Actual source code: gamg.c

petsc-master 2016-09-23
Report Typos and Errors
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
 4:  #include <petsc/private/matimpl.h>
 5:  #include <../src/ksp/pc/impls/gamg/gamg.h>
  6: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  8: #if defined PETSC_GAMG_USE_LOG
  9: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 10: #endif

 12: #if defined PETSC_USE_LOG
 13: PetscLogEvent PC_GAMGGraph_AGG;
 14: PetscLogEvent PC_GAMGGraph_GEO;
 15: PetscLogEvent PC_GAMGCoarsen_AGG;
 16: PetscLogEvent PC_GAMGCoarsen_GEO;
 17: PetscLogEvent PC_GAMGProlongator_AGG;
 18: PetscLogEvent PC_GAMGProlongator_GEO;
 19: PetscLogEvent PC_GAMGOptProlongator_AGG;
 20: #endif

 22: #define GAMG_MAXLEVELS 30

 24: /* #define GAMG_STAGES */
 25: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 26: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 27: #endif

 29: static PetscFunctionList GAMGList = 0;
 30: static PetscBool PCGAMGPackageInitialized;

 32: /* ----------------------------------------------------------------------------- */
 35: PetscErrorCode PCReset_GAMG(PC pc)
 36: {
 38:   PC_MG          *mg      = (PC_MG*)pc->data;
 39:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 42:   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
 43:   pc_gamg->data_sz = 0;
 44:   PetscFree(pc_gamg->orig_data);
 45:   return(0);
 46: }

 48: /* -------------------------------------------------------------------------- */
 49: /*
 50:    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
 51:      of active processors.

 53:    Input Parameter:
 54:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
 55:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
 56:    . Amat_fine - matrix on this fine (k) level
 57:    . cr_bs - coarse block size
 58:    In/Output Parameter:
 59:    . a_P_inout - prolongation operator to the next level (k-->k-1)
 60:    . a_nactive_proc - number of active procs
 61:    Output Parameter:
 62:    . a_Amat_crs - coarse matrix that is created (k-1)
 63: */

 67: static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last)
 68: {
 69:   PetscErrorCode  ierr;
 70:   PC_MG           *mg         = (PC_MG*)pc->data;
 71:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
 72:   Mat             Cmat,Pold=*a_P_inout;
 73:   MPI_Comm        comm;
 74:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
 75:   PetscInt        ncrs_eq,ncrs,f_bs;

 78:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
 79:   MPI_Comm_rank(comm, &rank);
 80:   MPI_Comm_size(comm, &size);
 81:   MatGetBlockSize(Amat_fine, &f_bs);
 82:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

 84:   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
 85:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);
 86:   if (pc_gamg->data_cell_rows>0) {
 87:     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
 88:   } else {
 89:     PetscInt  bs;
 90:     MatGetBlockSize(Cmat, &bs);
 91:     ncrs = ncrs_eq/bs;
 92:   }

 94:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
 95:   if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1;
 96:   else {
 97:     PetscInt ncrs_eq_glob;
 98:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
 99:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
100:     if (!new_size) new_size = 1; /* not likely, posible? */
101:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
102:   }

104:   if (Pcolumnperm) *Pcolumnperm = NULL;

106:   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
107:   else {
108:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
109:     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;

111:     nloc_old = ncrs_eq/cr_bs;
112:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
113: #if defined PETSC_GAMG_USE_LOG
114:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
115: #endif
116:     /* make 'is_eq_newproc' */
117:     PetscMalloc1(size, &counts);
118:     if (pc_gamg->repart) {
119:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
120:       Mat adj;

122:      PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);

124:       /* get 'adj' */
125:       if (cr_bs == 1) {
126:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
127:       } else {
128:         /* make a scalar matrix to partition (no Stokes here) */
129:         Mat               tMat;
130:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
131:         const PetscScalar *vals;
132:         const PetscInt    *idx;
133:         PetscInt          *d_nnz, *o_nnz, M, N;
134:         static PetscInt   llev = 0;
135:         MatType           mtype;

137:         PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);
138:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
139:         MatGetSize(Cmat, &M, &N);
140:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
141:           MatGetRow(Cmat,Ii,&ncols,0,0);
142:           d_nnz[jj] = ncols/cr_bs;
143:           o_nnz[jj] = ncols/cr_bs;
144:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
145:           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
146:           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
147:         }

149:         MatGetType(Amat_fine,&mtype);
150:         MatCreate(comm, &tMat);
151:         MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
152:         MatSetType(tMat,mtype);
153:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
154:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
155:         PetscFree2(d_nnz,o_nnz);

157:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
158:           PetscInt dest_row = ii/cr_bs;
159:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
160:           for (jj = 0; jj < ncols; jj++) {
161:             PetscInt    dest_col = idx[jj]/cr_bs;
162:             PetscScalar v        = 1.0;
163:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
164:           }
165:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
166:         }
167:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
168:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

170:         if (llev++ == -1) {
171:           PetscViewer viewer; char fname[32];
172:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
173:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
174:           MatView(tMat, viewer);
175:           PetscViewerDestroy(&viewer);
176:         }
177:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
178:         MatDestroy(&tMat);
179:       } /* create 'adj' */

181:       { /* partition: get newproc_idx */
182:         char            prefix[256];
183:         const char      *pcpre;
184:         const PetscInt  *is_idx;
185:         MatPartitioning mpart;
186:         IS              proc_is;
187:         PetscInt        targetPE;

189:         MatPartitioningCreate(comm, &mpart);
190:         MatPartitioningSetAdjacency(mpart, adj);
191:         PCGetOptionsPrefix(pc, &pcpre);
192:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
193:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
194:         MatPartitioningSetFromOptions(mpart);
195:         MatPartitioningSetNParts(mpart, new_size);
196:         MatPartitioningApply(mpart, &proc_is);
197:         MatPartitioningDestroy(&mpart);

199:         /* collect IS info */
200:         PetscMalloc1(ncrs_eq, &newproc_idx);
201:         ISGetIndices(proc_is, &is_idx);
202:         targetPE = 1; /* bring to "front" of machine */
203:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
204:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
205:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
206:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
207:           }
208:         }
209:         ISRestoreIndices(proc_is, &is_idx);
210:         ISDestroy(&proc_is);
211:       }
212:       MatDestroy(&adj);

214:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
215:       PetscFree(newproc_idx);
216:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
217:       PetscInt rfactor,targetPE;

219:       /* find factor */
220:       if (new_size == 1) rfactor = size; /* easy */
221:       else {
222:         PetscReal best_fact = 0.;
223:         jj = -1;
224:         for (kk = 1 ; kk <= size ; kk++) {
225:           if (!(size%kk)) { /* a candidate */
226:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
227:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
228:             if (fact > best_fact) {
229:               best_fact = fact; jj = kk;
230:             }
231:           }
232:         }
233:         if (jj != -1) rfactor = jj;
234:         else rfactor = 1; /* does this happen .. a prime */
235:       }
236:       new_size = size/rfactor;

238:       if (new_size==nactive) {
239:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
240:         PetscFree(counts);
241:         PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);
242: #if defined PETSC_GAMG_USE_LOG
243:         PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
244: #endif
245:         return(0);
246:       }

248:       PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);
249:       targetPE = rank/rfactor;
250:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
251:     } /* end simple 'is_eq_newproc' */

253:     /*
254:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
255:      */
256:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
257:     is_eq_num_prim = is_eq_num;
258:     /*
259:       Determine how many equations/vertices are assigned to each processor
260:      */
261:     ISPartitioningCount(is_eq_newproc, size, counts);
262:     ncrs_eq_new = counts[rank];
263:     ISDestroy(&is_eq_newproc);
264:     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */

266:     PetscFree(counts);
267: #if defined PETSC_GAMG_USE_LOG
268:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
269: #endif
270:     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
271:     {
272:     Vec            src_crd, dest_crd;
273:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
274:     VecScatter     vecscat;
275:     PetscScalar    *array;
276:     IS isscat;

278:     /* move data (for primal equations only) */
279:     /* Create a vector to contain the newly ordered element information */
280:     VecCreate(comm, &dest_crd);
281:     VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
282:     VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
283:     /*
284:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
285:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
286:      */
287:     PetscMalloc1(ncrs*node_data_sz, &tidx);
288:     ISGetIndices(is_eq_num_prim, &idx);
289:     for (ii=0,jj=0; ii<ncrs; ii++) {
290:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
291:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
292:     }
293:     ISRestoreIndices(is_eq_num_prim, &idx);
294:     ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
295:     PetscFree(tidx);
296:     /*
297:      Create a vector to contain the original vertex information for each element
298:      */
299:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
300:     for (jj=0; jj<ndata_cols; jj++) {
301:       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
302:       for (ii=0; ii<ncrs; ii++) {
303:         for (kk=0; kk<ndata_rows; kk++) {
304:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
305:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
306:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
307:         }
308:       }
309:     }
310:     VecAssemblyBegin(src_crd);
311:     VecAssemblyEnd(src_crd);
312:     /*
313:       Scatter the element vertex information (still in the original vertex ordering)
314:       to the correct processor
315:     */
316:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
317:     ISDestroy(&isscat);
318:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
319:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
320:     VecScatterDestroy(&vecscat);
321:     VecDestroy(&src_crd);
322:     /*
323:       Put the element vertex data into a new allocation of the gdata->ele
324:     */
325:     PetscFree(pc_gamg->data);
326:     PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);

328:     pc_gamg->data_sz = node_data_sz*ncrs_new;
329:     strideNew        = ncrs_new*ndata_rows;

331:     VecGetArray(dest_crd, &array);
332:     for (jj=0; jj<ndata_cols; jj++) {
333:       for (ii=0; ii<ncrs_new; ii++) {
334:         for (kk=0; kk<ndata_rows; kk++) {
335:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
336:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
337:         }
338:       }
339:     }
340:     VecRestoreArray(dest_crd, &array);
341:     VecDestroy(&dest_crd);
342:     }
343:     /* move A and P (columns) with new layout */
344: #if defined PETSC_GAMG_USE_LOG
345:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
346: #endif

348:     /*
349:       Invert for MatGetSubMatrix
350:     */
351:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
352:     ISSort(new_eq_indices); /* is this needed? */
353:     ISSetBlockSize(new_eq_indices, cr_bs);
354:     if (is_eq_num != is_eq_num_prim) {
355:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
356:     }
357:     if (Pcolumnperm) {
358:       PetscObjectReference((PetscObject)new_eq_indices);
359:       *Pcolumnperm = new_eq_indices;
360:     }
361:     ISDestroy(&is_eq_num);
362: #if defined PETSC_GAMG_USE_LOG
363:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
364:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
365: #endif
366:     /* 'a_Amat_crs' output */
367:     {
368:       Mat mat;
369:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
370:       *a_Amat_crs = mat;
371:     }
372:     MatDestroy(&Cmat);

374: #if defined PETSC_GAMG_USE_LOG
375:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
376: #endif
377:     /* prolongator */
378:     {
379:       IS       findices;
380:       PetscInt Istart,Iend;
381:       Mat      Pnew;

383:       MatGetOwnershipRange(Pold, &Istart, &Iend);
384: #if defined PETSC_GAMG_USE_LOG
385:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
386: #endif
387:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
388:       ISSetBlockSize(findices,f_bs);
389:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
390:       ISDestroy(&findices);

392: #if defined PETSC_GAMG_USE_LOG
393:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
394: #endif
395:       MatDestroy(a_P_inout);

397:       /* output - repartitioned */
398:       *a_P_inout = Pnew;
399:     }
400:     ISDestroy(&new_eq_indices);

402:     *a_nactive_proc = new_size; /* output */
403:   }
404:   return(0);
405: }

407: /* -------------------------------------------------------------------------- */
408: /*
409:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
410:                     by setting data structures and options.

412:    Input Parameter:
413: .  pc - the preconditioner context

415: */
418: PetscErrorCode PCSetUp_GAMG(PC pc)
419: {
421:   PC_MG          *mg      = (PC_MG*)pc->data;
422:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
423:   Mat            Pmat     = pc->pmat;
424:   PetscInt       fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
425:   MPI_Comm       comm;
426:   PetscMPIInt    rank,size,nactivepe;
427:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
428:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
429:   PetscLogDouble nnz0=0.,nnztot=0.;
430:   MatInfo        info;
431:   PetscBool      is_last = PETSC_FALSE;

434:   PetscObjectGetComm((PetscObject)pc,&comm);
435:   MPI_Comm_rank(comm,&rank);
436:   MPI_Comm_size(comm,&size);

438:   if (pc_gamg->setup_count++ > 0) {
439:     if ((PetscBool)(!pc_gamg->reuse_prol)) {
440:       /* reset everything */
441:       PCReset_MG(pc);
442:       pc->setupcalled = 0;
443:     } else {
444:       PC_MG_Levels **mglevels = mg->levels;
445:       /* just do Galerkin grids */
446:       Mat          B,dA,dB;

448:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
449:       if (pc_gamg->Nlevels > 1) {
450:         /* currently only handle case where mat and pmat are the same on coarser levels */
451:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
452:         /* (re)set to get dirty flag */
453:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);

455:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
456:           /* the first time through the matrix structure has changed from repartitioning */
457:           if (pc_gamg->setup_count==2) {
458:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
459:             MatDestroy(&mglevels[level]->A);

461:             mglevels[level]->A = B;
462:           } else {
463:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
464:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
465:           }
466:           KSPSetOperators(mglevels[level]->smoothd,B,B);
467:           dB   = B;
468:         }
469:       }

471:       PCSetUp_MG(pc);
472:       return(0);
473:     }
474:   }

476:   if (!pc_gamg->data) {
477:     if (pc_gamg->orig_data) {
478:       MatGetBlockSize(Pmat, &bs);
479:       MatGetLocalSize(Pmat, &qq, NULL);

481:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
482:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
483:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

485:       PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
486:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
487:     } else {
488:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
489:       pc_gamg->ops->createdefaultdata(pc,Pmat);
490:     }
491:   }

493:   /* cache original data for reuse */
494:   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
495:     PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
496:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
497:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
498:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
499:   }

501:   /* get basic dims */
502:   MatGetBlockSize(Pmat, &bs);
503:   MatGetSize(Pmat, &M, &N);

505:   MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); /* global reduction */
506:   nnz0   = info.nz_used;
507:   nnztot = info.nz_used;
508:   PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);

510:   /* Get A_i and R_i */
511:   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
512:     pc_gamg->current_level = level;
513:     level1 = level + 1;
514: #if defined PETSC_GAMG_USE_LOG
515:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
516: #if (defined GAMG_STAGES)
517:     PetscLogStagePush(gamg_stages[level]);
518: #endif
519: #endif
520:     { /* construct prolongator */
521:       Mat              Gmat;
522:       PetscCoarsenData *agg_lists;
523:       Mat              Prol11;

525:       pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
526:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
527:       pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);

529:       /* could have failed to create new level */
530:       if (Prol11) {
531:         /* get new block size of coarse matrices */
532:         MatGetBlockSizes(Prol11, NULL, &bs);

534:         if (pc_gamg->ops->optprolongator) {
535:           /* smooth */
536:           pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);
537:         }

539:         Parr[level1] = Prol11;
540:       } else Parr[level1] = NULL; /* failed to coarsen */

542:       if (pc_gamg->use_aggs_in_asm) {
543:         PetscInt bs;
544:         MatGetBlockSizes(Prol11, &bs, NULL);
545:         PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
546:       }

548:       MatDestroy(&Gmat);
549:       PetscCDDestroy(agg_lists);
550:     } /* construct prolongator scope */
551: #if defined PETSC_GAMG_USE_LOG
552:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
553: #endif
554:     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
555:     if (!Parr[level1]) { /* failed to coarsen */
556:        PetscInfo1(pc,"Stop gridding, level %D\n",level);
557: #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
558:       PetscLogStagePop();
559: #endif
560:       break;
561:     }
562: #if defined PETSC_GAMG_USE_LOG
563:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
564: #endif
565:     MatGetSize(Parr[level1], &M, &N); /* N is next M, a loop test variables */
566:     if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????");
567:     if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE;
568:     pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);

570: #if defined PETSC_GAMG_USE_LOG
571:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
572: #endif
573:     MatGetSize(Aarr[level1], &M, &N); /* M is loop test variables */
574:     MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
575:     nnztot += info.nz_used;
576:     PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);

578: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
579:     PetscLogStagePop();
580: #endif
581:     /* stop if one node or one proc -- could pull back for singular problems */
582:     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
583:        PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);
584:       level++;
585:       break;
586:     }
587:   } /* levels */
588:   PetscFree(pc_gamg->data);

590:   PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);
591:   pc_gamg->Nlevels = level + 1;
592:   fine_level       = level;
593:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

595:   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
596:     /* set default smoothers & set operators */
597:     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
598:       KSP smoother;
599:       PC  subpc;

601:       PCMGGetSmoother(pc, lidx, &smoother);
602:       KSPGetPC(smoother, &subpc);

604:       KSPSetNormType(smoother, KSP_NORM_NONE);
605:       /* set ops */
606:       KSPSetOperators(smoother, Aarr[level], Aarr[level]);
607:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

609:       /* set defaults */
610:       KSPSetType(smoother, KSPCHEBYSHEV);

612:       /* set blocks for ASM smoother that uses the 'aggregates' */
613:       if (pc_gamg->use_aggs_in_asm) {
614:         PetscInt sz;
615:         IS       *iss;

617:         sz   = nASMBlocksArr[level];
618:         iss   = ASMLocalIDsArr[level];
619:         PCSetType(subpc, PCASM);
620:         PCASMSetOverlap(subpc, 0);
621:         PCASMSetType(subpc,PC_ASM_BASIC);
622:         if (!sz) {
623:           IS       is;
624:           ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);
625:           PCASMSetLocalSubdomains(subpc, 1, NULL, &is);
626:           ISDestroy(&is);
627:         } else {
628:           PetscInt kk;
629:           PCASMSetLocalSubdomains(subpc, sz, NULL, iss);
630:           for (kk=0; kk<sz; kk++) {
631:             ISDestroy(&iss[kk]);
632:           }
633:           PetscFree(iss);
634:         }
635:         ASMLocalIDsArr[level] = NULL;
636:         nASMBlocksArr[level]  = 0;
637:       } else {
638:         PCSetType(subpc, PCSOR);
639:       }
640:     }
641:     {
642:       /* coarse grid */
643:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
644:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
645:       PCMGGetSmoother(pc, lidx, &smoother);
646:       KSPSetOperators(smoother, Lmat, Lmat);
647:       if (!pc_gamg->use_parallel_coarse_grid_solver) {
648:         KSPSetNormType(smoother, KSP_NORM_NONE);
649:         KSPGetPC(smoother, &subpc);
650:         PCSetType(subpc, PCBJACOBI);
651:         PCSetUp(subpc);
652:         PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
653:         if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
654:         KSPGetPC(k2[0],&pc2);
655:         PCSetType(pc2, PCLU);
656:         PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
657:         KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
658:         KSPSetType(k2[0], KSPPREONLY);
659:         /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
660:          * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
661:          * view every subdomain as though they were different. */
662:         ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
663:       }
664:     }

666:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
667:     PetscObjectOptionsBegin((PetscObject)pc);
668:     PCSetFromOptions_MG(PetscOptionsObject,pc);
669:     PetscOptionsEnd();
670:     PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);

672:     /* clean up */
673:     for (level=1; level<pc_gamg->Nlevels; level++) {
674:       MatDestroy(&Parr[level]);
675:       MatDestroy(&Aarr[level]);
676:     }
677:     PCSetUp_MG(pc);
678:   } else {
679:     KSP smoother;
680:     PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");
681:     PCMGGetSmoother(pc, 0, &smoother);
682:     KSPSetOperators(smoother, Aarr[0], Aarr[0]);
683:     KSPSetType(smoother, KSPPREONLY);
684:     PCSetUp_MG(pc);
685:   }
686:   return(0);
687: }

689: /* ------------------------------------------------------------------------- */
690: /*
691:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
692:    that was created with PCCreate_GAMG().

694:    Input Parameter:
695: .  pc - the preconditioner context

697:    Application Interface Routine: PCDestroy()
698: */
701: PetscErrorCode PCDestroy_GAMG(PC pc)
702: {
704:   PC_MG          *mg     = (PC_MG*)pc->data;
705:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

708:   PCReset_GAMG(pc);
709:   if (pc_gamg->ops->destroy) {
710:     (*pc_gamg->ops->destroy)(pc);
711:   }
712:   PetscFree(pc_gamg->ops);
713:   PetscFree(pc_gamg->gamg_type_name);
714:   PetscFree(pc_gamg);
715:   PCDestroy_MG(pc);
716:   return(0);
717: }

721: /*@
722:    PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction.

724:    Logically Collective on PC

726:    Input Parameters:
727: +  pc - the preconditioner context
728: -  n - the number of equations


731:    Options Database Key:
732: .  -pc_gamg_process_eq_limit <limit>

734:    Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process 
735:           that has degrees of freedom

737:    Level: intermediate

739:    Concepts: Unstructured multigrid preconditioner

741: .seealso: PCGAMGSetCoarseEqLim()
742: @*/
743: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
744: {

749:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
750:   return(0);
751: }

755: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
756: {
757:   PC_MG   *mg      = (PC_MG*)pc->data;
758:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

761:   if (n>0) pc_gamg->min_eq_proc = n;
762:   return(0);
763: }

767: /*@
768:    PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid.

770:  Collective on PC

772:    Input Parameters:
773: +  pc - the preconditioner context
774: -  n - maximum number of equations to aim for

776:    Options Database Key:
777: .  -pc_gamg_coarse_eq_limit <limit>

779:    Level: intermediate

781:    Concepts: Unstructured multigrid preconditioner

783: .seealso: PCGAMGSetProcEqLim()
784: @*/
785: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
786: {

791:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
792:   return(0);
793: }

797: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
798: {
799:   PC_MG   *mg      = (PC_MG*)pc->data;
800:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

803:   if (n>0) pc_gamg->coarse_eq_limit = n;
804:   return(0);
805: }

809: /*@
810:    PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids

812:    Collective on PC

814:    Input Parameters:
815: +  pc - the preconditioner context
816: -  n - PETSC_TRUE or PETSC_FALSE

818:    Options Database Key:
819: .  -pc_gamg_repartition <true,false>

821:    Notes: this will generally improve the loading balancing of the work on each level

823:    Level: intermediate

825:    Concepts: Unstructured multigrid preconditioner

827: .seealso: ()
828: @*/
829: PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n)
830: {

835:   PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));
836:   return(0);
837: }

841: static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n)
842: {
843:   PC_MG   *mg      = (PC_MG*)pc->data;
844:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

847:   pc_gamg->repart = n;
848:   return(0);
849: }

853: /*@
854:    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner

856:    Collective on PC

858:    Input Parameters:
859: +  pc - the preconditioner context
860: -  n - PETSC_TRUE or PETSC_FALSE

862:    Options Database Key:
863: .  -pc_gamg_reuse_interpolation <true,false>

865:    Level: intermediate

867:    Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows
868:           rebuilding the preconditioner quicker.

870:    Concepts: Unstructured multigrid preconditioner

872: .seealso: ()
873: @*/
874: PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
875: {

880:   PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));
881:   return(0);
882: }

886: static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
887: {
888:   PC_MG   *mg      = (PC_MG*)pc->data;
889:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

892:   pc_gamg->reuse_prol = n;
893:   return(0);
894: }

898: /*@
899:    PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner.

901:    Collective on PC

903:    Input Parameters:
904: +  pc - the preconditioner context
905: -  flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not

907:    Options Database Key:
908: .  -pc_gamg_asm_use_agg

910:    Level: intermediate

912:    Concepts: Unstructured multigrid preconditioner

914: .seealso: ()
915: @*/
916: PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg)
917: {

922:   PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));
923:   return(0);
924: }

928: static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg)
929: {
930:   PC_MG   *mg      = (PC_MG*)pc->data;
931:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

934:   pc_gamg->use_aggs_in_asm = flg;
935:   return(0);
936: }

940: /*@
941:    PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver

943:    Collective on PC

945:    Input Parameters:
946: +  pc - the preconditioner context
947: -  flg - PETSC_TRUE to not force coarse grid onto one processor

949:    Options Database Key:
950: .  -pc_gamg_use_parallel_coarse_grid_solver

952:    Level: intermediate

954:    Concepts: Unstructured multigrid preconditioner

956: .seealso: ()
957: @*/
958: PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg)
959: {

964:   PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));
965:   return(0);
966: }

970: static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg)
971: {
972:   PC_MG   *mg      = (PC_MG*)pc->data;
973:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

976:   pc_gamg->use_parallel_coarse_grid_solver = flg;
977:   return(0);
978: }

982: /*@
983:    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use

985:    Not collective on PC

987:    Input Parameters:
988: +  pc - the preconditioner
989: -  n - the maximum number of levels to use

991:    Options Database Key:
992: .  -pc_mg_levels

994:    Level: intermediate

996:    Concepts: Unstructured multigrid preconditioner

998: .seealso: ()
999: @*/
1000: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1001: {

1006:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1007:   return(0);
1008: }

1012: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1013: {
1014:   PC_MG   *mg      = (PC_MG*)pc->data;
1015:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1018:   pc_gamg->Nlevels = n;
1019:   return(0);
1020: }

1024: /*@
1025:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

1027:    Not collective on PC

1029:    Input Parameters:
1030: +  pc - the preconditioner context
1031: -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph

1033:    Options Database Key:
1034: .  -pc_gamg_threshold <threshold>

1036:    Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different 
1037:     (perhaps better) coarser set of points.

1039:    Level: intermediate

1041:    Concepts: Unstructured multigrid preconditioner

1043: .seealso: ()
1044: @*/
1045: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1046: {

1051:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1052:   return(0);
1053: }

1057: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1058: {
1059:   PC_MG   *mg      = (PC_MG*)pc->data;
1060:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1063:   pc_gamg->threshold = n;
1064:   return(0);
1065: }

1069: /*@C
1070:    PCGAMGSetType - Set solution method

1072:    Collective on PC

1074:    Input Parameters:
1075: +  pc - the preconditioner context
1076: -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL

1078:    Options Database Key:
1079: .  -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply

1081:    Level: intermediate

1083:    Concepts: Unstructured multigrid preconditioner

1085: .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType
1086: @*/
1087: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1088: {

1093:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1094:   return(0);
1095: }

1099: /*@C
1100:    PCGAMGGetType - Get solution method

1102:    Collective on PC

1104:    Input Parameter:
1105: .  pc - the preconditioner context

1107:    Output Parameter:
1108: .  type - the type of algorithm used

1110:    Level: intermediate

1112:    Concepts: Unstructured multigrid preconditioner

1114: .seealso: PCGAMGSetType(), PCGAMGType
1115: @*/
1116: PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1117: {

1122:   PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));
1123:   return(0);
1124: }

1128: static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1129: {
1130:   PC_MG          *mg      = (PC_MG*)pc->data;
1131:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1134:   *type = pc_gamg->type;
1135:   return(0);
1136: }

1140: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1141: {
1142:   PetscErrorCode ierr,(*r)(PC);
1143:   PC_MG          *mg      = (PC_MG*)pc->data;
1144:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1147:   pc_gamg->type = type;
1148:   PetscFunctionListFind(GAMGList,type,&r);
1149:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1150:   if (pc_gamg->ops->destroy) {
1151:     (*pc_gamg->ops->destroy)(pc);
1152:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1153:     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1154:     /* cleaning up common data in pc_gamg - this should disapear someday */
1155:     pc_gamg->data_cell_cols = 0;
1156:     pc_gamg->data_cell_rows = 0;
1157:     pc_gamg->orig_data_cell_cols = 0;
1158:     pc_gamg->orig_data_cell_rows = 0;
1159:     PetscFree(pc_gamg->data);
1160:     pc_gamg->data_sz = 0;
1161:   }
1162:   PetscFree(pc_gamg->gamg_type_name);
1163:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1164:   (*r)(pc);
1165:   return(0);
1166: }

1170: static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1171: {
1173:   PC_MG          *mg      = (PC_MG*)pc->data;
1174:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1177:   PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");
1178:   PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);
1179:   if (pc_gamg->use_aggs_in_asm) {
1180:     PetscViewerASCIIPrintf(viewer,"      Using aggregates from coarsening process to define subdomains for PCASM\n");
1181:   }
1182:   if (pc_gamg->use_parallel_coarse_grid_solver) {
1183:     PetscViewerASCIIPrintf(viewer,"      Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");
1184:   }
1185:   if (pc_gamg->ops->view) {
1186:     (*pc_gamg->ops->view)(pc,viewer);
1187:   }
1188:   return(0);
1189: }

1193: PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1194: {
1196:   PC_MG          *mg      = (PC_MG*)pc->data;
1197:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1198:   PetscBool      flag;
1199:   MPI_Comm       comm;
1200:   char           prefix[256];
1201:   const char     *pcpre;

1204:   PetscObjectGetComm((PetscObject)pc,&comm);
1205:   PetscOptionsHead(PetscOptionsObject,"GAMG options");
1206:   {
1207:     char tname[256];
1208:     PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1209:     if (flag) {
1210:       PCGAMGSetType(pc,tname);
1211:     }
1212:     PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);
1213:     PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);
1214:     PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation agragates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);
1215:     PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);
1216:     PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);
1217:     PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);
1218:     PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);
1219:     PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);

1221:     /* set options for subtype */
1222:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);}
1223:   }
1224:   PCGetOptionsPrefix(pc, &pcpre);
1225:   PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
1226:   PetscOptionsTail();
1227:   return(0);
1228: }

1230: /* -------------------------------------------------------------------------- */
1231: /*MC
1232:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner

1234:    Options Database Keys:
1235: +   -pc_gamg_type <type> - one of agg, geo, or classical
1236: .   -pc_gamg_repartition  <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined
1237: .   -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations
1238: .   -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother
1239: .   -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit>
1240:                                         equations on each process that has degrees of freedom
1241: .   -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for.
1242: -   -pc_gamg_threshold <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different

1244:    Options Database Keys for default Aggregation:
1245: +  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
1246: .  -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
1247: -  -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it

1249:    Multigrid options(inherited):
1250: +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1251: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1252: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1253: .  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1254: -  -pc_mg_levels <levels> - Number of levels of multigrid to use.


1257:   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1258: $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1259: $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1260: $       See the Users Manual Chapter 4 for more details

1262:   Level: intermediate

1264:   Concepts: algebraic multigrid

1266: .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1267:            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation()
1268: M*/

1272: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1273: {
1275:   PC_GAMG        *pc_gamg;
1276:   PC_MG          *mg;

1279:    /* register AMG type */
1280:   PCGAMGInitializePackage();

1282:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1283:   PCSetType(pc, PCMG);
1284:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1286:   /* create a supporting struct and attach it to pc */
1287:   PetscNewLog(pc,&pc_gamg);
1288:   PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);
1289:   mg           = (PC_MG*)pc->data;
1290:   mg->innerctx = pc_gamg;

1292:   PetscNewLog(pc,&pc_gamg->ops);

1294:   pc_gamg->setup_count = 0;
1295:   /* these should be in subctx but repartitioning needs simple arrays */
1296:   pc_gamg->data_sz = 0;
1297:   pc_gamg->data    = 0;

1299:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1300:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1301:   pc->ops->setup          = PCSetUp_GAMG;
1302:   pc->ops->reset          = PCReset_GAMG;
1303:   pc->ops->destroy        = PCDestroy_GAMG;
1304:   mg->view                = PCView_GAMG;

1306:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1307:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1308:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);
1309:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);
1310:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);
1311:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);
1312:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1313:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1314:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);
1315:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1316:   pc_gamg->repart           = PETSC_FALSE;
1317:   pc_gamg->reuse_prol       = PETSC_FALSE;
1318:   pc_gamg->use_aggs_in_asm  = PETSC_FALSE;
1319:   pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE;
1320:   pc_gamg->min_eq_proc      = 50;
1321:   pc_gamg->coarse_eq_limit  = 50;
1322:   pc_gamg->threshold        = 0.;
1323:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1324:   pc_gamg->current_level    = 0; /* don't need to init really */
1325:   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;

1327:   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1328:   PCGAMGSetType(pc,PCGAMGAGG);
1329:   return(0);
1330: }

1334: /*@C
1335:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1336:     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1337:     when using static libraries.

1339:  Level: developer

1341:  .keywords: PC, PCGAMG, initialize, package
1342:  .seealso: PetscInitialize()
1343: @*/
1344: PetscErrorCode PCGAMGInitializePackage(void)
1345: {

1349:   if (PCGAMGPackageInitialized) return(0);
1350:   PCGAMGPackageInitialized = PETSC_TRUE;
1351:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1352:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1353:   PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1354:   PetscRegisterFinalize(PCGAMGFinalizePackage);

1356:   /* general events */
1357:   PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);
1358:   PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);
1359:   PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1360:   PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1361:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1362:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1363:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);

1365: #if defined PETSC_GAMG_USE_LOG
1366:   PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1367:   PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1368:   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1369:   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1370:   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1371:   PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1372:   PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1373:   PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1374:   PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1375:   PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1376:   PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1377:   PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1378:   PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1379:   PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1380:   PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1381:   PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1382:   PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1384:   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1385:   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1386:   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1387:   /* create timer stages */
1388: #if defined GAMG_STAGES
1389:   {
1390:     char     str[32];
1391:     PetscInt lidx;
1392:     sprintf(str,"MG Level %d (finest)",0);
1393:     PetscLogStageRegister(str, &gamg_stages[0]);
1394:     for (lidx=1; lidx<9; lidx++) {
1395:       sprintf(str,"MG Level %d",lidx);
1396:       PetscLogStageRegister(str, &gamg_stages[lidx]);
1397:     }
1398:   }
1399: #endif
1400: #endif
1401:   return(0);
1402: }

1406: /*@C
1407:  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1408:     called from PetscFinalize() automatically.

1410:  Level: developer

1412:  .keywords: Petsc, destroy, package
1413:  .seealso: PetscFinalize()
1414: @*/
1415: PetscErrorCode PCGAMGFinalizePackage(void)
1416: {

1420:   PCGAMGPackageInitialized = PETSC_FALSE;
1421:   PetscFunctionListDestroy(&GAMGList);
1422:   return(0);
1423: }

1427: /*@C
1428:  PCGAMGRegister - Register a PCGAMG implementation.

1430:  Input Parameters:
1431:  + type - string that will be used as the name of the GAMG type.
1432:  - create - function for creating the gamg context.

1434:   Level: advanced

1436:  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1437: @*/
1438: PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1439: {

1443:   PCGAMGInitializePackage();
1444:   PetscFunctionListAdd(&GAMGList,type,create);
1445:   return(0);
1446: }