Actual source code: gamg.c

petsc-master 2016-07-29
Report Typos and Errors
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
  4: #include <petsc/private/matimpl.h>
  5: #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
  6: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  8: #if defined PETSC_GAMG_USE_LOG
  9: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 10: #endif

 12: #if defined PETSC_USE_LOG
 13: PetscLogEvent PC_GAMGGraph_AGG;
 14: PetscLogEvent PC_GAMGGraph_GEO;
 15: PetscLogEvent PC_GAMGCoarsen_AGG;
 16: PetscLogEvent PC_GAMGCoarsen_GEO;
 17: PetscLogEvent PC_GAMGProlongator_AGG;
 18: PetscLogEvent PC_GAMGProlongator_GEO;
 19: PetscLogEvent PC_GAMGOptProlongator_AGG;
 20: #endif

 22: #define GAMG_MAXLEVELS 30

 24: /* #define GAMG_STAGES */
 25: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 26: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 27: #endif

 29: static PetscFunctionList GAMGList = 0;
 30: static PetscBool PCGAMGPackageInitialized;

 32: /* ----------------------------------------------------------------------------- */
 35: PetscErrorCode PCReset_GAMG(PC pc)
 36: {
 38:   PC_MG          *mg      = (PC_MG*)pc->data;
 39:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 42:   if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n");
 43:   pc_gamg->data_sz = 0;
 44:   PetscFree(pc_gamg->orig_data);
 45:   return(0);
 46: }

 48: /* -------------------------------------------------------------------------- */
 49: /*
 50:    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
 51:      of active processors.

 53:    Input Parameter:
 54:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
 55:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
 56:    . Amat_fine - matrix on this fine (k) level
 57:    . cr_bs - coarse block size
 58:    In/Output Parameter:
 59:    . a_P_inout - prolongation operator to the next level (k-->k-1)
 60:    . a_nactive_proc - number of active procs
 61:    Output Parameter:
 62:    . a_Amat_crs - coarse matrix that is created (k-1)
 63: */

 67: static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm)
 68: {
 69:   PetscErrorCode  ierr;
 70:   PC_MG           *mg         = (PC_MG*)pc->data;
 71:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
 72:   Mat             Cmat,Pold=*a_P_inout;
 73:   MPI_Comm        comm;
 74:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
 75:   PetscInt        ncrs_eq,ncrs,f_bs;

 78:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
 79:   MPI_Comm_rank(comm, &rank);
 80:   MPI_Comm_size(comm, &size);
 81:   MatGetBlockSize(Amat_fine, &f_bs);
 82:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

 84:   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
 85:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);
 86:   if (pc_gamg->data_cell_rows>0) {
 87:     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
 88:   } else {
 89:     PetscInt  bs;
 90:     MatGetBlockSize(Cmat, &bs);
 91:     ncrs = ncrs_eq/bs;
 92:   }

 94:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
 95:   {
 96:     PetscInt ncrs_eq_glob;
 97:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
 98:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
 99:     if (!new_size) new_size = 1; /* not likely, posible? */
100:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
101:   }

103:   if (Pcolumnperm) *Pcolumnperm = NULL;

105:   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
106:   else {
107:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
108:     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;

110:     nloc_old = ncrs_eq/cr_bs;
111:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
112: #if defined PETSC_GAMG_USE_LOG
113:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
114: #endif
115:     /* make 'is_eq_newproc' */
116:     PetscMalloc1(size, &counts);
117:     if (pc_gamg->repart) {
118:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
119:       Mat adj;

121:      PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);

123:       /* get 'adj' */
124:       if (cr_bs == 1) {
125:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
126:       } else {
127:         /* make a scalar matrix to partition (no Stokes here) */
128:         Mat               tMat;
129:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
130:         const PetscScalar *vals;
131:         const PetscInt    *idx;
132:         PetscInt          *d_nnz, *o_nnz, M, N;
133:         static PetscInt   llev = 0;
134:         MatType           mtype;

136:         PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);
137:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
138:         MatGetSize(Cmat, &M, &N);
139:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
140:           MatGetRow(Cmat,Ii,&ncols,0,0);
141:           d_nnz[jj] = ncols/cr_bs;
142:           o_nnz[jj] = ncols/cr_bs;
143:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
144:           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
145:           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
146:         }

148:         MatGetType(Amat_fine,&mtype);
149:         MatCreate(comm, &tMat);
150:         MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
151:         MatSetType(tMat,mtype);
152:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
153:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
154:         PetscFree2(d_nnz,o_nnz);

156:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
157:           PetscInt dest_row = ii/cr_bs;
158:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
159:           for (jj = 0; jj < ncols; jj++) {
160:             PetscInt    dest_col = idx[jj]/cr_bs;
161:             PetscScalar v        = 1.0;
162:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
163:           }
164:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
165:         }
166:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
167:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

169:         if (llev++ == -1) {
170:           PetscViewer viewer; char fname[32];
171:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
172:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
173:           MatView(tMat, viewer);
174:           PetscViewerDestroy(&viewer);
175:         }
176:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
177:         MatDestroy(&tMat);
178:       } /* create 'adj' */

180:       { /* partition: get newproc_idx */
181:         char            prefix[256];
182:         const char      *pcpre;
183:         const PetscInt  *is_idx;
184:         MatPartitioning mpart;
185:         IS              proc_is;
186:         PetscInt        targetPE;

188:         MatPartitioningCreate(comm, &mpart);
189:         MatPartitioningSetAdjacency(mpart, adj);
190:         PCGetOptionsPrefix(pc, &pcpre);
191:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
192:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
193:         MatPartitioningSetFromOptions(mpart);
194:         MatPartitioningSetNParts(mpart, new_size);
195:         MatPartitioningApply(mpart, &proc_is);
196:         MatPartitioningDestroy(&mpart);

198:         /* collect IS info */
199:         PetscMalloc1(ncrs_eq, &newproc_idx);
200:         ISGetIndices(proc_is, &is_idx);
201:         targetPE = 1; /* bring to "front" of machine */
202:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
203:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
204:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
205:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
206:           }
207:         }
208:         ISRestoreIndices(proc_is, &is_idx);
209:         ISDestroy(&proc_is);
210:       }
211:       MatDestroy(&adj);

213:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
214:       PetscFree(newproc_idx);
215:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
216:       PetscInt rfactor,targetPE;

218:       /* find factor */
219:       if (new_size == 1) rfactor = size; /* easy */
220:       else {
221:         PetscReal best_fact = 0.;
222:         jj = -1;
223:         for (kk = 1 ; kk <= size ; kk++) {
224:           if (!(size%kk)) { /* a candidate */
225:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
226:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
227:             if (fact > best_fact) {
228:               best_fact = fact; jj = kk;
229:             }
230:           }
231:         }
232:         if (jj != -1) rfactor = jj;
233:         else rfactor = 1; /* does this happen .. a prime */
234:       }
235:       new_size = size/rfactor;

237:       if (new_size==nactive) {
238:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
239:         PetscFree(counts);
240:         PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);
241: #if defined PETSC_GAMG_USE_LOG
242:         PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
243: #endif
244:         return(0);
245:       }

247:       PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);
248:       targetPE = rank/rfactor;
249:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
250:     } /* end simple 'is_eq_newproc' */

252:     /*
253:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
254:      */
255:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
256:     is_eq_num_prim = is_eq_num;
257:     /*
258:       Determine how many equations/vertices are assigned to each processor
259:      */
260:     ISPartitioningCount(is_eq_newproc, size, counts);
261:     ncrs_eq_new = counts[rank];
262:     ISDestroy(&is_eq_newproc);
263:     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */

265:     PetscFree(counts);
266: #if defined PETSC_GAMG_USE_LOG
267:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
268: #endif
269:     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
270:     {
271:     Vec            src_crd, dest_crd;
272:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
273:     VecScatter     vecscat;
274:     PetscScalar    *array;
275:     IS isscat;

277:     /* move data (for primal equations only) */
278:     /* Create a vector to contain the newly ordered element information */
279:     VecCreate(comm, &dest_crd);
280:     VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
281:     VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
282:     /*
283:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
284:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
285:      */
286:     PetscMalloc1(ncrs*node_data_sz, &tidx);
287:     ISGetIndices(is_eq_num_prim, &idx);
288:     for (ii=0,jj=0; ii<ncrs; ii++) {
289:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
290:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
291:     }
292:     ISRestoreIndices(is_eq_num_prim, &idx);
293:     ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
294:     PetscFree(tidx);
295:     /*
296:      Create a vector to contain the original vertex information for each element
297:      */
298:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
299:     for (jj=0; jj<ndata_cols; jj++) {
300:       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
301:       for (ii=0; ii<ncrs; ii++) {
302:         for (kk=0; kk<ndata_rows; kk++) {
303:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
304:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
305:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
306:         }
307:       }
308:     }
309:     VecAssemblyBegin(src_crd);
310:     VecAssemblyEnd(src_crd);
311:     /*
312:       Scatter the element vertex information (still in the original vertex ordering)
313:       to the correct processor
314:     */
315:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
316:     ISDestroy(&isscat);
317:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
318:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
319:     VecScatterDestroy(&vecscat);
320:     VecDestroy(&src_crd);
321:     /*
322:       Put the element vertex data into a new allocation of the gdata->ele
323:     */
324:     PetscFree(pc_gamg->data);
325:     PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);

327:     pc_gamg->data_sz = node_data_sz*ncrs_new;
328:     strideNew        = ncrs_new*ndata_rows;

330:     VecGetArray(dest_crd, &array);
331:     for (jj=0; jj<ndata_cols; jj++) {
332:       for (ii=0; ii<ncrs_new; ii++) {
333:         for (kk=0; kk<ndata_rows; kk++) {
334:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
335:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
336:         }
337:       }
338:     }
339:     VecRestoreArray(dest_crd, &array);
340:     VecDestroy(&dest_crd);
341:     }
342:     /* move A and P (columns) with new layout */
343: #if defined PETSC_GAMG_USE_LOG
344:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
345: #endif

347:     /*
348:       Invert for MatGetSubMatrix
349:     */
350:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
351:     ISSort(new_eq_indices); /* is this needed? */
352:     ISSetBlockSize(new_eq_indices, cr_bs);
353:     if (is_eq_num != is_eq_num_prim) {
354:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
355:     }
356:     if (Pcolumnperm) {
357:       PetscObjectReference((PetscObject)new_eq_indices);
358:       *Pcolumnperm = new_eq_indices;
359:     }
360:     ISDestroy(&is_eq_num);
361: #if defined PETSC_GAMG_USE_LOG
362:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
363:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
364: #endif
365:     /* 'a_Amat_crs' output */
366:     {
367:       Mat mat;
368:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
369:       *a_Amat_crs = mat;
370:     }
371:     MatDestroy(&Cmat);

373: #if defined PETSC_GAMG_USE_LOG
374:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
375: #endif
376:     /* prolongator */
377:     {
378:       IS       findices;
379:       PetscInt Istart,Iend;
380:       Mat      Pnew;
381: 
382:       MatGetOwnershipRange(Pold, &Istart, &Iend);
383: #if defined PETSC_GAMG_USE_LOG
384:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
385: #endif
386:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
387:       ISSetBlockSize(findices,f_bs);
388:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
389:       ISDestroy(&findices);

391: #if defined PETSC_GAMG_USE_LOG
392:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
393: #endif
394:       MatDestroy(a_P_inout);

396:       /* output - repartitioned */
397:       *a_P_inout = Pnew;
398:     }
399:     ISDestroy(&new_eq_indices);

401:     *a_nactive_proc = new_size; /* output */
402:   }
403:   return(0);
404: }

406: /* -------------------------------------------------------------------------- */
407: /*
408:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
409:                     by setting data structures and options.

411:    Input Parameter:
412: .  pc - the preconditioner context

414: */
417: PetscErrorCode PCSetUp_GAMG(PC pc)
418: {
420:   PC_MG          *mg      = (PC_MG*)pc->data;
421:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
422:   Mat            Pmat     = pc->pmat;
423:   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
424:   MPI_Comm       comm;
425:   PetscMPIInt    rank,size,nactivepe;
426:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
427:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
428:   PetscLogDouble nnz0=0.,nnztot=0.;
429:   MatInfo        info;

432:   PetscObjectGetComm((PetscObject)pc,&comm);
433:   MPI_Comm_rank(comm,&rank);
434:   MPI_Comm_size(comm,&size);

436:   if (pc_gamg->setup_count++ > 0) {
437:     if ((PetscBool)(!pc_gamg->reuse_prol)) {
438:       /* reset everything */
439:       PCReset_MG(pc);
440:       pc->setupcalled = 0;
441:     } else {
442:       PC_MG_Levels **mglevels = mg->levels;
443:       /* just do Galerkin grids */
444:       Mat          B,dA,dB;

446:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
447:       if (pc_gamg->Nlevels > 1) {
448:         /* currently only handle case where mat and pmat are the same on coarser levels */
449:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
450:         /* (re)set to get dirty flag */
451:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);

453:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
454:           /* the first time through the matrix structure has changed from repartitioning */
455:           if (pc_gamg->setup_count==2) {
456:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
457:             MatDestroy(&mglevels[level]->A);

459:             mglevels[level]->A = B;
460:           } else {
461:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
462:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
463:           }
464:           KSPSetOperators(mglevels[level]->smoothd,B,B);
465:           dB   = B;
466:         }
467:       }

469:       PCSetUp_MG(pc);
470:       return(0);
471:     }
472:   }

474:   if (!pc_gamg->data) {
475:     if (pc_gamg->orig_data) {
476:       MatGetBlockSize(Pmat, &bs);
477:       MatGetLocalSize(Pmat, &qq, NULL);

479:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
480:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
481:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

483:       PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
484:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
485:     } else {
486:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
487:       pc_gamg->ops->createdefaultdata(pc,Pmat);
488:     }
489:   }

491:   /* cache original data for reuse */
492:   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
493:     PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
494:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
495:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
496:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
497:   }

499:   /* get basic dims */
500:   MatGetBlockSize(Pmat, &bs);
501:   MatGetSize(Pmat, &M, &qq);

503:   MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); /* global reduction */
504:   nnz0   = info.nz_used;
505:   nnztot = info.nz_used;
506:   PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);

508:   /* Get A_i and R_i */
509:   for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) {
510:     pc_gamg->current_level = level;
511:     level1 = level + 1;
512: #if defined PETSC_GAMG_USE_LOG
513:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
514: #if (defined GAMG_STAGES)
515:     PetscLogStagePush(gamg_stages[level]);
516: #endif
517: #endif
518:     { /* construct prolongator */
519:       Mat              Gmat;
520:       PetscCoarsenData *agg_lists;
521:       Mat              Prol11;

523:       pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
524:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
525:       pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);

527:       /* could have failed to create new level */
528:       if (Prol11) {
529:         /* get new block size of coarse matrices */
530:         MatGetBlockSizes(Prol11, NULL, &bs);

532:         if (pc_gamg->ops->optprolongator) {
533:           /* smooth */
534:           pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);
535:         }

537:         Parr[level1] = Prol11;
538:       } else Parr[level1] = NULL;

540:       if (pc_gamg->use_aggs_in_gasm) {
541:         PetscInt bs;
542:         MatGetBlockSizes(Prol11, &bs, NULL);
543:         PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
544:       }

546:       MatDestroy(&Gmat);
547:       PetscCDDestroy(agg_lists);
548:     } /* construct prolongator scope */
549: #if defined PETSC_GAMG_USE_LOG
550:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
551: #endif
552:     if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */
553:     if (!Parr[level1]) {
554:        PetscInfo1(pc,"Stop gridding, level %D\n",level);
555: #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES
556:       PetscLogStagePop();
557: #endif
558:       break;
559:     }
560: #if defined PETSC_GAMG_USE_LOG
561:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
562: #endif

564:     pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);

566: #if defined PETSC_GAMG_USE_LOG
567:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
568: #endif
569:     MatGetSize(Aarr[level1], &M, &qq);
570:     MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
571:     nnztot += info.nz_used;
572:     PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);

574: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
575:     PetscLogStagePop();
576: #endif
577:     /* stop if one node or one proc -- could pull back for singular problems */
578:     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
579:        PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);
580:       level++;
581:       break;
582:     }
583:   } /* levels */
584:   PetscFree(pc_gamg->data);

586:   PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);
587:   pc_gamg->Nlevels = level + 1;
588:   fine_level       = level;
589:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

591:   if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
592:     /* set default smoothers & set operators */
593:     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
594:       KSP smoother;
595:       PC  subpc;

597:       PCMGGetSmoother(pc, lidx, &smoother);
598:       KSPGetPC(smoother, &subpc);

600:       KSPSetNormType(smoother, KSP_NORM_NONE);
601:       /* set ops */
602:       KSPSetOperators(smoother, Aarr[level], Aarr[level]);
603:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

605:       /* set defaults */
606:       KSPSetType(smoother, KSPCHEBYSHEV);

608:       /* set blocks for GASM smoother that uses the 'aggregates' */
609:       if (pc_gamg->use_aggs_in_gasm) {
610:         PetscInt sz;
611:         IS       *is;

613:         sz   = nASMBlocksArr[level];
614:         is   = ASMLocalIDsArr[level];
615:         PCSetType(subpc, PCGASM);
616:         PCGASMSetOverlap(subpc, 0);
617:         if (!sz) {
618:           IS       is;
619:           PetscInt my0,kk;
620:           MatGetOwnershipRange(Aarr[level], &my0, &kk);
621:           ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
622:           PCGASMSetSubdomains(subpc, 1, &is, NULL);
623:           ISDestroy(&is);
624:         } else {
625:           PetscInt kk;
626:           PCGASMSetSubdomains(subpc, sz, is, NULL);
627:           for (kk=0; kk<sz; kk++) {
628:             ISDestroy(&is[kk]);
629:           }
630:           PetscFree(is);
631:         }
632:         ASMLocalIDsArr[level] = NULL;
633:         nASMBlocksArr[level]  = 0;
634:         PCGASMSetType(subpc, PC_GASM_BASIC);
635:       } else {
636:         PCSetType(subpc, PCSOR);
637:       }
638:     }
639:     {
640:       /* coarse grid */
641:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
642:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
643:       PCMGGetSmoother(pc, lidx, &smoother);
644:       KSPSetOperators(smoother, Lmat, Lmat);
645:       KSPSetNormType(smoother, KSP_NORM_NONE);
646:       KSPGetPC(smoother, &subpc);
647:       PCSetType(subpc, PCBJACOBI);
648:       PCSetUp(subpc);
649:       PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
650:       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
651:       KSPGetPC(k2[0],&pc2);
652:       PCSetType(pc2, PCLU);
653:       PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
654:       KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
655:       KSPSetType(k2[0], KSPPREONLY);
656:       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
657:        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
658:        * view every subdomain as though they were different. */
659:       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
660:     }

662:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
663:     PetscObjectOptionsBegin((PetscObject)pc);
664:     PCSetFromOptions_MG(PetscOptionsObject,pc);
665:     PetscOptionsEnd();
666:     if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators.");
667:     if (mg->galerkin == 1) mg->galerkin = 2;

669:     /* clean up */
670:     for (level=1; level<pc_gamg->Nlevels; level++) {
671:       MatDestroy(&Parr[level]);
672:       MatDestroy(&Aarr[level]);
673:     }
674:     PCSetUp_MG(pc);
675:   } else {
676:     KSP smoother;
677:     PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");
678:     PCMGGetSmoother(pc, 0, &smoother);
679:     KSPSetOperators(smoother, Aarr[0], Aarr[0]);
680:     KSPSetType(smoother, KSPPREONLY);
681:     PCSetUp_MG(pc);
682:   }
683:   return(0);
684: }

686: /* ------------------------------------------------------------------------- */
687: /*
688:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
689:    that was created with PCCreate_GAMG().

691:    Input Parameter:
692: .  pc - the preconditioner context

694:    Application Interface Routine: PCDestroy()
695: */
698: PetscErrorCode PCDestroy_GAMG(PC pc)
699: {
701:   PC_MG          *mg     = (PC_MG*)pc->data;
702:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

705:   PCReset_GAMG(pc);
706:   if (pc_gamg->ops->destroy) {
707:     (*pc_gamg->ops->destroy)(pc);
708:   }
709:   PetscRandomDestroy(&pc_gamg->random);
710:   PetscFree(pc_gamg->ops);
711:   PetscFree(pc_gamg->gamg_type_name);
712:   PetscFree(pc_gamg);
713:   PCDestroy_MG(pc);
714:   return(0);
715: }

719: /*@
720:    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction.

722:    Logically Collective on PC

724:    Input Parameters:
725: +  pc - the preconditioner context
726: -  n - the number of equations


729:    Options Database Key:
730: .  -pc_gamg_process_eq_limit <limit>

732:    Level: intermediate

734:    Concepts: Unstructured multigrid preconditioner

736: .seealso: PCGAMGSetCoarseEqLim()
737: @*/
738: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
739: {

744:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
745:   return(0);
746: }

750: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
751: {
752:   PC_MG   *mg      = (PC_MG*)pc->data;
753:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

756:   if (n>0) pc_gamg->min_eq_proc = n;
757:   return(0);
758: }

762: /*@
763:    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.

765:  Collective on PC

767:    Input Parameters:
768: +  pc - the preconditioner context
769: -  n - maximum number of equations to aim for

771:    Options Database Key:
772: .  -pc_gamg_coarse_eq_limit <limit>

774:    Level: intermediate

776:    Concepts: Unstructured multigrid preconditioner

778: .seealso: PCGAMGSetProcEqLim()
779: @*/
780: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
781: {

786:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
787:   return(0);
788: }

792: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
793: {
794:   PC_MG   *mg      = (PC_MG*)pc->data;
795:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

798:   if (n>0) pc_gamg->coarse_eq_limit = n;
799:   return(0);
800: }

804: /*@
805:    PCGAMGSetRepartitioning - Repartition the coarse grids

807:    Collective on PC

809:    Input Parameters:
810: +  pc - the preconditioner context
811: -  n - PETSC_TRUE or PETSC_FALSE

813:    Options Database Key:
814: .  -pc_gamg_repartition <true,false>

816:    Level: intermediate

818:    Concepts: Unstructured multigrid preconditioner

820: .seealso: ()
821: @*/
822: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
823: {

828:   PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
829:   return(0);
830: }

834: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
835: {
836:   PC_MG   *mg      = (PC_MG*)pc->data;
837:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

840:   pc_gamg->repart = n;
841:   return(0);
842: }

846: /*@
847:    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner

849:    Collective on PC

851:    Input Parameters:
852: +  pc - the preconditioner context
853: -  n - PETSC_TRUE or PETSC_FALSE

855:    Options Database Key:
856: .  -pc_gamg_reuse_interpolation <true,false>

858:    Level: intermediate

860:    Concepts: Unstructured multigrid preconditioner

862: .seealso: ()
863: @*/
864: PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
865: {

870:   PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));
871:   return(0);
872: }

876: static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
877: {
878:   PC_MG   *mg      = (PC_MG*)pc->data;
879:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

882:   pc_gamg->reuse_prol = n;
883:   return(0);
884: }

888: /*@
889:    PCGAMGSetUseASMAggs -

891:    Collective on PC

893:    Input Parameters:
894: .  pc - the preconditioner context


897:    Options Database Key:
898: .  -pc_gamg_use_agg_gasm

900:    Level: intermediate

902:    Concepts: Unstructured multigrid preconditioner

904: .seealso: ()
905: @*/
906: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
907: {

912:   PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
913:   return(0);
914: }

918: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
919: {
920:   PC_MG   *mg      = (PC_MG*)pc->data;
921:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

924:   pc_gamg->use_aggs_in_gasm = n;
925:   return(0);
926: }

930: /*@
931:    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use

933:    Not collective on PC

935:    Input Parameters:
936: +  pc - the preconditioner
937: -  n - the maximum number of levels to use

939:    Options Database Key:
940: .  -pc_mg_levels

942:    Level: intermediate

944:    Concepts: Unstructured multigrid preconditioner

946: .seealso: ()
947: @*/
948: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
949: {

954:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
955:   return(0);
956: }

960: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
961: {
962:   PC_MG   *mg      = (PC_MG*)pc->data;
963:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

966:   pc_gamg->Nlevels = n;
967:   return(0);
968: }

972: /*@
973:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

975:    Not collective on PC

977:    Input Parameters:
978: +  pc - the preconditioner context
979: -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph

981:    Options Database Key:
982: .  -pc_gamg_threshold <threshold>

984:    Level: intermediate

986:    Concepts: Unstructured multigrid preconditioner

988: .seealso: ()
989: @*/
990: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
991: {

996:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
997:   return(0);
998: }

1002: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1003: {
1004:   PC_MG   *mg      = (PC_MG*)pc->data;
1005:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1008:   pc_gamg->threshold = n;
1009:   return(0);
1010: }

1014: /*@
1015:    PCGAMGSetType - Set solution method

1017:    Collective on PC

1019:    Input Parameters:
1020: +  pc - the preconditioner context
1021: -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL

1023:    Options Database Key:
1024: .  -pc_gamg_type <agg,geo,classical>

1026:    Level: intermediate

1028:    Concepts: Unstructured multigrid preconditioner

1030: .seealso: PCGAMGGetType(), PCGAMG
1031: @*/
1032: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1033: {

1038:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1039:   return(0);
1040: }

1044: /*@
1045:    PCGAMGGetType - Get solution method

1047:    Collective on PC

1049:    Input Parameter:
1050: .  pc - the preconditioner context

1052:    Output Parameter:
1053: .  type - the type of algorithm used

1055:    Level: intermediate

1057:    Concepts: Unstructured multigrid preconditioner

1059: .seealso: PCGAMGSetType(), PCGAMGType
1060: @*/
1061: PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1062: {

1067:   PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));
1068:   return(0);
1069: }

1073: static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1074: {
1075:   PC_MG          *mg      = (PC_MG*)pc->data;
1076:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1079:   *type = pc_gamg->type;
1080:   return(0);
1081: }

1085: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1086: {
1087:   PetscErrorCode ierr,(*r)(PC);
1088:   PC_MG          *mg      = (PC_MG*)pc->data;
1089:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1092:   pc_gamg->type = type;
1093:   PetscFunctionListFind(GAMGList,type,&r);
1094:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1095:   if (pc_gamg->ops->destroy) {
1096:     (*pc_gamg->ops->destroy)(pc);
1097:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1098:     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1099:     /* cleaning up common data in pc_gamg - this should disapear someday */
1100:     pc_gamg->data_cell_cols = 0;
1101:     pc_gamg->data_cell_rows = 0;
1102:     pc_gamg->orig_data_cell_cols = 0;
1103:     pc_gamg->orig_data_cell_rows = 0;
1104:     PetscFree(pc_gamg->data);
1105:     pc_gamg->data_sz = 0;
1106:   }
1107:   PetscFree(pc_gamg->gamg_type_name);
1108:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1109:   (*r)(pc);
1110:   return(0);
1111: }

1115: static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1116: {
1118:   PC_MG          *mg      = (PC_MG*)pc->data;
1119:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1122:   PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");
1123:   PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);
1124:   if (pc_gamg->ops->view) {
1125:     (*pc_gamg->ops->view)(pc,viewer);
1126:   }
1127:   return(0);
1128: }

1132: PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc)
1133: {
1135:   PC_MG          *mg      = (PC_MG*)pc->data;
1136:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1137:   PetscBool      flag;
1138:   MPI_Comm       comm;
1139:   char           prefix[256];
1140:   const char     *pcpre;

1143:   PetscObjectGetComm((PetscObject)pc,&comm);
1144:   PetscOptionsHead(PetscOptionsObject,"GAMG options");
1145:   {
1146:     char tname[256];
1147:     PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1148:     if (flag) {
1149:       PCGAMGSetType(pc,tname);
1150:     }
1151:     PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);
1152:     PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);
1153:     PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);
1154:     PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);
1155:     PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);
1156:     PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);
1157:     PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);

1159:     /* set options for subtype */
1160:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);}
1161:   }
1162:   PCGetOptionsPrefix(pc, &pcpre);
1163:   PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
1164:   PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);
1165:   PetscRandomSetFromOptions(pc_gamg->random);
1166:   PetscOptionsTail();
1167:   return(0);
1168: }

1170: /* -------------------------------------------------------------------------- */
1171: /*MC
1172:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner

1174:    Options Database Keys:
1175:    Multigrid options(inherited)
1176: +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1177: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1178: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1179: -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade


1182:   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1183: $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1184: $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1185: $       See the Users Manual Chapter 4 for more details

1187:   Level: intermediate

1189:   Concepts: algebraic multigrid

1191: .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(),
1192:            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType()
1193: M*/

1197: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1198: {
1200:   PC_GAMG        *pc_gamg;
1201:   PC_MG          *mg;

1204:   /* register AMG type */
1205:   PCGAMGInitializePackage();

1207:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1208:   PCSetType(pc, PCMG);
1209:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1211:   /* create a supporting struct and attach it to pc */
1212:   PetscNewLog(pc,&pc_gamg);
1213:   mg           = (PC_MG*)pc->data;
1214:   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally from PCMG by GAMG code */
1215:   mg->innerctx = pc_gamg;

1217:   PetscNewLog(pc,&pc_gamg->ops);

1219:   pc_gamg->setup_count = 0;
1220:   /* these should be in subctx but repartitioning needs simple arrays */
1221:   pc_gamg->data_sz = 0;
1222:   pc_gamg->data    = 0;

1224:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1225:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1226:   pc->ops->setup          = PCSetUp_GAMG;
1227:   pc->ops->reset          = PCReset_GAMG;
1228:   pc->ops->destroy        = PCDestroy_GAMG;
1229:   mg->view                = PCView_GAMG;

1231:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1232:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1233:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1234:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);
1235:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1236:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1237:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1238:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);
1239:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1240:   pc_gamg->repart           = PETSC_FALSE;
1241:   pc_gamg->reuse_prol       = PETSC_FALSE;
1242:   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1243:   pc_gamg->min_eq_proc      = 50;
1244:   pc_gamg->coarse_eq_limit  = 50;
1245:   pc_gamg->threshold        = 0.;
1246:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1247:   pc_gamg->current_level    = 0; /* don't need to init really */
1248:   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;

1250:   PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);

1252:   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1253:   PCGAMGSetType(pc,PCGAMGAGG);
1254:   return(0);
1255: }

1259: /*@C
1260:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1261:     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1262:     when using static libraries.

1264:  Level: developer

1266:  .keywords: PC, PCGAMG, initialize, package
1267:  .seealso: PetscInitialize()
1268: @*/
1269: PetscErrorCode PCGAMGInitializePackage(void)
1270: {

1274:   if (PCGAMGPackageInitialized) return(0);
1275:   PCGAMGPackageInitialized = PETSC_TRUE;
1276:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1277:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1278:   PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1279:   PetscRegisterFinalize(PCGAMGFinalizePackage);

1281:   /* general events */
1282:   PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);
1283:   PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);
1284:   PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1285:   PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1286:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1287:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1288:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);

1290: #if defined PETSC_GAMG_USE_LOG
1291:   PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1292:   PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1293:   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1294:   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1295:   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1296:   PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1297:   PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1298:   PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1299:   PetscLogEventRegister("    search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1300:   PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1301:   PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1302:   PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1303:   PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1304:   PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1305:   PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1306:   PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1307:   PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1309:   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1310:   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1311:   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1312:   /* create timer stages */
1313: #if defined GAMG_STAGES
1314:   {
1315:     char     str[32];
1316:     PetscInt lidx;
1317:     sprintf(str,"MG Level %d (finest)",0);
1318:     PetscLogStageRegister(str, &gamg_stages[0]);
1319:     for (lidx=1; lidx<9; lidx++) {
1320:       sprintf(str,"MG Level %d",lidx);
1321:       PetscLogStageRegister(str, &gamg_stages[lidx]);
1322:     }
1323:   }
1324: #endif
1325: #endif
1326:   return(0);
1327: }

1331: /*@C
1332:  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1333:     called from PetscFinalize() automatically.

1335:  Level: developer

1337:  .keywords: Petsc, destroy, package
1338:  .seealso: PetscFinalize()
1339: @*/
1340: PetscErrorCode PCGAMGFinalizePackage(void)
1341: {

1345:   PCGAMGPackageInitialized = PETSC_FALSE;
1346:   PetscFunctionListDestroy(&GAMGList);
1347:   return(0);
1348: }

1352: /*@C
1353:  PCGAMGRegister - Register a PCGAMG implementation.

1355:  Input Parameters:
1356:  + type - string that will be used as the name of the GAMG type.
1357:  - create - function for creating the gamg context.

1359:   Level: advanced

1361:  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1362: @*/
1363: PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1364: {

1368:   PCGAMGInitializePackage();
1369:   PetscFunctionListAdd(&GAMGList,type,create);
1370:   return(0);
1371: }