Actual source code: gamg.c

petsc-master 2015-02-27
Report Typos and Errors
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
  4: #include <petsc-private/matimpl.h>
  5: #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
  6: #include <petsc-private/kspimpl.h>
  7: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  9: #if defined PETSC_GAMG_USE_LOG
 10: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 11: #endif

 13: #if defined PETSC_USE_LOG
 14: PetscLogEvent PC_GAMGGgraph_AGG;
 15: PetscLogEvent PC_GAMGGgraph_GEO;
 16: PetscLogEvent PC_GAMGCoarsen_AGG;
 17: PetscLogEvent PC_GAMGCoarsen_GEO;
 18: PetscLogEvent PC_GAMGProlongator_AGG;
 19: PetscLogEvent PC_GAMGProlongator_GEO;
 20: PetscLogEvent PC_GAMGOptprol_AGG;
 21: #endif

 23: #define GAMG_MAXLEVELS 30

 25: /* #define GAMG_STAGES */
 26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 28: #endif

 30: static PetscFunctionList GAMGList = 0;
 31: static PetscBool PCGAMGPackageInitialized;

 33: /* ----------------------------------------------------------------------------- */
 36: PetscErrorCode PCReset_GAMG(PC pc)
 37: {
 39:   PC_MG          *mg      = (PC_MG*)pc->data;
 40:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 43:   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
 44:     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
 45:     PetscFree(pc_gamg->data);
 46:   }
 47:   pc_gamg->data = NULL; pc_gamg->data_sz = 0;

 49:   if (pc_gamg->orig_data) {
 50:     PetscFree(pc_gamg->orig_data);
 51:   }
 52:   return(0);
 53: }

 55: /* -------------------------------------------------------------------------- */
 56: /*
 57:    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
 58:      of active processors.

 60:    Input Parameter:
 61:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
 62:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
 63:    . Amat_fine - matrix on this fine (k) level
 64:    . cr_bs - coarse block size
 65:    In/Output Parameter:
 66:    . a_P_inout - prolongation operator to the next level (k-->k-1)
 67:    . a_nactive_proc - number of active procs
 68:    Output Parameter:
 69:    . a_Amat_crs - coarse matrix that is created (k-1)
 70: */

 74: static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,
 75:                                   Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,
 76:                                   IS * Pcolumnperm)
 77: {
 78:   PetscErrorCode  ierr;
 79:   PC_MG           *mg         = (PC_MG*)pc->data;
 80:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
 81:   Mat             Cmat,Pold=*a_P_inout;
 82:   MPI_Comm        comm;
 83:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
 84:   PetscInt        ncrs_eq,ncrs,f_bs;

 87:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
 88:   MPI_Comm_rank(comm, &rank);
 89:   MPI_Comm_size(comm, &size);
 90:   MatGetBlockSize(Amat_fine, &f_bs);
 91:   /* RAP */
 92:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

 94:   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
 95:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);
 96:   if (pc_gamg->data_cell_rows>0) {
 97:     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
 98:   }
 99:   else {
100:     PetscInt  bs;
101:     MatGetBlockSize(Cmat, &bs);
102:     ncrs = ncrs_eq/bs;
103:   }

105:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
106:   {
107:     PetscInt ncrs_eq_glob;
108:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
109:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
110:     if (new_size == 0) new_size = 1; /* not likely, posible? */
111:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
112:   }

114:   if (Pcolumnperm) *Pcolumnperm = NULL;

116:   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
117:   else {
118:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
119:     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;

121:     nloc_old = ncrs_eq/cr_bs;
122:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
123: #if defined PETSC_GAMG_USE_LOG
124:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
125: #endif
126:     /* make 'is_eq_newproc' */
127:     PetscMalloc1(size, &counts);
128:     if (pc_gamg->repart) {
129:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
130:       Mat adj;

132:       if (pc_gamg->verbose>0) {
133:         if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
134:         else {
135:           PetscInt n;
136:           MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);
137:           PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
138:         }
139:       }

141:       /* get 'adj' */
142:       if (cr_bs == 1) {
143:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
144:       } else {
145:         /* make a scalar matrix to partition (no Stokes here) */
146:         Mat               tMat;
147:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
148:         const PetscScalar *vals;
149:         const PetscInt    *idx;
150:         PetscInt          *d_nnz, *o_nnz, M, N;
151:         static PetscInt   llev = 0;

153:         PetscMalloc1(ncrs, &d_nnz);
154:         PetscMalloc1(ncrs, &o_nnz);
155:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
156:         MatGetSize(Cmat, &M, &N);
157:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
158:           MatGetRow(Cmat,Ii,&ncols,0,0);
159:           d_nnz[jj] = ncols/cr_bs;
160:           o_nnz[jj] = ncols/cr_bs;
161:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
162:           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
163:           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
164:         }

166:         MatCreate(comm, &tMat);
167:         MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
168:         MatSetType(tMat,MATAIJ);
169:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
170:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
171:         PetscFree(d_nnz);
172:         PetscFree(o_nnz);

174:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
175:           PetscInt dest_row = ii/cr_bs;
176:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
177:           for (jj = 0; jj < ncols; jj++) {
178:             PetscInt    dest_col = idx[jj]/cr_bs;
179:             PetscScalar v        = 1.0;
180:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
181:           }
182:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
183:         }
184:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
185:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

187:         if (llev++ == -1) {
188:           PetscViewer viewer; char fname[32];
189:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
190:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
191:           MatView(tMat, viewer);
192:           PetscViewerDestroy(&viewer);
193:         }

195:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);

197:         MatDestroy(&tMat);
198:       } /* create 'adj' */

200:       { /* partition: get newproc_idx */
201:         char            prefix[256];
202:         const char      *pcpre;
203:         const PetscInt  *is_idx;
204:         MatPartitioning mpart;
205:         IS              proc_is;
206:         PetscInt        targetPE;

208:         MatPartitioningCreate(comm, &mpart);
209:         MatPartitioningSetAdjacency(mpart, adj);
210:         PCGetOptionsPrefix(pc, &pcpre);
211:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
212:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
213:         MatPartitioningSetFromOptions(mpart);
214:         MatPartitioningSetNParts(mpart, new_size);
215:         MatPartitioningApply(mpart, &proc_is);
216:         MatPartitioningDestroy(&mpart);

218:         /* collect IS info */
219:         PetscMalloc1(ncrs_eq, &newproc_idx);
220:         ISGetIndices(proc_is, &is_idx);
221:         targetPE = 1; /* bring to "front" of machine */
222:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
223:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
224:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
225:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
226:           }
227:         }
228:         ISRestoreIndices(proc_is, &is_idx);
229:         ISDestroy(&proc_is);
230:       }
231:       MatDestroy(&adj);

233:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
234:       if (newproc_idx != 0) {
235:         PetscFree(newproc_idx);
236:       }
237:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */

239:       PetscInt rfactor,targetPE;
240:       /* find factor */
241:       if (new_size == 1) rfactor = size; /* easy */
242:       else {
243:         PetscReal best_fact = 0.;
244:         jj = -1;
245:         for (kk = 1 ; kk <= size ; kk++) {
246:           if (size%kk==0) { /* a candidate */
247:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
248:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
249:             if (fact > best_fact) {
250:               best_fact = fact; jj = kk;
251:             }
252:           }
253:         }
254:         if (jj != -1) rfactor = jj;
255:         else rfactor = 1; /* does this happen .. a prime */
256:       }
257:       new_size = size/rfactor;

259:       if (new_size==nactive) {
260:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
261:         PetscFree(counts);
262:         if (pc_gamg->verbose>0) {
263:           PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
264:         }
265:         return(0);
266:       }

268:       if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
269:       targetPE = rank/rfactor;
270:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
271:     } /* end simple 'is_eq_newproc' */

273:     /*
274:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
275:      */
276:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
277:     is_eq_num_prim = is_eq_num;
278:     /*
279:       Determine how many equations/vertices are assigned to each processor
280:      */
281:     ISPartitioningCount(is_eq_newproc, size, counts);
282:     ncrs_eq_new = counts[rank];
283:     ISDestroy(&is_eq_newproc);
284:     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */

286:     PetscFree(counts);
287: #if defined PETSC_GAMG_USE_LOG
288:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
289: #endif
290:     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
291:     {
292:     Vec            src_crd, dest_crd;
293:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
294:     VecScatter     vecscat;
295:     PetscScalar    *array;
296:     IS isscat;

298:     /* move data (for primal equations only) */
299:     /* Create a vector to contain the newly ordered element information */
300:     VecCreate(comm, &dest_crd);
301:     VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
302:     VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
303:     /*
304:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
305:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
306:      */
307:     PetscMalloc1(ncrs*node_data_sz, &tidx);
308:     ISGetIndices(is_eq_num_prim, &idx);
309:     for (ii=0,jj=0; ii<ncrs; ii++) {
310:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
311:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
312:     }
313:     ISRestoreIndices(is_eq_num_prim, &idx);
314:     ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
315:     PetscFree(tidx);
316:     /*
317:      Create a vector to contain the original vertex information for each element
318:      */
319:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
320:     for (jj=0; jj<ndata_cols; jj++) {
321:       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
322:       for (ii=0; ii<ncrs; ii++) {
323:         for (kk=0; kk<ndata_rows; kk++) {
324:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
325:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
326:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
327:         }
328:       }
329:     }
330:     VecAssemblyBegin(src_crd);
331:     VecAssemblyEnd(src_crd);
332:     /*
333:       Scatter the element vertex information (still in the original vertex ordering)
334:       to the correct processor
335:     */
336:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
337:     ISDestroy(&isscat);
338:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
339:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
340:     VecScatterDestroy(&vecscat);
341:     VecDestroy(&src_crd);
342:     /*
343:       Put the element vertex data into a new allocation of the gdata->ele
344:     */
345:     PetscFree(pc_gamg->data);
346:     PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);

348:     pc_gamg->data_sz = node_data_sz*ncrs_new;
349:     strideNew        = ncrs_new*ndata_rows;

351:     VecGetArray(dest_crd, &array);
352:     for (jj=0; jj<ndata_cols; jj++) {
353:       for (ii=0; ii<ncrs_new; ii++) {
354:         for (kk=0; kk<ndata_rows; kk++) {
355:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
356:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
357:         }
358:       }
359:     }
360:     VecRestoreArray(dest_crd, &array);
361:     VecDestroy(&dest_crd);
362:     }
363:     /* move A and P (columns) with new layout */
364: #if defined PETSC_GAMG_USE_LOG
365:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
366: #endif

368:     /*
369:       Invert for MatGetSubMatrix
370:     */
371:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
372:     ISSort(new_eq_indices); /* is this needed? */
373:     ISSetBlockSize(new_eq_indices, cr_bs);
374:     if (is_eq_num != is_eq_num_prim) {
375:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
376:     }
377:     if (Pcolumnperm) {
378:       PetscObjectReference((PetscObject)new_eq_indices);
379:       *Pcolumnperm = new_eq_indices;
380:     }
381:     ISDestroy(&is_eq_num);
382: #if defined PETSC_GAMG_USE_LOG
383:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
384:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
385: #endif
386:     /* 'a_Amat_crs' output */
387:     {
388:       Mat mat;
389:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
390:       *a_Amat_crs = mat;

392:       if (!PETSC_TRUE) {
393:         PetscInt cbs, rbs;
394:         MatGetBlockSizes(Cmat, &rbs, &cbs);
395:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
396:         MatGetBlockSizes(mat, &rbs, &cbs);
397:         PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
398:       }
399:     }
400:     MatDestroy(&Cmat);

402: #if defined PETSC_GAMG_USE_LOG
403:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
404: #endif
405:     /* prolongator */
406:     {
407:       IS       findices;
408:       PetscInt Istart,Iend;
409:       Mat      Pnew;
410:       MatGetOwnershipRange(Pold, &Istart, &Iend);
411: #if defined PETSC_GAMG_USE_LOG
412:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
413: #endif
414:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
415:       ISSetBlockSize(findices,f_bs);
416:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
417:       ISDestroy(&findices);

419:       if (!PETSC_TRUE) {
420:         PetscInt cbs, rbs;
421:         MatGetBlockSizes(Pold, &rbs, &cbs);
422:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
423:         MatGetBlockSizes(Pnew, &rbs, &cbs);
424:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
425:       }
426: #if defined PETSC_GAMG_USE_LOG
427:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
428: #endif
429:       MatDestroy(a_P_inout);

431:       /* output - repartitioned */
432:       *a_P_inout = Pnew;
433:     }
434:     ISDestroy(&new_eq_indices);

436:     *a_nactive_proc = new_size; /* output */
437:   }

439:   /* outout matrix data */
440:   if (!PETSC_TRUE) {
441:     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
442:     if (llev==0) {
443:       sprintf(fname,"Cmat_%d.m",llev++);
444:       PetscViewerASCIIOpen(comm,fname,&viewer);
445:       PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
446:       MatView(Amat_fine, viewer);
447:       PetscViewerDestroy(&viewer);
448:     }
449:     sprintf(fname,"Cmat_%d.m",llev++);
450:     PetscViewerASCIIOpen(comm,fname,&viewer);
451:     PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
452:     MatView(Cmat, viewer);
453:     PetscViewerDestroy(&viewer);
454:   }
455:   return(0);
456: }

458: /* -------------------------------------------------------------------------- */
459: /*
460:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
461:                     by setting data structures and options.

463:    Input Parameter:
464: .  pc - the preconditioner context

466:    Application Interface Routine: PCSetUp()

468:    Notes:
469:    The interface routine PCSetUp() is not usually called directly by
470:    the user, but instead is called by PCApply() if necessary.
471: */
474: PetscErrorCode PCSetUp_GAMG(PC pc)
475: {
477:   PC_MG          *mg      = (PC_MG*)pc->data;
478:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
479:   Mat            Pmat     = pc->pmat;
480:   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
481:   MPI_Comm       comm;
482:   PetscMPIInt    rank,size,nactivepe;
483:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
484:   PetscReal      emaxs[GAMG_MAXLEVELS];
485:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
486:   PetscLogDouble nnz0=0.,nnztot=0.;
487:   MatInfo        info;
488:   PetscBool      redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);

491:   PetscObjectGetComm((PetscObject)pc,&comm);
492:   MPI_Comm_rank(comm,&rank);
493:   MPI_Comm_size(comm,&size);

495:   if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);

497:   if (pc_gamg->setup_count++ > 0) {
498:     if (redo_mesh_setup) {
499:       /* reset everything */
500:       PCReset_MG(pc);
501:       pc->setupcalled = 0;
502:     } else {
503:       PC_MG_Levels **mglevels = mg->levels;
504:       /* just do Galerkin grids */
505:       Mat          B,dA,dB;

507:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
508:       if (pc_gamg->Nlevels > 1) {
509:         /* currently only handle case where mat and pmat are the same on coarser levels */
510:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
511:         /* (re)set to get dirty flag */
512:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);

514:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
515:           /* the first time through the matrix structure has changed from repartitioning */
516:           if (pc_gamg->setup_count==2) {
517:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
518:             MatDestroy(&mglevels[level]->A);

520:             mglevels[level]->A = B;
521:           } else {
522:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
523:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
524:           }
525:           KSPSetOperators(mglevels[level]->smoothd,B,B);
526:           dB   = B;
527:         }
528:       }

530:       PCSetUp_MG(pc);

532:       return(0);
533:     }
534:   }

536:   if (!pc_gamg->data) {
537:     if (pc_gamg->orig_data) {
538:       MatGetBlockSize(Pmat, &bs);
539:       MatGetLocalSize(Pmat, &qq, NULL);

541:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
542:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
543:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

545:       PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
546:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
547:     } else {
548:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
549:       pc_gamg->ops->createdefaultdata(pc,Pmat);
550:     }
551:   }

553:   /* cache original data for reuse */
554:   if (!pc_gamg->orig_data && redo_mesh_setup) {
555:     PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
556:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
557:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
558:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
559:   }

561:   /* get basic dims */
562:   MatGetBlockSize(Pmat, &bs);

564:   MatGetSize(Pmat, &M, &qq);
565:   if (pc_gamg->verbose) {
566:     PetscInt NN = M;
567:     if (pc_gamg->verbose==1) {
568:        MatGetInfo(Pmat,MAT_LOCAL,&info);
569:       MatGetLocalSize(Pmat, &NN, &qq);
570:       if (!NN) NN=1;
571:     } else {
572:       MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
573:     }
574:     nnz0   = info.nz_used;
575:     nnztot = info.nz_used;
576:     PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
577:                          rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
578:                          (int)(nnz0/(PetscReal)NN),size);
579:   }

581:   /* Get A_i and R_i */
582:   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
583:        level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit);
584:        level++) {
585:     pc_gamg->firstCoarsen = (level ? PETSC_FALSE : PETSC_TRUE);
586:     level1 = level + 1;
587: #if defined PETSC_GAMG_USE_LOG
588:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
589: #if (defined GAMG_STAGES)
590:     PetscLogStagePush(gamg_stages[level]);
591: #endif
592: #endif
593:     { /* construct prolongator */
594:       Mat              Gmat;
595:       PetscCoarsenData *agg_lists;
596:       Mat              Prol11;

598:       pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
599:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
600:       pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);

602:       /* could have failed to create new level */
603:       if (Prol11) {
604:         /* get new block size of coarse matrices */
605:         MatGetBlockSizes(Prol11, NULL, &bs);

607:         if (pc_gamg->ops->optprol) {
608:           /* smooth */
609:           pc_gamg->ops->optprol(pc, Aarr[level], &Prol11);
610:         }

612:         Parr[level1] = Prol11;
613:       } else Parr[level1] = NULL;

615:       if (pc_gamg->use_aggs_in_gasm) {
616:         PetscInt bs;
617:         MatGetBlockSizes(Prol11, &bs, NULL);
618:         PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
619:       }

621:       MatDestroy(&Gmat);
622:       PetscCDDestroy(agg_lists);
623:     } /* construct prolongator scope */
624: #if defined PETSC_GAMG_USE_LOG
625:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
626: #endif
627:     /* cache eigen estimate */
628:     if (pc_gamg->emax_id != -1) {
629:       PetscBool flag;
630:       PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);
631:       if (!flag) emaxs[level] = -1.;
632:     } else emaxs[level] = -1.;
633:     if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
634:     if (!Parr[level1]) {
635:       if (pc_gamg->verbose) {
636:          PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);
637:       }
638: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
639:       PetscLogStagePop();
640: #endif
641:       break;
642:     }
643: #if defined PETSC_GAMG_USE_LOG
644:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
645: #endif

647:     pc_gamg->ops->createlevel(pc, Aarr[level], bs,
648:                        &Parr[level1], &Aarr[level1], &nactivepe, NULL);

650: #if defined PETSC_GAMG_USE_LOG
651:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
652: #endif
653:     MatGetSize(Aarr[level1], &M, &qq);

655:     if (pc_gamg->verbose > 0) {
656:       PetscInt NN = M;
657:       if (pc_gamg->verbose==1) {
658:         MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
659:         MatGetLocalSize(Aarr[level1], &NN, &qq);
660:         if (!NN) NN=1;
661:       } else {
662:         MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
663:       }

665:       nnztot += info.nz_used;
666:       PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
667:                             rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
668:                             (int)(info.nz_used/(PetscReal)NN), nactivepe);
669:     }
670: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
671:     PetscLogStagePop();
672: #endif
673:     /* stop if one node or one proc -- could pull back for singular problems */
674:     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M < 2) ) {
675:       if (pc_gamg->verbose) {
676:          PetscPrintf(comm,"\t[%d]%s HARD stop of coarsening ?????????, level %d\n",rank,__FUNCT__,level);
677:       }
678:       level++;
679:       break;
680:     }
681:   } /* levels */
682:   pc_gamg->firstCoarsen = PETSC_FALSE;

684:   if (pc_gamg->data) {
685:     PetscFree(pc_gamg->data);
686:     pc_gamg->data = NULL;
687:   }

689:   if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
690:   pc_gamg->Nlevels = level + 1;
691:   fine_level       = level;
692:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

694:   /* simple setup */
695:   if (!PETSC_TRUE) {
696:     PC_MG_Levels **mglevels = mg->levels;
697:     for (lidx=0,level=pc_gamg->Nlevels-1;
698:          lidx<fine_level;
699:          lidx++, level--) {
700:       PCMGSetInterpolation(pc, lidx+1, Parr[level]);
701:       KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);
702:       MatDestroy(&Parr[level]);
703:       MatDestroy(&Aarr[level]);
704:     }
705:     KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);

707:     PCSetUp_MG(pc);
708:   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
709:     /* set default smoothers & set operators */
710:     for (lidx = 1, level = pc_gamg->Nlevels-2;
711:          lidx <= fine_level;
712:          lidx++, level--) {
713:       KSP smoother;
714:       PC  subpc;

716:       PCMGGetSmoother(pc, lidx, &smoother);
717:       KSPGetPC(smoother, &subpc);

719:       KSPSetNormType(smoother, KSP_NORM_NONE);
720:       /* set ops */
721:       KSPSetOperators(smoother, Aarr[level], Aarr[level]);
722:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

724:       /* set defaults */
725:       KSPSetType(smoother, KSPCHEBYSHEV);

727:       /* set blocks for GASM smoother that uses the 'aggregates' */
728:       if (pc_gamg->use_aggs_in_gasm) {
729:         PetscInt sz;
730:         IS       *is;

732:         sz   = nASMBlocksArr[level];
733:         is   = ASMLocalIDsArr[level];
734:         PCSetType(subpc, PCGASM);
735:         PCGASMSetOverlap(subpc, 0);
736:         if (sz==0) {
737:           IS       is;
738:           PetscInt my0,kk;
739:           MatGetOwnershipRange(Aarr[level], &my0, &kk);
740:           ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
741:           PCGASMSetSubdomains(subpc, 1, &is, NULL);
742:           ISDestroy(&is);
743:         } else {
744:           PetscInt kk;
745:           PCGASMSetSubdomains(subpc, sz, is, NULL);
746:           for (kk=0; kk<sz; kk++) {
747:             ISDestroy(&is[kk]);
748:           }
749:           PetscFree(is);
750:         }
751:         ASMLocalIDsArr[level] = NULL;
752:         nASMBlocksArr[level]  = 0;
753:         PCGASMSetType(subpc, PC_GASM_BASIC);
754:       } else {
755:         PCSetType(subpc, PCSOR);
756:       }
757:     }
758:     {
759:       /* coarse grid */
760:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
761:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
762:       PCMGGetSmoother(pc, lidx, &smoother);
763:       KSPSetOperators(smoother, Lmat, Lmat);
764:       KSPSetNormType(smoother, KSP_NORM_NONE);
765:       KSPGetPC(smoother, &subpc);
766:       PCSetType(subpc, PCBJACOBI);
767:       PCSetUp(subpc);
768:       PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
769:       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
770:       KSPGetPC(k2[0],&pc2);
771:       PCSetType(pc2, PCLU);
772:       PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
773:       KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
774:       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
775:        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
776:        * view every subdomain as though they were different. */
777:       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
778:     }

780:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
781:     PetscObjectOptionsBegin((PetscObject)pc);
782:     PCSetFromOptions_MG(PetscOptionsObject,pc);
783:     PetscOptionsEnd();
784:     if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");

786:     /* create cheby smoothers */
787:     for (lidx = 1, level = pc_gamg->Nlevels-2;
788:          lidx <= fine_level;
789:          lidx++, level--) {
790:       KSP       smoother;
791:       PetscBool flag,flag2;
792:       PC        subpc;

794:       PCMGGetSmoother(pc, lidx, &smoother);
795:       KSPGetPC(smoother, &subpc);

797:       /* do my own cheby */
798:       PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
799:       if (flag) {
800:         PetscReal emax, emin;
801:         PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
802:         PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);
803:         if ((flag||flag2) && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */
804:         else { /* eigen estimate 'emax' -- this is done in cheby */
805:           KSP eksp;
806:           Mat Lmat = Aarr[level];
807:           Vec bb, xx;

809:           MatCreateVecs(Lmat, &bb, 0);
810:           MatCreateVecs(Lmat, &xx, 0);
811:           {
812:             PetscRandom rctx;
813:             PetscRandomCreate(comm,&rctx);
814:             PetscRandomSetFromOptions(rctx);
815:             VecSetRandom(bb,rctx);
816:             PetscRandomDestroy(&rctx);
817:           }

819:           /* zeroing out BC rows -- needed for crazy matrices */
820:           {
821:             PetscInt    Istart,Iend,ncols,jj,Ii;
822:             PetscScalar zero = 0.0;
823:             MatGetOwnershipRange(Lmat, &Istart, &Iend);
824:             for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
825:               MatGetRow(Lmat,Ii,&ncols,0,0);
826:               if (ncols <= 1) {
827:                 VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);
828:               }
829:               MatRestoreRow(Lmat,Ii,&ncols,0,0);
830:             }
831:             VecAssemblyBegin(bb);
832:             VecAssemblyEnd(bb);
833:           }

835:           KSPCreate(comm, &eksp);
836:           KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);
837:           KSPSetNormType(eksp, KSP_NORM_NONE);
838:           KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
839:           KSPAppendOptionsPrefix(eksp, "gamg_est_");
840:           KSPSetFromOptions(eksp);

842:           KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
843:           KSPSetOperators(eksp, Lmat, Lmat);
844:           KSPSetComputeSingularValues(eksp,PETSC_TRUE);

846:           /* set PC type to be same as smoother */
847:           KSPSetPC(eksp, subpc);

849:           /* solve - keep stuff out of logging */
850:           PetscLogEventDeactivate(KSP_Solve);
851:           PetscLogEventDeactivate(PC_Apply);
852:           KSPSolve(eksp, bb, xx);
853:           PetscLogEventActivate(KSP_Solve);
854:           PetscLogEventActivate(PC_Apply);

856:           KSPComputeExtremeSingularValues(eksp, &emax, &emin);

858:           VecDestroy(&xx);
859:           VecDestroy(&bb);
860:           KSPDestroy(&eksp);

862:           if (pc_gamg->verbose > 0) {
863:             PetscInt N1, tt;
864:             MatGetSize(Aarr[level], &N1, &tt);
865:             PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
866:           }
867:         }
868:         {
869:           PetscInt N1, N0;
870:           MatGetSize(Aarr[level], &N1, NULL);
871:           MatGetSize(Aarr[level+1], &N0, NULL);
872:           /* heuristic - is this crap? */
873:           /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
874:           emin  = emax * pc_gamg->eigtarget[0];
875:           emax *= pc_gamg->eigtarget[1];
876:         }
877:         KSPChebyshevSetEigenvalues(smoother, emax, emin);
878:       } /* setup checby flag */
879:     } /* non-coarse levels */

881:     /* clean up */
882:     for (level=1; level<pc_gamg->Nlevels; level++) {
883:       MatDestroy(&Parr[level]);
884:       MatDestroy(&Aarr[level]);
885:     }

887:     PCSetUp_MG(pc);
888:   } else {
889:     KSP smoother;
890:     if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
891:     PCMGGetSmoother(pc, 0, &smoother);
892:     KSPSetOperators(smoother, Aarr[0], Aarr[0]);
893:     KSPSetType(smoother, KSPPREONLY);
894:     PCSetUp_MG(pc);
895:   }
896:   return(0);
897: }

899: /* ------------------------------------------------------------------------- */
900: /*
901:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
902:    that was created with PCCreate_GAMG().

904:    Input Parameter:
905: .  pc - the preconditioner context

907:    Application Interface Routine: PCDestroy()
908: */
911: PetscErrorCode PCDestroy_GAMG(PC pc)
912: {
914:   PC_MG          *mg     = (PC_MG*)pc->data;
915:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

918:   PCReset_GAMG(pc);
919:   if (pc_gamg->ops->destroy) {
920:     (*pc_gamg->ops->destroy)(pc);
921:   }
922:   PetscFree(pc_gamg->ops);
923:   PetscFree(pc_gamg->gamg_type_name);
924:   PetscFree(pc_gamg);
925:   PCDestroy_MG(pc);
926:   return(0);
927: }


932: /*@
933:    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction.

935:    Logically Collective on PC

937:    Input Parameters:
938: +  pc - the preconditioner context
939: -  n - the number of equations


942:    Options Database Key:
943: .  -pc_gamg_process_eq_limit <limit>

945:    Level: intermediate

947:    Concepts: Unstructured multrigrid preconditioner

949: .seealso: ()
950: @*/
951: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
952: {

957:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
958:   return(0);
959: }

963: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
964: {
965:   PC_MG   *mg      = (PC_MG*)pc->data;
966:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

969:   if (n>0) pc_gamg->min_eq_proc = n;
970:   return(0);
971: }

975: /*@
976:    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.

978:  Collective on PC

980:    Input Parameters:
981: +  pc - the preconditioner context
982: -  n - maximum number of equations to aim for

984:    Options Database Key:
985: .  -pc_gamg_coarse_eq_limit <limit>

987:    Level: intermediate

989:    Concepts: Unstructured multrigrid preconditioner

991: @*/
992: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
993: {

998:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
999:   return(0);
1000: }

1004: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1005: {
1006:   PC_MG   *mg      = (PC_MG*)pc->data;
1007:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1010:   if (n>0) pc_gamg->coarse_eq_limit = n;
1011:   return(0);
1012: }

1016: /*@
1017:    PCGAMGSetRepartitioning - Repartition the coarse grids

1019:    Collective on PC

1021:    Input Parameters:
1022: +  pc - the preconditioner context
1023: -  n - PETSC_TRUE or PETSC_FALSE

1025:    Options Database Key:
1026: .  -pc_gamg_repartition <true,false>

1028:    Level: intermediate

1030:    Concepts: Unstructured multrigrid preconditioner

1032: .seealso: ()
1033: @*/
1034: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1035: {

1040:   PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1041:   return(0);
1042: }

1046: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1047: {
1048:   PC_MG   *mg      = (PC_MG*)pc->data;
1049:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1052:   pc_gamg->repart = n;
1053:   return(0);
1054: }

1058: /*@
1059:    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner

1061:    Collective on PC

1063:    Input Parameters:
1064: +  pc - the preconditioner context
1065: -  n - PETSC_TRUE or PETSC_FALSE

1067:    Options Database Key:
1068: .  -pc_gamg_reuse_interpolation <true,false>

1070:    Level: intermediate

1072:    Concepts: Unstructured multrigrid preconditioner

1074: .seealso: ()
1075: @*/
1076: PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
1077: {

1082:   PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));
1083:   return(0);
1084: }

1088: static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
1089: {
1090:   PC_MG   *mg      = (PC_MG*)pc->data;
1091:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1094:   pc_gamg->reuse_prol = n;
1095:   return(0);
1096: }

1100: /*@
1101:    PCGAMGSetUseASMAggs -

1103:    Collective on PC

1105:    Input Parameters:
1106: .  pc - the preconditioner context


1109:    Options Database Key:
1110: .  -pc_gamg_use_agg_gasm

1112:    Level: intermediate

1114:    Concepts: Unstructured multrigrid preconditioner

1116: .seealso: ()
1117: @*/
1118: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1119: {

1124:   PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1125:   return(0);
1126: }

1130: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1131: {
1132:   PC_MG   *mg      = (PC_MG*)pc->data;
1133:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1136:   pc_gamg->use_aggs_in_gasm = n;
1137:   return(0);
1138: }

1142: /*@
1143:    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use

1145:    Not collective on PC

1147:    Input Parameters:
1148: +  pc - the preconditioner
1149: -  n - the maximum number of levels to use

1151:    Options Database Key:
1152: .  -pc_mg_levels

1154:    Level: intermediate

1156:    Concepts: Unstructured multrigrid preconditioner

1158: .seealso: ()
1159: @*/
1160: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1161: {

1166:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1167:   return(0);
1168: }

1172: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1173: {
1174:   PC_MG   *mg      = (PC_MG*)pc->data;
1175:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1178:   pc_gamg->Nlevels = n;
1179:   return(0);
1180: }

1184: /*@
1185:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

1187:    Not collective on PC

1189:    Input Parameters:
1190: +  pc - the preconditioner context
1191: -  threshold - the threshold value, 0.0 is the lowest possible

1193:    Options Database Key:
1194: .  -pc_gamg_threshold <threshold>

1196:    Level: intermediate

1198:    Concepts: Unstructured multrigrid preconditioner

1200: .seealso: ()
1201: @*/
1202: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1203: {

1208:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1209:   return(0);
1210: }

1214: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1215: {
1216:   PC_MG   *mg      = (PC_MG*)pc->data;
1217:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1220:   pc_gamg->threshold = n;
1221:   return(0);
1222: }

1226: /*@
1227:    PCGAMGSetType - Set solution method

1229:    Collective on PC

1231:    Input Parameters:
1232: +  pc - the preconditioner context
1233: -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL

1235:    Options Database Key:
1236: .  -pc_gamg_type <agg,geo,classical>

1238:    Level: intermediate

1240:    Concepts: Unstructured multrigrid preconditioner

1242: .seealso: PCGAMGGetType(), PCGAMG
1243: @*/
1244: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1245: {

1250:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1251:   return(0);
1252: }

1256: /*@
1257:    PCGAMGGetType - Get solution method

1259:    Collective on PC

1261:    Input Parameter:
1262: .  pc - the preconditioner context

1264:    Output Parameter:
1265: .  type - the type of algorithm used

1267:    Level: intermediate

1269:    Concepts: Unstructured multrigrid preconditioner

1271: .seealso: PCGAMGSetType()
1272: @*/
1273: PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1274: {

1279:   PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));
1280:   return(0);
1281: }

1285: static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1286: {
1287:   PC_MG          *mg      = (PC_MG*)pc->data;
1288:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1291:   *type = pc_gamg->type;
1292:   return(0);
1293: }

1297: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1298: {
1299:   PetscErrorCode ierr,(*r)(PC);
1300:   PC_MG          *mg      = (PC_MG*)pc->data;
1301:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1304:   pc_gamg->type = type;
1305:   PetscFunctionListFind(GAMGList,type,&r);
1306:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1307:   if (pc_gamg->ops->destroy) {
1308:     /* there was something here - kill it */
1309:     (*pc_gamg->ops->destroy)(pc);
1310:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1311:     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1312:     /* cleaning up common data in pc_gamg - this should disapear someday */
1313:     pc_gamg->data_cell_cols = 0;
1314:     pc_gamg->data_cell_rows = 0;
1315:     pc_gamg->orig_data_cell_cols = 0;
1316:     pc_gamg->orig_data_cell_rows = 0;
1317:     if (pc_gamg->data_sz) {
1318:       PetscFree(pc_gamg->data);
1319:       pc_gamg->data_sz = 0;
1320:     } else if (pc_gamg->data) {
1321:       PetscFree(pc_gamg->data); /* can this happen ? */
1322:     }
1323:   }
1324:   PetscFree(pc_gamg->gamg_type_name);
1325:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1326:   (*r)(pc);
1327:   return(0);
1328: }

1332: PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc)
1333: {
1335:   PC_MG          *mg      = (PC_MG*)pc->data;
1336:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1337:   PetscBool      flag;
1338:   PetscInt       two   = 2;
1339:   MPI_Comm       comm;

1342:   PetscObjectGetComm((PetscObject)pc,&comm);
1343:   PetscOptionsHead(PetscOptionsObject,"GAMG options");
1344:   {
1345:     /* -pc_gamg_type */
1346:     {
1347:       char tname[256];
1348:       PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1349:       if (flag) {
1350:         PCGAMGSetType(pc,tname);
1351:       }
1352:     }
1353:     /* -pc_gamg_verbose */
1354:     PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG","none", pc_gamg->verbose,&pc_gamg->verbose, NULL);
1355:     /* -pc_gamg_repartition */
1356:     PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);
1357:     /* -pc_gamg_reuse_interpolation */
1358:     PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);
1359:     /* -pc_gamg_use_agg_gasm */
1360:     PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);
1361:     /* -pc_gamg_process_eq_limit */
1362:     PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);
1363:     /* -pc_gamg_coarse_eq_limit */
1364:     PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);
1365:     /* -pc_gamg_threshold */
1366:     PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);
1367:     if (flag && pc_gamg->verbose) {
1368:       PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1369:     }
1370:     /* -pc_gamg_eigtarget */
1371:     PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1372:     PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);

1374:     /* set options for subtype */
1375:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);}
1376:   }
1377:   PetscOptionsTail();
1378:   return(0);
1379: }

1381: /* -------------------------------------------------------------------------- */
1382: /*MC
1383:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner

1385:    Options Database Keys:
1386:    Multigrid options(inherited)
1387: +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1388: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1389: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1390: -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade


1393:   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1394: $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1395: $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1396: $       See the Users Manual Chapter 4 for more details

1398:   Level: intermediate

1400:   Concepts: algebraic multigrid

1402: .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 
1403:            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType()
1404: M*/

1408: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1409: {
1411:   PC_GAMG        *pc_gamg;
1412:   PC_MG          *mg;
1413: #if defined PETSC_GAMG_USE_LOG
1414:   static long count = 0;
1415: #endif

1418:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1419:   PCSetType(pc, PCMG); /* calls PCCreate_MG() and MGCreate_Private() */
1420:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1422:   /* create a supporting struct and attach it to pc */
1423:   PetscNewLog(pc,&pc_gamg);
1424:   mg           = (PC_MG*)pc->data;
1425:   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1426:   mg->innerctx = pc_gamg;

1428:   PetscNewLog(pc,&pc_gamg->ops);

1430:   pc_gamg->setup_count = 0;
1431:   /* these should be in subctx but repartitioning needs simple arrays */
1432:   pc_gamg->data_sz = 0;
1433:   pc_gamg->data    = 0;

1435:   /* register AMG type */
1436:   PCGAMGInitializePackage();

1438:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1439:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1440:   pc->ops->setup          = PCSetUp_GAMG;
1441:   pc->ops->reset          = PCReset_GAMG;
1442:   pc->ops->destroy        = PCDestroy_GAMG;

1444:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1445:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1446:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1447:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);
1448:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1449:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1450:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1451:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);
1452:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1453:   pc_gamg->repart           = PETSC_FALSE;
1454:   pc_gamg->reuse_prol       = PETSC_FALSE;
1455:   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1456:   pc_gamg->min_eq_proc      = 50;
1457:   pc_gamg->coarse_eq_limit  = 50;
1458:   pc_gamg->threshold        = 0.;
1459:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1460:   pc_gamg->verbose          = 0;
1461:   pc_gamg->emax_id          = -1;
1462:   pc_gamg->firstCoarsen     = PETSC_FALSE;
1463:   pc_gamg->eigtarget[0]     = 0.05;
1464:   pc_gamg->eigtarget[1]     = 1.05;
1465:   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;

1467:   /* private events */
1468: #if defined PETSC_GAMG_USE_LOG
1469:   if (count++ == 0) {
1470:     PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1471:     PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1472:     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1473:     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1474:     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1475:     PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1476:     PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1477:     PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1478:     PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1479:     PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1480:     PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1481:     PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1482:     PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1483:     PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1484:     PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1485:     PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1486:     PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1488:     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1489:     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1490:     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1491:     /* create timer stages */
1492: #if defined GAMG_STAGES
1493:     {
1494:       char     str[32];
1495:       PetscInt lidx;
1496:       sprintf(str,"MG Level %d (finest)",0);
1497:       PetscLogStageRegister(str, &gamg_stages[0]);
1498:       for (lidx=1; lidx<9; lidx++) {
1499:         sprintf(str,"MG Level %d",lidx);
1500:         PetscLogStageRegister(str, &gamg_stages[lidx]);
1501:       }
1502:     }
1503: #endif
1504:   }
1505: #endif
1506:   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1507:   PCGAMGSetType(pc,PCGAMGAGG);
1508:   return(0);
1509: }

1513: /*@C
1514:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1515:  from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1516:  when using static libraries.

1518:  Level: developer

1520:  .keywords: PC, PCGAMG, initialize, package
1521:  .seealso: PetscInitialize()
1522: @*/
1523: PetscErrorCode PCGAMGInitializePackage(void)
1524: {

1528:   if (PCGAMGPackageInitialized) return(0);
1529:   PCGAMGPackageInitialized = PETSC_TRUE;
1530:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1531:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1532:   PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1533:   PetscRegisterFinalize(PCGAMGFinalizePackage);

1535:   /* general events */
1536: #if defined PETSC_USE_LOG
1537:   PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1538:   PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1539:   PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1540:   PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1541:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1542:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1543:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1544: #endif

1546:   return(0);
1547: }

1551: /*@C
1552:  PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1553:  called from PetscFinalize().

1555:  Level: developer

1557:  .keywords: Petsc, destroy, package
1558:  .seealso: PetscFinalize()
1559: @*/
1560: PetscErrorCode PCGAMGFinalizePackage(void)
1561: {

1565:   PCGAMGPackageInitialized = PETSC_FALSE;
1566:   PetscFunctionListDestroy(&GAMGList);
1567:   return(0);
1568: }

1572: /*@C
1573:  PCGAMGRegister - Register a PCGAMG implementation.

1575:  Input Parameters:
1576:  + type - string that will be used as the name of the GAMG type.
1577:  - create - function for creating the gamg context.

1579:   Level: advanced

1581:  .seealso: PCGAMGGetContext(), PCGAMGSetContext()
1582: @*/
1583: PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1584: {

1588:   PCGAMGInitializePackage();
1589:   PetscFunctionListAdd(&GAMGList,type,create);
1590:   return(0);
1591: }