Actual source code: gamg.c

petsc-master 2015-01-31
Report Typos and Errors
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
  4: #include <petsc-private/matimpl.h>
  5: #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
  6: #include <petsc-private/kspimpl.h>
  7: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  9: #if defined PETSC_GAMG_USE_LOG
 10: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 11: #endif

 13: #if defined PETSC_USE_LOG
 14: PetscLogEvent PC_GAMGGgraph_AGG;
 15: PetscLogEvent PC_GAMGGgraph_GEO;
 16: PetscLogEvent PC_GAMGCoarsen_AGG;
 17: PetscLogEvent PC_GAMGCoarsen_GEO;
 18: PetscLogEvent PC_GAMGProlongator_AGG;
 19: PetscLogEvent PC_GAMGProlongator_GEO;
 20: PetscLogEvent PC_GAMGOptprol_AGG;
 21: #endif

 23: #define GAMG_MAXLEVELS 30

 25: /* #define GAMG_STAGES */
 26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 28: #endif

 30: static PetscFunctionList GAMGList = 0;
 31: static PetscBool PCGAMGPackageInitialized;

 33: /* ----------------------------------------------------------------------------- */
 36: PetscErrorCode PCReset_GAMG(PC pc)
 37: {
 39:   PC_MG          *mg      = (PC_MG*)pc->data;
 40:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 43:   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
 44:     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
 45:     PetscFree(pc_gamg->data);
 46:   }
 47:   pc_gamg->data = NULL; pc_gamg->data_sz = 0;

 49:   if (pc_gamg->orig_data) {
 50:     PetscFree(pc_gamg->orig_data);
 51:   }
 52:   return(0);
 53: }

 55: /* -------------------------------------------------------------------------- */
 56: /*
 57:    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
 58:      of active processors.

 60:    Input Parameter:
 61:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
 62:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
 63:    . Amat_fine - matrix on this fine (k) level
 64:    . cr_bs - coarse block size
 65:    In/Output Parameter:
 66:    . a_P_inout - prolongation operator to the next level (k-->k-1)
 67:    . a_nactive_proc - number of active procs
 68:    Output Parameter:
 69:    . a_Amat_crs - coarse matrix that is created (k-1)
 70: */

 74: static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,
 75:                                   Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc)
 76: {
 77:   PetscErrorCode  ierr;
 78:   PC_MG           *mg         = (PC_MG*)pc->data;
 79:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
 80:   const PetscBool repart      = pc_gamg->repart;
 81:   const PetscInt  min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
 82:   Mat             Cmat,Pold=*a_P_inout;
 83:   MPI_Comm        comm;
 84:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
 85:   PetscInt        ncrs_eq,ncrs,f_bs;

 88:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
 89:   MPI_Comm_rank(comm, &rank);
 90:   MPI_Comm_size(comm, &size);
 91:   MatGetBlockSize(Amat_fine, &f_bs);
 92:   /* RAP */
 93:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

 95:   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
 96:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);
 97:   if (pc_gamg->data_cell_rows>0) {
 98:     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
 99:   }
100:   else {
101:     PetscInt  bs;
102:     MatGetBlockSize(Cmat, &bs);
103:     ncrs = ncrs_eq/bs;
104:   }

106:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
107:   {
108:     PetscInt ncrs_eq_glob;
109:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
110:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
111:     if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1;
112:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
113:   }

115:   if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
116:   else {
117:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
118:     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;

120:     nloc_old = ncrs_eq/cr_bs;
121:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
122: #if defined PETSC_GAMG_USE_LOG
123:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
124: #endif
125:     /* make 'is_eq_newproc' */
126:     PetscMalloc1(size, &counts);
127:     if (repart) {
128:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
129:       Mat adj;

131:       if (pc_gamg->verbose>0) {
132:         if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
133:         else {
134:           PetscInt n;
135:           MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);
136:           PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
137:         }
138:       }

140:       /* get 'adj' */
141:       if (cr_bs == 1) {
142:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
143:       } else {
144:         /* make a scalar matrix to partition (no Stokes here) */
145:         Mat               tMat;
146:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
147:         const PetscScalar *vals;
148:         const PetscInt    *idx;
149:         PetscInt          *d_nnz, *o_nnz, M, N;
150:         static PetscInt   llev = 0;

152:         PetscMalloc1(ncrs, &d_nnz);
153:         PetscMalloc1(ncrs, &o_nnz);
154:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
155:         MatGetSize(Cmat, &M, &N);
156:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
157:           MatGetRow(Cmat,Ii,&ncols,0,0);
158:           d_nnz[jj] = ncols/cr_bs;
159:           o_nnz[jj] = ncols/cr_bs;
160:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
161:           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
162:           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
163:         }

165:         MatCreate(comm, &tMat);
166:         MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
167:         MatSetType(tMat,MATAIJ);
168:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
169:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
170:         PetscFree(d_nnz);
171:         PetscFree(o_nnz);

173:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
174:           PetscInt dest_row = ii/cr_bs;
175:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
176:           for (jj = 0; jj < ncols; jj++) {
177:             PetscInt    dest_col = idx[jj]/cr_bs;
178:             PetscScalar v        = 1.0;
179:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
180:           }
181:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
182:         }
183:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
184:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

186:         if (llev++ == -1) {
187:           PetscViewer viewer; char fname[32];
188:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
189:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
190:           MatView(tMat, viewer);
191:           PetscViewerDestroy(&viewer);
192:         }

194:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);

196:         MatDestroy(&tMat);
197:       } /* create 'adj' */

199:       { /* partition: get newproc_idx */
200:         char            prefix[256];
201:         const char      *pcpre;
202:         const PetscInt  *is_idx;
203:         MatPartitioning mpart;
204:         IS              proc_is;
205:         PetscInt        targetPE;

207:         MatPartitioningCreate(comm, &mpart);
208:         MatPartitioningSetAdjacency(mpart, adj);
209:         PCGetOptionsPrefix(pc, &pcpre);
210:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
211:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
212:         MatPartitioningSetFromOptions(mpart);
213:         MatPartitioningSetNParts(mpart, new_size);
214:         MatPartitioningApply(mpart, &proc_is);
215:         MatPartitioningDestroy(&mpart);

217:         /* collect IS info */
218:         PetscMalloc1(ncrs_eq, &newproc_idx);
219:         ISGetIndices(proc_is, &is_idx);
220:         targetPE = 1; /* bring to "front" of machine */
221:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
222:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
223:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
224:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
225:           }
226:         }
227:         ISRestoreIndices(proc_is, &is_idx);
228:         ISDestroy(&proc_is);
229:       }
230:       MatDestroy(&adj);

232:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
233:       if (newproc_idx != 0) {
234:         PetscFree(newproc_idx);
235:       }
236:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */

238:       PetscInt rfactor,targetPE;
239:       /* find factor */
240:       if (new_size == 1) rfactor = size; /* easy */
241:       else {
242:         PetscReal best_fact = 0.;
243:         jj = -1;
244:         for (kk = 1 ; kk <= size ; kk++) {
245:           if (size%kk==0) { /* a candidate */
246:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
247:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
248:             if (fact > best_fact) {
249:               best_fact = fact; jj = kk;
250:             }
251:           }
252:         }
253:         if (jj != -1) rfactor = jj;
254:         else rfactor = 1; /* does this happen .. a prime */
255:       }
256:       new_size = size/rfactor;

258:       if (new_size==nactive) {
259:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
260:         PetscFree(counts);
261:         if (pc_gamg->verbose>0) {
262:           PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
263:         }
264:         return(0);
265:       }

267:       if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
268:       targetPE = rank/rfactor;
269:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
270:     } /* end simple 'is_eq_newproc' */

272:     /*
273:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
274:      */
275:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
276:     is_eq_num_prim = is_eq_num;
277:     /*
278:       Determine how many equations/vertices are assigned to each processor
279:      */
280:     ISPartitioningCount(is_eq_newproc, size, counts);
281:     ncrs_eq_new = counts[rank];
282:     ISDestroy(&is_eq_newproc);
283:     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */

285:     PetscFree(counts);
286: #if defined PETSC_GAMG_USE_LOG
287:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
288: #endif
289:     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
290:     {
291:     Vec            src_crd, dest_crd;
292:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
293:     VecScatter     vecscat;
294:     PetscScalar    *array;
295:     IS isscat;

297:     /* move data (for primal equations only) */
298:     /* Create a vector to contain the newly ordered element information */
299:     VecCreate(comm, &dest_crd);
300:     VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
301:     VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
302:     /*
303:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
304:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
305:      */
306:     PetscMalloc1(ncrs*node_data_sz, &tidx);
307:     ISGetIndices(is_eq_num_prim, &idx);
308:     for (ii=0,jj=0; ii<ncrs; ii++) {
309:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
310:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
311:     }
312:     ISRestoreIndices(is_eq_num_prim, &idx);
313:     ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
314:     PetscFree(tidx);
315:     /*
316:      Create a vector to contain the original vertex information for each element
317:      */
318:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
319:     for (jj=0; jj<ndata_cols; jj++) {
320:       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
321:       for (ii=0; ii<ncrs; ii++) {
322:         for (kk=0; kk<ndata_rows; kk++) {
323:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
324:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
325:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
326:         }
327:       }
328:     }
329:     VecAssemblyBegin(src_crd);
330:     VecAssemblyEnd(src_crd);
331:     /*
332:       Scatter the element vertex information (still in the original vertex ordering)
333:       to the correct processor
334:     */
335:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
336:     ISDestroy(&isscat);
337:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
338:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
339:     VecScatterDestroy(&vecscat);
340:     VecDestroy(&src_crd);
341:     /*
342:       Put the element vertex data into a new allocation of the gdata->ele
343:     */
344:     PetscFree(pc_gamg->data);
345:     PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);

347:     pc_gamg->data_sz = node_data_sz*ncrs_new;
348:     strideNew        = ncrs_new*ndata_rows;

350:     VecGetArray(dest_crd, &array);
351:     for (jj=0; jj<ndata_cols; jj++) {
352:       for (ii=0; ii<ncrs_new; ii++) {
353:         for (kk=0; kk<ndata_rows; kk++) {
354:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
355:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
356:         }
357:       }
358:     }
359:     VecRestoreArray(dest_crd, &array);
360:     VecDestroy(&dest_crd);
361:     }
362:     /* move A and P (columns) with new layout */
363: #if defined PETSC_GAMG_USE_LOG
364:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
365: #endif

367:     /*
368:       Invert for MatGetSubMatrix
369:     */
370:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
371:     ISSort(new_eq_indices); /* is this needed? */
372:     ISSetBlockSize(new_eq_indices, cr_bs);
373:     if (is_eq_num != is_eq_num_prim) {
374:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
375:     }
376:     ISDestroy(&is_eq_num);
377: #if defined PETSC_GAMG_USE_LOG
378:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
379:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
380: #endif
381:     /* 'a_Amat_crs' output */
382:     {
383:       Mat mat;
384:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
385:       *a_Amat_crs = mat;

387:       if (!PETSC_TRUE) {
388:         PetscInt cbs, rbs;
389:         MatGetBlockSizes(Cmat, &rbs, &cbs);
390:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
391:         MatGetBlockSizes(mat, &rbs, &cbs);
392:         PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
393:       }
394:     }
395:     MatDestroy(&Cmat);

397: #if defined PETSC_GAMG_USE_LOG
398:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
399: #endif
400:     /* prolongator */
401:     {
402:       IS       findices;
403:       PetscInt Istart,Iend;
404:       Mat      Pnew;
405:       MatGetOwnershipRange(Pold, &Istart, &Iend);
406: #if defined PETSC_GAMG_USE_LOG
407:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
408: #endif
409:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
410:       ISSetBlockSize(findices,f_bs);
411:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
412:       ISDestroy(&findices);

414:       if (!PETSC_TRUE) {
415:         PetscInt cbs, rbs;
416:         MatGetBlockSizes(Pold, &rbs, &cbs);
417:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
418:         MatGetBlockSizes(Pnew, &rbs, &cbs);
419:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
420:       }
421: #if defined PETSC_GAMG_USE_LOG
422:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
423: #endif
424:       MatDestroy(a_P_inout);

426:       /* output - repartitioned */
427:       *a_P_inout = Pnew;
428:     }
429:     ISDestroy(&new_eq_indices);

431:     *a_nactive_proc = new_size; /* output */
432:   }

434:   /* outout matrix data */
435:   if (!PETSC_TRUE) {
436:     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
437:     if (llev==0) {
438:       sprintf(fname,"Cmat_%d.m",llev++);
439:       PetscViewerASCIIOpen(comm,fname,&viewer);
440:       PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
441:       MatView(Amat_fine, viewer);
442:       PetscViewerDestroy(&viewer);
443:     }
444:     sprintf(fname,"Cmat_%d.m",llev++);
445:     PetscViewerASCIIOpen(comm,fname,&viewer);
446:     PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
447:     MatView(Cmat, viewer);
448:     PetscViewerDestroy(&viewer);
449:   }
450:   return(0);
451: }

453: /* -------------------------------------------------------------------------- */
454: /*
455:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
456:                     by setting data structures and options.

458:    Input Parameter:
459: .  pc - the preconditioner context

461:    Application Interface Routine: PCSetUp()

463:    Notes:
464:    The interface routine PCSetUp() is not usually called directly by
465:    the user, but instead is called by PCApply() if necessary.
466: */
469: PetscErrorCode PCSetUp_GAMG(PC pc)
470: {
472:   PC_MG          *mg      = (PC_MG*)pc->data;
473:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
474:   Mat            Pmat     = pc->pmat;
475:   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
476:   MPI_Comm       comm;
477:   PetscMPIInt    rank,size,nactivepe;
478:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
479:   PetscReal      emaxs[GAMG_MAXLEVELS];
480:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
481:   PetscLogDouble nnz0=0.,nnztot=0.;
482:   MatInfo        info;
483:   PetscBool      redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);

486:   PetscObjectGetComm((PetscObject)pc,&comm);
487:   MPI_Comm_rank(comm,&rank);
488:   MPI_Comm_size(comm,&size);

490:   if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);

492:   if (pc_gamg->setup_count++ > 0) {
493:     if (redo_mesh_setup) {
494:       /* reset everything */
495:       PCReset_MG(pc);
496:       pc->setupcalled = 0;
497:     } else {
498:       PC_MG_Levels **mglevels = mg->levels;
499:       /* just do Galerkin grids */
500:       Mat          B,dA,dB;

502:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
503:       if (pc_gamg->Nlevels > 1) {
504:         /* currently only handle case where mat and pmat are the same on coarser levels */
505:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
506:         /* (re)set to get dirty flag */
507:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);

509:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
510:           /* the first time through the matrix structure has changed from repartitioning */
511:           if (pc_gamg->setup_count==2) {
512:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
513:             MatDestroy(&mglevels[level]->A);

515:             mglevels[level]->A = B;
516:           } else {
517:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
518:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
519:           }
520:           KSPSetOperators(mglevels[level]->smoothd,B,B);
521:           dB   = B;
522:         }
523:       }

525:       PCSetUp_MG(pc);

527:       return(0);
528:     }
529:   }

531:   if (!pc_gamg->data) {
532:     if (pc_gamg->orig_data) {
533:       MatGetBlockSize(Pmat, &bs);
534:       MatGetLocalSize(Pmat, &qq, NULL);

536:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
537:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
538:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

540:       PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
541:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
542:     } else {
543:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
544:       pc_gamg->ops->createdefaultdata(pc,Pmat);
545:     }
546:   }

548:   /* cache original data for reuse */
549:   if (!pc_gamg->orig_data && redo_mesh_setup) {
550:     PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
551:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
552:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
553:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
554:   }

556:   /* get basic dims */
557:   MatGetBlockSize(Pmat, &bs);

559:   MatGetSize(Pmat, &M, &qq);
560:   if (pc_gamg->verbose) {
561:     PetscInt NN = M;
562:     if (pc_gamg->verbose==1) {
563:        MatGetInfo(Pmat,MAT_LOCAL,&info);
564:       MatGetLocalSize(Pmat, &NN, &qq);
565:       if (!NN) NN=1;
566:     } else {
567:       MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
568:     }
569:     nnz0   = info.nz_used;
570:     nnztot = info.nz_used;
571:     PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
572:                          rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
573:                          (int)(nnz0/(PetscReal)NN),size);
574:   }

576:   /* Get A_i and R_i */
577:   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
578:        level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit);  /* && (size==1 || nactivepe>1); */
579:        level++) {
580:     level1 = level + 1;
581: #if defined PETSC_GAMG_USE_LOG
582:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
583: #if (defined GAMG_STAGES)
584:     PetscLogStagePush(gamg_stages[level]);
585: #endif
586: #endif
587:     { /* construct prolongator */
588:       Mat              Gmat;
589:       PetscCoarsenData *agg_lists;
590:       Mat              Prol11;

592:       pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
593:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
594:       pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);

596:       /* could have failed to create new level */
597:       if (Prol11) {
598:         /* get new block size of coarse matrices */
599:         MatGetBlockSizes(Prol11, NULL, &bs);

601:         if (pc_gamg->ops->optprol) {
602:           /* smooth */
603:           pc_gamg->ops->optprol(pc, Aarr[level], &Prol11);
604:         }

606:         Parr[level1] = Prol11;
607:       } else Parr[level1] = NULL;

609:       if (pc_gamg->use_aggs_in_gasm) {
610:         PetscInt bs;
611:         MatGetBlockSizes(Prol11, &bs, NULL);
612:         PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
613:       }

615:       MatDestroy(&Gmat);
616:       PetscCDDestroy(agg_lists);
617:     } /* construct prolongator scope */
618: #if defined PETSC_GAMG_USE_LOG
619:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
620: #endif
621:     /* cache eigen estimate */
622:     if (pc_gamg->emax_id != -1) {
623:       PetscBool flag;
624:       PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);
625:       if (!flag) emaxs[level] = -1.;
626:     } else emaxs[level] = -1.;
627:     if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
628:     if (!Parr[level1]) {
629:       if (pc_gamg->verbose) {
630:          PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);
631:       }
632:       break;
633:     }
634: #if defined PETSC_GAMG_USE_LOG
635:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
636: #endif

638:     pc_gamg->ops->createlevel(pc, Aarr[level], bs,
639:                        &Parr[level1], &Aarr[level1], &nactivepe);

641: #if defined PETSC_GAMG_USE_LOG
642:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
643: #endif
644:     MatGetSize(Aarr[level1], &M, &qq);

646:     if (pc_gamg->verbose > 0) {
647:       PetscInt NN = M;
648:       if (pc_gamg->verbose==1) {
649:         MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
650:         MatGetLocalSize(Aarr[level1], &NN, &qq);
651:         if (!NN) NN=1;
652:       } else {
653:         MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
654:       }

656:       nnztot += info.nz_used;
657:       PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
658:                             rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
659:                             (int)(info.nz_used/(PetscReal)NN), nactivepe);
660:     }

662:     /* stop if one node or one proc -- could pull back for singular problems */
663:     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M < 2)
664:          || nactivepe==1 ) {
665:       level++;
666:       break;
667:     }
668: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
669:     PetscLogStagePop();
670: #endif
671:   } /* levels */

673:   if (pc_gamg->data) {
674:     PetscFree(pc_gamg->data);
675:     pc_gamg->data = NULL;
676:   }

678:   if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
679:   pc_gamg->Nlevels = level + 1;
680:   fine_level       = level;
681:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

683:   /* simple setup */
684:   if (!PETSC_TRUE) {
685:     PC_MG_Levels **mglevels = mg->levels;
686:     for (lidx=0,level=pc_gamg->Nlevels-1;
687:          lidx<fine_level;
688:          lidx++, level--) {
689:       PCMGSetInterpolation(pc, lidx+1, Parr[level]);
690:       KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);
691:       MatDestroy(&Parr[level]);
692:       MatDestroy(&Aarr[level]);
693:     }
694:     KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);

696:     PCSetUp_MG(pc);
697:   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
698:     /* set default smoothers & set operators */
699:     for (lidx = 1, level = pc_gamg->Nlevels-2;
700:          lidx <= fine_level;
701:          lidx++, level--) {
702:       KSP smoother;
703:       PC  subpc;

705:       PCMGGetSmoother(pc, lidx, &smoother);
706:       KSPGetPC(smoother, &subpc);

708:       KSPSetNormType(smoother, KSP_NORM_NONE);
709:       /* set ops */
710:       KSPSetOperators(smoother, Aarr[level], Aarr[level]);
711:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

713:       /* set defaults */
714:       KSPSetType(smoother, KSPCHEBYSHEV);

716:       /* set blocks for GASM smoother that uses the 'aggregates' */
717:       if (pc_gamg->use_aggs_in_gasm) {
718:         PetscInt sz;
719:         IS       *is;

721:         sz   = nASMBlocksArr[level];
722:         is   = ASMLocalIDsArr[level];
723:         PCSetType(subpc, PCGASM);
724:         PCGASMSetOverlap(subpc, 0);
725:         if (sz==0) {
726:           IS       is;
727:           PetscInt my0,kk;
728:           MatGetOwnershipRange(Aarr[level], &my0, &kk);
729:           ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
730:           PCGASMSetSubdomains(subpc, 1, &is, NULL);
731:           ISDestroy(&is);
732:         } else {
733:           PetscInt kk;
734:           PCGASMSetSubdomains(subpc, sz, is, NULL);
735:           for (kk=0; kk<sz; kk++) {
736:             ISDestroy(&is[kk]);
737:           }
738:           PetscFree(is);
739:         }
740:         ASMLocalIDsArr[level] = NULL;
741:         nASMBlocksArr[level]  = 0;
742:         PCGASMSetType(subpc, PC_GASM_BASIC);
743:       } else {
744:         PCSetType(subpc, PCSOR);
745:       }
746:     }
747:     {
748:       /* coarse grid */
749:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
750:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
751:       PCMGGetSmoother(pc, lidx, &smoother);
752:       KSPSetOperators(smoother, Lmat, Lmat);
753:       KSPSetNormType(smoother, KSP_NORM_NONE);
754:       KSPGetPC(smoother, &subpc);
755:       PCSetType(subpc, PCBJACOBI);
756:       PCSetUp(subpc);
757:       PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
758:       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
759:       KSPGetPC(k2[0],&pc2);
760:       PCSetType(pc2, PCLU);
761:       PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
762:       KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
763:       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
764:        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
765:        * view every subdomain as though they were different. */
766:       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
767:     }

769:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
770:     PetscObjectOptionsBegin((PetscObject)pc);
771:     PCSetFromOptions_MG(PetscOptionsObject,pc);
772:     PetscOptionsEnd();
773:     if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");

775:     /* create cheby smoothers */
776:     for (lidx = 1, level = pc_gamg->Nlevels-2;
777:          lidx <= fine_level;
778:          lidx++, level--) {
779:       KSP       smoother;
780:       PetscBool flag,flag2;
781:       PC        subpc;

783:       PCMGGetSmoother(pc, lidx, &smoother);
784:       KSPGetPC(smoother, &subpc);

786:       /* do my own cheby */
787:       PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
788:       if (flag) {
789:         PetscReal emax, emin;
790:         PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
791:         PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);
792:         if ((flag||flag2) && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */
793:         else { /* eigen estimate 'emax' -- this is done in cheby */
794:           KSP eksp;
795:           Mat Lmat = Aarr[level];
796:           Vec bb, xx;

798:           MatCreateVecs(Lmat, &bb, 0);
799:           MatCreateVecs(Lmat, &xx, 0);
800:           {
801:             PetscRandom rctx;
802:             PetscRandomCreate(comm,&rctx);
803:             PetscRandomSetFromOptions(rctx);
804:             VecSetRandom(bb,rctx);
805:             PetscRandomDestroy(&rctx);
806:           }

808:           /* zeroing out BC rows -- needed for crazy matrices */
809:           {
810:             PetscInt    Istart,Iend,ncols,jj,Ii;
811:             PetscScalar zero = 0.0;
812:             MatGetOwnershipRange(Lmat, &Istart, &Iend);
813:             for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
814:               MatGetRow(Lmat,Ii,&ncols,0,0);
815:               if (ncols <= 1) {
816:                 VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);
817:               }
818:               MatRestoreRow(Lmat,Ii,&ncols,0,0);
819:             }
820:             VecAssemblyBegin(bb);
821:             VecAssemblyEnd(bb);
822:           }

824:           KSPCreate(comm, &eksp);
825:           KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);
826:           KSPSetNormType(eksp, KSP_NORM_NONE);
827:           KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
828:           KSPAppendOptionsPrefix(eksp, "gamg_est_");
829:           KSPSetFromOptions(eksp);

831:           KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
832:           KSPSetOperators(eksp, Lmat, Lmat);
833:           KSPSetComputeSingularValues(eksp,PETSC_TRUE);

835:           /* set PC type to be same as smoother */
836:           KSPSetPC(eksp, subpc);

838:           /* solve - keep stuff out of logging */
839:           PetscLogEventDeactivate(KSP_Solve);
840:           PetscLogEventDeactivate(PC_Apply);
841:           KSPSolve(eksp, bb, xx);
842:           PetscLogEventActivate(KSP_Solve);
843:           PetscLogEventActivate(PC_Apply);

845:           KSPComputeExtremeSingularValues(eksp, &emax, &emin);

847:           VecDestroy(&xx);
848:           VecDestroy(&bb);
849:           KSPDestroy(&eksp);

851:           if (pc_gamg->verbose > 0) {
852:             PetscInt N1, tt;
853:             MatGetSize(Aarr[level], &N1, &tt);
854:             PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
855:           }
856:         }
857:         {
858:           PetscInt N1, N0;
859:           MatGetSize(Aarr[level], &N1, NULL);
860:           MatGetSize(Aarr[level+1], &N0, NULL);
861:           /* heuristic - is this crap? */
862:           /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
863:           emin  = emax * pc_gamg->eigtarget[0];
864:           emax *= pc_gamg->eigtarget[1];
865:         }
866:         KSPChebyshevSetEigenvalues(smoother, emax, emin);
867:       } /* setup checby flag */
868:     } /* non-coarse levels */

870:     /* clean up */
871:     for (level=1; level<pc_gamg->Nlevels; level++) {
872:       MatDestroy(&Parr[level]);
873:       MatDestroy(&Aarr[level]);
874:     }

876:     PCSetUp_MG(pc);
877:   } else {
878:     KSP smoother;
879:     if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
880:     PCMGGetSmoother(pc, 0, &smoother);
881:     KSPSetOperators(smoother, Aarr[0], Aarr[0]);
882:     KSPSetType(smoother, KSPPREONLY);
883:     PCSetUp_MG(pc);
884:   }
885:   return(0);
886: }

888: /* ------------------------------------------------------------------------- */
889: /*
890:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
891:    that was created with PCCreate_GAMG().

893:    Input Parameter:
894: .  pc - the preconditioner context

896:    Application Interface Routine: PCDestroy()
897: */
900: PetscErrorCode PCDestroy_GAMG(PC pc)
901: {
903:   PC_MG          *mg     = (PC_MG*)pc->data;
904:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

907:   PCReset_GAMG(pc);
908:   if (pc_gamg->ops->destroy) {
909:     (*pc_gamg->ops->destroy)(pc);
910:   }
911:   PetscFree(pc_gamg->ops);
912:   PetscFree(pc_gamg->gamg_type_name);
913:   PetscFree(pc_gamg);
914:   PCDestroy_MG(pc);
915:   return(0);
916: }


921: /*@
922:    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction.

924:    Logically Collective on PC

926:    Input Parameters:
927: +  pc - the preconditioner context
928: -  n - the number of equations


931:    Options Database Key:
932: .  -pc_gamg_process_eq_limit <limit>

934:    Level: intermediate

936:    Concepts: Unstructured multrigrid preconditioner

938: .seealso: ()
939: @*/
940: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
941: {

946:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
947:   return(0);
948: }

952: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
953: {
954:   PC_MG   *mg      = (PC_MG*)pc->data;
955:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

958:   if (n>0) pc_gamg->min_eq_proc = n;
959:   return(0);
960: }

964: /*@
965:    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.

967:  Collective on PC

969:    Input Parameters:
970: +  pc - the preconditioner context
971: -  n - maximum number of equations to aim for

973:    Options Database Key:
974: .  -pc_gamg_coarse_eq_limit <limit>

976:    Level: intermediate

978:    Concepts: Unstructured multrigrid preconditioner

980: @*/
981: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
982: {

987:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
988:   return(0);
989: }

993: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
994: {
995:   PC_MG   *mg      = (PC_MG*)pc->data;
996:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

999:   if (n>0) pc_gamg->coarse_eq_limit = n;
1000:   return(0);
1001: }

1005: /*@
1006:    PCGAMGSetRepartitioning - Repartition the coarse grids

1008:    Collective on PC

1010:    Input Parameters:
1011: +  pc - the preconditioner context
1012: -  n - PETSC_TRUE or PETSC_FALSE

1014:    Options Database Key:
1015: .  -pc_gamg_repartition <true,false>

1017:    Level: intermediate

1019:    Concepts: Unstructured multrigrid preconditioner

1021: .seealso: ()
1022: @*/
1023: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1024: {

1029:   PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1030:   return(0);
1031: }

1035: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1036: {
1037:   PC_MG   *mg      = (PC_MG*)pc->data;
1038:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1041:   pc_gamg->repart = n;
1042:   return(0);
1043: }

1047: /*@
1048:    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner

1050:    Collective on PC

1052:    Input Parameters:
1053: +  pc - the preconditioner context
1054: -  n - PETSC_TRUE or PETSC_FALSE

1056:    Options Database Key:
1057: .  -pc_gamg_reuse_interpolation <true,false>

1059:    Level: intermediate

1061:    Concepts: Unstructured multrigrid preconditioner

1063: .seealso: ()
1064: @*/
1065: PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
1066: {

1071:   PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));
1072:   return(0);
1073: }

1077: static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
1078: {
1079:   PC_MG   *mg      = (PC_MG*)pc->data;
1080:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1083:   pc_gamg->reuse_prol = n;
1084:   return(0);
1085: }

1089: /*@
1090:    PCGAMGSetUseASMAggs -

1092:    Collective on PC

1094:    Input Parameters:
1095: .  pc - the preconditioner context


1098:    Options Database Key:
1099: .  -pc_gamg_use_agg_gasm

1101:    Level: intermediate

1103:    Concepts: Unstructured multrigrid preconditioner

1105: .seealso: ()
1106: @*/
1107: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1108: {

1113:   PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1114:   return(0);
1115: }

1119: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1120: {
1121:   PC_MG   *mg      = (PC_MG*)pc->data;
1122:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1125:   pc_gamg->use_aggs_in_gasm = n;
1126:   return(0);
1127: }

1131: /*@
1132:    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use

1134:    Not collective on PC

1136:    Input Parameters:
1137: +  pc - the preconditioner
1138: -  n - the maximum number of levels to use

1140:    Options Database Key:
1141: .  -pc_mg_levels

1143:    Level: intermediate

1145:    Concepts: Unstructured multrigrid preconditioner

1147: .seealso: ()
1148: @*/
1149: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1150: {

1155:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1156:   return(0);
1157: }

1161: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1162: {
1163:   PC_MG   *mg      = (PC_MG*)pc->data;
1164:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1167:   pc_gamg->Nlevels = n;
1168:   return(0);
1169: }

1173: /*@
1174:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

1176:    Not collective on PC

1178:    Input Parameters:
1179: +  pc - the preconditioner context
1180: -  threshold - the threshold value, 0.0 is the lowest possible

1182:    Options Database Key:
1183: .  -pc_gamg_threshold <threshold>

1185:    Level: intermediate

1187:    Concepts: Unstructured multrigrid preconditioner

1189: .seealso: ()
1190: @*/
1191: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1192: {

1197:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1198:   return(0);
1199: }

1203: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1204: {
1205:   PC_MG   *mg      = (PC_MG*)pc->data;
1206:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1209:   pc_gamg->threshold = n;
1210:   return(0);
1211: }

1215: /*@
1216:    PCGAMGSetType - Set solution method

1218:    Collective on PC

1220:    Input Parameters:
1221: +  pc - the preconditioner context
1222: -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL

1224:    Options Database Key:
1225: .  -pc_gamg_type <agg,geo,classical>

1227:    Level: intermediate

1229:    Concepts: Unstructured multrigrid preconditioner

1231: .seealso: PCGAMGGetType(), PCGAMG
1232: @*/
1233: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1234: {

1239:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1240:   return(0);
1241: }

1245: /*@
1246:    PCGAMGGetType - Get solution method

1248:    Collective on PC

1250:    Input Parameter:
1251: .  pc - the preconditioner context

1253:    Output Parameter:
1254: .  type - the type of algorithm used

1256:    Level: intermediate

1258:    Concepts: Unstructured multrigrid preconditioner

1260: .seealso: PCGAMGSetType()
1261: @*/
1262: PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1263: {

1268:   PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));
1269:   return(0);
1270: }

1274: static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1275: {
1276:   PC_MG          *mg      = (PC_MG*)pc->data;
1277:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1280:   *type = pc_gamg->type;
1281:   return(0);
1282: }

1286: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1287: {
1288:   PetscErrorCode ierr,(*r)(PC);
1289:   PC_MG          *mg      = (PC_MG*)pc->data;
1290:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1293:   pc_gamg->type = type;
1294:   PetscFunctionListFind(GAMGList,type,&r);
1295:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1296:   if (pc_gamg->ops->destroy) {
1297:     /* there was something here - kill it */
1298:     (*pc_gamg->ops->destroy)(pc);
1299:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1300:     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1301:     /* cleaning up common data in pc_gamg - this should disapear someday */
1302:     pc_gamg->data_cell_cols = 0;
1303:     pc_gamg->data_cell_rows = 0;
1304:     pc_gamg->orig_data_cell_cols = 0;
1305:     pc_gamg->orig_data_cell_rows = 0;
1306:     if (pc_gamg->data_sz) {
1307:       PetscFree(pc_gamg->data);
1308:       pc_gamg->data_sz = 0;
1309:     } else if (pc_gamg->data) {
1310:       PetscFree(pc_gamg->data); /* can this happen ? */
1311:     }
1312:   }
1313:   PetscFree(pc_gamg->gamg_type_name);
1314:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1315:   (*r)(pc);
1316:   return(0);
1317: }

1321: PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc)
1322: {
1324:   PC_MG          *mg      = (PC_MG*)pc->data;
1325:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1326:   PetscBool      flag;
1327:   PetscInt       two   = 2;
1328:   MPI_Comm       comm;

1331:   PetscObjectGetComm((PetscObject)pc,&comm);
1332:   PetscOptionsHead(PetscOptionsObject,"GAMG options");
1333:   {
1334:     /* -pc_gamg_type */
1335:     {
1336:       char tname[256];
1337:       PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1338:       if (flag) {
1339:         PCGAMGSetType(pc,tname);
1340:       }
1341:     }
1342:     /* -pc_gamg_verbose */
1343:     PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG","none", pc_gamg->verbose,&pc_gamg->verbose, NULL);
1344:     /* -pc_gamg_repartition */
1345:     PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);
1346:     /* -pc_gamg_reuse_interpolation */
1347:     PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);
1348:     /* -pc_gamg_use_agg_gasm */
1349:     PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);
1350:     /* -pc_gamg_process_eq_limit */
1351:     PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);
1352:     /* -pc_gamg_coarse_eq_limit */
1353:     PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);
1354:     /* -pc_gamg_threshold */
1355:     PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);
1356:     if (flag && pc_gamg->verbose) {
1357:       PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1358:     }
1359:     /* -pc_gamg_eigtarget */
1360:     PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1361:     PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);

1363:     /* set options for subtype */
1364:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);}
1365:   }
1366:   PetscOptionsTail();
1367:   return(0);
1368: }

1370: /* -------------------------------------------------------------------------- */
1371: /*MC
1372:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner

1374:    Options Database Keys:
1375:    Multigrid options(inherited)
1376: +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1377: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1378: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1379: -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade


1382:   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1383: $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1384: $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1385: $       See the Users Manual Chapter 4 for more details

1387:   Level: intermediate

1389:   Concepts: algebraic multigrid

1391: .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 
1392:            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType()
1393: M*/

1397: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1398: {
1400:   PC_GAMG        *pc_gamg;
1401:   PC_MG          *mg;
1402: #if defined PETSC_GAMG_USE_LOG
1403:   static long count = 0;
1404: #endif

1407:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1408:   PCSetType(pc, PCMG); /* calls PCCreate_MG() and MGCreate_Private() */
1409:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1411:   /* create a supporting struct and attach it to pc */
1412:   PetscNewLog(pc,&pc_gamg);
1413:   mg           = (PC_MG*)pc->data;
1414:   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1415:   mg->innerctx = pc_gamg;

1417:   PetscNewLog(pc,&pc_gamg->ops);

1419:   pc_gamg->setup_count = 0;
1420:   /* these should be in subctx but repartitioning needs simple arrays */
1421:   pc_gamg->data_sz = 0;
1422:   pc_gamg->data    = 0;

1424:   /* register AMG type */
1425:   PCGAMGInitializePackage();

1427:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1428:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1429:   pc->ops->setup          = PCSetUp_GAMG;
1430:   pc->ops->reset          = PCReset_GAMG;
1431:   pc->ops->destroy        = PCDestroy_GAMG;

1433:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1434:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1435:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1436:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);
1437:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1438:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1439:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1440:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);
1441:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1442:   pc_gamg->repart           = PETSC_FALSE;
1443:   pc_gamg->reuse_prol       = PETSC_FALSE;
1444:   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1445:   pc_gamg->min_eq_proc      = 50;
1446:   pc_gamg->coarse_eq_limit  = 50;
1447:   pc_gamg->threshold        = 0.;
1448:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1449:   pc_gamg->verbose          = 0;
1450:   pc_gamg->emax_id          = -1;
1451:   pc_gamg->eigtarget[0]     = 0.05;
1452:   pc_gamg->eigtarget[1]     = 1.05;
1453:   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;

1455:   /* private events */
1456: #if defined PETSC_GAMG_USE_LOG
1457:   if (count++ == 0) {
1458:     PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1459:     PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1460:     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1461:     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1462:     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1463:     PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1464:     PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1465:     PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1466:     PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1467:     PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1468:     PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1469:     PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1470:     PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1471:     PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1472:     PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1473:     PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1474:     PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1476:     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1477:     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1478:     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1479:     /* create timer stages */
1480: #if defined GAMG_STAGES
1481:     {
1482:       char     str[32];
1483:       PetscInt lidx;
1484:       sprintf(str,"MG Level %d (finest)",0);
1485:       PetscLogStageRegister(str, &gamg_stages[0]);
1486:       for (lidx=1; lidx<9; lidx++) {
1487:         sprintf(str,"MG Level %d",lidx);
1488:         PetscLogStageRegister(str, &gamg_stages[lidx]);
1489:       }
1490:     }
1491: #endif
1492:   }
1493: #endif
1494:   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1495:   PCGAMGSetType(pc,PCGAMGAGG);
1496:   return(0);
1497: }

1501: /*@C
1502:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1503:  from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1504:  when using static libraries.

1506:  Level: developer

1508:  .keywords: PC, PCGAMG, initialize, package
1509:  .seealso: PetscInitialize()
1510: @*/
1511: PetscErrorCode PCGAMGInitializePackage(void)
1512: {

1516:   if (PCGAMGPackageInitialized) return(0);
1517:   PCGAMGPackageInitialized = PETSC_TRUE;
1518:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1519:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1520:   PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1521:   PetscRegisterFinalize(PCGAMGFinalizePackage);

1523:   /* general events */
1524: #if defined PETSC_USE_LOG
1525:   PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1526:   PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1527:   PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1528:   PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1529:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1530:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1531:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1532: #endif

1534:   return(0);
1535: }

1539: /*@C
1540:  PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1541:  called from PetscFinalize().

1543:  Level: developer

1545:  .keywords: Petsc, destroy, package
1546:  .seealso: PetscFinalize()
1547: @*/
1548: PetscErrorCode PCGAMGFinalizePackage(void)
1549: {

1553:   PCGAMGPackageInitialized = PETSC_FALSE;
1554:   PetscFunctionListDestroy(&GAMGList);
1555:   return(0);
1556: }

1560: /*@C
1561:  PCGAMGRegister - Register a PCGAMG implementation.

1563:  Input Parameters:
1564:  + type - string that will be used as the name of the GAMG type.
1565:  - create - function for creating the gamg context.

1567:   Level: advanced

1569:  .seealso: PCGAMGGetContext(), PCGAMGSetContext()
1570: @*/
1571: PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1572: {

1576:   PCGAMGInitializePackage();
1577:   PetscFunctionListAdd(&GAMGList,type,create);
1578:   return(0);
1579: }