Actual source code: asm.c

petsc-dev 2014-08-28
Report Typos and Errors
  2: /*
  3:   This file defines an additive Schwarz preconditioner for any Mat implementation.

  5:   Note that each processor may have any number of subdomains. But in order to
  6:   deal easily with the VecScatter(), we treat each processor as if it has the
  7:   same number of subdomains.

  9:        n - total number of true subdomains on all processors
 10:        n_local_true - actual number of subdomains on this processor
 11:        n_local = maximum over all processors of n_local_true
 12: */
 13: #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
 14: #include <petscdm.h>

 16: typedef struct {
 17:   PetscInt   n, n_local, n_local_true;
 18:   PetscInt   overlap;             /* overlap requested by user */
 19:   KSP        *ksp;                /* linear solvers for each block */
 20:   VecScatter *restriction;        /* mapping from global to subregion */
 21:   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion */
 22:   VecScatter *prolongation;       /* mapping from subregion to global */
 23:   Vec        *x,*y,*y_local;      /* work vectors */
 24:   IS         *is;                 /* index set that defines each overlapping subdomain */
 25:   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
 26:   Mat        *mat,*pmat;          /* mat is not currently used */
 27:   PCASMType  type;                /* use reduced interpolation, restriction or both */
 28:   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
 29:   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
 30:   PetscBool  sort_indices;        /* flag to sort subdomain indices */
 31:   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
 32: } PC_ASM;

 36: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
 37: {
 38:   PC_ASM         *osm = (PC_ASM*)pc->data;
 40:   PetscMPIInt    rank;
 41:   PetscInt       i,bsz;
 42:   PetscBool      iascii,isstring;
 43:   PetscViewer    sviewer;

 46:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 47:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
 48:   if (iascii) {
 49:     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
 50:     if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
 51:     if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
 52:     PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: %s, %s\n",blocks,overlaps);
 53:     PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
 54:     MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
 55:     if (osm->same_local_solves) {
 56:       if (osm->ksp) {
 57:         PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");
 58:         PetscViewerGetSingleton(viewer,&sviewer);
 59:         if (!rank) {
 60:           PetscViewerASCIIPushTab(viewer);
 61:           KSPView(osm->ksp[0],sviewer);
 62:           PetscViewerASCIIPopTab(viewer);
 63:         }
 64:         PetscViewerRestoreSingleton(viewer,&sviewer);
 65:       }
 66:     } else {
 67:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 68:       PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
 69:       PetscViewerFlush(viewer);
 70:       PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");
 71:       PetscViewerASCIIPushTab(viewer);
 72:       PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
 73:       PetscViewerGetSingleton(viewer,&sviewer);
 74:       for (i=0; i<osm->n_local_true; i++) {
 75:         ISGetLocalSize(osm->is[i],&bsz);
 76:         PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
 77:         KSPView(osm->ksp[i],sviewer);
 78:         PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
 79:       }
 80:       PetscViewerRestoreSingleton(viewer,&sviewer);
 81:       PetscViewerASCIIPopTab(viewer);
 82:       PetscViewerFlush(viewer);
 83:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 84:     }
 85:   } else if (isstring) {
 86:     PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
 87:     PetscViewerGetSingleton(viewer,&sviewer);
 88:     if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
 89:     PetscViewerRestoreSingleton(viewer,&sviewer);
 90:   }
 91:   return(0);
 92: }

 96: static PetscErrorCode PCASMPrintSubdomains(PC pc)
 97: {
 98:   PC_ASM         *osm = (PC_ASM*)pc->data;
 99:   const char     *prefix;
100:   char           fname[PETSC_MAX_PATH_LEN+1];
101:   PetscViewer    viewer, sviewer;
102:   char           *s;
103:   PetscInt       i,j,nidx;
104:   const PetscInt *idx;
105:   PetscMPIInt    rank, size;

109:   MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
110:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
111:   PCGetOptionsPrefix(pc,&prefix);
112:   PetscOptionsGetString(prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);
113:   if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
114:   PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
115:   for (i=0; i<osm->n_local; i++) {
116:     if (i < osm->n_local_true) {
117:       ISGetLocalSize(osm->is[i],&nidx);
118:       ISGetIndices(osm->is[i],&idx);
119:       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
120:       PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);
121:       PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
122:       PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
123:       for (j=0; j<nidx; j++) {
124:         PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
125:       }
126:       ISRestoreIndices(osm->is[i],&idx);
127:       PetscViewerStringSPrintf(sviewer,"\n");
128:       PetscViewerDestroy(&sviewer);
129:       PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
130:       PetscViewerASCIISynchronizedPrintf(viewer, s);
131:       PetscViewerFlush(viewer);
132:       PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
133:       PetscFree(s);
134:       if (osm->is_local) {
135:         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
136:         PetscMalloc(sizeof(char)*(16*(nidx+1)+512), &s);
137:         PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
138:         PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
139:         ISGetLocalSize(osm->is_local[i],&nidx);
140:         ISGetIndices(osm->is_local[i],&idx);
141:         for (j=0; j<nidx; j++) {
142:           PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
143:         }
144:         ISRestoreIndices(osm->is_local[i],&idx);
145:         PetscViewerStringSPrintf(sviewer,"\n");
146:         PetscViewerDestroy(&sviewer);
147:         PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
148:         PetscViewerASCIISynchronizedPrintf(viewer, s);
149:         PetscViewerFlush(viewer);
150:         PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
151:         PetscFree(s);
152:       }
153:     } else {
154:       /* Participate in collective viewer calls. */
155:       PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
156:       PetscViewerFlush(viewer);
157:       PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
158:       /* Assume either all ranks have is_local or none do. */
159:       if (osm->is_local) {
160:         PetscViewerASCIISynchronizedAllow(viewer, PETSC_TRUE);
161:         PetscViewerFlush(viewer);
162:         PetscViewerASCIISynchronizedAllow(viewer, PETSC_FALSE);
163:       }
164:     }
165:   }
166:   PetscViewerFlush(viewer);
167:   PetscViewerDestroy(&viewer);
168:   return(0);
169: }

173: static PetscErrorCode PCSetUp_ASM(PC pc)
174: {
175:   PC_ASM         *osm = (PC_ASM*)pc->data;
177:   PetscBool      symset,flg;
178:   PetscInt       i,m,m_local,firstRow,lastRow;
179:   MatReuse       scall = MAT_REUSE_MATRIX;
180:   IS             isl;
181:   KSP            ksp;
182:   PC             subpc;
183:   const char     *prefix,*pprefix;
184:   Vec            vec;
185:   DM             *domain_dm = NULL;

188:   if (!pc->setupcalled) {

190:     if (!osm->type_set) {
191:       MatIsSymmetricKnown(pc->pmat,&symset,&flg);
192:       if (symset && flg) osm->type = PC_ASM_BASIC;
193:     }

195:     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
196:     if (osm->n_local_true == PETSC_DECIDE) {
197:       /* no subdomains given */
198:       /* try pc->dm first, if allowed */
199:       if (osm->dm_subdomains && pc->dm) {
200:         PetscInt  num_domains, d;
201:         char      **domain_names;
202:         IS        *inner_domain_is, *outer_domain_is;
203:         DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, NULL, &inner_domain_is, &outer_domain_is, NULL, &domain_dm);
204:         if (num_domains) {
205:           PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
206:         }
207:         for (d = 0; d < num_domains; ++d) {
208:           if (domain_names)    {PetscFree(domain_names[d]);}
209:           if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
210:           if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
211:         }
212:         PetscFree(domain_names);
213:         PetscFree(inner_domain_is);
214:         PetscFree(outer_domain_is);
215:       }
216:       if (osm->n_local_true == PETSC_DECIDE) {
217:         /* still no subdomains; use one subdomain per processor */
218:         osm->n_local_true = 1;
219:       }
220:     }
221:     { /* determine the global and max number of subdomains */
222:       struct {PetscInt max,sum;} inwork,outwork;
223:       inwork.max   = osm->n_local_true;
224:       inwork.sum   = osm->n_local_true;
225:       MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,PetscObjectComm((PetscObject)pc));
226:       osm->n_local = outwork.max;
227:       osm->n       = outwork.sum;
228:     }
229:     if (!osm->is) { /* create the index sets */
230:       PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
231:     }
232:     if (osm->n_local_true > 1 && !osm->is_local) {
233:       PetscMalloc1(osm->n_local_true,&osm->is_local);
234:       for (i=0; i<osm->n_local_true; i++) {
235:         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
236:           ISDuplicate(osm->is[i],&osm->is_local[i]);
237:           ISCopy(osm->is[i],osm->is_local[i]);
238:         } else {
239:           PetscObjectReference((PetscObject)osm->is[i]);
240:           osm->is_local[i] = osm->is[i];
241:         }
242:       }
243:     }
244:     PCGetOptionsPrefix(pc,&prefix);
245:     flg  = PETSC_FALSE;
246:     PetscOptionsGetBool(prefix,"-pc_asm_print_subdomains",&flg,NULL);
247:     if (flg) { PCASMPrintSubdomains(pc); }

249:     if (osm->overlap > 0) {
250:       /* Extend the "overlapping" regions by a number of steps */
251:       MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
252:     }
253:     if (osm->sort_indices) {
254:       for (i=0; i<osm->n_local_true; i++) {
255:         ISSort(osm->is[i]);
256:         if (osm->is_local) {
257:           ISSort(osm->is_local[i]);
258:         }
259:       }
260:     }
261:     /* Create the local work vectors and scatter contexts */
262:     MatGetVecs(pc->pmat,&vec,0);
263:     PetscMalloc1(osm->n_local,&osm->restriction);
264:     if (osm->is_local) {PetscMalloc1(osm->n_local,&osm->localization);}
265:     PetscMalloc1(osm->n_local,&osm->prolongation);
266:     PetscMalloc1(osm->n_local,&osm->x);
267:     PetscMalloc1(osm->n_local,&osm->y);
268:     PetscMalloc1(osm->n_local,&osm->y_local);
269:     VecGetOwnershipRange(vec, &firstRow, &lastRow);
270:     for (i=0; i<osm->n_local_true; ++i, firstRow += m_local) {
271:       ISGetLocalSize(osm->is[i],&m);
272:       VecCreateSeq(PETSC_COMM_SELF,m,&osm->x[i]);
273:       ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
274:       VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);
275:       ISDestroy(&isl);
276:       VecDuplicate(osm->x[i],&osm->y[i]);
277:       if (osm->is_local) {
278:         ISLocalToGlobalMapping ltog;
279:         IS                     isll;
280:         const PetscInt         *idx_local;
281:         PetscInt               *idx,nout;

283:         ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);
284:         ISGetLocalSize(osm->is_local[i],&m_local);
285:         ISGetIndices(osm->is_local[i], &idx_local);
286:         PetscMalloc1(m_local,&idx);
287:         ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);
288:         ISLocalToGlobalMappingDestroy(&ltog);
289:         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
290:         ISRestoreIndices(osm->is_local[i], &idx_local);
291:         ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);
292:         ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);
293:         VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);
294:         VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);
295:         ISDestroy(&isll);

297:         VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);
298:         ISDestroy(&isl);
299:       } else {
300:         VecGetLocalSize(vec,&m_local);

302:         osm->y_local[i] = osm->y[i];

304:         PetscObjectReference((PetscObject) osm->y[i]);

306:         osm->prolongation[i] = osm->restriction[i];

308:         PetscObjectReference((PetscObject) osm->restriction[i]);
309:       }
310:     }
311:     if (firstRow != lastRow) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "Specified ASM subdomain sizes were invalid: %d != %d", firstRow, lastRow);
312:     for (i=osm->n_local_true; i<osm->n_local; i++) {
313:       VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);
314:       VecDuplicate(osm->x[i],&osm->y[i]);
315:       VecDuplicate(osm->x[i],&osm->y_local[i]);
316:       ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);
317:       VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);
318:       if (osm->is_local) {
319:         VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);
320:         VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);
321:       } else {
322:         osm->prolongation[i] = osm->restriction[i];
323:         PetscObjectReference((PetscObject) osm->restriction[i]);
324:       }
325:       ISDestroy(&isl);
326:     }
327:     VecDestroy(&vec);

329:     if (!osm->ksp) {
330:       /* Create the local solvers */
331:       PetscMalloc1(osm->n_local_true,&osm->ksp);
332:       if (domain_dm) {
333:         PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
334:       }
335:       for (i=0; i<osm->n_local_true; i++) {
336:         KSPCreate(PETSC_COMM_SELF,&ksp);
337:         PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
338:         PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
339:         KSPSetType(ksp,KSPPREONLY);
340:         KSPGetPC(ksp,&subpc);
341:         PCGetOptionsPrefix(pc,&prefix);
342:         KSPSetOptionsPrefix(ksp,prefix);
343:         KSPAppendOptionsPrefix(ksp,"sub_");
344:         if (domain_dm) {
345:           KSPSetDM(ksp, domain_dm[i]);
346:           KSPSetDMActive(ksp, PETSC_FALSE);
347:           DMDestroy(&domain_dm[i]);
348:         }
349:         osm->ksp[i] = ksp;
350:       }
351:       if (domain_dm) {
352:         PetscFree(domain_dm);
353:       }
354:     }
355:     scall = MAT_INITIAL_MATRIX;
356:   } else {
357:     /*
358:        Destroy the blocks from the previous iteration
359:     */
360:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
361:       MatDestroyMatrices(osm->n_local_true,&osm->pmat);
362:       scall = MAT_INITIAL_MATRIX;
363:     }
364:   }

366:   /*
367:      Extract out the submatrices
368:   */
369:   MatGetSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
370:   if (scall == MAT_INITIAL_MATRIX) {
371:     PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
372:     for (i=0; i<osm->n_local_true; i++) {
373:       PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
374:       PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
375:     }
376:   }

378:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
379:      different boundary conditions for the submatrices than for the global problem) */
380:   PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);

382:   /*
383:      Loop over subdomains putting them into local ksp
384:   */
385:   for (i=0; i<osm->n_local_true; i++) {
386:     KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
387:     if (!pc->setupcalled) {
388:       KSPSetFromOptions(osm->ksp[i]);
389:     }
390:   }
391:   return(0);
392: }

396: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
397: {
398:   PC_ASM         *osm = (PC_ASM*)pc->data;
400:   PetscInt       i;

403:   for (i=0; i<osm->n_local_true; i++) {
404:     KSPSetUp(osm->ksp[i]);
405:   }
406:   return(0);
407: }

411: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
412: {
413:   PC_ASM         *osm = (PC_ASM*)pc->data;
415:   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
416:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

419:   /*
420:      Support for limiting the restriction or interpolation to only local
421:      subdomain values (leaving the other values 0).
422:   */
423:   if (!(osm->type & PC_ASM_RESTRICT)) {
424:     forward = SCATTER_FORWARD_LOCAL;
425:     /* have to zero the work RHS since scatter may leave some slots empty */
426:     for (i=0; i<n_local_true; i++) {
427:       VecZeroEntries(osm->x[i]);
428:     }
429:   }
430:   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;

432:   for (i=0; i<n_local; i++) {
433:     VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
434:   }
435:   VecZeroEntries(y);
436:   /* do the local solves */
437:   for (i=0; i<n_local_true; i++) {
438:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
439:     KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
440:     if (osm->localization) {
441:       VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
442:       VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
443:     }
444:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
445:   }
446:   /* handle the rest of the scatters that do not have local solves */
447:   for (i=n_local_true; i<n_local; i++) {
448:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
449:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
450:   }
451:   for (i=0; i<n_local; i++) {
452:     VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
453:   }
454:   return(0);
455: }

459: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
460: {
461:   PC_ASM         *osm = (PC_ASM*)pc->data;
463:   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
464:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

467:   /*
468:      Support for limiting the restriction or interpolation to only local
469:      subdomain values (leaving the other values 0).

471:      Note: these are reversed from the PCApply_ASM() because we are applying the
472:      transpose of the three terms
473:   */
474:   if (!(osm->type & PC_ASM_INTERPOLATE)) {
475:     forward = SCATTER_FORWARD_LOCAL;
476:     /* have to zero the work RHS since scatter may leave some slots empty */
477:     for (i=0; i<n_local_true; i++) {
478:       VecZeroEntries(osm->x[i]);
479:     }
480:   }
481:   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;

483:   for (i=0; i<n_local; i++) {
484:     VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
485:   }
486:   VecZeroEntries(y);
487:   /* do the local solves */
488:   for (i=0; i<n_local_true; i++) {
489:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
490:     KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
491:     if (osm->localization) {
492:       VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
493:       VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
494:     }
495:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
496:   }
497:   /* handle the rest of the scatters that do not have local solves */
498:   for (i=n_local_true; i<n_local; i++) {
499:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
500:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
501:   }
502:   for (i=0; i<n_local; i++) {
503:     VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
504:   }
505:   return(0);
506: }

510: static PetscErrorCode PCReset_ASM(PC pc)
511: {
512:   PC_ASM         *osm = (PC_ASM*)pc->data;
514:   PetscInt       i;

517:   if (osm->ksp) {
518:     for (i=0; i<osm->n_local_true; i++) {
519:       KSPReset(osm->ksp[i]);
520:     }
521:   }
522:   if (osm->pmat) {
523:     if (osm->n_local_true > 0) {
524:       MatDestroyMatrices(osm->n_local_true,&osm->pmat);
525:     }
526:   }
527:   if (osm->restriction) {
528:     for (i=0; i<osm->n_local; i++) {
529:       VecScatterDestroy(&osm->restriction[i]);
530:       if (osm->localization) {VecScatterDestroy(&osm->localization[i]);}
531:       VecScatterDestroy(&osm->prolongation[i]);
532:       VecDestroy(&osm->x[i]);
533:       VecDestroy(&osm->y[i]);
534:       VecDestroy(&osm->y_local[i]);
535:     }
536:     PetscFree(osm->restriction);
537:     if (osm->localization) {PetscFree(osm->localization);}
538:     PetscFree(osm->prolongation);
539:     PetscFree(osm->x);
540:     PetscFree(osm->y);
541:     PetscFree(osm->y_local);
542:   }
543:   PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);

545:   osm->is       = 0;
546:   osm->is_local = 0;
547:   return(0);
548: }

552: static PetscErrorCode PCDestroy_ASM(PC pc)
553: {
554:   PC_ASM         *osm = (PC_ASM*)pc->data;
556:   PetscInt       i;

559:   PCReset_ASM(pc);
560:   if (osm->ksp) {
561:     for (i=0; i<osm->n_local_true; i++) {
562:       KSPDestroy(&osm->ksp[i]);
563:     }
564:     PetscFree(osm->ksp);
565:   }
566:   PetscFree(pc->data);
567:   return(0);
568: }

572: static PetscErrorCode PCSetFromOptions_ASM(PC pc)
573: {
574:   PC_ASM         *osm = (PC_ASM*)pc->data;
576:   PetscInt       blocks,ovl;
577:   PetscBool      symset,flg;
578:   PCASMType      asmtype;

581:   /* set the type to symmetric if matrix is symmetric */
582:   if (!osm->type_set && pc->pmat) {
583:     MatIsSymmetricKnown(pc->pmat,&symset,&flg);
584:     if (symset && flg) osm->type = PC_ASM_BASIC;
585:   }
586:   PetscOptionsHead("Additive Schwarz options");
587:   PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
588:   PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
589:   if (flg) {
590:     PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
591:     osm->dm_subdomains = PETSC_FALSE;
592:   }
593:   PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
594:   if (flg) {
595:     PCASMSetOverlap(pc,ovl);
596:     osm->dm_subdomains = PETSC_FALSE;
597:   }
598:   flg  = PETSC_FALSE;
599:   PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
600:   if (flg) {PCASMSetType(pc,asmtype); }
601:   PetscOptionsTail();
602:   return(0);
603: }

605: /*------------------------------------------------------------------------------------*/

609: static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
610: {
611:   PC_ASM         *osm = (PC_ASM*)pc->data;
613:   PetscInt       i;

616:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
617:   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");

619:   if (!pc->setupcalled) {
620:     if (is) {
621:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
622:     }
623:     if (is_local) {
624:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
625:     }
626:     PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);

628:     osm->n_local_true = n;
629:     osm->is           = 0;
630:     osm->is_local     = 0;
631:     if (is) {
632:       PetscMalloc1(n,&osm->is);
633:       for (i=0; i<n; i++) osm->is[i] = is[i];
634:       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
635:       osm->overlap = -1;
636:     }
637:     if (is_local) {
638:       PetscMalloc1(n,&osm->is_local);
639:       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
640:       if (!is) {
641:         PetscMalloc1(osm->n_local_true,&osm->is);
642:         for (i=0; i<osm->n_local_true; i++) {
643:           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
644:             ISDuplicate(osm->is_local[i],&osm->is[i]);
645:             ISCopy(osm->is_local[i],osm->is[i]);
646:           } else {
647:             PetscObjectReference((PetscObject)osm->is_local[i]);
648:             osm->is[i] = osm->is_local[i];
649:           }
650:         }
651:       }
652:     }
653:   }
654:   return(0);
655: }

659: static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
660: {
661:   PC_ASM         *osm = (PC_ASM*)pc->data;
663:   PetscMPIInt    rank,size;
664:   PetscInt       n;

667:   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
668:   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");

670:   /*
671:      Split the subdomains equally among all processors
672:   */
673:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
674:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
675:   n    = N/size + ((N % size) > rank);
676:   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
677:   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
678:   if (!pc->setupcalled) {
679:     PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);

681:     osm->n_local_true = n;
682:     osm->is           = 0;
683:     osm->is_local     = 0;
684:   }
685:   return(0);
686: }

690: static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
691: {
692:   PC_ASM *osm = (PC_ASM*)pc->data;

695:   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
696:   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
697:   if (!pc->setupcalled) osm->overlap = ovl;
698:   return(0);
699: }

703: static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
704: {
705:   PC_ASM *osm = (PC_ASM*)pc->data;

708:   osm->type     = type;
709:   osm->type_set = PETSC_TRUE;
710:   return(0);
711: }

715: static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
716: {
717:   PC_ASM *osm = (PC_ASM*)pc->data;

720:   osm->sort_indices = doSort;
721:   return(0);
722: }

726: static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
727: {
728:   PC_ASM         *osm = (PC_ASM*)pc->data;

732:   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");

734:   if (n_local) *n_local = osm->n_local_true;
735:   if (first_local) {
736:     MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
737:     *first_local -= osm->n_local_true;
738:   }
739:   if (ksp) {
740:     /* Assume that local solves are now different; not necessarily
741:        true though!  This flag is used only for PCView_ASM() */
742:     *ksp                   = osm->ksp;
743:     osm->same_local_solves = PETSC_FALSE;
744:   }
745:   return(0);
746: }

750: /*@C
751:     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.

753:     Collective on PC

755:     Input Parameters:
756: +   pc - the preconditioner context
757: .   n - the number of subdomains for this processor (default value = 1)
758: .   is - the index set that defines the subdomains for this processor
759:          (or NULL for PETSc to determine subdomains)
760: -   is_local - the index sets that define the local part of the subdomains for this processor
761:          (or NULL to use the default of 1 subdomain per process)

763:     Notes:
764:     The IS numbering is in the parallel, global numbering of the vector for both is and is_local

766:     By default the ASM preconditioner uses 1 block per processor.

768:     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.

770:     Level: advanced

772: .keywords: PC, ASM, set, local, subdomains, additive Schwarz

774: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
775:           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
776: @*/
777: PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
778: {

783:   PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
784:   return(0);
785: }

789: /*@C
790:     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
791:     additive Schwarz preconditioner.  Either all or no processors in the
792:     PC communicator must call this routine, with the same index sets.

794:     Collective on PC

796:     Input Parameters:
797: +   pc - the preconditioner context
798: .   N  - the number of subdomains for all processors
799: .   is - the index sets that define the subdomains for all processors
800:          (or NULL to ask PETSc to compe up with subdomains)
801: -   is_local - the index sets that define the local part of the subdomains for this processor
802:          (or NULL to use the default of 1 subdomain per process)

804:     Options Database Key:
805:     To set the total number of subdomain blocks rather than specify the
806:     index sets, use the option
807: .    -pc_asm_blocks <blks> - Sets total blocks

809:     Notes:
810:     Currently you cannot use this to set the actual subdomains with the argument is.

812:     By default the ASM preconditioner uses 1 block per processor.

814:     These index sets cannot be destroyed until after completion of the
815:     linear solves for which the ASM preconditioner is being used.

817:     Use PCASMSetLocalSubdomains() to set local subdomains.

819:     The IS numbering is in the parallel, global numbering of the vector for both is and is_local

821:     Level: advanced

823: .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz

825: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
826:           PCASMCreateSubdomains2D()
827: @*/
828: PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
829: {

834:   PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
835:   return(0);
836: }

840: /*@
841:     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
842:     additive Schwarz preconditioner.  Either all or no processors in the
843:     PC communicator must call this routine.

845:     Logically Collective on PC

847:     Input Parameters:
848: +   pc  - the preconditioner context
849: -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)

851:     Options Database Key:
852: .   -pc_asm_overlap <ovl> - Sets overlap

854:     Notes:
855:     By default the ASM preconditioner uses 1 block per processor.  To use
856:     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
857:     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).

859:     The overlap defaults to 1, so if one desires that no additional
860:     overlap be computed beyond what may have been set with a call to
861:     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
862:     must be set to be 0.  In particular, if one does not explicitly set
863:     the subdomains an application code, then all overlap would be computed
864:     internally by PETSc, and using an overlap of 0 would result in an ASM
865:     variant that is equivalent to the block Jacobi preconditioner.

867:     Note that one can define initial index sets with any overlap via
868:     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine
869:     PCASMSetOverlap() merely allows PETSc to extend that overlap further
870:     if desired.

872:     Level: intermediate

874: .keywords: PC, ASM, set, overlap

876: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
877:           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains()
878: @*/
879: PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
880: {

886:   PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
887:   return(0);
888: }

892: /*@
893:     PCASMSetType - Sets the type of restriction and interpolation used
894:     for local problems in the additive Schwarz method.

896:     Logically Collective on PC

898:     Input Parameters:
899: +   pc  - the preconditioner context
900: -   type - variant of ASM, one of
901: .vb
902:       PC_ASM_BASIC       - full interpolation and restriction
903:       PC_ASM_RESTRICT    - full restriction, local processor interpolation
904:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
905:       PC_ASM_NONE        - local processor restriction and interpolation
906: .ve

908:     Options Database Key:
909: .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type

911:     Level: intermediate

913: .keywords: PC, ASM, set, type

915: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
916:           PCASMCreateSubdomains2D()
917: @*/
918: PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
919: {

925:   PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
926:   return(0);
927: }

931: /*@
932:     PCASMSetSortIndices - Determines whether subdomain indices are sorted.

934:     Logically Collective on PC

936:     Input Parameters:
937: +   pc  - the preconditioner context
938: -   doSort - sort the subdomain indices

940:     Level: intermediate

942: .keywords: PC, ASM, set, type

944: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
945:           PCASMCreateSubdomains2D()
946: @*/
947: PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
948: {

954:   PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
955:   return(0);
956: }

960: /*@C
961:    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
962:    this processor.

964:    Collective on PC iff first_local is requested

966:    Input Parameter:
967: .  pc - the preconditioner context

969:    Output Parameters:
970: +  n_local - the number of blocks on this processor or NULL
971: .  first_local - the global number of the first block on this processor or NULL,
972:                  all processors must request or all must pass NULL
973: -  ksp - the array of KSP contexts

975:    Note:
976:    After PCASMGetSubKSP() the array of KSPes is not to be freed.

978:    Currently for some matrix implementations only 1 block per processor
979:    is supported.

981:    You must call KSPSetUp() before calling PCASMGetSubKSP().

983:    Fortran note:
984:    The output argument 'ksp' must be an array of sufficient length or NULL_OBJECT. The latter can be used to learn the necessary length.

986:    Level: advanced

988: .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context

990: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
991:           PCASMCreateSubdomains2D(),
992: @*/
993: PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
994: {

999:   PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1000:   return(0);
1001: }

1003: /* -------------------------------------------------------------------------------------*/
1004: /*MC
1005:    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1006:            its own KSP object.

1008:    Options Database Keys:
1009: +  -pc_asm_blocks <blks> - Sets total blocks
1010: .  -pc_asm_overlap <ovl> - Sets overlap
1011: -  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type

1013:      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1014:       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1015:       -pc_asm_type basic to use the standard ASM.

1017:    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1018:      than one processor. Defaults to one block per processor.

1020:      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1021:         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

1023:      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1024:          and set the options directly on the resulting KSP object (you can access its PC
1025:          with KSPGetPC())


1028:    Level: beginner

1030:    Concepts: additive Schwarz method

1032:     References:
1033:     An additive variant of the Schwarz alternating method for the case of many subregions
1034:     M Dryja, OB Widlund - Courant Institute, New York University Technical report

1036:     Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1037:     Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X.

1039: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1040:            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(),
1041:            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType()

1043: M*/

1047: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1048: {
1050:   PC_ASM         *osm;

1053:   PetscNewLog(pc,&osm);

1055:   osm->n                 = PETSC_DECIDE;
1056:   osm->n_local           = 0;
1057:   osm->n_local_true      = PETSC_DECIDE;
1058:   osm->overlap           = 1;
1059:   osm->ksp               = 0;
1060:   osm->restriction       = 0;
1061:   osm->localization      = 0;
1062:   osm->prolongation      = 0;
1063:   osm->x                 = 0;
1064:   osm->y                 = 0;
1065:   osm->y_local           = 0;
1066:   osm->is                = 0;
1067:   osm->is_local          = 0;
1068:   osm->mat               = 0;
1069:   osm->pmat              = 0;
1070:   osm->type              = PC_ASM_RESTRICT;
1071:   osm->same_local_solves = PETSC_TRUE;
1072:   osm->sort_indices      = PETSC_TRUE;
1073:   osm->dm_subdomains     = PETSC_FALSE;

1075:   pc->data                 = (void*)osm;
1076:   pc->ops->apply           = PCApply_ASM;
1077:   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1078:   pc->ops->setup           = PCSetUp_ASM;
1079:   pc->ops->reset           = PCReset_ASM;
1080:   pc->ops->destroy         = PCDestroy_ASM;
1081:   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1082:   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1083:   pc->ops->view            = PCView_ASM;
1084:   pc->ops->applyrichardson = 0;

1086:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1087:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1088:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1089:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1090:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1091:   PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1092:   return(0);
1093: }

1097: /*@C
1098:    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1099:    preconditioner for a any problem on a general grid.

1101:    Collective

1103:    Input Parameters:
1104: +  A - The global matrix operator
1105: -  n - the number of local blocks

1107:    Output Parameters:
1108: .  outis - the array of index sets defining the subdomains

1110:    Level: advanced

1112:    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1113:     from these if you use PCASMSetLocalSubdomains()

1115:     In the Fortran version you must provide the array outis[] already allocated of length n.

1117: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid

1119: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1120: @*/
1121: PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1122: {
1123:   MatPartitioning mpart;
1124:   const char      *prefix;
1125:   PetscErrorCode  (*f)(Mat,Mat*);
1126:   PetscMPIInt     size;
1127:   PetscInt        i,j,rstart,rend,bs;
1128:   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1129:   Mat             Ad     = NULL, adj;
1130:   IS              ispart,isnumb,*is;
1131:   PetscErrorCode  ierr;

1136:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);

1138:   /* Get prefix, row distribution, and block size */
1139:   MatGetOptionsPrefix(A,&prefix);
1140:   MatGetOwnershipRange(A,&rstart,&rend);
1141:   MatGetBlockSize(A,&bs);
1142:   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);

1144:   /* Get diagonal block from matrix if possible */
1145:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
1146:   PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",&f);
1147:   if (f) {
1148:     MatGetDiagonalBlock(A,&Ad);
1149:   } else if (size == 1) {
1150:     Ad = A;
1151:   }
1152:   if (Ad) {
1153:     PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1154:     if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1155:   }
1156:   if (Ad && n > 1) {
1157:     PetscBool match,done;
1158:     /* Try to setup a good matrix partitioning if available */
1159:     MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1160:     PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1161:     MatPartitioningSetFromOptions(mpart);
1162:     PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1163:     if (!match) {
1164:       PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1165:     }
1166:     if (!match) { /* assume a "good" partitioner is available */
1167:       PetscInt       na;
1168:       const PetscInt *ia,*ja;
1169:       MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1170:       if (done) {
1171:         /* Build adjacency matrix by hand. Unfortunately a call to
1172:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1173:            remove the block-aij structure and we cannot expect
1174:            MatPartitioning to split vertices as we need */
1175:         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1176:         const PetscInt *row;
1177:         nnz = 0;
1178:         for (i=0; i<na; i++) { /* count number of nonzeros */
1179:           len = ia[i+1] - ia[i];
1180:           row = ja + ia[i];
1181:           for (j=0; j<len; j++) {
1182:             if (row[j] == i) { /* don't count diagonal */
1183:               len--; break;
1184:             }
1185:           }
1186:           nnz += len;
1187:         }
1188:         PetscMalloc1((na+1),&iia);
1189:         PetscMalloc1((nnz),&jja);
1190:         nnz    = 0;
1191:         iia[0] = 0;
1192:         for (i=0; i<na; i++) { /* fill adjacency */
1193:           cnt = 0;
1194:           len = ia[i+1] - ia[i];
1195:           row = ja + ia[i];
1196:           for (j=0; j<len; j++) {
1197:             if (row[j] != i) { /* if not diagonal */
1198:               jja[nnz+cnt++] = row[j];
1199:             }
1200:           }
1201:           nnz     += cnt;
1202:           iia[i+1] = nnz;
1203:         }
1204:         /* Partitioning of the adjacency matrix */
1205:         MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1206:         MatPartitioningSetAdjacency(mpart,adj);
1207:         MatPartitioningSetNParts(mpart,n);
1208:         MatPartitioningApply(mpart,&ispart);
1209:         ISPartitioningToNumbering(ispart,&isnumb);
1210:         MatDestroy(&adj);
1211:         foundpart = PETSC_TRUE;
1212:       }
1213:       MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1214:     }
1215:     MatPartitioningDestroy(&mpart);
1216:   }

1218:   PetscMalloc1(n,&is);
1219:   *outis = is;

1221:   if (!foundpart) {

1223:     /* Partitioning by contiguous chunks of rows */

1225:     PetscInt mbs   = (rend-rstart)/bs;
1226:     PetscInt start = rstart;
1227:     for (i=0; i<n; i++) {
1228:       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1229:       ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1230:       start += count;
1231:     }

1233:   } else {

1235:     /* Partitioning by adjacency of diagonal block  */

1237:     const PetscInt *numbering;
1238:     PetscInt       *count,nidx,*indices,*newidx,start=0;
1239:     /* Get node count in each partition */
1240:     PetscMalloc1(n,&count);
1241:     ISPartitioningCount(ispart,n,count);
1242:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1243:       for (i=0; i<n; i++) count[i] *= bs;
1244:     }
1245:     /* Build indices from node numbering */
1246:     ISGetLocalSize(isnumb,&nidx);
1247:     PetscMalloc1(nidx,&indices);
1248:     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1249:     ISGetIndices(isnumb,&numbering);
1250:     PetscSortIntWithPermutation(nidx,numbering,indices);
1251:     ISRestoreIndices(isnumb,&numbering);
1252:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1253:       PetscMalloc1(nidx*bs,&newidx);
1254:       for (i=0; i<nidx; i++) {
1255:         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1256:       }
1257:       PetscFree(indices);
1258:       nidx   *= bs;
1259:       indices = newidx;
1260:     }
1261:     /* Shift to get global indices */
1262:     for (i=0; i<nidx; i++) indices[i] += rstart;

1264:     /* Build the index sets for each block */
1265:     for (i=0; i<n; i++) {
1266:       ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1267:       ISSort(is[i]);
1268:       start += count[i];
1269:     }

1271:     PetscFree(count);
1272:     PetscFree(indices);
1273:     ISDestroy(&isnumb);
1274:     ISDestroy(&ispart);

1276:   }
1277:   return(0);
1278: }

1282: /*@C
1283:    PCASMDestroySubdomains - Destroys the index sets created with
1284:    PCASMCreateSubdomains(). Should be called after setting subdomains
1285:    with PCASMSetLocalSubdomains().

1287:    Collective

1289:    Input Parameters:
1290: +  n - the number of index sets
1291: .  is - the array of index sets
1292: -  is_local - the array of local index sets, can be NULL

1294:    Level: advanced

1296: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid

1298: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1299: @*/
1300: PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1301: {
1302:   PetscInt       i;

1306:   if (n <= 0) return(0);
1307:   if (is) {
1309:     for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1310:     PetscFree(is);
1311:   }
1312:   if (is_local) {
1314:     for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1315:     PetscFree(is_local);
1316:   }
1317:   return(0);
1318: }

1322: /*@
1323:    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1324:    preconditioner for a two-dimensional problem on a regular grid.

1326:    Not Collective

1328:    Input Parameters:
1329: +  m, n - the number of mesh points in the x and y directions
1330: .  M, N - the number of subdomains in the x and y directions
1331: .  dof - degrees of freedom per node
1332: -  overlap - overlap in mesh lines

1334:    Output Parameters:
1335: +  Nsub - the number of subdomains created
1336: .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1337: -  is_local - array of index sets defining non-overlapping subdomains

1339:    Note:
1340:    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1341:    preconditioners.  More general related routines are
1342:    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().

1344:    Level: advanced

1346: .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid

1348: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1349:           PCASMSetOverlap()
1350: @*/
1351: PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1352: {
1353:   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1355:   PetscInt       nidx,*idx,loc,ii,jj,count;

1358:   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");

1360:   *Nsub     = N*M;
1361:   PetscMalloc1((*Nsub),is);
1362:   PetscMalloc1((*Nsub),is_local);
1363:   ystart    = 0;
1364:   loc_outer = 0;
1365:   for (i=0; i<N; i++) {
1366:     height = n/N + ((n % N) > i); /* height of subdomain */
1367:     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1368:     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1369:     yright = ystart + height + overlap; if (yright > n) yright = n;
1370:     xstart = 0;
1371:     for (j=0; j<M; j++) {
1372:       width = m/M + ((m % M) > j); /* width of subdomain */
1373:       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1374:       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1375:       xright = xstart + width + overlap; if (xright > m) xright = m;
1376:       nidx   = (xright - xleft)*(yright - yleft);
1377:       PetscMalloc1(nidx,&idx);
1378:       loc    = 0;
1379:       for (ii=yleft; ii<yright; ii++) {
1380:         count = m*ii + xleft;
1381:         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1382:       }
1383:       ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1384:       if (overlap == 0) {
1385:         PetscObjectReference((PetscObject)(*is)[loc_outer]);

1387:         (*is_local)[loc_outer] = (*is)[loc_outer];
1388:       } else {
1389:         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1390:           for (jj=xstart; jj<xstart+width; jj++) {
1391:             idx[loc++] = m*ii + jj;
1392:           }
1393:         }
1394:         ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1395:       }
1396:       PetscFree(idx);
1397:       xstart += width;
1398:       loc_outer++;
1399:     }
1400:     ystart += height;
1401:   }
1402:   for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1403:   return(0);
1404: }

1408: /*@C
1409:     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1410:     only) for the additive Schwarz preconditioner.

1412:     Not Collective

1414:     Input Parameter:
1415: .   pc - the preconditioner context

1417:     Output Parameters:
1418: +   n - the number of subdomains for this processor (default value = 1)
1419: .   is - the index sets that define the subdomains for this processor
1420: -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)


1423:     Notes:
1424:     The IS numbering is in the parallel, global numbering of the vector.

1426:     Level: advanced

1428: .keywords: PC, ASM, set, local, subdomains, additive Schwarz

1430: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1431:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1432: @*/
1433: PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1434: {
1435:   PC_ASM         *osm;
1437:   PetscBool      match;

1443:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1444:   if (!match) {
1445:     if (n) *n = 0;
1446:     if (is) *is = NULL;
1447:   } else {
1448:     osm = (PC_ASM*)pc->data;
1449:     if (n) *n = osm->n_local_true;
1450:     if (is) *is = osm->is;
1451:     if (is_local) *is_local = osm->is_local;
1452:   }
1453:   return(0);
1454: }

1458: /*@C
1459:     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1460:     only) for the additive Schwarz preconditioner.

1462:     Not Collective

1464:     Input Parameter:
1465: .   pc - the preconditioner context

1467:     Output Parameters:
1468: +   n - the number of matrices for this processor (default value = 1)
1469: -   mat - the matrices


1472:     Level: advanced

1474:     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())

1476:            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.

1478: .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi

1480: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1481:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1482: @*/
1483: PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1484: {
1485:   PC_ASM         *osm;
1487:   PetscBool      match;

1493:   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1494:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1495:   if (!match) {
1496:     if (n) *n = 0;
1497:     if (mat) *mat = NULL;
1498:   } else {
1499:     osm = (PC_ASM*)pc->data;
1500:     if (n) *n = osm->n_local_true;
1501:     if (mat) *mat = osm->pmat;
1502:   }
1503:   return(0);
1504: }

1508: /*@
1509:     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1510:     Logically Collective

1512:     Input Parameter:
1513: +   pc  - the preconditioner
1514: -   flg - boolean indicating whether to use subdomains defined by the DM

1516:     Options Database Key:
1517: .   -pc_asm_dm_subdomains

1519:     Level: intermediate

1521:     Notes:
1522:     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1523:     so setting either of the first two effectively turns the latter off.

1525: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1527: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1528:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1529: @*/
1530: PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1531: {
1532:   PC_ASM         *osm = (PC_ASM*)pc->data;
1534:   PetscBool      match;

1539:   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1540:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1541:   if (match) {
1542:     osm->dm_subdomains = flg;
1543:   }
1544:   return(0);
1545: }

1549: /*@
1550:     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1551:     Not Collective

1553:     Input Parameter:
1554: .   pc  - the preconditioner

1556:     Output Parameter:
1557: .   flg - boolean indicating whether to use subdomains defined by the DM

1559:     Level: intermediate

1561: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1563: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1564:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1565: @*/
1566: PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1567: {
1568:   PC_ASM         *osm = (PC_ASM*)pc->data;
1570:   PetscBool      match;

1575:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1576:   if (match) {
1577:     if (flg) *flg = osm->dm_subdomains;
1578:   }
1579:   return(0);
1580: }