Actual source code: asm.c

petsc-master 2017-08-22
Report Typos and Errors

  2: /*
  3:   This file defines an additive Schwarz preconditioner for any Mat implementation.

  5:   Note that each processor may have any number of subdomains. But in order to
  6:   deal easily with the VecScatter(), we treat each processor as if it has the
  7:   same number of subdomains.

  9:        n - total number of true subdomains on all processors
 10:        n_local_true - actual number of subdomains on this processor
 11:        n_local = maximum over all processors of n_local_true
 12: */
 13:  #include <petsc/private/pcimpl.h>
 14:  #include <petscdm.h>

 16: typedef struct {
 17:   PetscInt   n, n_local, n_local_true;
 18:   PetscInt   overlap;             /* overlap requested by user */
 19:   KSP        *ksp;                /* linear solvers for each block */
 20:   VecScatter *restriction;        /* mapping from global to subregion */
 21:   VecScatter *localization;       /* mapping from overlapping to non-overlapping subregion; used for restrict version of algorithms */
 22:   VecScatter *prolongation;       /* mapping from subregion to global */
 23:   Vec        *x,*y,*y_local;      /* work vectors */
 24:   IS         *is;                 /* index set that defines each overlapping subdomain */
 25:   IS         *is_local;           /* index set that defines each non-overlapping subdomain, may be NULL */
 26:   Mat        *mat,*pmat;          /* mat is not currently used */
 27:   PCASMType  type;                /* use reduced interpolation, restriction or both */
 28:   PetscBool  type_set;            /* if user set this value (so won't change it for symmetric problems) */
 29:   PetscBool  same_local_solves;   /* flag indicating whether all local solvers are same */
 30:   PetscBool  sort_indices;        /* flag to sort subdomain indices */
 31:   PetscBool  dm_subdomains;       /* whether DM is allowed to define subdomains */
 32:   PCCompositeType loctype;        /* the type of composition for local solves */
 33:   MatType    sub_mat_type;        /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */
 34:   /* For multiplicative solve */
 35:   Mat       *lmats;               /* submatrices for overlapping multiplicative (process) subdomain */
 36:   Vec        lx, ly;              /* work vectors */
 37:   IS         lis;                 /* index set that defines each overlapping multiplicative (process) subdomain */
 38: } PC_ASM;

 40: static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer)
 41: {
 42:   PC_ASM         *osm = (PC_ASM*)pc->data;
 44:   PetscMPIInt    rank;
 45:   PetscInt       i,bsz;
 46:   PetscBool      iascii,isstring;
 47:   PetscViewer    sviewer;

 50:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 51:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
 52:   if (iascii) {
 53:     char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set";
 54:     if (osm->overlap >= 0) {PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);}
 55:     if (osm->n > 0) {PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);}
 56:     PetscViewerASCIIPrintf(viewer,"  %s, %s\n",blocks,overlaps);
 57:     PetscViewerASCIIPrintf(viewer,"  restriction/interpolation type - %s\n",PCASMTypes[osm->type]);
 58:     if (osm->dm_subdomains) {PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: using DM to define subdomains\n");}
 59:     if (osm->loctype != PC_COMPOSITE_ADDITIVE) {PetscViewerASCIIPrintf(viewer,"  Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);}
 60:     MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
 61:     if (osm->same_local_solves) {
 62:       if (osm->ksp) {
 63:         PetscViewerASCIIPrintf(viewer,"  Local solve is same for all blocks, in the following KSP and PC objects:\n");
 64:         PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 65:         if (!rank) {
 66:           PetscViewerASCIIPushTab(viewer);
 67:           KSPView(osm->ksp[0],sviewer);
 68:           PetscViewerASCIIPopTab(viewer);
 69:         }
 70:         PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 71:       }
 72:     } else {
 73:       PetscViewerASCIIPushSynchronized(viewer);
 74:       PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);
 75:       PetscViewerFlush(viewer);
 76:       PetscViewerASCIIPrintf(viewer,"  Local solve info for each block is in the following KSP and PC objects:\n");
 77:       PetscViewerASCIIPushTab(viewer);
 78:       PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");
 79:       PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 80:       for (i=0; i<osm->n_local_true; i++) {
 81:         ISGetLocalSize(osm->is[i],&bsz);
 82:         PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);
 83:         KSPView(osm->ksp[i],sviewer);
 84:         PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");
 85:       }
 86:       PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 87:       PetscViewerASCIIPopTab(viewer);
 88:       PetscViewerFlush(viewer);
 89:       PetscViewerASCIIPopSynchronized(viewer);
 90:     }
 91:   } else if (isstring) {
 92:     PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);
 93:     PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 94:     if (osm->ksp) {KSPView(osm->ksp[0],sviewer);}
 95:     PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
 96:   }
 97:   return(0);
 98: }

100: static PetscErrorCode PCASMPrintSubdomains(PC pc)
101: {
102:   PC_ASM         *osm = (PC_ASM*)pc->data;
103:   const char     *prefix;
104:   char           fname[PETSC_MAX_PATH_LEN+1];
105:   PetscViewer    viewer, sviewer;
106:   char           *s;
107:   PetscInt       i,j,nidx;
108:   const PetscInt *idx;
109:   PetscMPIInt    rank, size;

113:   MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
114:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
115:   PCGetOptionsPrefix(pc,&prefix);
116:   PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);
117:   if (fname[0] == 0) { PetscStrcpy(fname,"stdout"); };
118:   PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);
119:   for (i=0; i<osm->n_local; i++) {
120:     if (i < osm->n_local_true) {
121:       ISGetLocalSize(osm->is[i],&nidx);
122:       ISGetIndices(osm->is[i],&idx);
123:       /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
124:       PetscMalloc1(16*(nidx+1)+512, &s);
125:       PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
126:       PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);
127:       for (j=0; j<nidx; j++) {
128:         PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
129:       }
130:       ISRestoreIndices(osm->is[i],&idx);
131:       PetscViewerStringSPrintf(sviewer,"\n");
132:       PetscViewerDestroy(&sviewer);
133:       PetscViewerASCIIPushSynchronized(viewer);
134:       PetscViewerASCIISynchronizedPrintf(viewer, s);
135:       PetscViewerFlush(viewer);
136:       PetscViewerASCIIPopSynchronized(viewer);
137:       PetscFree(s);
138:       if (osm->is_local) {
139:         /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/
140:         PetscMalloc1(16*(nidx+1)+512, &s);
141:         PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);
142:         PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);
143:         ISGetLocalSize(osm->is_local[i],&nidx);
144:         ISGetIndices(osm->is_local[i],&idx);
145:         for (j=0; j<nidx; j++) {
146:           PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);
147:         }
148:         ISRestoreIndices(osm->is_local[i],&idx);
149:         PetscViewerStringSPrintf(sviewer,"\n");
150:         PetscViewerDestroy(&sviewer);
151:         PetscViewerASCIIPushSynchronized(viewer);
152:         PetscViewerASCIISynchronizedPrintf(viewer, s);
153:         PetscViewerFlush(viewer);
154:         PetscViewerASCIIPopSynchronized(viewer);
155:         PetscFree(s);
156:       }
157:     } else {
158:       /* Participate in collective viewer calls. */
159:       PetscViewerASCIIPushSynchronized(viewer);
160:       PetscViewerFlush(viewer);
161:       PetscViewerASCIIPopSynchronized(viewer);
162:       /* Assume either all ranks have is_local or none do. */
163:       if (osm->is_local) {
164:         PetscViewerASCIIPushSynchronized(viewer);
165:         PetscViewerFlush(viewer);
166:         PetscViewerASCIIPopSynchronized(viewer);
167:       }
168:     }
169:   }
170:   PetscViewerFlush(viewer);
171:   PetscViewerDestroy(&viewer);
172:   return(0);
173: }

175: static PetscErrorCode PCSetUp_ASM(PC pc)
176: {
177:   PC_ASM         *osm = (PC_ASM*)pc->data;
179:   PetscBool      symset,flg;
180:   PetscInt       i,m,m_local;
181:   MatReuse       scall = MAT_REUSE_MATRIX;
182:   IS             isl;
183:   KSP            ksp;
184:   PC             subpc;
185:   const char     *prefix,*pprefix;
186:   Vec            vec;
187:   DM             *domain_dm = NULL;

190:   if (!pc->setupcalled) {

192:     if (!osm->type_set) {
193:       MatIsSymmetricKnown(pc->pmat,&symset,&flg);
194:       if (symset && flg) osm->type = PC_ASM_BASIC;
195:     }

197:     /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */
198:     if (osm->n_local_true == PETSC_DECIDE) {
199:       /* no subdomains given */
200:       /* try pc->dm first, if allowed */
201:       if (osm->dm_subdomains && pc->dm) {
202:         PetscInt  num_domains, d;
203:         char      **domain_names;
204:         IS        *inner_domain_is, *outer_domain_is;
205:         DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);
206:         osm->overlap = -1; /* We do not want to increase the overlap of the IS. 
207:                               A future improvement of this code might allow one to use 
208:                               DM-defined subdomains and also increase the overlap, 
209:                               but that is not currently supported */
210:         if (num_domains) {
211:           PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);
212:         }
213:         for (d = 0; d < num_domains; ++d) {
214:           if (domain_names)    {PetscFree(domain_names[d]);}
215:           if (inner_domain_is) {ISDestroy(&inner_domain_is[d]);}
216:           if (outer_domain_is) {ISDestroy(&outer_domain_is[d]);}
217:         }
218:         PetscFree(domain_names);
219:         PetscFree(inner_domain_is);
220:         PetscFree(outer_domain_is);
221:       }
222:       if (osm->n_local_true == PETSC_DECIDE) {
223:         /* still no subdomains; use one subdomain per processor */
224:         osm->n_local_true = 1;
225:       }
226:     }
227:     { /* determine the global and max number of subdomains */
228:       struct {PetscInt max,sum;} inwork,outwork;
229:       PetscMPIInt size;

231:       inwork.max   = osm->n_local_true;
232:       inwork.sum   = osm->n_local_true;
233:       MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));
234:       osm->n_local = outwork.max;
235:       osm->n       = outwork.sum;

237:       MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
238:       if (outwork.max == 1 && outwork.sum == size) {
239:         /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */
240:         MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);
241:       }
242:     }
243:     if (!osm->is) { /* create the index sets */
244:       PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);
245:     }
246:     if (osm->n_local_true > 1 && !osm->is_local) {
247:       PetscMalloc1(osm->n_local_true,&osm->is_local);
248:       for (i=0; i<osm->n_local_true; i++) {
249:         if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
250:           ISDuplicate(osm->is[i],&osm->is_local[i]);
251:           ISCopy(osm->is[i],osm->is_local[i]);
252:         } else {
253:           PetscObjectReference((PetscObject)osm->is[i]);
254:           osm->is_local[i] = osm->is[i];
255:         }
256:       }
257:     }
258:     PCGetOptionsPrefix(pc,&prefix);
259:     flg  = PETSC_FALSE;
260:     PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);
261:     if (flg) { PCASMPrintSubdomains(pc); }

263:     if (osm->overlap > 0) {
264:       /* Extend the "overlapping" regions by a number of steps */
265:       MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);
266:     }
267:     if (osm->sort_indices) {
268:       for (i=0; i<osm->n_local_true; i++) {
269:         ISSort(osm->is[i]);
270:         if (osm->is_local) {
271:           ISSort(osm->is_local[i]);
272:         }
273:       }
274:     }

276:     if (!osm->ksp) {
277:       /* Create the local solvers */
278:       PetscMalloc1(osm->n_local_true,&osm->ksp);
279:       if (domain_dm) {
280:         PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");
281:       }
282:       for (i=0; i<osm->n_local_true; i++) {
283:         KSPCreate(PETSC_COMM_SELF,&ksp);
284:         KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);
285:         PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);
286:         PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);
287:         KSPSetType(ksp,KSPPREONLY);
288:         KSPGetPC(ksp,&subpc);
289:         PCGetOptionsPrefix(pc,&prefix);
290:         KSPSetOptionsPrefix(ksp,prefix);
291:         KSPAppendOptionsPrefix(ksp,"sub_");
292:         if (domain_dm) {
293:           KSPSetDM(ksp, domain_dm[i]);
294:           KSPSetDMActive(ksp, PETSC_FALSE);
295:           DMDestroy(&domain_dm[i]);
296:         }
297:         osm->ksp[i] = ksp;
298:       }
299:       if (domain_dm) {
300:         PetscFree(domain_dm);
301:       }
302:     }
303:     if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
304:       PetscInt m;

306:       ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);
307:       ISSortRemoveDups(osm->lis);
308:       ISGetLocalSize(osm->lis, &m);
309:       VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);
310:       VecDuplicate(osm->lx, &osm->ly);
311:     }
312:     scall = MAT_INITIAL_MATRIX;
313:   } else {
314:     /*
315:        Destroy the blocks from the previous iteration
316:     */
317:     if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
318:       MatDestroyMatrices(osm->n_local_true,&osm->pmat);
319:       scall = MAT_INITIAL_MATRIX;
320:     }
321:   }

323:   /*
324:      Extract out the submatrices
325:   */
326:   MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);
327:   if (scall == MAT_INITIAL_MATRIX) {
328:     PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);
329:     for (i=0; i<osm->n_local_true; i++) {
330:       PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);
331:       PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);
332:     }
333:   }

335:   /* Convert the types of the submatrices (if needbe) */
336:   if (osm->sub_mat_type) {
337:     for (i=0; i<osm->n_local_true; i++) {
338:       MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));
339:     }
340:   }

342:   if(!pc->setupcalled){
343:     /* Create the local work vectors (from the local matrices) and scatter contexts */
344:     MatCreateVecs(pc->pmat,&vec,0);
345:     PetscMalloc1(osm->n_local,&osm->restriction);
346:     if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()");
347:     if (osm->is_local && osm->type == PC_ASM_RESTRICT) {PetscMalloc1(osm->n_local,&osm->localization);}
348:     PetscMalloc1(osm->n_local,&osm->prolongation);
349:     PetscMalloc1(osm->n_local,&osm->x);
350:     PetscMalloc1(osm->n_local,&osm->y);
351:     PetscMalloc1(osm->n_local,&osm->y_local);
352:     for (i=0; i<osm->n_local_true; ++i) {
353:       ISGetLocalSize(osm->is[i],&m);
354:       MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);
355:       ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);
356:       VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);
357:       ISDestroy(&isl);
358:       VecDuplicate(osm->x[i],&osm->y[i]);
359:       if (osm->localization) {
360:         ISLocalToGlobalMapping ltog;
361:         IS                     isll;
362:         const PetscInt         *idx_local;
363:         PetscInt               *idx,nout;

365:         ISLocalToGlobalMappingCreateIS(osm->is[i],&ltog);
366:         ISGetLocalSize(osm->is_local[i],&m_local);
367:         ISGetIndices(osm->is_local[i], &idx_local);
368:         PetscMalloc1(m_local,&idx);
369:         ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);
370:         ISLocalToGlobalMappingDestroy(&ltog);
371:         if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is");
372:         ISRestoreIndices(osm->is_local[i], &idx_local);
373:         ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);
374:         ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);
375:         VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);
376:         VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);
377:         ISDestroy(&isll);

379:         VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);
380:         ISDestroy(&isl);
381:       } else {
382:         osm->y_local[i] = osm->y[i];
383:         PetscObjectReference((PetscObject) osm->y[i]);
384:         osm->prolongation[i] = osm->restriction[i];
385:         PetscObjectReference((PetscObject) osm->restriction[i]);
386:       }
387:     }
388:     for (i=osm->n_local_true; i<osm->n_local; i++) {
389:       VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);
390:       VecDuplicate(osm->x[i],&osm->y[i]);
391:       VecDuplicate(osm->x[i],&osm->y_local[i]);
392:       ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);
393:       VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);
394:       if (osm->localization) {
395:         VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);
396:         VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);
397:       } else {
398:         osm->prolongation[i] = osm->restriction[i];
399:         PetscObjectReference((PetscObject) osm->restriction[i]);
400:       }
401:       ISDestroy(&isl);
402:     }
403:     VecDestroy(&vec);
404:   }

406:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
407:     IS      *cis;
408:     PetscInt c;

410:     PetscMalloc1(osm->n_local_true, &cis);
411:     for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis;
412:     MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);
413:     PetscFree(cis);
414:   }

416:   /* Return control to the user so that the submatrices can be modified (e.g., to apply
417:      different boundary conditions for the submatrices than for the global problem) */
418:   PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);

420:   /*
421:      Loop over subdomains putting them into local ksp
422:   */
423:   for (i=0; i<osm->n_local_true; i++) {
424:     KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);
425:     if (!pc->setupcalled) {
426:       KSPSetFromOptions(osm->ksp[i]);
427:     }
428:   }
429:   return(0);
430: }

432: static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc)
433: {
434:   PC_ASM             *osm = (PC_ASM*)pc->data;
435:   PetscErrorCode     ierr;
436:   PetscInt           i;
437:   KSPConvergedReason reason;

440:   for (i=0; i<osm->n_local_true; i++) {
441:     KSPSetUp(osm->ksp[i]);
442:     KSPGetConvergedReason(osm->ksp[i],&reason);
443:     if (reason == KSP_DIVERGED_PCSETUP_FAILED) {
444:       pc->failedreason = PC_SUBPC_ERROR;
445:     }
446:   }
447:   return(0);
448: }

450: static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y)
451: {
452:   PC_ASM         *osm = (PC_ASM*)pc->data;
454:   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
455:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

458:   /*
459:      Support for limiting the restriction or interpolation to only local
460:      subdomain values (leaving the other values 0).
461:   */
462:   if (!(osm->type & PC_ASM_RESTRICT)) {
463:     forward = SCATTER_FORWARD_LOCAL;
464:     /* have to zero the work RHS since scatter may leave some slots empty */
465:     for (i=0; i<n_local_true; i++) {
466:       VecZeroEntries(osm->x[i]);
467:     }
468:   }
469:   if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL;

471:   switch (osm->loctype)
472:   {
473:   case PC_COMPOSITE_ADDITIVE:
474:     for (i=0; i<n_local; i++) {
475:       VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
476:     }
477:     VecZeroEntries(y);
478:     /* do the local solves */
479:     for (i=0; i<n_local_true; i++) {
480:       VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
481:       KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);
482:       if (osm->localization) {
483:         VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
484:         VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
485:       }
486:       VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
487:     }
488:     /* handle the rest of the scatters that do not have local solves */
489:     for (i=n_local_true; i<n_local; i++) {
490:       VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
491:       VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
492:     }
493:     for (i=0; i<n_local; i++) {
494:       VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
495:     }
496:     break;
497:   case PC_COMPOSITE_MULTIPLICATIVE:
498:     VecZeroEntries(y);
499:     /* do the local solves */
500:     for (i = 0; i < n_local_true; ++i) {
501:       if (i > 0) {
502:         /* Update rhs */
503:         VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);
504:         VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);
505:       } else {
506:         VecZeroEntries(osm->x[i]);
507:       }
508:       VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);
509:       VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);
510:       KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);
511:       if (osm->localization) {
512:         VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);
513:         VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);
514:       }
515:       VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
516:       VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
517:       if (i < n_local_true-1) {
518:         VecSet(osm->ly, 0.0);
519:         VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);
520:         VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);
521:         VecScale(osm->ly, -1.0);
522:         MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);
523:         VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);
524:         VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);
525:       }
526:     }
527:     /* handle the rest of the scatters that do not have local solves */
528:     for (i = n_local_true; i < n_local; ++i) {
529:       VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);
530:       VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);
531:       VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
532:       VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);
533:     }
534:     break;
535:   default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]);
536:   }
537:   return(0);
538: }

540: static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y)
541: {
542:   PC_ASM         *osm = (PC_ASM*)pc->data;
544:   PetscInt       i,n_local = osm->n_local,n_local_true = osm->n_local_true;
545:   ScatterMode    forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE;

548:   /*
549:      Support for limiting the restriction or interpolation to only local
550:      subdomain values (leaving the other values 0).

552:      Note: these are reversed from the PCApply_ASM() because we are applying the
553:      transpose of the three terms
554:   */
555:   if (!(osm->type & PC_ASM_INTERPOLATE)) {
556:     forward = SCATTER_FORWARD_LOCAL;
557:     /* have to zero the work RHS since scatter may leave some slots empty */
558:     for (i=0; i<n_local_true; i++) {
559:       VecZeroEntries(osm->x[i]);
560:     }
561:   }
562:   if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL;

564:   for (i=0; i<n_local; i++) {
565:     VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
566:   }
567:   VecZeroEntries(y);
568:   /* do the local solves */
569:   for (i=0; i<n_local_true; i++) {
570:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
571:     KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);
572:     if (osm->localization) {
573:       VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
574:       VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);
575:     }
576:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
577:   }
578:   /* handle the rest of the scatters that do not have local solves */
579:   for (i=n_local_true; i<n_local; i++) {
580:     VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);
581:     VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
582:   }
583:   for (i=0; i<n_local; i++) {
584:     VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);
585:   }
586:   return(0);
587: }

589: static PetscErrorCode PCReset_ASM(PC pc)
590: {
591:   PC_ASM         *osm = (PC_ASM*)pc->data;
593:   PetscInt       i;

596:   if (osm->ksp) {
597:     for (i=0; i<osm->n_local_true; i++) {
598:       KSPReset(osm->ksp[i]);
599:     }
600:   }
601:   if (osm->pmat) {
602:     if (osm->n_local_true > 0) {
603:       MatDestroySubMatrices(osm->n_local_true,&osm->pmat);
604:     }
605:   }
606:   if (osm->restriction) {
607:     for (i=0; i<osm->n_local; i++) {
608:       VecScatterDestroy(&osm->restriction[i]);
609:       if (osm->localization) {VecScatterDestroy(&osm->localization[i]);}
610:       VecScatterDestroy(&osm->prolongation[i]);
611:       VecDestroy(&osm->x[i]);
612:       VecDestroy(&osm->y[i]);
613:       VecDestroy(&osm->y_local[i]);
614:     }
615:     PetscFree(osm->restriction);
616:     if (osm->localization) {PetscFree(osm->localization);}
617:     PetscFree(osm->prolongation);
618:     PetscFree(osm->x);
619:     PetscFree(osm->y);
620:     PetscFree(osm->y_local);
621:   }
622:   PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);
623:   if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) {
624:     ISDestroy(&osm->lis);
625:     MatDestroyMatrices(osm->n_local_true, &osm->lmats);
626:     VecDestroy(&osm->lx);
627:     VecDestroy(&osm->ly);
628:   }

630:   PetscFree(osm->sub_mat_type);

632:   osm->is       = 0;
633:   osm->is_local = 0;
634:   return(0);
635: }

637: static PetscErrorCode PCDestroy_ASM(PC pc)
638: {
639:   PC_ASM         *osm = (PC_ASM*)pc->data;
641:   PetscInt       i;

644:   PCReset_ASM(pc);
645:   if (osm->ksp) {
646:     for (i=0; i<osm->n_local_true; i++) {
647:       KSPDestroy(&osm->ksp[i]);
648:     }
649:     PetscFree(osm->ksp);
650:   }
651:   PetscFree(pc->data);
652:   return(0);
653: }

655: static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc)
656: {
657:   PC_ASM         *osm = (PC_ASM*)pc->data;
659:   PetscInt       blocks,ovl;
660:   PetscBool      symset,flg;
661:   PCASMType      asmtype;
662:   PCCompositeType loctype;
663:   char           sub_mat_type[256];

666:   /* set the type to symmetric if matrix is symmetric */
667:   if (!osm->type_set && pc->pmat) {
668:     MatIsSymmetricKnown(pc->pmat,&symset,&flg);
669:     if (symset && flg) osm->type = PC_ASM_BASIC;
670:   }
671:   PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");
672:   PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);
673:   PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);
674:   if (flg) {
675:     PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);
676:     osm->dm_subdomains = PETSC_FALSE;
677:   }
678:   PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);
679:   if (flg) {
680:     PCASMSetOverlap(pc,ovl);
681:     osm->dm_subdomains = PETSC_FALSE;
682:   }
683:   flg  = PETSC_FALSE;
684:   PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);
685:   if (flg) {PCASMSetType(pc,asmtype); }
686:   flg  = PETSC_FALSE;
687:   PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);
688:   if (flg) {PCASMSetLocalType(pc,loctype); }
689:   PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);
690:   if(flg){
691:     PCASMSetSubMatType(pc,sub_mat_type);
692:   }
693:   PetscOptionsTail();
694:   return(0);
695: }

697: /*------------------------------------------------------------------------------------*/

699: static PetscErrorCode  PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[])
700: {
701:   PC_ASM         *osm = (PC_ASM*)pc->data;
703:   PetscInt       i;

706:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n);
707:   if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp().");

709:   if (!pc->setupcalled) {
710:     if (is) {
711:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is[i]);}
712:     }
713:     if (is_local) {
714:       for (i=0; i<n; i++) {PetscObjectReference((PetscObject)is_local[i]);}
715:     }
716:     PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);

718:     osm->n_local_true = n;
719:     osm->is           = 0;
720:     osm->is_local     = 0;
721:     if (is) {
722:       PetscMalloc1(n,&osm->is);
723:       for (i=0; i<n; i++) osm->is[i] = is[i];
724:       /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */
725:       osm->overlap = -1;
726:     }
727:     if (is_local) {
728:       PetscMalloc1(n,&osm->is_local);
729:       for (i=0; i<n; i++) osm->is_local[i] = is_local[i];
730:       if (!is) {
731:         PetscMalloc1(osm->n_local_true,&osm->is);
732:         for (i=0; i<osm->n_local_true; i++) {
733:           if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */
734:             ISDuplicate(osm->is_local[i],&osm->is[i]);
735:             ISCopy(osm->is_local[i],osm->is[i]);
736:           } else {
737:             PetscObjectReference((PetscObject)osm->is_local[i]);
738:             osm->is[i] = osm->is_local[i];
739:           }
740:         }
741:       }
742:     }
743:   }
744:   return(0);
745: }

747: static PetscErrorCode  PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local)
748: {
749:   PC_ASM         *osm = (PC_ASM*)pc->data;
751:   PetscMPIInt    rank,size;
752:   PetscInt       n;

755:   if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N);
756:   if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet.");

758:   /*
759:      Split the subdomains equally among all processors
760:   */
761:   MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);
762:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
763:   n    = N/size + ((N % size) > rank);
764:   if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N);
765:   if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp().");
766:   if (!pc->setupcalled) {
767:     PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);

769:     osm->n_local_true = n;
770:     osm->is           = 0;
771:     osm->is_local     = 0;
772:   }
773:   return(0);
774: }

776: static PetscErrorCode  PCASMSetOverlap_ASM(PC pc,PetscInt ovl)
777: {
778:   PC_ASM *osm = (PC_ASM*)pc->data;

781:   if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested");
782:   if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp().");
783:   if (!pc->setupcalled) osm->overlap = ovl;
784:   return(0);
785: }

787: static PetscErrorCode  PCASMSetType_ASM(PC pc,PCASMType type)
788: {
789:   PC_ASM *osm = (PC_ASM*)pc->data;

792:   osm->type     = type;
793:   osm->type_set = PETSC_TRUE;
794:   return(0);
795: }

797: static PetscErrorCode  PCASMGetType_ASM(PC pc,PCASMType *type)
798: {
799:   PC_ASM *osm = (PC_ASM*)pc->data;

802:   *type = osm->type;
803:   return(0);
804: }

806: static PetscErrorCode  PCASMSetLocalType_ASM(PC pc, PCCompositeType type)
807: {
808:   PC_ASM *osm = (PC_ASM *) pc->data;

811:   osm->loctype = type;
812:   return(0);
813: }

815: static PetscErrorCode  PCASMGetLocalType_ASM(PC pc, PCCompositeType *type)
816: {
817:   PC_ASM *osm = (PC_ASM *) pc->data;

820:   *type = osm->loctype;
821:   return(0);
822: }

824: static PetscErrorCode  PCASMSetSortIndices_ASM(PC pc,PetscBool  doSort)
825: {
826:   PC_ASM *osm = (PC_ASM*)pc->data;

829:   osm->sort_indices = doSort;
830:   return(0);
831: }

833: static PetscErrorCode  PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp)
834: {
835:   PC_ASM         *osm = (PC_ASM*)pc->data;

839:   if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here");

841:   if (n_local) *n_local = osm->n_local_true;
842:   if (first_local) {
843:     MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));
844:     *first_local -= osm->n_local_true;
845:   }
846:   if (ksp) {
847:     /* Assume that local solves are now different; not necessarily
848:        true though!  This flag is used only for PCView_ASM() */
849:     *ksp                   = osm->ksp;
850:     osm->same_local_solves = PETSC_FALSE;
851:   }
852:   return(0);
853: }

855: static PetscErrorCode  PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type)
856: {
857:   PC_ASM         *osm = (PC_ASM*)pc->data;

862:   *sub_mat_type = osm->sub_mat_type;
863:   return(0);
864: }

866: static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type)
867: {
868:   PetscErrorCode    ierr;
869:   PC_ASM            *osm = (PC_ASM*)pc->data;

873:   PetscFree(osm->sub_mat_type);
874:   PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);
875:   return(0);
876: }

878: /*@C
879:     PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner.

881:     Collective on PC

883:     Input Parameters:
884: +   pc - the preconditioner context
885: .   n - the number of subdomains for this processor (default value = 1)
886: .   is - the index set that defines the subdomains for this processor
887:          (or NULL for PETSc to determine subdomains)
888: -   is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT
889:          (or NULL to not provide these)

891:     Notes:
892:     The IS numbering is in the parallel, global numbering of the vector for both is and is_local

894:     By default the ASM preconditioner uses 1 block per processor.

896:     Use PCASMSetTotalSubdomains() to set the subdomains for all processors.

898:     If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated
899:     back to form the global solution (this is the standard restricted additive Schwarz method)

901:     If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is
902:     no code to handle that case.

904:     Level: advanced

906: .keywords: PC, ASM, set, local, subdomains, additive Schwarz

908: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
909:           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType()
910: @*/
911: PetscErrorCode  PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[])
912: {

917:   PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));
918:   return(0);
919: }

921: /*@C
922:     PCASMSetTotalSubdomains - Sets the subdomains for all processors for the
923:     additive Schwarz preconditioner.  Either all or no processors in the
924:     PC communicator must call this routine, with the same index sets.

926:     Collective on PC

928:     Input Parameters:
929: +   pc - the preconditioner context
930: .   N  - the number of subdomains for all processors
931: .   is - the index sets that define the subdomains for all processors
932:          (or NULL to ask PETSc to compe up with subdomains)
933: -   is_local - the index sets that define the local part of the subdomains for this processor
934:          (or NULL to not provide this information)

936:     Options Database Key:
937:     To set the total number of subdomain blocks rather than specify the
938:     index sets, use the option
939: .    -pc_asm_blocks <blks> - Sets total blocks

941:     Notes:
942:     Currently you cannot use this to set the actual subdomains with the argument is or is_local.

944:     By default the ASM preconditioner uses 1 block per processor.

946:     These index sets cannot be destroyed until after completion of the
947:     linear solves for which the ASM preconditioner is being used.

949:     Use PCASMSetLocalSubdomains() to set local subdomains.

951:     The IS numbering is in the parallel, global numbering of the vector for both is and is_local

953:     Level: advanced

955: .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz

957: .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
958:           PCASMCreateSubdomains2D()
959: @*/
960: PetscErrorCode  PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[])
961: {

966:   PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));
967:   return(0);
968: }

970: /*@
971:     PCASMSetOverlap - Sets the overlap between a pair of subdomains for the
972:     additive Schwarz preconditioner.  Either all or no processors in the
973:     PC communicator must call this routine.

975:     Logically Collective on PC

977:     Input Parameters:
978: +   pc  - the preconditioner context
979: -   ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1)

981:     Options Database Key:
982: .   -pc_asm_overlap <ovl> - Sets overlap

984:     Notes:
985:     By default the ASM preconditioner uses 1 block per processor.  To use
986:     multiple blocks per perocessor, see PCASMSetTotalSubdomains() and
987:     PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>).

989:     The overlap defaults to 1, so if one desires that no additional
990:     overlap be computed beyond what may have been set with a call to
991:     PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl
992:     must be set to be 0.  In particular, if one does not explicitly set
993:     the subdomains an application code, then all overlap would be computed
994:     internally by PETSc, and using an overlap of 0 would result in an ASM
995:     variant that is equivalent to the block Jacobi preconditioner.

997:     The default algorithm used by PETSc to increase overlap is fast, but not scalable,
998:     use the option -mat_increase_overlap_scalable when the problem and number of processes is large.

1000:     Note that one can define initial index sets with any overlap via
1001:     PCASMSetLocalSubdomains(); the routine
1002:     PCASMSetOverlap() merely allows PETSc to extend that overlap further
1003:     if desired.

1005:     Level: intermediate

1007: .keywords: PC, ASM, set, overlap

1009: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1010:           PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap()
1011: @*/
1012: PetscErrorCode  PCASMSetOverlap(PC pc,PetscInt ovl)
1013: {

1019:   PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));
1020:   return(0);
1021: }

1023: /*@
1024:     PCASMSetType - Sets the type of restriction and interpolation used
1025:     for local problems in the additive Schwarz method.

1027:     Logically Collective on PC

1029:     Input Parameters:
1030: +   pc  - the preconditioner context
1031: -   type - variant of ASM, one of
1032: .vb
1033:       PC_ASM_BASIC       - full interpolation and restriction
1034:       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1035:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1036:       PC_ASM_NONE        - local processor restriction and interpolation
1037: .ve

1039:     Options Database Key:
1040: .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type

1042:     Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected
1043:     to limit the local processor interpolation

1045:     Level: intermediate

1047: .keywords: PC, ASM, set, type

1049: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1050:           PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType()
1051: @*/
1052: PetscErrorCode  PCASMSetType(PC pc,PCASMType type)
1053: {

1059:   PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));
1060:   return(0);
1061: }

1063: /*@
1064:     PCASMGetType - Gets the type of restriction and interpolation used
1065:     for local problems in the additive Schwarz method.

1067:     Logically Collective on PC

1069:     Input Parameter:
1070: .   pc  - the preconditioner context

1072:     Output Parameter:
1073: .   type - variant of ASM, one of

1075: .vb
1076:       PC_ASM_BASIC       - full interpolation and restriction
1077:       PC_ASM_RESTRICT    - full restriction, local processor interpolation
1078:       PC_ASM_INTERPOLATE - full interpolation, local processor restriction
1079:       PC_ASM_NONE        - local processor restriction and interpolation
1080: .ve

1082:     Options Database Key:
1083: .   -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type

1085:     Level: intermediate

1087: .keywords: PC, ASM, set, type

1089: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1090:           PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType()
1091: @*/
1092: PetscErrorCode  PCASMGetType(PC pc,PCASMType *type)
1093: {

1098:   PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));
1099:   return(0);
1100: }

1102: /*@
1103:   PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method.

1105:   Logically Collective on PC

1107:   Input Parameters:
1108: + pc  - the preconditioner context
1109: - type - type of composition, one of
1110: .vb
1111:   PC_COMPOSITE_ADDITIVE       - local additive combination
1112:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1113: .ve

1115:   Options Database Key:
1116: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1118:   Level: intermediate

1120: .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1121: @*/
1122: PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type)
1123: {

1129:   PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));
1130:   return(0);
1131: }

1133: /*@
1134:   PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method.

1136:   Logically Collective on PC

1138:   Input Parameter:
1139: . pc  - the preconditioner context

1141:   Output Parameter:
1142: . type - type of composition, one of
1143: .vb
1144:   PC_COMPOSITE_ADDITIVE       - local additive combination
1145:   PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination
1146: .ve

1148:   Options Database Key:
1149: . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type

1151:   Level: intermediate

1153: .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType
1154: @*/
1155: PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type)
1156: {

1162:   PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));
1163:   return(0);
1164: }

1166: /*@
1167:     PCASMSetSortIndices - Determines whether subdomain indices are sorted.

1169:     Logically Collective on PC

1171:     Input Parameters:
1172: +   pc  - the preconditioner context
1173: -   doSort - sort the subdomain indices

1175:     Level: intermediate

1177: .keywords: PC, ASM, set, type

1179: .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(),
1180:           PCASMCreateSubdomains2D()
1181: @*/
1182: PetscErrorCode  PCASMSetSortIndices(PC pc,PetscBool doSort)
1183: {

1189:   PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));
1190:   return(0);
1191: }

1193: /*@C
1194:    PCASMGetSubKSP - Gets the local KSP contexts for all blocks on
1195:    this processor.

1197:    Collective on PC iff first_local is requested

1199:    Input Parameter:
1200: .  pc - the preconditioner context

1202:    Output Parameters:
1203: +  n_local - the number of blocks on this processor or NULL
1204: .  first_local - the global number of the first block on this processor or NULL,
1205:                  all processors must request or all must pass NULL
1206: -  ksp - the array of KSP contexts

1208:    Note:
1209:    After PCASMGetSubKSP() the array of KSPes is not to be freed.

1211:    You must call KSPSetUp() before calling PCASMGetSubKSP().

1213:    Fortran note:
1214:    The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length.

1216:    Level: advanced

1218: .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context

1220: .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(),
1221:           PCASMCreateSubdomains2D(),
1222: @*/
1223: PetscErrorCode  PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[])
1224: {

1229:   PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));
1230:   return(0);
1231: }

1233: /* -------------------------------------------------------------------------------------*/
1234: /*MC
1235:    PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with
1236:            its own KSP object.

1238:    Options Database Keys:
1239: +  -pc_asm_blocks <blks> - Sets total blocks
1240: .  -pc_asm_overlap <ovl> - Sets overlap
1241: .  -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict
1242: -  -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive

1244:      IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you
1245:       will get a different convergence rate due to the default option of -pc_asm_type restrict. Use
1246:       -pc_asm_type basic to use the standard ASM.

1248:    Notes: Each processor can have one or more blocks, but a block cannot be shared by more
1249:      than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor.

1251:      To set options on the solvers for each block append -sub_ to all the KSP, and PC
1252:         options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly

1254:      To set the options on the solvers separate for each block call PCASMGetSubKSP()
1255:          and set the options directly on the resulting KSP object (you can access its PC
1256:          with KSPGetPC())

1258:    Level: beginner

1260:    Concepts: additive Schwarz method

1262:     References:
1263: +   1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions
1264:      Courant Institute, New York University Technical report
1265: -   1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations,
1266:     Cambridge University Press.

1268: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC,
1269:            PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType()
1270:            PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType

1272: M*/

1274: PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc)
1275: {
1277:   PC_ASM         *osm;

1280:   PetscNewLog(pc,&osm);

1282:   osm->n                 = PETSC_DECIDE;
1283:   osm->n_local           = 0;
1284:   osm->n_local_true      = PETSC_DECIDE;
1285:   osm->overlap           = 1;
1286:   osm->ksp               = 0;
1287:   osm->restriction       = 0;
1288:   osm->localization      = 0;
1289:   osm->prolongation      = 0;
1290:   osm->x                 = 0;
1291:   osm->y                 = 0;
1292:   osm->y_local           = 0;
1293:   osm->is                = 0;
1294:   osm->is_local          = 0;
1295:   osm->mat               = 0;
1296:   osm->pmat              = 0;
1297:   osm->type              = PC_ASM_RESTRICT;
1298:   osm->loctype           = PC_COMPOSITE_ADDITIVE;
1299:   osm->same_local_solves = PETSC_TRUE;
1300:   osm->sort_indices      = PETSC_TRUE;
1301:   osm->dm_subdomains     = PETSC_FALSE;
1302:   osm->sub_mat_type      = NULL;

1304:   pc->data                 = (void*)osm;
1305:   pc->ops->apply           = PCApply_ASM;
1306:   pc->ops->applytranspose  = PCApplyTranspose_ASM;
1307:   pc->ops->setup           = PCSetUp_ASM;
1308:   pc->ops->reset           = PCReset_ASM;
1309:   pc->ops->destroy         = PCDestroy_ASM;
1310:   pc->ops->setfromoptions  = PCSetFromOptions_ASM;
1311:   pc->ops->setuponblocks   = PCSetUpOnBlocks_ASM;
1312:   pc->ops->view            = PCView_ASM;
1313:   pc->ops->applyrichardson = 0;

1315:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);
1316:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);
1317:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);
1318:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);
1319:   PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);
1320:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);
1321:   PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);
1322:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);
1323:   PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);
1324:   PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);
1325:   PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);
1326:   return(0);
1327: }

1329: /*@C
1330:    PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz
1331:    preconditioner for a any problem on a general grid.

1333:    Collective

1335:    Input Parameters:
1336: +  A - The global matrix operator
1337: -  n - the number of local blocks

1339:    Output Parameters:
1340: .  outis - the array of index sets defining the subdomains

1342:    Level: advanced

1344:    Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap
1345:     from these if you use PCASMSetLocalSubdomains()

1347:     In the Fortran version you must provide the array outis[] already allocated of length n.

1349: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid

1351: .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains()
1352: @*/
1353: PetscErrorCode  PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[])
1354: {
1355:   MatPartitioning mpart;
1356:   const char      *prefix;
1357:   void            (*f)(void);
1358:   PetscInt        i,j,rstart,rend,bs;
1359:   PetscBool       isbaij = PETSC_FALSE,foundpart = PETSC_FALSE;
1360:   Mat             Ad     = NULL, adj;
1361:   IS              ispart,isnumb,*is;
1362:   PetscErrorCode  ierr;

1367:   if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n);

1369:   /* Get prefix, row distribution, and block size */
1370:   MatGetOptionsPrefix(A,&prefix);
1371:   MatGetOwnershipRange(A,&rstart,&rend);
1372:   MatGetBlockSize(A,&bs);
1373:   if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs);

1375:   /* Get diagonal block from matrix if possible */
1376:   MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);
1377:   if (f) {
1378:     MatGetDiagonalBlock(A,&Ad);
1379:   }
1380:   if (Ad) {
1381:     PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);
1382:     if (!isbaij) {PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);}
1383:   }
1384:   if (Ad && n > 1) {
1385:     PetscBool match,done;
1386:     /* Try to setup a good matrix partitioning if available */
1387:     MatPartitioningCreate(PETSC_COMM_SELF,&mpart);
1388:     PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
1389:     MatPartitioningSetFromOptions(mpart);
1390:     PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);
1391:     if (!match) {
1392:       PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);
1393:     }
1394:     if (!match) { /* assume a "good" partitioner is available */
1395:       PetscInt       na;
1396:       const PetscInt *ia,*ja;
1397:       MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1398:       if (done) {
1399:         /* Build adjacency matrix by hand. Unfortunately a call to
1400:            MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will
1401:            remove the block-aij structure and we cannot expect
1402:            MatPartitioning to split vertices as we need */
1403:         PetscInt       i,j,len,nnz,cnt,*iia=0,*jja=0;
1404:         const PetscInt *row;
1405:         nnz = 0;
1406:         for (i=0; i<na; i++) { /* count number of nonzeros */
1407:           len = ia[i+1] - ia[i];
1408:           row = ja + ia[i];
1409:           for (j=0; j<len; j++) {
1410:             if (row[j] == i) { /* don't count diagonal */
1411:               len--; break;
1412:             }
1413:           }
1414:           nnz += len;
1415:         }
1416:         PetscMalloc1(na+1,&iia);
1417:         PetscMalloc1(nnz,&jja);
1418:         nnz    = 0;
1419:         iia[0] = 0;
1420:         for (i=0; i<na; i++) { /* fill adjacency */
1421:           cnt = 0;
1422:           len = ia[i+1] - ia[i];
1423:           row = ja + ia[i];
1424:           for (j=0; j<len; j++) {
1425:             if (row[j] != i) { /* if not diagonal */
1426:               jja[nnz+cnt++] = row[j];
1427:             }
1428:           }
1429:           nnz     += cnt;
1430:           iia[i+1] = nnz;
1431:         }
1432:         /* Partitioning of the adjacency matrix */
1433:         MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);
1434:         MatPartitioningSetAdjacency(mpart,adj);
1435:         MatPartitioningSetNParts(mpart,n);
1436:         MatPartitioningApply(mpart,&ispart);
1437:         ISPartitioningToNumbering(ispart,&isnumb);
1438:         MatDestroy(&adj);
1439:         foundpart = PETSC_TRUE;
1440:       }
1441:       MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);
1442:     }
1443:     MatPartitioningDestroy(&mpart);
1444:   }

1446:   PetscMalloc1(n,&is);
1447:   *outis = is;

1449:   if (!foundpart) {

1451:     /* Partitioning by contiguous chunks of rows */

1453:     PetscInt mbs   = (rend-rstart)/bs;
1454:     PetscInt start = rstart;
1455:     for (i=0; i<n; i++) {
1456:       PetscInt count = (mbs/n + ((mbs % n) > i)) * bs;
1457:       ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);
1458:       start += count;
1459:     }

1461:   } else {

1463:     /* Partitioning by adjacency of diagonal block  */

1465:     const PetscInt *numbering;
1466:     PetscInt       *count,nidx,*indices,*newidx,start=0;
1467:     /* Get node count in each partition */
1468:     PetscMalloc1(n,&count);
1469:     ISPartitioningCount(ispart,n,count);
1470:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1471:       for (i=0; i<n; i++) count[i] *= bs;
1472:     }
1473:     /* Build indices from node numbering */
1474:     ISGetLocalSize(isnumb,&nidx);
1475:     PetscMalloc1(nidx,&indices);
1476:     for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */
1477:     ISGetIndices(isnumb,&numbering);
1478:     PetscSortIntWithPermutation(nidx,numbering,indices);
1479:     ISRestoreIndices(isnumb,&numbering);
1480:     if (isbaij && bs > 1) { /* adjust for the block-aij case */
1481:       PetscMalloc1(nidx*bs,&newidx);
1482:       for (i=0; i<nidx; i++) {
1483:         for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j;
1484:       }
1485:       PetscFree(indices);
1486:       nidx   *= bs;
1487:       indices = newidx;
1488:     }
1489:     /* Shift to get global indices */
1490:     for (i=0; i<nidx; i++) indices[i] += rstart;

1492:     /* Build the index sets for each block */
1493:     for (i=0; i<n; i++) {
1494:       ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);
1495:       ISSort(is[i]);
1496:       start += count[i];
1497:     }

1499:     PetscFree(count);
1500:     PetscFree(indices);
1501:     ISDestroy(&isnumb);
1502:     ISDestroy(&ispart);

1504:   }
1505:   return(0);
1506: }

1508: /*@C
1509:    PCASMDestroySubdomains - Destroys the index sets created with
1510:    PCASMCreateSubdomains(). Should be called after setting subdomains
1511:    with PCASMSetLocalSubdomains().

1513:    Collective

1515:    Input Parameters:
1516: +  n - the number of index sets
1517: .  is - the array of index sets
1518: -  is_local - the array of local index sets, can be NULL

1520:    Level: advanced

1522: .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid

1524: .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains()
1525: @*/
1526: PetscErrorCode  PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[])
1527: {
1528:   PetscInt       i;

1532:   if (n <= 0) return(0);
1533:   if (is) {
1535:     for (i=0; i<n; i++) { ISDestroy(&is[i]); }
1536:     PetscFree(is);
1537:   }
1538:   if (is_local) {
1540:     for (i=0; i<n; i++) { ISDestroy(&is_local[i]); }
1541:     PetscFree(is_local);
1542:   }
1543:   return(0);
1544: }

1546: /*@
1547:    PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz
1548:    preconditioner for a two-dimensional problem on a regular grid.

1550:    Not Collective

1552:    Input Parameters:
1553: +  m, n - the number of mesh points in the x and y directions
1554: .  M, N - the number of subdomains in the x and y directions
1555: .  dof - degrees of freedom per node
1556: -  overlap - overlap in mesh lines

1558:    Output Parameters:
1559: +  Nsub - the number of subdomains created
1560: .  is - array of index sets defining overlapping (if overlap > 0) subdomains
1561: -  is_local - array of index sets defining non-overlapping subdomains

1563:    Note:
1564:    Presently PCAMSCreateSubdomains2d() is valid only for sequential
1565:    preconditioners.  More general related routines are
1566:    PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains().

1568:    Level: advanced

1570: .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid

1572: .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(),
1573:           PCASMSetOverlap()
1574: @*/
1575: PetscErrorCode  PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local)
1576: {
1577:   PetscInt       i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer;
1579:   PetscInt       nidx,*idx,loc,ii,jj,count;

1582:   if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," ");

1584:   *Nsub     = N*M;
1585:   PetscMalloc1(*Nsub,is);
1586:   PetscMalloc1(*Nsub,is_local);
1587:   ystart    = 0;
1588:   loc_outer = 0;
1589:   for (i=0; i<N; i++) {
1590:     height = n/N + ((n % N) > i); /* height of subdomain */
1591:     if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n");
1592:     yleft  = ystart - overlap; if (yleft < 0) yleft = 0;
1593:     yright = ystart + height + overlap; if (yright > n) yright = n;
1594:     xstart = 0;
1595:     for (j=0; j<M; j++) {
1596:       width = m/M + ((m % M) > j); /* width of subdomain */
1597:       if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m");
1598:       xleft  = xstart - overlap; if (xleft < 0) xleft = 0;
1599:       xright = xstart + width + overlap; if (xright > m) xright = m;
1600:       nidx   = (xright - xleft)*(yright - yleft);
1601:       PetscMalloc1(nidx,&idx);
1602:       loc    = 0;
1603:       for (ii=yleft; ii<yright; ii++) {
1604:         count = m*ii + xleft;
1605:         for (jj=xleft; jj<xright; jj++) idx[loc++] = count++;
1606:       }
1607:       ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);
1608:       if (overlap == 0) {
1609:         PetscObjectReference((PetscObject)(*is)[loc_outer]);

1611:         (*is_local)[loc_outer] = (*is)[loc_outer];
1612:       } else {
1613:         for (loc=0,ii=ystart; ii<ystart+height; ii++) {
1614:           for (jj=xstart; jj<xstart+width; jj++) {
1615:             idx[loc++] = m*ii + jj;
1616:           }
1617:         }
1618:         ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);
1619:       }
1620:       PetscFree(idx);
1621:       xstart += width;
1622:       loc_outer++;
1623:     }
1624:     ystart += height;
1625:   }
1626:   for (i=0; i<*Nsub; i++) { ISSort((*is)[i]); }
1627:   return(0);
1628: }

1630: /*@C
1631:     PCASMGetLocalSubdomains - Gets the local subdomains (for this processor
1632:     only) for the additive Schwarz preconditioner.

1634:     Not Collective

1636:     Input Parameter:
1637: .   pc - the preconditioner context

1639:     Output Parameters:
1640: +   n - the number of subdomains for this processor (default value = 1)
1641: .   is - the index sets that define the subdomains for this processor
1642: -   is_local - the index sets that define the local part of the subdomains for this processor (can be NULL)


1645:     Notes:
1646:     The IS numbering is in the parallel, global numbering of the vector.

1648:     Level: advanced

1650: .keywords: PC, ASM, set, local, subdomains, additive Schwarz

1652: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1653:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices()
1654: @*/
1655: PetscErrorCode  PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[])
1656: {
1657:   PC_ASM         *osm = (PC_ASM*)pc->data;
1659:   PetscBool      match;

1665:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1666:   if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM");
1667:   if (n) *n = osm->n_local_true;
1668:   if (is) *is = osm->is;
1669:   if (is_local) *is_local = osm->is_local;
1670:   return(0);
1671: }

1673: /*@C
1674:     PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor
1675:     only) for the additive Schwarz preconditioner.

1677:     Not Collective

1679:     Input Parameter:
1680: .   pc - the preconditioner context

1682:     Output Parameters:
1683: +   n - the number of matrices for this processor (default value = 1)
1684: -   mat - the matrices

1686:     Level: advanced

1688:     Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks())

1690:            Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner.

1692: .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi

1694: .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(),
1695:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices()
1696: @*/
1697: PetscErrorCode  PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[])
1698: {
1699:   PC_ASM         *osm;
1701:   PetscBool      match;

1707:   if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp().");
1708:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1709:   if (!match) {
1710:     if (n) *n = 0;
1711:     if (mat) *mat = NULL;
1712:   } else {
1713:     osm = (PC_ASM*)pc->data;
1714:     if (n) *n = osm->n_local_true;
1715:     if (mat) *mat = osm->pmat;
1716:   }
1717:   return(0);
1718: }

1720: /*@
1721:     PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.

1723:     Logically Collective

1725:     Input Parameter:
1726: +   pc  - the preconditioner
1727: -   flg - boolean indicating whether to use subdomains defined by the DM

1729:     Options Database Key:
1730: .   -pc_asm_dm_subdomains

1732:     Level: intermediate

1734:     Notes:
1735:     PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(),
1736:     so setting either of the first two effectively turns the latter off.

1738: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1740: .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1741:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1742: @*/
1743: PetscErrorCode  PCASMSetDMSubdomains(PC pc,PetscBool flg)
1744: {
1745:   PC_ASM         *osm = (PC_ASM*)pc->data;
1747:   PetscBool      match;

1752:   if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC.");
1753:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1754:   if (match) {
1755:     osm->dm_subdomains = flg;
1756:   }
1757:   return(0);
1758: }

1760: /*@
1761:     PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible.
1762:     Not Collective

1764:     Input Parameter:
1765: .   pc  - the preconditioner

1767:     Output Parameter:
1768: .   flg - boolean indicating whether to use subdomains defined by the DM

1770:     Level: intermediate

1772: .keywords: PC, ASM, DM, set, subdomains, additive Schwarz

1774: .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap()
1775:           PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains()
1776: @*/
1777: PetscErrorCode  PCASMGetDMSubdomains(PC pc,PetscBool* flg)
1778: {
1779:   PC_ASM         *osm = (PC_ASM*)pc->data;
1781:   PetscBool      match;

1786:   PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);
1787:   if (match) {
1788:     if (flg) *flg = osm->dm_subdomains;
1789:   }
1790:   return(0);
1791: }

1793: /*@
1794:      PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string.

1796:    Not Collective

1798:    Input Parameter:
1799: .  pc - the PC

1801:    Output Parameter:
1802: .  -pc_asm_sub_mat_type - name of matrix type

1804:    Level: advanced

1806: .keywords: PC, PCASM, MatType, set

1808: .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1809: @*/
1810: PetscErrorCode  PCASMGetSubMatType(PC pc,MatType *sub_mat_type){

1813:   PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));
1814:   return(0);
1815: }

1817: /*@
1818:      PCASMSetSubMatType - Set the type of matrix used for ASM subsolves

1820:    Collective on Mat

1822:    Input Parameters:
1823: +  pc             - the PC object
1824: -  sub_mat_type   - matrix type

1826:    Options Database Key:
1827: .  -pc_asm_sub_mat_type  <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed.

1829:    Notes:
1830:    See "${PETSC_DIR}/include/petscmat.h" for available types

1832:   Level: advanced

1834: .keywords: PC, PCASM, MatType, set

1836: .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat
1837: @*/
1838: PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type)
1839: {

1842:   PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));
1843:   return(0);
1844: }