Actual source code: redundant.c

petsc-master 2019-10-20
Report Typos and Errors

  2: /*
  3:   This file defines a "solve the problem redundantly on each subgroup of processor" preconditioner.
  4: */
  5:  #include <petsc/private/pcimpl.h>
  6:  #include <petscksp.h>

  8: typedef struct {
  9:   KSP                ksp;
 10:   PC                 pc;                   /* actual preconditioner used on each processor */
 11:   Vec                xsub,ysub;            /* vectors of a subcommunicator to hold parallel vectors of PetscObjectComm((PetscObject)pc) */
 12:   Vec                xdup,ydup;            /* parallel vector that congregates xsub or ysub facilitating vector scattering */
 13:   Mat                pmats;                /* matrix and optional preconditioner matrix belong to a subcommunicator */
 14:   VecScatter         scatterin,scatterout; /* scatter used to move all values to each processor group (subcommunicator) */
 15:   PetscBool          useparallelmat;
 16:   PetscSubcomm       psubcomm;
 17:   PetscInt           nsubcomm;             /* num of data structure PetscSubcomm */
 18:   PetscBool          shifttypeset;
 19:   MatFactorShiftType shifttype;
 20: } PC_Redundant;

 22: PetscErrorCode  PCFactorSetShiftType_Redundant(PC pc,MatFactorShiftType shifttype)
 23: {
 24:   PC_Redundant   *red = (PC_Redundant*)pc->data;

 28:   if (red->ksp) {
 29:     PC pc;
 30:     KSPGetPC(red->ksp,&pc);
 31:     PCFactorSetShiftType(pc,shifttype);
 32:   } else {
 33:     red->shifttypeset = PETSC_TRUE;
 34:     red->shifttype    = shifttype;
 35:   }
 36:   return(0);
 37: }

 39: static PetscErrorCode PCView_Redundant(PC pc,PetscViewer viewer)
 40: {
 41:   PC_Redundant   *red = (PC_Redundant*)pc->data;
 43:   PetscBool      iascii,isstring;
 44:   PetscViewer    subviewer;

 47:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 48:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
 49:   if (iascii) {
 50:     if (!red->psubcomm) {
 51:       PetscViewerASCIIPrintf(viewer,"  Not yet setup\n");
 52:     } else {
 53:       PetscViewerASCIIPrintf(viewer,"  First (color=0) of %D PCs follows\n",red->nsubcomm);
 54:       PetscViewerGetSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
 55:       if (!red->psubcomm->color) { /* only view first redundant pc */
 56:         PetscViewerASCIIPushTab(subviewer);
 57:         KSPView(red->ksp,subviewer);
 58:         PetscViewerASCIIPopTab(subviewer);
 59:       }
 60:       PetscViewerRestoreSubViewer(viewer,((PetscObject)red->pc)->comm,&subviewer);
 61:     }
 62:   } else if (isstring) {
 63:     PetscViewerStringSPrintf(viewer," Redundant solver preconditioner");
 64:   }
 65:   return(0);
 66: }

 68:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 69: static PetscErrorCode PCSetUp_Redundant(PC pc)
 70: {
 71:   PC_Redundant   *red = (PC_Redundant*)pc->data;
 73:   PetscInt       mstart,mend,mlocal,M;
 74:   PetscMPIInt    size;
 75:   MPI_Comm       comm,subcomm;
 76:   Vec            x;

 79:   PetscObjectGetComm((PetscObject)pc,&comm);

 81:   /* if pmatrix set by user is sequential then we do not need to gather the parallel matrix */
 82:   MPI_Comm_size(comm,&size);
 83:   if (size == 1) red->useparallelmat = PETSC_FALSE;

 85:   if (!pc->setupcalled) {
 86:     PetscInt mloc_sub;
 87:     if (!red->psubcomm) { /* create red->psubcomm, new ksp and pc over subcomm */
 88:       KSP ksp;
 89:       PCRedundantGetKSP(pc,&ksp);
 90:     }
 91:     subcomm = PetscSubcommChild(red->psubcomm);

 93:     if (red->useparallelmat) {
 94:       /* grab the parallel matrix and put it into processors of a subcomminicator */
 95:       MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,subcomm,MAT_INITIAL_MATRIX,&red->pmats);

 97:       MPI_Comm_size(subcomm,&size);
 98:       if (size > 1) {
 99:         PetscBool foundpack,issbaij;
100:         PetscObjectTypeCompare((PetscObject)red->pmats,MATMPISBAIJ,&issbaij);
101:         if (!issbaij) {
102:           MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_LU,&foundpack);
103:         } else {
104:           MatGetFactorAvailable(red->pmats,NULL,MAT_FACTOR_CHOLESKY,&foundpack);
105:         }
106:         if (!foundpack) { /* reset default ksp and pc */
107:           KSPSetType(red->ksp,KSPGMRES);
108:           PCSetType(red->pc,PCBJACOBI);
109:         } else {
110:           PCFactorSetMatSolverType(red->pc,NULL);
111:         }
112:       }

114:       KSPSetOperators(red->ksp,red->pmats,red->pmats);

116:       /* get working vectors xsub and ysub */
117:       MatCreateVecs(red->pmats,&red->xsub,&red->ysub);

119:       /* create working vectors xdup and ydup.
120:        xdup concatenates all xsub's contigously to form a mpi vector over dupcomm  (see PetscSubcommCreate_interlaced())
121:        ydup concatenates all ysub and has empty local arrays because ysub's arrays will be place into it.
122:        Note: we use communicator dupcomm, not PetscObjectComm((PetscObject)pc)! */
123:       MatGetLocalSize(red->pmats,&mloc_sub,NULL);
124:       VecCreateMPI(PetscSubcommContiguousParent(red->psubcomm),mloc_sub,PETSC_DECIDE,&red->xdup);
125:       VecCreateMPIWithArray(PetscSubcommContiguousParent(red->psubcomm),1,mloc_sub,PETSC_DECIDE,NULL,&red->ydup);

127:       /* create vecscatters */
128:       if (!red->scatterin) { /* efficiency of scatterin is independent from psubcomm_type! */
129:         IS       is1,is2;
130:         PetscInt *idx1,*idx2,i,j,k;

132:         MatCreateVecs(pc->pmat,&x,0);
133:         VecGetSize(x,&M);
134:         VecGetOwnershipRange(x,&mstart,&mend);
135:         mlocal = mend - mstart;
136:         PetscMalloc2(red->psubcomm->n*mlocal,&idx1,red->psubcomm->n*mlocal,&idx2);
137:         j    = 0;
138:         for (k=0; k<red->psubcomm->n; k++) {
139:           for (i=mstart; i<mend; i++) {
140:             idx1[j]   = i;
141:             idx2[j++] = i + M*k;
142:           }
143:         }
144:         ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx1,PETSC_COPY_VALUES,&is1);
145:         ISCreateGeneral(comm,red->psubcomm->n*mlocal,idx2,PETSC_COPY_VALUES,&is2);
146:         VecScatterCreate(x,is1,red->xdup,is2,&red->scatterin);
147:         ISDestroy(&is1);
148:         ISDestroy(&is2);

150:         /* Impl below is good for PETSC_SUBCOMM_INTERLACED (no inter-process communication) and PETSC_SUBCOMM_CONTIGUOUS (communication within subcomm) */
151:         ISCreateStride(comm,mlocal,mstart+ red->psubcomm->color*M,1,&is1);
152:         ISCreateStride(comm,mlocal,mstart,1,&is2);
153:         VecScatterCreate(red->xdup,is1,x,is2,&red->scatterout);
154:         ISDestroy(&is1);
155:         ISDestroy(&is2);
156:         PetscFree2(idx1,idx2);
157:         VecDestroy(&x);
158:       }
159:     } else { /* !red->useparallelmat */
160:       KSPSetOperators(red->ksp,pc->mat,pc->pmat);
161:     }
162:   } else { /* pc->setupcalled */
163:     if (red->useparallelmat) {
164:       MatReuse       reuse;
165:       /* grab the parallel matrix and put it into processors of a subcomminicator */
166:       /*--------------------------------------------------------------------------*/
167:       if (pc->flag == DIFFERENT_NONZERO_PATTERN) {
168:         /* destroy old matrices */
169:         MatDestroy(&red->pmats);
170:         reuse = MAT_INITIAL_MATRIX;
171:       } else {
172:         reuse = MAT_REUSE_MATRIX;
173:       }
174:       MatCreateRedundantMatrix(pc->pmat,red->psubcomm->n,PetscSubcommChild(red->psubcomm),reuse,&red->pmats);
175:       KSPSetOperators(red->ksp,red->pmats,red->pmats);
176:     } else { /* !red->useparallelmat */
177:       KSPSetOperators(red->ksp,pc->mat,pc->pmat);
178:     }
179:   }

181:   if (pc->setfromoptionscalled) {
182:     KSPSetFromOptions(red->ksp);
183:   }
184:   KSPSetUp(red->ksp);
185:   return(0);
186: }

188: static PetscErrorCode PCApply_Redundant(PC pc,Vec x,Vec y)
189: {
190:   PC_Redundant   *red = (PC_Redundant*)pc->data;
192:   PetscScalar    *array;

195:   if (!red->useparallelmat) {
196:     KSPSolve(red->ksp,x,y);
197:     KSPCheckSolve(red->ksp,pc,y);
198:     return(0);
199:   }

201:   /* scatter x to xdup */
202:   VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
203:   VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);

205:   /* place xdup's local array into xsub */
206:   VecGetArray(red->xdup,&array);
207:   VecPlaceArray(red->xsub,(const PetscScalar*)array);

209:   /* apply preconditioner on each processor */
210:   KSPSolve(red->ksp,red->xsub,red->ysub);
211:   KSPCheckSolve(red->ksp,pc,red->ysub);
212:   VecResetArray(red->xsub);
213:   VecRestoreArray(red->xdup,&array);

215:   /* place ysub's local array into ydup */
216:   VecGetArray(red->ysub,&array);
217:   VecPlaceArray(red->ydup,(const PetscScalar*)array);

219:   /* scatter ydup to y */
220:   VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
221:   VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
222:   VecResetArray(red->ydup);
223:   VecRestoreArray(red->ysub,&array);
224:   return(0);
225: }

227: static PetscErrorCode PCApplyTranspose_Redundant(PC pc,Vec x,Vec y)
228: {
229:   PC_Redundant   *red = (PC_Redundant*)pc->data;
231:   PetscScalar    *array;

234:   if (!red->useparallelmat) {
235:     KSPSolveTranspose(red->ksp,x,y);
236:     KSPCheckSolve(red->ksp,pc,y);
237:     return(0);
238:   }

240:   /* scatter x to xdup */
241:   VecScatterBegin(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);
242:   VecScatterEnd(red->scatterin,x,red->xdup,INSERT_VALUES,SCATTER_FORWARD);

244:   /* place xdup's local array into xsub */
245:   VecGetArray(red->xdup,&array);
246:   VecPlaceArray(red->xsub,(const PetscScalar*)array);

248:   /* apply preconditioner on each processor */
249:   KSPSolveTranspose(red->ksp,red->xsub,red->ysub);
250:   KSPCheckSolve(red->ksp,pc,red->ysub);
251:   VecResetArray(red->xsub);
252:   VecRestoreArray(red->xdup,&array);

254:   /* place ysub's local array into ydup */
255:   VecGetArray(red->ysub,&array);
256:   VecPlaceArray(red->ydup,(const PetscScalar*)array);

258:   /* scatter ydup to y */
259:   VecScatterBegin(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
260:   VecScatterEnd(red->scatterout,red->ydup,y,INSERT_VALUES,SCATTER_FORWARD);
261:   VecResetArray(red->ydup);
262:   VecRestoreArray(red->ysub,&array);
263:   return(0);
264: }

266: static PetscErrorCode PCReset_Redundant(PC pc)
267: {
268:   PC_Redundant   *red = (PC_Redundant*)pc->data;

272:   if (red->useparallelmat) {
273:     VecScatterDestroy(&red->scatterin);
274:     VecScatterDestroy(&red->scatterout);
275:     VecDestroy(&red->ysub);
276:     VecDestroy(&red->xsub);
277:     VecDestroy(&red->xdup);
278:     VecDestroy(&red->ydup);
279:   }
280:   MatDestroy(&red->pmats);
281:   KSPReset(red->ksp);
282:   return(0);
283: }

285: static PetscErrorCode PCDestroy_Redundant(PC pc)
286: {
287:   PC_Redundant   *red = (PC_Redundant*)pc->data;

291:   PCReset_Redundant(pc);
292:   KSPDestroy(&red->ksp);
293:   PetscSubcommDestroy(&red->psubcomm);
294:   PetscFree(pc->data);
295:   return(0);
296: }

298: static PetscErrorCode PCSetFromOptions_Redundant(PetscOptionItems *PetscOptionsObject,PC pc)
299: {
301:   PC_Redundant   *red = (PC_Redundant*)pc->data;

304:   PetscOptionsHead(PetscOptionsObject,"Redundant options");
305:   PetscOptionsInt("-pc_redundant_number","Number of redundant pc","PCRedundantSetNumber",red->nsubcomm,&red->nsubcomm,0);
306:   PetscOptionsTail();
307:   return(0);
308: }

310: static PetscErrorCode PCRedundantSetNumber_Redundant(PC pc,PetscInt nreds)
311: {
312:   PC_Redundant *red = (PC_Redundant*)pc->data;

315:   red->nsubcomm = nreds;
316:   return(0);
317: }

319: /*@
320:    PCRedundantSetNumber - Sets the number of redundant preconditioner contexts.

322:    Logically Collective on PC

324:    Input Parameters:
325: +  pc - the preconditioner context
326: -  nredundant - number of redundant preconditioner contexts; for example if you are using 64 MPI processes and
327:                               use an nredundant of 4 there will be 4 parallel solves each on 16 = 64/4 processes.

329:    Level: advanced

331: @*/
332: PetscErrorCode PCRedundantSetNumber(PC pc,PetscInt nredundant)
333: {

338:   if (nredundant <= 0) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG, "num of redundant pc %D must be positive",nredundant);
339:   PetscTryMethod(pc,"PCRedundantSetNumber_C",(PC,PetscInt),(pc,nredundant));
340:   return(0);
341: }

343: static PetscErrorCode PCRedundantSetScatter_Redundant(PC pc,VecScatter in,VecScatter out)
344: {
345:   PC_Redundant   *red = (PC_Redundant*)pc->data;

349:   PetscObjectReference((PetscObject)in);
350:   VecScatterDestroy(&red->scatterin);

352:   red->scatterin  = in;

354:   PetscObjectReference((PetscObject)out);
355:   VecScatterDestroy(&red->scatterout);
356:   red->scatterout = out;
357:   return(0);
358: }

360: /*@
361:    PCRedundantSetScatter - Sets the scatter used to copy values into the
362:      redundant local solve and the scatter to move them back into the global
363:      vector.

365:    Logically Collective on PC

367:    Input Parameters:
368: +  pc - the preconditioner context
369: .  in - the scatter to move the values in
370: -  out - the scatter to move them out

372:    Level: advanced

374: @*/
375: PetscErrorCode PCRedundantSetScatter(PC pc,VecScatter in,VecScatter out)
376: {

383:   PetscTryMethod(pc,"PCRedundantSetScatter_C",(PC,VecScatter,VecScatter),(pc,in,out));
384:   return(0);
385: }

387: static PetscErrorCode PCRedundantGetKSP_Redundant(PC pc,KSP *innerksp)
388: {
390:   PC_Redundant   *red = (PC_Redundant*)pc->data;
391:   MPI_Comm       comm,subcomm;
392:   const char     *prefix;
393:   PetscBool      issbaij;

396:   if (!red->psubcomm) {
397:     PCGetOptionsPrefix(pc,&prefix);

399:     PetscObjectGetComm((PetscObject)pc,&comm);
400:     PetscSubcommCreate(comm,&red->psubcomm);
401:     PetscSubcommSetNumber(red->psubcomm,red->nsubcomm);
402:     PetscSubcommSetType(red->psubcomm,PETSC_SUBCOMM_CONTIGUOUS);

404:     PetscSubcommSetOptionsPrefix(red->psubcomm,prefix);
405:     PetscSubcommSetFromOptions(red->psubcomm);
406:     PetscLogObjectMemory((PetscObject)pc,sizeof(PetscSubcomm));

408:     /* create a new PC that processors in each subcomm have copy of */
409:     subcomm = PetscSubcommChild(red->psubcomm);

411:     KSPCreate(subcomm,&red->ksp);
412:     KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);
413:     PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
414:     PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);
415:     KSPSetType(red->ksp,KSPPREONLY);
416:     KSPGetPC(red->ksp,&red->pc);
417:     PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQSBAIJ,&issbaij);
418:     if (!issbaij) {
419:       PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPISBAIJ,&issbaij);
420:     }
421:     if (!issbaij) {
422:       PCSetType(red->pc,PCLU);
423:     } else {
424:       PCSetType(red->pc,PCCHOLESKY);
425:     }
426:     if (red->shifttypeset) {
427:       PCFactorSetShiftType(red->pc,red->shifttype);
428:       red->shifttypeset = PETSC_FALSE;
429:     }
430:     KSPSetOptionsPrefix(red->ksp,prefix);
431:     KSPAppendOptionsPrefix(red->ksp,"redundant_");
432:   }
433:   *innerksp = red->ksp;
434:   return(0);
435: }

437: /*@
438:    PCRedundantGetKSP - Gets the less parallel KSP created by the redundant PC.

440:    Not Collective

442:    Input Parameter:
443: .  pc - the preconditioner context

445:    Output Parameter:
446: .  innerksp - the KSP on the smaller set of processes

448:    Level: advanced

450: @*/
451: PetscErrorCode PCRedundantGetKSP(PC pc,KSP *innerksp)
452: {

458:   PetscUseMethod(pc,"PCRedundantGetKSP_C",(PC,KSP*),(pc,innerksp));
459:   return(0);
460: }

462: static PetscErrorCode PCRedundantGetOperators_Redundant(PC pc,Mat *mat,Mat *pmat)
463: {
464:   PC_Redundant *red = (PC_Redundant*)pc->data;

467:   if (mat)  *mat  = red->pmats;
468:   if (pmat) *pmat = red->pmats;
469:   return(0);
470: }

472: /*@
473:    PCRedundantGetOperators - gets the sequential matrix and preconditioner matrix

475:    Not Collective

477:    Input Parameter:
478: .  pc - the preconditioner context

480:    Output Parameters:
481: +  mat - the matrix
482: -  pmat - the (possibly different) preconditioner matrix

484:    Level: advanced

486: @*/
487: PetscErrorCode PCRedundantGetOperators(PC pc,Mat *mat,Mat *pmat)
488: {

495:   PetscUseMethod(pc,"PCRedundantGetOperators_C",(PC,Mat*,Mat*),(pc,mat,pmat));
496:   return(0);
497: }

499: /* -------------------------------------------------------------------------------------*/
500: /*MC
501:      PCREDUNDANT - Runs a KSP solver with preconditioner for the entire problem on subgroups of processors

503:      Options for the redundant preconditioners can be set with -redundant_pc_xxx for the redundant KSP with -redundant_ksp_xxx

505:   Options Database:
506: .  -pc_redundant_number <n> - number of redundant solves, for example if you are using 64 MPI processes and
507:                               use an n of 4 there will be 4 parallel solves each on 16 = 64/4 processes.

509:    Level: intermediate

511:    Notes:
512:     The default KSP is preonly and the default PC is LU or CHOLESKY if Pmat is of type MATSBAIJ.

514:    PCFactorSetShiftType() applied to this PC will convey they shift type into the inner PC if it is factorization based.

516:    Developer Notes:
517:     Note that PCSetInitialGuessNonzero()  is not used by this class but likely should be.

519: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedundantSetScatter(),
520:            PCRedundantGetKSP(), PCRedundantGetOperators(), PCRedundantSetNumber()
521: M*/

523: PETSC_EXTERN PetscErrorCode PCCreate_Redundant(PC pc)
524: {
526:   PC_Redundant   *red;
527:   PetscMPIInt    size;

530:   PetscNewLog(pc,&red);
531:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);

533:   red->nsubcomm       = size;
534:   red->useparallelmat = PETSC_TRUE;
535:   pc->data            = (void*)red;

537:   pc->ops->apply          = PCApply_Redundant;
538:   pc->ops->applytranspose = PCApplyTranspose_Redundant;
539:   pc->ops->setup          = PCSetUp_Redundant;
540:   pc->ops->destroy        = PCDestroy_Redundant;
541:   pc->ops->reset          = PCReset_Redundant;
542:   pc->ops->setfromoptions = PCSetFromOptions_Redundant;
543:   pc->ops->view           = PCView_Redundant;

545:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetScatter_C",PCRedundantSetScatter_Redundant);
546:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantSetNumber_C",PCRedundantSetNumber_Redundant);
547:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetKSP_C",PCRedundantGetKSP_Redundant);
548:   PetscObjectComposeFunction((PetscObject)pc,"PCRedundantGetOperators_C",PCRedundantGetOperators_Redundant);
549:   PetscObjectComposeFunction((PetscObject)pc,"PCFactorSetShiftType_C",PCFactorSetShiftType_Redundant);
550:   return(0);
551: }