Actual source code: precon.c

petsc-3.12.2 2019-11-22
Report Typos and Errors

  2: /*
  3:     The PC (preconditioner) interface routines, callable by users.
  4: */
  5:  #include <petsc/private/pcimpl.h>
  6:  #include <petscdm.h>

  8: /* Logging support */
  9: PetscClassId  PC_CLASSID;
 10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
 11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
 12: PetscInt      PetscMGLevelId;

 14: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
 15: {
 17:   PetscMPIInt    size;
 18:   PetscBool      hasop,flg1,flg2,set,flg3;

 21:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
 22:   if (pc->pmat) {
 23:     MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
 24:     if (size == 1) {
 25:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
 26:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
 27:       MatIsSymmetricKnown(pc->pmat,&set,&flg3);
 28:       if (flg1 && (!flg2 || (set && flg3))) {
 29:         *type = PCICC;
 30:       } else if (flg2) {
 31:         *type = PCILU;
 32:       } else if (hasop) { /* likely is a parallel matrix run on one processor */
 33:         *type = PCBJACOBI;
 34:       } else {
 35:         *type = PCNONE;
 36:       }
 37:     } else {
 38:        if (hasop) {
 39:         *type = PCBJACOBI;
 40:       } else {
 41:         *type = PCNONE;
 42:       }
 43:     }
 44:   } else {
 45:     if (size == 1) {
 46:       *type = PCILU;
 47:     } else {
 48:       *type = PCBJACOBI;
 49:     }
 50:   }
 51:   return(0);
 52: }

 54: /*@
 55:    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats

 57:    Collective on PC

 59:    Input Parameter:
 60: .  pc - the preconditioner context

 62:    Level: developer

 64:    Notes:
 65:     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC

 67: .seealso: PCCreate(), PCSetUp()
 68: @*/
 69: PetscErrorCode  PCReset(PC pc)
 70: {

 75:   if (pc->ops->reset) {
 76:     (*pc->ops->reset)(pc);
 77:   }
 78:   VecDestroy(&pc->diagonalscaleright);
 79:   VecDestroy(&pc->diagonalscaleleft);
 80:   MatDestroy(&pc->pmat);
 81:   MatDestroy(&pc->mat);

 83:   pc->setupcalled = 0;
 84:   return(0);
 85: }

 87: /*@
 88:    PCDestroy - Destroys PC context that was created with PCCreate().

 90:    Collective on PC

 92:    Input Parameter:
 93: .  pc - the preconditioner context

 95:    Level: developer

 97: .seealso: PCCreate(), PCSetUp()
 98: @*/
 99: PetscErrorCode  PCDestroy(PC *pc)
100: {

104:   if (!*pc) return(0);
106:   if (--((PetscObject)(*pc))->refct > 0) {*pc = 0; return(0);}

108:   PCReset(*pc);

110:   /* if memory was published with SAWs then destroy it */
111:   PetscObjectSAWsViewOff((PetscObject)*pc);
112:   if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
113:   DMDestroy(&(*pc)->dm);
114:   PetscHeaderDestroy(pc);
115:   return(0);
116: }

118: /*@C
119:    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
120:       scaling as needed by certain time-stepping codes.

122:    Logically Collective on PC

124:    Input Parameter:
125: .  pc - the preconditioner context

127:    Output Parameter:
128: .  flag - PETSC_TRUE if it applies the scaling

130:    Level: developer

132:    Notes:
133:     If this returns PETSC_TRUE then the system solved via the Krylov method is
134: $           D M A D^{-1} y = D M b  for left preconditioning or
135: $           D A M D^{-1} z = D b for right preconditioning

137: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
138: @*/
139: PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
140: {
144:   *flag = pc->diagonalscale;
145:   return(0);
146: }

148: /*@
149:    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
150:       scaling as needed by certain time-stepping codes.

152:    Logically Collective on PC

154:    Input Parameters:
155: +  pc - the preconditioner context
156: -  s - scaling vector

158:    Level: intermediate

160:    Notes:
161:     The system solved via the Krylov method is
162: $           D M A D^{-1} y = D M b  for left preconditioning or
163: $           D A M D^{-1} z = D b for right preconditioning

165:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

167: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
168: @*/
169: PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
170: {

176:   pc->diagonalscale     = PETSC_TRUE;

178:   PetscObjectReference((PetscObject)s);
179:   VecDestroy(&pc->diagonalscaleleft);

181:   pc->diagonalscaleleft = s;

183:   VecDuplicate(s,&pc->diagonalscaleright);
184:   VecCopy(s,pc->diagonalscaleright);
185:   VecReciprocal(pc->diagonalscaleright);
186:   return(0);
187: }

189: /*@
190:    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.

192:    Logically Collective on PC

194:    Input Parameters:
195: +  pc - the preconditioner context
196: .  in - input vector
197: -  out - scaled vector (maybe the same as in)

199:    Level: intermediate

201:    Notes:
202:     The system solved via the Krylov method is
203: $           D M A D^{-1} y = D M b  for left preconditioning or
204: $           D A M D^{-1} z = D b for right preconditioning

206:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

208:    If diagonal scaling is turned off and in is not out then in is copied to out

210: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
211: @*/
212: PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
213: {

220:   if (pc->diagonalscale) {
221:     VecPointwiseMult(out,pc->diagonalscaleleft,in);
222:   } else if (in != out) {
223:     VecCopy(in,out);
224:   }
225:   return(0);
226: }

228: /*@
229:    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.

231:    Logically Collective on PC

233:    Input Parameters:
234: +  pc - the preconditioner context
235: .  in - input vector
236: -  out - scaled vector (maybe the same as in)

238:    Level: intermediate

240:    Notes:
241:     The system solved via the Krylov method is
242: $           D M A D^{-1} y = D M b  for left preconditioning or
243: $           D A M D^{-1} z = D b for right preconditioning

245:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

247:    If diagonal scaling is turned off and in is not out then in is copied to out

249: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
250: @*/
251: PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
252: {

259:   if (pc->diagonalscale) {
260:     VecPointwiseMult(out,pc->diagonalscaleright,in);
261:   } else if (in != out) {
262:     VecCopy(in,out);
263:   }
264:   return(0);
265: }

267: /*@
268:    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
269:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
270:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

272:    Logically Collective on PC

274:    Input Parameters:
275: +  pc - the preconditioner context
276: -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

278:    Options Database Key:
279: .  -pc_use_amat <true,false>

281:    Notes:
282:    For the common case in which the linear system matrix and the matrix used to construct the
283:    preconditioner are identical, this routine is does nothing.

285:    Level: intermediate

287: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
288: @*/
289: PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
290: {
293:   pc->useAmat = flg;
294:   return(0);
295: }

297: /*@
298:    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.

300:    Logically Collective on PC

302:    Input Parameters:
303: +  pc - iterative context obtained from PCCreate()
304: -  flg - PETSC_TRUE indicates you want the error generated

306:    Level: advanced

308:    Notes:
309:     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
310:     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
311:     detected.

313:     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs

315: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
316: @*/
317: PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
318: {
322:   pc->erroriffailure = flg;
323:   return(0);
324: }

326: /*@
327:    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
328:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
329:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

331:    Logically Collective on PC

333:    Input Parameter:
334: .  pc - the preconditioner context

336:    Output Parameter:
337: .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

339:    Notes:
340:    For the common case in which the linear system matrix and the matrix used to construct the
341:    preconditioner are identical, this routine is does nothing.

343:    Level: intermediate

345: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
346: @*/
347: PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
348: {
351:   *flg = pc->useAmat;
352:   return(0);
353: }

355: /*@
356:    PCCreate - Creates a preconditioner context.

358:    Collective

360:    Input Parameter:
361: .  comm - MPI communicator

363:    Output Parameter:
364: .  pc - location to put the preconditioner context

366:    Notes:
367:    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or ICC
368:    in parallel. For dense matrices it is always PCNONE.

370:    Level: developer

372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
375: {
376:   PC             pc;

381:   *newpc = 0;
382:   PCInitializePackage();

384:   PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);

386:   pc->mat                  = 0;
387:   pc->pmat                 = 0;
388:   pc->setupcalled          = 0;
389:   pc->setfromoptionscalled = 0;
390:   pc->data                 = 0;
391:   pc->diagonalscale        = PETSC_FALSE;
392:   pc->diagonalscaleleft    = 0;
393:   pc->diagonalscaleright   = 0;

395:   pc->modifysubmatrices  = 0;
396:   pc->modifysubmatricesP = 0;

398:   *newpc = pc;
399:   return(0);

401: }

403: /* -------------------------------------------------------------------------------*/

405: /*@
406:    PCApply - Applies the preconditioner to a vector.

408:    Collective on PC

410:    Input Parameters:
411: +  pc - the preconditioner context
412: -  x - input vector

414:    Output Parameter:
415: .  y - output vector

417:    Level: developer

419: .seealso: PCApplyTranspose(), PCApplyBAorAB()
420: @*/
421: PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
422: {
424:   PetscInt       m,n,mv,nv;

430:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
431:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
432:   /* use pmat to check vector sizes since for KSPLQR the pmat may be of a different size than mat */
433:   MatGetLocalSize(pc->pmat,&m,&n);
434:   VecGetLocalSize(x,&nv);
435:   VecGetLocalSize(y,&mv);
436:   if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal resulting vector number of rows %D",m,mv);
437:   if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal resulting vector number of rows %D",n,nv);
438:   VecSetErrorIfLocked(y,3);

440:   PCSetUp(pc);
441:   if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
442:   VecLockReadPush(x);
443:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
444:   (*pc->ops->apply)(pc,x,y);
445:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
446:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
447:   VecLockReadPop(x);
448:   return(0);
449: }

451: /*@
452:    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.

454:    Collective on PC

456:    Input Parameters:
457: +  pc - the preconditioner context
458: -  x - input vector

460:    Output Parameter:
461: .  y - output vector

463:    Notes:
464:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

466:    Level: developer

468: .seealso: PCApply(), PCApplySymmetricRight()
469: @*/
470: PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
471: {

478:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
479:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
480:   PCSetUp(pc);
481:   if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
482:   VecLockReadPush(x);
483:   PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
484:   (*pc->ops->applysymmetricleft)(pc,x,y);
485:   PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
486:   VecLockReadPop(x);
487:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
488:   return(0);
489: }

491: /*@
492:    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.

494:    Collective on PC

496:    Input Parameters:
497: +  pc - the preconditioner context
498: -  x - input vector

500:    Output Parameter:
501: .  y - output vector

503:    Level: developer

505:    Notes:
506:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

508: .seealso: PCApply(), PCApplySymmetricLeft()
509: @*/
510: PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
511: {

518:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
519:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
520:   PCSetUp(pc);
521:   if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
522:   VecLockReadPush(x);
523:   PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
524:   (*pc->ops->applysymmetricright)(pc,x,y);
525:   PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
526:   VecLockReadPop(x);
527:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
528:   return(0);
529: }

531: /*@
532:    PCApplyTranspose - Applies the transpose of preconditioner to a vector.

534:    Collective on PC

536:    Input Parameters:
537: +  pc - the preconditioner context
538: -  x - input vector

540:    Output Parameter:
541: .  y - output vector

543:    Notes:
544:     For complex numbers this applies the non-Hermitian transpose.

546:    Developer Notes:
547:     We need to implement a PCApplyHermitianTranspose()

549:    Level: developer

551: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
552: @*/
553: PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
554: {

561:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
562:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
563:   PCSetUp(pc);
564:   if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
565:   VecLockReadPush(x);
566:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
567:   (*pc->ops->applytranspose)(pc,x,y);
568:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
569:   VecLockReadPop(x);
570:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
571:   return(0);
572: }

574: /*@
575:    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation

577:    Collective on PC

579:    Input Parameters:
580: .  pc - the preconditioner context

582:    Output Parameter:
583: .  flg - PETSC_TRUE if a transpose operation is defined

585:    Level: developer

587: .seealso: PCApplyTranspose()
588: @*/
589: PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
590: {
594:   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
595:   else *flg = PETSC_FALSE;
596:   return(0);
597: }

599: /*@
600:    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.

602:    Collective on PC

604:    Input Parameters:
605: +  pc - the preconditioner context
606: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
607: .  x - input vector
608: -  work - work vector

610:    Output Parameter:
611: .  y - output vector

613:    Level: developer

615:    Notes:
616:     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
617:    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.

619: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
620: @*/
621: PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
622: {

630:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
631:   if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
632:   if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
633:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}

635:   PCSetUp(pc);
636:   if (pc->diagonalscale) {
637:     if (pc->ops->applyBA) {
638:       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
639:       VecDuplicate(x,&work2);
640:       PCDiagonalScaleRight(pc,x,work2);
641:       (*pc->ops->applyBA)(pc,side,work2,y,work);
642:       PCDiagonalScaleLeft(pc,y,y);
643:       VecDestroy(&work2);
644:     } else if (side == PC_RIGHT) {
645:       PCDiagonalScaleRight(pc,x,y);
646:       PCApply(pc,y,work);
647:       MatMult(pc->mat,work,y);
648:       PCDiagonalScaleLeft(pc,y,y);
649:     } else if (side == PC_LEFT) {
650:       PCDiagonalScaleRight(pc,x,y);
651:       MatMult(pc->mat,y,work);
652:       PCApply(pc,work,y);
653:       PCDiagonalScaleLeft(pc,y,y);
654:     } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
655:   } else {
656:     if (pc->ops->applyBA) {
657:       (*pc->ops->applyBA)(pc,side,x,y,work);
658:     } else if (side == PC_RIGHT) {
659:       PCApply(pc,x,work);
660:       MatMult(pc->mat,work,y);
661:     } else if (side == PC_LEFT) {
662:       MatMult(pc->mat,x,work);
663:       PCApply(pc,work,y);
664:     } else if (side == PC_SYMMETRIC) {
665:       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
666:       PCApplySymmetricRight(pc,x,work);
667:       MatMult(pc->mat,work,y);
668:       VecCopy(y,work);
669:       PCApplySymmetricLeft(pc,work,y);
670:     }
671:   }
672:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
673:   return(0);
674: }

676: /*@
677:    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
678:    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
679:    NOT tr(B*A) = tr(A)*tr(B).

681:    Collective on PC

683:    Input Parameters:
684: +  pc - the preconditioner context
685: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
686: .  x - input vector
687: -  work - work vector

689:    Output Parameter:
690: .  y - output vector


693:    Notes:
694:     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
695:       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)

697:     Level: developer

699: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
700: @*/
701: PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
702: {

710:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
711:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}
712:   if (pc->ops->applyBAtranspose) {
713:     (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
714:     if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
715:     return(0);
716:   }
717:   if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");

719:   PCSetUp(pc);
720:   if (side == PC_RIGHT) {
721:     PCApplyTranspose(pc,x,work);
722:     MatMultTranspose(pc->mat,work,y);
723:   } else if (side == PC_LEFT) {
724:     MatMultTranspose(pc->mat,x,work);
725:     PCApplyTranspose(pc,work,y);
726:   }
727:   /* add support for PC_SYMMETRIC */
728:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
729:   return(0);
730: }

732: /* -------------------------------------------------------------------------------*/

734: /*@
735:    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
736:    built-in fast application of Richardson's method.

738:    Not Collective

740:    Input Parameter:
741: .  pc - the preconditioner

743:    Output Parameter:
744: .  exists - PETSC_TRUE or PETSC_FALSE

746:    Level: developer

748: .seealso: PCApplyRichardson()
749: @*/
750: PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
751: {
755:   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
756:   else *exists = PETSC_FALSE;
757:   return(0);
758: }

760: /*@
761:    PCApplyRichardson - Applies several steps of Richardson iteration with
762:    the particular preconditioner. This routine is usually used by the
763:    Krylov solvers and not the application code directly.

765:    Collective on PC

767:    Input Parameters:
768: +  pc  - the preconditioner context
769: .  b   - the right hand side
770: .  w   - one work vector
771: .  rtol - relative decrease in residual norm convergence criteria
772: .  abstol - absolute residual norm convergence criteria
773: .  dtol - divergence residual norm increase criteria
774: .  its - the number of iterations to apply.
775: -  guesszero - if the input x contains nonzero initial guess

777:    Output Parameter:
778: +  outits - number of iterations actually used (for SOR this always equals its)
779: .  reason - the reason the apply terminated
780: -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE

782:    Notes:
783:    Most preconditioners do not support this function. Use the command
784:    PCApplyRichardsonExists() to determine if one does.

786:    Except for the multigrid PC this routine ignores the convergence tolerances
787:    and always runs for the number of iterations

789:    Level: developer

791: .seealso: PCApplyRichardsonExists()
792: @*/
793: PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
794: {

802:   if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
803:   PCSetUp(pc);
804:   if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
805:   (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
806:   return(0);
807: }

809: /*@
810:    PCGetFailedReason - Gets the reason a PCSetUp() failed or 0 if it did not fail

812:    Logically Collective on PC

814:    Input Parameter:
815: .  pc - the preconditioner context

817:    Output Parameter:
818: .  reason - the reason it failed, currently only -1

820:    Level: advanced

822: .seealso: PCCreate(), PCApply(), PCDestroy()
823: @*/
824: PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
825: {
827:   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
828:   else *reason = pc->failedreason;
829:   return(0);
830: }


833: /*
834:       a setupcall of 0 indicates never setup,
835:                      1 indicates has been previously setup
836:                     -1 indicates a PCSetUp() was attempted and failed
837: */
838: /*@
839:    PCSetUp - Prepares for the use of a preconditioner.

841:    Collective on PC

843:    Input Parameter:
844: .  pc - the preconditioner context

846:    Level: developer

848: .seealso: PCCreate(), PCApply(), PCDestroy()
849: @*/
850: PetscErrorCode  PCSetUp(PC pc)
851: {
852:   PetscErrorCode   ierr;
853:   const char       *def;
854:   PetscObjectState matstate, matnonzerostate;

858:   if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");

860:   if (pc->setupcalled && pc->reusepreconditioner) {
861:     PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");
862:     return(0);
863:   }

865:   PetscObjectStateGet((PetscObject)pc->pmat,&matstate);
866:   MatGetNonzeroState(pc->pmat,&matnonzerostate);
867:   if (!pc->setupcalled) {
868:     PetscInfo(pc,"Setting up PC for first time\n");
869:     pc->flag        = DIFFERENT_NONZERO_PATTERN;
870:   } else if (matstate == pc->matstate) {
871:     PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");
872:     return(0);
873:   } else {
874:     if (matnonzerostate > pc->matnonzerostate) {
875:        PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
876:        pc->flag            = DIFFERENT_NONZERO_PATTERN;
877:     } else {
878:       PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
879:       pc->flag            = SAME_NONZERO_PATTERN;
880:     }
881:   }
882:   pc->matstate        = matstate;
883:   pc->matnonzerostate = matnonzerostate;

885:   if (!((PetscObject)pc)->type_name) {
886:     PCGetDefaultType_Private(pc,&def);
887:     PCSetType(pc,def);
888:   }

890:   MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);
891:   MatSetErrorIfFailure(pc->mat,pc->erroriffailure);
892:   PetscLogEventBegin(PC_SetUp,pc,0,0,0);
893:   if (pc->ops->setup) {
894:     (*pc->ops->setup)(pc);
895:   }
896:   PetscLogEventEnd(PC_SetUp,pc,0,0,0);
897:   if (!pc->setupcalled) pc->setupcalled = 1;
898:   return(0);
899: }

901: /*@
902:    PCSetUpOnBlocks - Sets up the preconditioner for each block in
903:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
904:    methods.

906:    Collective on PC

908:    Input Parameters:
909: .  pc - the preconditioner context

911:    Level: developer

913: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
914: @*/
915: PetscErrorCode  PCSetUpOnBlocks(PC pc)
916: {

921:   if (!pc->ops->setuponblocks) return(0);
922:   PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
923:   (*pc->ops->setuponblocks)(pc);
924:   PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
925:   return(0);
926: }

928: /*@C
929:    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
930:    submatrices that arise within certain subdomain-based preconditioners.
931:    The basic submatrices are extracted from the preconditioner matrix as
932:    usual; the user can then alter these (for example, to set different boundary
933:    conditions for each submatrix) before they are used for the local solves.

935:    Logically Collective on PC

937:    Input Parameters:
938: +  pc - the preconditioner context
939: .  func - routine for modifying the submatrices
940: -  ctx - optional user-defined context (may be null)

942:    Calling sequence of func:
943: $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);

945: +  row - an array of index sets that contain the global row numbers
946:          that comprise each local submatrix
947: .  col - an array of index sets that contain the global column numbers
948:          that comprise each local submatrix
949: .  submat - array of local submatrices
950: -  ctx - optional user-defined context for private data for the
951:          user-defined func routine (may be null)

953:    Notes:
954:    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
955:    KSPSolve().

957:    A routine set by PCSetModifySubMatrices() is currently called within
958:    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
959:    preconditioners.  All other preconditioners ignore this routine.

961:    Level: advanced

963: .seealso: PCModifySubMatrices(), PCASMGetSubMatrices()
964: @*/
965: PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
966: {
969:   pc->modifysubmatrices  = func;
970:   pc->modifysubmatricesP = ctx;
971:   return(0);
972: }

974: /*@C
975:    PCModifySubMatrices - Calls an optional user-defined routine within
976:    certain preconditioners if one has been set with PCSetModifySubMarices().

978:    Collective on PC

980:    Input Parameters:
981: +  pc - the preconditioner context
982: .  nsub - the number of local submatrices
983: .  row - an array of index sets that contain the global row numbers
984:          that comprise each local submatrix
985: .  col - an array of index sets that contain the global column numbers
986:          that comprise each local submatrix
987: .  submat - array of local submatrices
988: -  ctx - optional user-defined context for private data for the
989:          user-defined routine (may be null)

991:    Output Parameter:
992: .  submat - array of local submatrices (the entries of which may
993:             have been modified)

995:    Notes:
996:    The user should NOT generally call this routine, as it will
997:    automatically be called within certain preconditioners (currently
998:    block Jacobi, additive Schwarz) if set.

1000:    The basic submatrices are extracted from the preconditioner matrix
1001:    as usual; the user can then alter these (for example, to set different
1002:    boundary conditions for each submatrix) before they are used for the
1003:    local solves.

1005:    Level: developer

1007: .seealso: PCSetModifySubMatrices()
1008: @*/
1009: PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1010: {

1015:   if (!pc->modifysubmatrices) return(0);
1016:   PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1017:   (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1018:   PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1019:   return(0);
1020: }

1022: /*@
1023:    PCSetOperators - Sets the matrix associated with the linear system and
1024:    a (possibly) different one associated with the preconditioner.

1026:    Logically Collective on PC

1028:    Input Parameters:
1029: +  pc - the preconditioner context
1030: .  Amat - the matrix that defines the linear system
1031: -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.

1033:    Notes:
1034:     Passing a NULL for Amat or Pmat removes the matrix that is currently used.

1036:     If you wish to replace either Amat or Pmat but leave the other one untouched then
1037:     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1038:     on it and then pass it back in in your call to KSPSetOperators().

1040:    More Notes about Repeated Solution of Linear Systems:
1041:    PETSc does NOT reset the matrix entries of either Amat or Pmat
1042:    to zero after a linear solve; the user is completely responsible for
1043:    matrix assembly.  See the routine MatZeroEntries() if desiring to
1044:    zero all elements of a matrix.

1046:    Level: intermediate

1048: .seealso: PCGetOperators(), MatZeroEntries()
1049:  @*/
1050: PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1051: {
1052:   PetscErrorCode   ierr;
1053:   PetscInt         m1,n1,m2,n2;

1061:   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1062:     MatGetLocalSize(Amat,&m1,&n1);
1063:     MatGetLocalSize(pc->mat,&m2,&n2);
1064:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1065:     MatGetLocalSize(Pmat,&m1,&n1);
1066:     MatGetLocalSize(pc->pmat,&m2,&n2);
1067:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1068:   }

1070:   if (Pmat != pc->pmat) {
1071:     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1072:     pc->matnonzerostate = -1;
1073:     pc->matstate        = -1;
1074:   }

1076:   /* reference first in case the matrices are the same */
1077:   if (Amat) {PetscObjectReference((PetscObject)Amat);}
1078:   MatDestroy(&pc->mat);
1079:   if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1080:   MatDestroy(&pc->pmat);
1081:   pc->mat  = Amat;
1082:   pc->pmat = Pmat;
1083:   return(0);
1084: }

1086: /*@
1087:    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.

1089:    Logically Collective on PC

1091:    Input Parameters:
1092: +  pc - the preconditioner context
1093: -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1095:     Level: intermediate

1097: .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner()
1098:  @*/
1099: PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1100: {
1103:   pc->reusepreconditioner = flag;
1104:   return(0);
1105: }

1107: /*@
1108:    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.

1110:    Not Collective

1112:    Input Parameter:
1113: .  pc - the preconditioner context

1115:    Output Parameter:
1116: .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1118:    Level: intermediate

1120: .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner()
1121:  @*/
1122: PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1123: {
1126:   *flag = pc->reusepreconditioner;
1127:   return(0);
1128: }

1130: /*@
1131:    PCGetOperators - Gets the matrix associated with the linear system and
1132:    possibly a different one associated with the preconditioner.

1134:    Not collective, though parallel Mats are returned if the PC is parallel

1136:    Input Parameter:
1137: .  pc - the preconditioner context

1139:    Output Parameters:
1140: +  Amat - the matrix defining the linear system
1141: -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.

1143:    Level: intermediate

1145:    Notes:
1146:     Does not increase the reference count of the matrices, so you should not destroy them

1148:    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1149:       are created in PC and returned to the user. In this case, if both operators
1150:       mat and pmat are requested, two DIFFERENT operators will be returned. If
1151:       only one is requested both operators in the PC will be the same (i.e. as
1152:       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1153:       The user must set the sizes of the returned matrices and their type etc just
1154:       as if the user created them with MatCreate(). For example,

1156: $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1157: $           set size, type, etc of Amat

1159: $         MatCreate(comm,&mat);
1160: $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1161: $         PetscObjectDereference((PetscObject)mat);
1162: $           set size, type, etc of Amat

1164:      and

1166: $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1167: $           set size, type, etc of Amat and Pmat

1169: $         MatCreate(comm,&Amat);
1170: $         MatCreate(comm,&Pmat);
1171: $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1172: $         PetscObjectDereference((PetscObject)Amat);
1173: $         PetscObjectDereference((PetscObject)Pmat);
1174: $           set size, type, etc of Amat and Pmat

1176:     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1177:     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1178:     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1179:     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1180:     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1181:     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1182:     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1183:     it can be created for you?


1186: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1187: @*/
1188: PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1189: {

1194:   if (Amat) {
1195:     if (!pc->mat) {
1196:       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1197:         pc->mat = pc->pmat;
1198:         PetscObjectReference((PetscObject)pc->mat);
1199:       } else {                  /* both Amat and Pmat are empty */
1200:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1201:         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1202:           pc->pmat = pc->mat;
1203:           PetscObjectReference((PetscObject)pc->pmat);
1204:         }
1205:       }
1206:     }
1207:     *Amat = pc->mat;
1208:   }
1209:   if (Pmat) {
1210:     if (!pc->pmat) {
1211:       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1212:         pc->pmat = pc->mat;
1213:         PetscObjectReference((PetscObject)pc->pmat);
1214:       } else {
1215:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1216:         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1217:           pc->mat = pc->pmat;
1218:           PetscObjectReference((PetscObject)pc->mat);
1219:         }
1220:       }
1221:     }
1222:     *Pmat = pc->pmat;
1223:   }
1224:   return(0);
1225: }

1227: /*@C
1228:    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1229:    possibly a different one associated with the preconditioner have been set in the PC.

1231:    Not collective, though the results on all processes should be the same

1233:    Input Parameter:
1234: .  pc - the preconditioner context

1236:    Output Parameters:
1237: +  mat - the matrix associated with the linear system was set
1238: -  pmat - matrix associated with the preconditioner was set, usually the same

1240:    Level: intermediate

1242: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1243: @*/
1244: PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1245: {
1248:   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1249:   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1250:   return(0);
1251: }

1253: /*@
1254:    PCFactorGetMatrix - Gets the factored matrix from the
1255:    preconditioner context.  This routine is valid only for the LU,
1256:    incomplete LU, Cholesky, and incomplete Cholesky methods.

1258:    Not Collective on PC though Mat is parallel if PC is parallel

1260:    Input Parameters:
1261: .  pc - the preconditioner context

1263:    Output parameters:
1264: .  mat - the factored matrix

1266:    Level: advanced

1268:    Notes:
1269:     Does not increase the reference count for the matrix so DO NOT destroy it

1271: @*/
1272: PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1273: {

1279:   if (pc->ops->getfactoredmatrix) {
1280:     (*pc->ops->getfactoredmatrix)(pc,mat);
1281:   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1282:   return(0);
1283: }

1285: /*@C
1286:    PCSetOptionsPrefix - Sets the prefix used for searching for all
1287:    PC options in the database.

1289:    Logically Collective on PC

1291:    Input Parameters:
1292: +  pc - the preconditioner context
1293: -  prefix - the prefix string to prepend to all PC option requests

1295:    Notes:
1296:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1297:    The first character of all runtime options is AUTOMATICALLY the
1298:    hyphen.

1300:    Level: advanced

1302: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1303: @*/
1304: PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1305: {

1310:   PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1311:   return(0);
1312: }

1314: /*@C
1315:    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1316:    PC options in the database.

1318:    Logically Collective on PC

1320:    Input Parameters:
1321: +  pc - the preconditioner context
1322: -  prefix - the prefix string to prepend to all PC option requests

1324:    Notes:
1325:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1326:    The first character of all runtime options is AUTOMATICALLY the
1327:    hyphen.

1329:    Level: advanced

1331: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1332: @*/
1333: PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1334: {

1339:   PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1340:   return(0);
1341: }

1343: /*@C
1344:    PCGetOptionsPrefix - Gets the prefix used for searching for all
1345:    PC options in the database.

1347:    Not Collective

1349:    Input Parameters:
1350: .  pc - the preconditioner context

1352:    Output Parameters:
1353: .  prefix - pointer to the prefix string used, is returned

1355:    Notes:
1356:     On the fortran side, the user should pass in a string 'prifix' of
1357:    sufficient length to hold the prefix.

1359:    Level: advanced

1361: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1362: @*/
1363: PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1364: {

1370:   PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1371:   return(0);
1372: }

1374: /*
1375:    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1376:   preconditioners including BDDC and Eisentat that transform the equations before applying
1377:   the Krylov methods
1378: */
1379: PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1380: {

1386:   *change = PETSC_FALSE;
1387:   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1388:   return(0);
1389: }

1391: /*@
1392:    PCPreSolve - Optional pre-solve phase, intended for any
1393:    preconditioner-specific actions that must be performed before
1394:    the iterative solve itself.

1396:    Collective on PC

1398:    Input Parameters:
1399: +  pc - the preconditioner context
1400: -  ksp - the Krylov subspace context

1402:    Level: developer

1404:    Sample of Usage:
1405: .vb
1406:     PCPreSolve(pc,ksp);
1407:     KSPSolve(ksp,b,x);
1408:     PCPostSolve(pc,ksp);
1409: .ve

1411:    Notes:
1412:    The pre-solve phase is distinct from the PCSetUp() phase.

1414:    KSPSolve() calls this directly, so is rarely called by the user.

1416: .seealso: PCPostSolve()
1417: @*/
1418: PetscErrorCode  PCPreSolve(PC pc,KSP ksp)
1419: {
1421:   Vec            x,rhs;

1426:   pc->presolvedone++;
1427:   if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1428:   KSPGetSolution(ksp,&x);
1429:   KSPGetRhs(ksp,&rhs);

1431:   if (pc->ops->presolve) {
1432:     (*pc->ops->presolve)(pc,ksp,rhs,x);
1433:   }
1434:   return(0);
1435: }

1437: /*@
1438:    PCPostSolve - Optional post-solve phase, intended for any
1439:    preconditioner-specific actions that must be performed after
1440:    the iterative solve itself.

1442:    Collective on PC

1444:    Input Parameters:
1445: +  pc - the preconditioner context
1446: -  ksp - the Krylov subspace context

1448:    Sample of Usage:
1449: .vb
1450:     PCPreSolve(pc,ksp);
1451:     KSPSolve(ksp,b,x);
1452:     PCPostSolve(pc,ksp);
1453: .ve

1455:    Note:
1456:    KSPSolve() calls this routine directly, so it is rarely called by the user.

1458:    Level: developer

1460: .seealso: PCPreSolve(), KSPSolve()
1461: @*/
1462: PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1463: {
1465:   Vec            x,rhs;

1470:   pc->presolvedone--;
1471:   KSPGetSolution(ksp,&x);
1472:   KSPGetRhs(ksp,&rhs);
1473:   if (pc->ops->postsolve) {
1474:      (*pc->ops->postsolve)(pc,ksp,rhs,x);
1475:   }
1476:   return(0);
1477: }

1479: /*@C
1480:   PCLoad - Loads a PC that has been stored in binary  with PCView().

1482:   Collective on PetscViewer

1484:   Input Parameters:
1485: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1486:            some related function before a call to PCLoad().
1487: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()

1489:    Level: intermediate

1491:   Notes:
1492:    The type is determined by the data in the file, any type set into the PC before this call is ignored.

1494:   Notes for advanced users:
1495:   Most users should not need to know the details of the binary storage
1496:   format, since PCLoad() and PCView() completely hide these details.
1497:   But for anyone who's interested, the standard binary matrix storage
1498:   format is
1499: .vb
1500:      has not yet been determined
1501: .ve

1503: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1504: @*/
1505: PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1506: {
1508:   PetscBool      isbinary;
1509:   PetscInt       classid;
1510:   char           type[256];

1515:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1516:   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");

1518:   PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);
1519:   if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1520:   PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);
1521:   PCSetType(newdm, type);
1522:   if (newdm->ops->load) {
1523:     (*newdm->ops->load)(newdm,viewer);
1524:   }
1525:   return(0);
1526: }

1528:  #include <petscdraw.h>
1529: #if defined(PETSC_HAVE_SAWS)
1530:  #include <petscviewersaws.h>
1531: #endif
1532: /*@C
1533:    PCView - Prints the PC data structure.

1535:    Collective on PC

1537:    Input Parameters:
1538: +  PC - the PC context
1539: -  viewer - optional visualization context

1541:    Note:
1542:    The available visualization contexts include
1543: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1544: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1545:          output where only the first processor opens
1546:          the file.  All other processors send their
1547:          data to the first processor to print.

1549:    The user can open an alternative visualization contexts with
1550:    PetscViewerASCIIOpen() (output to a specified file).

1552:    Level: developer

1554: .seealso: KSPView(), PetscViewerASCIIOpen()
1555: @*/
1556: PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1557: {
1558:   PCType         cstr;
1560:   PetscBool      iascii,isstring,isbinary,isdraw;
1561: #if defined(PETSC_HAVE_SAWS)
1562:   PetscBool      issaws;
1563: #endif

1567:   if (!viewer) {
1568:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1569:   }

1573:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1574:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1575:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1576:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1577: #if defined(PETSC_HAVE_SAWS)
1578:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);
1579: #endif

1581:   if (iascii) {
1582:     PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);
1583:     if (!pc->setupcalled) {
1584:       PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n");
1585:     }
1586:     if (pc->ops->view) {
1587:       PetscViewerASCIIPushTab(viewer);
1588:       (*pc->ops->view)(pc,viewer);
1589:       PetscViewerASCIIPopTab(viewer);
1590:     }
1591:     if (pc->mat) {
1592:       PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1593:       if (pc->pmat == pc->mat) {
1594:         PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n");
1595:         PetscViewerASCIIPushTab(viewer);
1596:         MatView(pc->mat,viewer);
1597:         PetscViewerASCIIPopTab(viewer);
1598:       } else {
1599:         if (pc->pmat) {
1600:           PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n");
1601:         } else {
1602:           PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n");
1603:         }
1604:         PetscViewerASCIIPushTab(viewer);
1605:         MatView(pc->mat,viewer);
1606:         if (pc->pmat) {MatView(pc->pmat,viewer);}
1607:         PetscViewerASCIIPopTab(viewer);
1608:       }
1609:       PetscViewerPopFormat(viewer);
1610:     }
1611:   } else if (isstring) {
1612:     PCGetType(pc,&cstr);
1613:     PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);
1614:     if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1615:     if (pc->mat) {MatView(pc->mat,viewer);}
1616:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1617:   } else if (isbinary) {
1618:     PetscInt    classid = PC_FILE_CLASSID;
1619:     MPI_Comm    comm;
1620:     PetscMPIInt rank;
1621:     char        type[256];

1623:     PetscObjectGetComm((PetscObject)pc,&comm);
1624:     MPI_Comm_rank(comm,&rank);
1625:     if (!rank) {
1626:       PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);
1627:       PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1628:       PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR,PETSC_FALSE);
1629:     }
1630:     if (pc->ops->view) {
1631:       (*pc->ops->view)(pc,viewer);
1632:     }
1633:   } else if (isdraw) {
1634:     PetscDraw draw;
1635:     char      str[25];
1636:     PetscReal x,y,bottom,h;
1637:     PetscInt  n;

1639:     PetscViewerDrawGetDraw(viewer,0,&draw);
1640:     PetscDrawGetCurrentPoint(draw,&x,&y);
1641:     if (pc->mat) {
1642:       MatGetSize(pc->mat,&n,NULL);
1643:       PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1644:     } else {
1645:       PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1646:     }
1647:     PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1648:     bottom = y - h;
1649:     PetscDrawPushCurrentPoint(draw,x,bottom);
1650:     if (pc->ops->view) {
1651:       (*pc->ops->view)(pc,viewer);
1652:     }
1653:     PetscDrawPopCurrentPoint(draw);
1654: #if defined(PETSC_HAVE_SAWS)
1655:   } else if (issaws) {
1656:     PetscMPIInt rank;

1658:     PetscObjectName((PetscObject)pc);
1659:     MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1660:     if (!((PetscObject)pc)->amsmem && !rank) {
1661:       PetscObjectViewSAWs((PetscObject)pc,viewer);
1662:     }
1663:     if (pc->mat) {MatView(pc->mat,viewer);}
1664:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1665: #endif
1666:   }
1667:   return(0);
1668: }

1670: /*@C
1671:   PCRegister -  Adds a method to the preconditioner package.

1673:    Not collective

1675:    Input Parameters:
1676: +  name_solver - name of a new user-defined solver
1677: -  routine_create - routine to create method context

1679:    Notes:
1680:    PCRegister() may be called multiple times to add several user-defined preconditioners.

1682:    Sample usage:
1683: .vb
1684:    PCRegister("my_solver", MySolverCreate);
1685: .ve

1687:    Then, your solver can be chosen with the procedural interface via
1688: $     PCSetType(pc,"my_solver")
1689:    or at runtime via the option
1690: $     -pc_type my_solver

1692:    Level: advanced

1694: .seealso: PCRegisterAll(), PCRegisterDestroy()
1695: @*/
1696: PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1697: {

1701:   PCInitializePackage();
1702:   PetscFunctionListAdd(&PCList,sname,function);
1703:   return(0);
1704: }

1706: static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1707: {
1708:   PC             pc;

1712:   MatShellGetContext(A,&pc);
1713:   PCApply(pc,X,Y);
1714:   return(0);
1715: }

1717: /*@
1718:     PCComputeOperator - Computes the explicit preconditioned operator.

1720:     Collective on PC

1722:     Input Parameter:
1723: +   pc - the preconditioner object
1724: -   mattype - the matrix type to be used for the operator

1726:     Output Parameter:
1727: .   mat - the explict preconditioned operator

1729:     Notes:
1730:     This computation is done by applying the operators to columns of the identity matrix.
1731:     This routine is costly in general, and is recommended for use only with relatively small systems.
1732:     Currently, this routine uses a dense matrix format when mattype == NULL

1734:     Level: advanced

1736: .seealso: KSPComputeOperator(), MatType

1738: @*/
1739: PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1740: {
1742:   PetscInt       N,M,m,n;
1743:   Mat            A,Apc;

1748:   PCGetOperators(pc,&A,NULL);
1749:   MatGetLocalSize(A,&m,&n);
1750:   MatGetSize(A,&M,&N);
1751:   MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);
1752:   MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);
1753:   MatComputeOperator(Apc,mattype,mat);
1754:   MatDestroy(&Apc);
1755:   return(0);
1756: }

1758: /*@
1759:    PCSetCoordinates - sets the coordinates of all the nodes on the local process

1761:    Collective on PC

1763:    Input Parameters:
1764: +  pc - the solver context
1765: .  dim - the dimension of the coordinates 1, 2, or 3
1766: .  nloc - the blocked size of the coordinates array
1767: -  coords - the coordinates array

1769:    Level: intermediate

1771:    Notes:
1772:    coords is an array of the dim coordinates for the nodes on
1773:    the local processor, of size dim*nloc.
1774:    If there are 108 equation on a processor
1775:    for a displacement finite element discretization of elasticity (so
1776:    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1777:    double precision values (ie, 3 * 36).  These x y z coordinates
1778:    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1779:    ... , N-1.z ].

1781: .seealso: MatSetNearNullSpace()
1782: @*/
1783: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1784: {

1790:   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1791:   return(0);
1792: }

1794: /*@
1795:    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)

1797:    Logically Collective on PC

1799:    Input Parameters:
1800: +  pc - the precondition context

1802:    Output Parameter:
1803: -  num_levels - the number of levels
1804: .  interpolations - the interpolation matrices (size of num_levels-1)

1806:    Level: advanced

1808: .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level

1810: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators()
1811: @*/
1812: PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1813: {

1820:   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
1821:   return(0);
1822: }

1824: /*@
1825:    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)

1827:    Logically Collective on PC

1829:    Input Parameters:
1830: +  pc - the precondition context

1832:    Output Parameter:
1833: -  num_levels - the number of levels
1834: .  coarseOperators - the coarse operator matrices (size of num_levels-1)

1836:    Level: advanced

1838: .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level

1840: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations()
1841: @*/
1842: PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
1843: {

1850:   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
1851:   return(0);
1852: }