Actual source code: hpc.c

petsc-3.3-p7 2013-05-11
  2: #include <petsc-private/pcimpl.h>     /*I "petscpc.h" I*/
  3: #include <petscksp.h>

  5: typedef struct {
  6:   MatStructure flag;               /* pc->flag */
  7:   PetscInt     setupcalled;        /* pc->setupcalled */
  8:   PetscInt     n;
  9:   MPI_Comm     comm;                 /* local world used by this preconditioner */
 10:   KSP          ksp;                  /* actual solver used across local world */
 11:   Mat          mat;                  /* matrix in local world */
 12:   Mat          gmat;                 /* matrix known only to process 0 in the local world */
 13:   Vec          x,y,xdummy,ydummy;
 14:   VecScatter   scatter;
 15:   PetscBool    nonzero_guess;
 16: } PC_HMPI;


 21: /*
 22:     Would like to have this simply call PCView() on the inner PC. The problem is
 23:   that the outer comm does not live on the inside so cannot do this. Instead 
 24:   handle the special case when the viewer is stdout, construct a new one just
 25:   for this call.
 26: */

 28: static PetscErrorCode PCView_HMPI_MP(MPI_Comm comm,void *ctx)
 29: {
 30:   PC_HMPI      *red = (PC_HMPI*)ctx;
 32:   PetscViewer    viewer;

 35:   PetscViewerASCIIGetStdout(comm,&viewer);
 36:   PetscViewerASCIIPushTab(viewer);         /* this is bogus in general */
 37:   KSPView(red->ksp,viewer);
 38:   PetscViewerASCIIPopTab(viewer);
 39:   return(0);
 40: }

 44: static PetscErrorCode PCView_HMPI(PC pc,PetscViewer viewer)
 45: {
 46:   PC_HMPI      *red = (PC_HMPI*)pc->data;
 47:   PetscMPIInt    size;
 49:   PetscBool      iascii;


 54:   MPI_Comm_size(red->comm,&size);
 55:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 56:   if (iascii) {
 57:     PetscViewerASCIIPrintf(viewer,"  Size of solver nodes %d\n",size);
 58:     PetscViewerASCIIPrintf(viewer,"  Parallel sub-solver given next\n",size);
 59:     /* should only make the next call if the viewer is associated with stdout */
 60:     PetscHMPIRun(red->comm,PCView_HMPI_MP,red);
 61:   }
 62:   return(0);
 63: }

 65: extern PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);

 69: static PetscErrorCode PCApply_HMPI_1(PC pc,Vec x,Vec y)
 70: {
 71:   PC_HMPI      *red = (PC_HMPI*)pc->data;

 75:   KSPSetInitialGuessNonzero(red->ksp,pc->nonzero_guess);
 76:   KSPSolve(red->ksp,x,y);
 77:   return(0);
 78: }

 82: static PetscErrorCode PCSetUp_HMPI_MP(MPI_Comm comm,void *ctx)
 83: {
 84:   PC_HMPI      *red = (PC_HMPI*)ctx;
 86:   PetscInt       m;
 87:   MatReuse       scal;
 88:   PetscMPIInt    rank;

 91:   red->comm = comm;
 92:   MPI_Bcast(&red->setupcalled,1,MPIU_INT,0,comm);
 93:   MPI_Bcast(&red->flag,1,MPIU_INT,0,comm);
 94:   if (!red->setupcalled) {
 95:     /* setup vector communication */
 96:     MPI_Bcast(&red->n,1,MPIU_INT,0,comm);
 97:     VecCreateMPI(comm,PETSC_DECIDE,red->n,&red->x);
 98:     VecCreateMPI(comm,PETSC_DECIDE,red->n,&red->y);
 99:     VecScatterCreateToZero(red->x,&red->scatter,&red->xdummy);
100:     VecDuplicate(red->xdummy,&red->ydummy);
101:     MPI_Comm_rank(comm,&rank);
102:     if (!rank) {
103:       VecDestroy(&red->xdummy);
104:       VecDestroy(&red->ydummy);
105:     }
106:     scal = MAT_INITIAL_MATRIX;
107:   } else {
108:     if (red->flag == DIFFERENT_NONZERO_PATTERN) {
109:       MatDestroy(&red->mat);
110:       scal = MAT_INITIAL_MATRIX;
111:       CHKMEMQ;
112:     } else {
113:       scal = MAT_REUSE_MATRIX;
114:     }
115:   }

117:   /* copy matrix out onto processes */
118:   VecGetLocalSize(red->x,&m);
119:   MatDistribute_MPIAIJ(comm,red->gmat,m,scal,&red->mat);
120:   if (!red->setupcalled) {
121:     /* create the solver */
122:     KSPCreate(comm,&red->ksp);
123:     /* would like to set proper tablevel for KSP, but do not have direct access to parent pc */
124:     KSPSetOptionsPrefix(red->ksp,"hmpi_"); /* should actually append with global pc prefix */
125:     KSPSetOperators(red->ksp,red->mat,red->mat,red->flag);
126:     KSPSetFromOptions(red->ksp);
127:   } else {
128:     KSPSetOperators(red->ksp,red->mat,red->mat,red->flag);
129:   }
130:   return(0);
131: }

135: static PetscErrorCode PCSetUp_HMPI(PC pc)
136: {
137:   PC_HMPI      *red = (PC_HMPI*)pc->data;
139:   PetscMPIInt    size;

142:   red->gmat        = pc->mat;
143:   red->flag        = pc->flag;
144:   red->setupcalled = pc->setupcalled;

146:   MPI_Comm_size(red->comm,&size);
147:   if (size == 1) {  /* special case where copy of matrix is not needed */
148:     if (!red->setupcalled) {
149:       /* create the solver */
150:       KSPCreate(((PetscObject)pc)->comm,&red->ksp);
151:       PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);
152:       KSPSetOptionsPrefix(red->ksp,"hmpi_"); /* should actually append with global pc prefix */
153:       KSPSetOperators(red->ksp,red->gmat,red->gmat,red->flag);
154:       KSPSetFromOptions(red->ksp);
155:     } else {
156:       KSPSetOperators(red->ksp,red->gmat,red->gmat,red->flag);
157:     }
158:     pc->ops->apply = PCApply_HMPI_1;
159:     return(0);
160:   } else {
161:     MatGetSize(pc->mat,&red->n,PETSC_IGNORE);
162:     PetscHMPIRun(red->comm,PCSetUp_HMPI_MP,red);
163:   }
164:   return(0);
165: }

169: static PetscErrorCode PCApply_HMPI_MP(MPI_Comm comm,void *ctx)
170: {
171:   PC_HMPI      *red = (PC_HMPI*)ctx;

175:   VecScatterBegin(red->scatter,red->xdummy,red->x,INSERT_VALUES,SCATTER_REVERSE);
176:   VecScatterEnd(red->scatter,red->xdummy,red->x,INSERT_VALUES,SCATTER_REVERSE);
177:   MPI_Bcast(&red->nonzero_guess,1,MPIU_INT,0,red->comm);
178:   if (red->nonzero_guess) {
179:     VecScatterBegin(red->scatter,red->ydummy,red->y,INSERT_VALUES,SCATTER_REVERSE);
180:     VecScatterEnd(red->scatter,red->ydummy,red->y,INSERT_VALUES,SCATTER_REVERSE);
181:   }
182:   KSPSetInitialGuessNonzero(red->ksp,red->nonzero_guess);

184:   KSPSolve(red->ksp,red->x,red->y);

186:   VecScatterBegin(red->scatter,red->y,red->ydummy,INSERT_VALUES,SCATTER_FORWARD);
187:   VecScatterEnd(red->scatter,red->y,red->ydummy,INSERT_VALUES,SCATTER_FORWARD);
188:   return(0);
189: }

193: static PetscErrorCode PCApply_HMPI(PC pc,Vec x,Vec y)
194: {
195:   PC_HMPI      *red = (PC_HMPI*)pc->data;

199:   red->xdummy        = x;
200:   red->ydummy        = y;
201:   red->nonzero_guess = pc->nonzero_guess;
202:   PetscHMPIRun(red->comm,PCApply_HMPI_MP,red);
203:   return(0);
204: }

208: static PetscErrorCode PCDestroy_HMPI_MP(MPI_Comm comm,void *ctx)
209: {
210:   PC_HMPI      *red = (PC_HMPI*)ctx;
211:   PetscMPIInt    rank;

215:   VecScatterDestroy(&red->scatter);
216:   VecDestroy(&red->x);
217:   VecDestroy(&red->y);
218:   KSPDestroy(&red->ksp);
219:   MatDestroy(&red->mat);
220:   MPI_Comm_rank(comm,&rank);
221:   if (rank) {
222:     VecDestroy(&red->xdummy);
223:     VecDestroy(&red->ydummy);
224:   }
225:   return(0);
226: }

230: static PetscErrorCode PCDestroy_HMPI(PC pc)
231: {
232:   PC_HMPI      *red = (PC_HMPI*)pc->data;

236:   PetscHMPIRun(red->comm,PCDestroy_HMPI_MP,red);
237:   PetscHMPIFree(red->comm,red);
238:   return(0);
239: }

243: static PetscErrorCode PCSetFromOptions_HMPI(PC pc)
244: {
246:   return(0);
247: }


250: /* -------------------------------------------------------------------------------------*/
251: /*MC
252:      PCHMPI - Runs a preconditioner for a single process matrix across several MPI processes

254: $     This will usually be run with -pc_type hmpi -ksp_type preonly
255: $     solver options are set with -hmpi_ksp_... and -hmpi_pc_... for example
256: $     -hmpi_ksp_type cg would use cg as the Krylov method or -hmpi_ksp_monitor or
257: $     -hmpi_pc_type hypre -hmpi_pc_hypre_type boomeramg

259:        Always run with -ksp_view (or -snes_view) to see what solver is actually being used.

261:        Currently the solver options INSIDE the HMPI preconditioner can ONLY be set via the
262:       options database.

264:    Level: intermediate

266:    See PetscHMPIMerge() and PetscHMPISpawn() for two ways to start up MPI for use with this preconditioner

268: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types)

270: M*/

272: EXTERN_C_BEGIN
275: PetscErrorCode  PCCreate_HMPI(PC pc)
276: {
278:   PC_HMPI      *red;
279:   PetscMPIInt    size;

282:   MPI_Comm_size(((PetscObject)pc)->comm,&size);
283:   if (size > 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_SIZ,"HMPI preconditioner only works for sequential solves");
284:   /* caste the struct length to a PetscInt for easier MPI calls */

286:   PetscHMPIMalloc(PETSC_COMM_LOCAL_WORLD,(PetscInt)sizeof(PC_HMPI),(void**)&red);
287:   red->comm = PETSC_COMM_LOCAL_WORLD;
288:   pc->data  = (void*) red;

290:   pc->ops->apply          = PCApply_HMPI;
291:   pc->ops->destroy        = PCDestroy_HMPI;
292:   pc->ops->setfromoptions = PCSetFromOptions_HMPI;
293:   pc->ops->setup          = PCSetUp_HMPI;
294:   pc->ops->view           = PCView_HMPI;
295:   return(0);
296: }
297: EXTERN_C_END