Actual source code: telescope_dmda.c

petsc-3.11.1 2019-04-17
Report Typos and Errors


  3:  #include <petsc/private/matimpl.h>
  4:  #include <petsc/private/pcimpl.h>
  5:  #include <petsc/private/dmimpl.h>
  6:  #include <petscksp.h>
  7:  #include <petscdm.h>
  8:  #include <petscdmda.h>

 10: #include "../src/ksp/pc/impls/telescope/telescope.h"

 12: static PetscBool  cited = PETSC_FALSE;
 13: static const char citation[] =
 14: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 15: "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 16: "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 17: "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 18: "  series    = {PASC '16},\n"
 19: "  isbn      = {978-1-4503-4126-4},\n"
 20: "  location  = {Lausanne, Switzerland},\n"
 21: "  pages     = {5:1--5:12},\n"
 22: "  articleno = {5},\n"
 23: "  numpages  = {12},\n"
 24: "  url       = {http://doi.acm.org/10.1145/2929908.2929913},\n"
 25: "  doi       = {10.1145/2929908.2929913},\n"
 26: "  acmid     = {2929913},\n"
 27: "  publisher = {ACM},\n"
 28: "  address   = {New York, NY, USA},\n"
 29: "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 30: "  year      = {2016}\n"
 31: "}\n";

 33: static PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim,PetscInt i,PetscInt j,PetscInt k,
 34:                                                       PetscInt Mp,PetscInt Np,PetscInt Pp,
 35:                                                       PetscInt start_i[],PetscInt start_j[],PetscInt start_k[],
 36:                                                       PetscInt span_i[],PetscInt span_j[],PetscInt span_k[],
 37:                                                       PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *_pk,PetscMPIInt *rank_re)
 38: {
 39:   PetscInt pi,pj,pk,n;

 42:   *rank_re = -1;
 43:   if (_pi) *_pi = -1;
 44:   if (_pj) *_pj = -1;
 45:   if (_pk) *_pk = -1;
 46:   pi = pj = pk = -1;
 47:   if (_pi) {
 48:     for (n=0; n<Mp; n++) {
 49:       if ( (i >= start_i[n]) && (i < start_i[n]+span_i[n]) ) {
 50:         pi = n;
 51:         break;
 52:       }
 53:     }
 54:     if (pi == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pi cannot be determined : range %D, val %D",Mp,i);
 55:     *_pi = pi;
 56:   }

 58:   if (_pj) {
 59:     for (n=0; n<Np; n++) {
 60:       if ( (j >= start_j[n]) && (j < start_j[n]+span_j[n]) ) {
 61:         pj = n;
 62:         break;
 63:       }
 64:     }
 65:     if (pj == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pj cannot be determined : range %D, val %D",Np,j);
 66:     *_pj = pj;
 67:   }

 69:   if (_pk) {
 70:     for (n=0; n<Pp; n++) {
 71:       if ( (k >= start_k[n]) && (k < start_k[n]+span_k[n]) ) {
 72:         pk = n;
 73:         break;
 74:       }
 75:     }
 76:     if (pk == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmda-ijk] pk cannot be determined : range %D, val %D",Pp,k);
 77:     *_pk = pk;
 78:   }

 80:   switch (dim) {
 81:   case 1:
 82:     *rank_re = pi;
 83:     break;
 84:   case 2:
 85:     *rank_re = pi + pj * Mp;
 86:     break;
 87:   case 3:
 88:     *rank_re = pi + pj * Mp + pk * (Mp*Np);
 89:     break;
 90:   }
 91:   return(0);
 92: }

 94: static PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim,PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,PetscInt Pp_re,
 95:                                       PetscInt range_i_re[],PetscInt range_j_re[],PetscInt range_k_re[],PetscInt *s0)
 96: {
 97:   PetscInt i,j,k,start_IJK = 0;
 98:   PetscInt rank_ijk;

101:   switch (dim) {
102:   case 1:
103:     for (i=0; i<Mp_re; i++) {
104:       rank_ijk = i;
105:       if (rank_ijk < rank_re) {
106:         start_IJK += range_i_re[i];
107:       }
108:     }
109:     break;
110:     case 2:
111:     for (j=0; j<Np_re; j++) {
112:       for (i=0; i<Mp_re; i++) {
113:         rank_ijk = i + j*Mp_re;
114:         if (rank_ijk < rank_re) {
115:           start_IJK += range_i_re[i]*range_j_re[j];
116:         }
117:       }
118:     }
119:     break;
120:     case 3:
121:     for (k=0; k<Pp_re; k++) {
122:       for (j=0; j<Np_re; j++) {
123:         for (i=0; i<Mp_re; i++) {
124:           rank_ijk = i + j*Mp_re + k*Mp_re*Np_re;
125:           if (rank_ijk < rank_re) {
126:             start_IJK += range_i_re[i]*range_j_re[j]*range_k_re[k];
127:           }
128:         }
129:       }
130:     }
131:     break;
132:   }
133:   *s0 = start_IJK;
134:   return(0);
135: }

137: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PC_Telescope sred,DM dm,DM subdm)
138: {
140:   DM             cdm;
141:   Vec            coor,coor_natural,perm_coors;
142:   PetscInt       i,j,si,sj,ni,nj,M,N,Ml,Nl,c,nidx;
143:   PetscInt       *fine_indices;
144:   IS             is_fine,is_local;
145:   VecScatter     sctx;

148:   DMGetCoordinates(dm,&coor);
149:   if (!coor) return(0);
150:   if (PCTelescope_isActiveRank(sred)) {
151:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
152:   }
153:   /* Get the coordinate vector from the distributed array */
154:   DMGetCoordinateDM(dm,&cdm);
155:   DMDACreateNaturalVector(cdm,&coor_natural);

157:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
158:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

160:   /* get indices of the guys I want to grab */
161:   DMDAGetInfo(dm,NULL,&M,&N,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
162:   if (PCTelescope_isActiveRank(sred)) {
163:     DMDAGetCorners(subdm,&si,&sj,NULL,&ni,&nj,NULL);
164:     Ml = ni;
165:     Nl = nj;
166:   } else {
167:     si = sj = 0;
168:     ni = nj = 0;
169:     Ml = Nl = 0;
170:   }

172:   PetscMalloc1(Ml*Nl*2,&fine_indices);
173:   c = 0;
174:   if (PCTelescope_isActiveRank(sred)) {
175:     for (j=sj; j<sj+nj; j++) {
176:       for (i=si; i<si+ni; i++) {
177:         nidx = (i) + (j)*M;
178:         fine_indices[c  ] = 2 * nidx     ;
179:         fine_indices[c+1] = 2 * nidx + 1 ;
180:         c = c + 2;
181:       }
182:     }
183:     if (c != Ml*Nl*2) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c %D should equal 2 * Ml %D * Nl %D",c,Ml,Nl);
184:   }

186:   /* generate scatter */
187:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*2,fine_indices,PETSC_USE_POINTER,&is_fine);
188:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*2,0,1,&is_local);

190:   /* scatter */
191:   VecCreate(PETSC_COMM_SELF,&perm_coors);
192:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*2);
193:   VecSetType(perm_coors,VECSEQ);

195:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
196:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
197:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
198:   /* access */
199:   if (PCTelescope_isActiveRank(sred)) {
200:     Vec               _coors;
201:     const PetscScalar *LA_perm;
202:     PetscScalar       *LA_coors;

204:     DMGetCoordinates(subdm,&_coors);
205:     VecGetArrayRead(perm_coors,&LA_perm);
206:     VecGetArray(_coors,&LA_coors);
207:     for (i=0; i<Ml*Nl*2; i++) {
208:       LA_coors[i] = LA_perm[i];
209:     }
210:     VecRestoreArray(_coors,&LA_coors);
211:     VecRestoreArrayRead(perm_coors,&LA_perm);
212:   }

214:   /* update local coords */
215:   if (PCTelescope_isActiveRank(sred)) {
216:     DM  _dmc;
217:     Vec _coors,_coors_local;
218:     DMGetCoordinateDM(subdm,&_dmc);
219:     DMGetCoordinates(subdm,&_coors);
220:     DMGetCoordinatesLocal(subdm,&_coors_local);
221:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
222:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
223:   }
224:   VecScatterDestroy(&sctx);
225:   ISDestroy(&is_fine);
226:   PetscFree(fine_indices);
227:   ISDestroy(&is_local);
228:   VecDestroy(&perm_coors);
229:   VecDestroy(&coor_natural);
230:   return(0);
231: }

233: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PC_Telescope sred,DM dm,DM subdm)
234: {
236:   DM             cdm;
237:   Vec            coor,coor_natural,perm_coors;
238:   PetscInt       i,j,k,si,sj,sk,ni,nj,nk,M,N,P,Ml,Nl,Pl,c,nidx;
239:   PetscInt       *fine_indices;
240:   IS             is_fine,is_local;
241:   VecScatter     sctx;

244:   DMGetCoordinates(dm,&coor);
245:   if (!coor) return(0);

247:   if (PCTelescope_isActiveRank(sred)) {
248:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
249:   }

251:   /* Get the coordinate vector from the distributed array */
252:   DMGetCoordinateDM(dm,&cdm);
253:   DMDACreateNaturalVector(cdm,&coor_natural);
254:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
255:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

257:   /* get indices of the guys I want to grab */
258:   DMDAGetInfo(dm,NULL,&M,&N,&P,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

260:   if (PCTelescope_isActiveRank(sred)) {
261:     DMDAGetCorners(subdm,&si,&sj,&sk,&ni,&nj,&nk);
262:     Ml = ni;
263:     Nl = nj;
264:     Pl = nk;
265:   } else {
266:     si = sj = sk = 0;
267:     ni = nj = nk = 0;
268:     Ml = Nl = Pl = 0;
269:   }

271:   PetscMalloc1(Ml*Nl*Pl*3,&fine_indices);

273:   c = 0;
274:   if (PCTelescope_isActiveRank(sred)) {
275:     for (k=sk; k<sk+nk; k++) {
276:       for (j=sj; j<sj+nj; j++) {
277:         for (i=si; i<si+ni; i++) {
278:           nidx = (i) + (j)*M + (k)*M*N;
279:           fine_indices[c  ] = 3 * nidx     ;
280:           fine_indices[c+1] = 3 * nidx + 1 ;
281:           fine_indices[c+2] = 3 * nidx + 2 ;
282:           c = c + 3;
283:         }
284:       }
285:     }
286:   }

288:   /* generate scatter */
289:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*Pl*3,fine_indices,PETSC_USE_POINTER,&is_fine);
290:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*Pl*3,0,1,&is_local);

292:   /* scatter */
293:   VecCreate(PETSC_COMM_SELF,&perm_coors);
294:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*Pl*3);
295:   VecSetType(perm_coors,VECSEQ);
296:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
297:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
298:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);

300:   /* access */
301:   if (PCTelescope_isActiveRank(sred)) {
302:     Vec               _coors;
303:     const PetscScalar *LA_perm;
304:     PetscScalar       *LA_coors;

306:     DMGetCoordinates(subdm,&_coors);
307:     VecGetArrayRead(perm_coors,&LA_perm);
308:     VecGetArray(_coors,&LA_coors);
309:     for (i=0; i<Ml*Nl*Pl*3; i++) {
310:       LA_coors[i] = LA_perm[i];
311:     }
312:     VecRestoreArray(_coors,&LA_coors);
313:     VecRestoreArrayRead(perm_coors,&LA_perm);
314:   }

316:   /* update local coords */
317:   if (PCTelescope_isActiveRank(sred)) {
318:     DM  _dmc;
319:     Vec _coors,_coors_local;

321:     DMGetCoordinateDM(subdm,&_dmc);
322:     DMGetCoordinates(subdm,&_coors);
323:     DMGetCoordinatesLocal(subdm,&_coors_local);
324:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
325:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
326:   }

328:   VecScatterDestroy(&sctx);
329:   ISDestroy(&is_fine);
330:   PetscFree(fine_indices);
331:   ISDestroy(&is_local);
332:   VecDestroy(&perm_coors);
333:   VecDestroy(&coor_natural);
334:   return(0);
335: }

337: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
338: {
339:   PetscInt       dim;
340:   DM             dm,subdm;
341:   PetscSubcomm   psubcomm;
342:   MPI_Comm       comm;
343:   Vec            coor;

347:   PCGetDM(pc,&dm);
348:   DMGetCoordinates(dm,&coor);
349:   if (!coor) return(0);
350:   psubcomm = sred->psubcomm;
351:   comm = PetscSubcommParent(psubcomm);
352:   subdm = ctx->dmrepart;

354:   PetscInfo(pc,"PCTelescope: setting up the coordinates (DMDA)\n");
355:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
356:   switch (dim) {
357:   case 1: SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
358:     break;
359:   case 2: PCTelescopeSetUp_dmda_repart_coors2d(sred,dm,subdm);
360:     break;
361:   case 3: PCTelescopeSetUp_dmda_repart_coors3d(sred,dm,subdm);
362:     break;
363:   }
364:   return(0);
365: }

367: /* setup repartitioned dm */
368: PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
369: {
370:   PetscErrorCode        ierr;
371:   DM                    dm;
372:   PetscInt              dim,nx,ny,nz,ndof,nsw,sum,k;
373:   DMBoundaryType        bx,by,bz;
374:   DMDAStencilType       stencil;
375:   const PetscInt        *_range_i_re;
376:   const PetscInt        *_range_j_re;
377:   const PetscInt        *_range_k_re;
378:   DMDAInterpolationType itype;
379:   PetscInt              refine_x,refine_y,refine_z;
380:   MPI_Comm              comm,subcomm;
381:   const char            *prefix;

384:   comm = PetscSubcommParent(sred->psubcomm);
385:   subcomm = PetscSubcommChild(sred->psubcomm);
386:   PCGetDM(pc,&dm);

388:   DMDAGetInfo(dm,&dim,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,&nsw,&bx,&by,&bz,&stencil);
389:   DMDAGetInterpolationType(dm,&itype);
390:   DMDAGetRefinementFactor(dm,&refine_x,&refine_y,&refine_z);

392:   ctx->dmrepart = NULL;
393:   _range_i_re = _range_j_re = _range_k_re = NULL;
394:   /* Create DMDA on the child communicator */
395:   if (PCTelescope_isActiveRank(sred)) {
396:     switch (dim) {
397:     case 1:
398:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (1D)\n");
399:       /*DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart);*/
400:       ny = nz = 1;
401:       by = bz = DM_BOUNDARY_NONE;
402:       break;
403:     case 2:
404:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (2D)\n");
405:       /*DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,&ctx->dmrepart);*/
406:       nz = 1;
407:       bz = DM_BOUNDARY_NONE;
408:       break;
409:     case 3:
410:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (3D)\n");
411:       /*DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz, PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart);*/
412:       break;
413:     }
414:     /*
415:      The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
416:      a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
417:      This allows users to control the partitioning of the subDM.
418:     */
419:     DMDACreate(subcomm,&ctx->dmrepart);
420:     /* Set unique option prefix name */
421:     KSPGetOptionsPrefix(sred->ksp,&prefix);
422:     DMSetOptionsPrefix(ctx->dmrepart,prefix);
423:     DMAppendOptionsPrefix(ctx->dmrepart,"repart_");
424:     /* standard setup from DMDACreate{1,2,3}d() */
425:     DMSetDimension(ctx->dmrepart,dim);
426:     DMDASetSizes(ctx->dmrepart,nx,ny,nz);
427:     DMDASetNumProcs(ctx->dmrepart,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE);
428:     DMDASetBoundaryType(ctx->dmrepart,bx,by,bz);
429:     DMDASetDof(ctx->dmrepart,ndof);
430:     DMDASetStencilType(ctx->dmrepart,stencil);
431:     DMDASetStencilWidth(ctx->dmrepart,nsw);
432:     DMDASetOwnershipRanges(ctx->dmrepart,NULL,NULL,NULL);
433:     DMSetFromOptions(ctx->dmrepart);
434:     DMSetUp(ctx->dmrepart);
435:     /* Set refinement factors and interpolation type from the partent */
436:     DMDASetRefinementFactor(ctx->dmrepart,refine_x,refine_y,refine_z);
437:     DMDASetInterpolationType(ctx->dmrepart,itype);

439:     DMDAGetInfo(ctx->dmrepart,NULL,NULL,NULL,NULL,&ctx->Mp_re,&ctx->Np_re,&ctx->Pp_re,NULL,NULL,NULL,NULL,NULL,NULL);
440:     DMDAGetOwnershipRanges(ctx->dmrepart,&_range_i_re,&_range_j_re,&_range_k_re);

442:     ctx->dmrepart->ops->creatematrix = dm->ops->creatematrix;
443:     ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
444:   }

446:   /* generate ranges for repartitioned dm */
447:   /* note - assume rank 0 always participates */
448:   MPI_Bcast(&ctx->Mp_re,1,MPIU_INT,0,comm);
449:   MPI_Bcast(&ctx->Np_re,1,MPIU_INT,0,comm);
450:   MPI_Bcast(&ctx->Pp_re,1,MPIU_INT,0,comm);

452:   PetscCalloc1(ctx->Mp_re,&ctx->range_i_re);
453:   PetscCalloc1(ctx->Np_re,&ctx->range_j_re);
454:   PetscCalloc1(ctx->Pp_re,&ctx->range_k_re);

456:   if (_range_i_re) {PetscMemcpy(ctx->range_i_re,_range_i_re,sizeof(PetscInt)*ctx->Mp_re);}
457:   if (_range_j_re) {PetscMemcpy(ctx->range_j_re,_range_j_re,sizeof(PetscInt)*ctx->Np_re);}
458:   if (_range_k_re) {PetscMemcpy(ctx->range_k_re,_range_k_re,sizeof(PetscInt)*ctx->Pp_re);}

460:   MPI_Bcast(ctx->range_i_re,ctx->Mp_re,MPIU_INT,0,comm);
461:   MPI_Bcast(ctx->range_j_re,ctx->Np_re,MPIU_INT,0,comm);
462:   MPI_Bcast(ctx->range_k_re,ctx->Pp_re,MPIU_INT,0,comm);

464:   PetscMalloc1(ctx->Mp_re,&ctx->start_i_re);
465:   PetscMalloc1(ctx->Np_re,&ctx->start_j_re);
466:   PetscMalloc1(ctx->Pp_re,&ctx->start_k_re);

468:   sum = 0;
469:   for (k=0; k<ctx->Mp_re; k++) {
470:     ctx->start_i_re[k] = sum;
471:     sum += ctx->range_i_re[k];
472:   }

474:   sum = 0;
475:   for (k=0; k<ctx->Np_re; k++) {
476:     ctx->start_j_re[k] = sum;
477:     sum += ctx->range_j_re[k];
478:   }

480:   sum = 0;
481:   for (k=0; k<ctx->Pp_re; k++) {
482:     ctx->start_k_re[k] = sum;
483:     sum += ctx->range_k_re[k];
484:   }

486:   /* attach repartitioned dm to child ksp */
487:   {
488:     PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
489:     void           *dmksp_ctx;

491:     DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);

493:     /* attach dm to ksp on sub communicator */
494:     if (PCTelescope_isActiveRank(sred)) {
495:       KSPSetDM(sred->ksp,ctx->dmrepart);

497:       if (!dmksp_func || sred->ignore_kspcomputeoperators) {
498:         KSPSetDMActive(sred->ksp,PETSC_FALSE);
499:       } else {
500:         /* sub ksp inherits dmksp_func and context provided by user */
501:         KSPSetComputeOperators(sred->ksp,dmksp_func,dmksp_ctx);
502:         KSPSetDMActive(sred->ksp,PETSC_TRUE);
503:       }
504:     }
505:   }
506:   return(0);
507: }

509: PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
510: {
512:   DM             dm;
513:   MPI_Comm       comm;
514:   Mat            Pscalar,P;
515:   PetscInt       ndof;
516:   PetscInt       i,j,k,location,startI[3],endI[3],lenI[3],nx,ny,nz;
517:   PetscInt       sr,er,Mr;
518:   Vec            V;

521:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-3D)\n");
522:   PetscObjectGetComm((PetscObject)pc,&comm);

524:   PCGetDM(pc,&dm);
525:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);

527:   DMGetGlobalVector(dm,&V);
528:   VecGetSize(V,&Mr);
529:   VecGetOwnershipRange(V,&sr,&er);
530:   DMRestoreGlobalVector(dm,&V);
531:   sr = sr / ndof;
532:   er = er / ndof;
533:   Mr = Mr / ndof;

535:   MatCreate(comm,&Pscalar);
536:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
537:   MatSetType(Pscalar,MATAIJ);
538:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
539:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

541:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],&lenI[2]);
542:   DMDAGetCorners(dm,&startI[0],&startI[1],&startI[2],&endI[0],&endI[1],&endI[2]);
543:   endI[0] += startI[0];
544:   endI[1] += startI[1];
545:   endI[2] += startI[2];

547:   for (k=startI[2]; k<endI[2]; k++) {
548:     for (j=startI[1]; j<endI[1]; j++) {
549:       for (i=startI[0]; i<endI[0]; i++) {
550:         PetscMPIInt rank_ijk_re,rank_reI[3];
551:         PetscInt    s0_re;
552:         PetscInt    ii,jj,kk,local_ijk_re,mapped_ijk;
553:         PetscInt    lenI_re[3];

555:         location = (i - startI[0]) + (j - startI[1])*lenI[0] + (k - startI[2])*lenI[0]*lenI[1];
556:         _DMDADetermineRankFromGlobalIJK(3,i,j,k,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
557:                                                ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
558:                                                ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
559:                                                &rank_reI[0],&rank_reI[1],&rank_reI[2],&rank_ijk_re);
560:         _DMDADetermineGlobalS0(3,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);
561:         ii = i - ctx->start_i_re[ rank_reI[0] ];
562:         if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error ii");
563:         jj = j - ctx->start_j_re[ rank_reI[1] ];
564:         if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error jj");
565:         kk = k - ctx->start_k_re[ rank_reI[2] ];
566:         if (kk < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm3d] index error kk");
567:         lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
568:         lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
569:         lenI_re[2] = ctx->range_k_re[ rank_reI[2] ];
570:         local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
571:         mapped_ijk = s0_re + local_ijk_re;
572:         MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
573:       }
574:     }
575:   }
576:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
577:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
578:   MatCreateMAIJ(Pscalar,ndof,&P);
579:   MatDestroy(&Pscalar);
580:   ctx->permutation = P;
581:   return(0);
582: }

584: PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
585: {
587:   DM             dm;
588:   MPI_Comm       comm;
589:   Mat            Pscalar,P;
590:   PetscInt       ndof;
591:   PetscInt       i,j,location,startI[2],endI[2],lenI[2],nx,ny,nz;
592:   PetscInt       sr,er,Mr;
593:   Vec            V;

596:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-2D)\n");
597:   PetscObjectGetComm((PetscObject)pc,&comm);
598:   PCGetDM(pc,&dm);
599:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
600:   DMGetGlobalVector(dm,&V);
601:   VecGetSize(V,&Mr);
602:   VecGetOwnershipRange(V,&sr,&er);
603:   DMRestoreGlobalVector(dm,&V);
604:   sr = sr / ndof;
605:   er = er / ndof;
606:   Mr = Mr / ndof;

608:   MatCreate(comm,&Pscalar);
609:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
610:   MatSetType(Pscalar,MATAIJ);
611:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
612:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

614:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
615:   DMDAGetCorners(dm,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
616:   endI[0] += startI[0];
617:   endI[1] += startI[1];

619:   for (j=startI[1]; j<endI[1]; j++) {
620:     for (i=startI[0]; i<endI[0]; i++) {
621:       PetscMPIInt rank_ijk_re,rank_reI[3];
622:       PetscInt    s0_re;
623:       PetscInt    ii,jj,local_ijk_re,mapped_ijk;
624:       PetscInt    lenI_re[3];

626:       location = (i - startI[0]) + (j - startI[1])*lenI[0];
627:       _DMDADetermineRankFromGlobalIJK(2,i,j,0,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
628:                                              ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
629:                                              ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
630:                                              &rank_reI[0],&rank_reI[1],NULL,&rank_ijk_re);

632:       _DMDADetermineGlobalS0(2,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);

634:       ii = i - ctx->start_i_re[ rank_reI[0] ];
635:       if (ii < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error ii");
636:       jj = j - ctx->start_j_re[ rank_reI[1] ];
637:       if (jj < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"[dmdarepart-perm2d] index error jj");

639:       lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
640:       lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
641:       local_ijk_re = ii + jj * lenI_re[0];
642:       mapped_ijk = s0_re + local_ijk_re;
643:       MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
644:     }
645:   }
646:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
647:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
648:   MatCreateMAIJ(Pscalar,ndof,&P);
649:   MatDestroy(&Pscalar);
650:   ctx->permutation = P;
651:   return(0);
652: }

654: PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
655: {
657:   Vec            xred,yred,xtmp,x,xp;
658:   VecScatter     scatter;
659:   IS             isin;
660:   Mat            B;
661:   PetscInt       m,bs,st,ed;
662:   MPI_Comm       comm;

665:   PetscObjectGetComm((PetscObject)pc,&comm);
666:   PCGetOperators(pc,NULL,&B);
667:   MatCreateVecs(B,&x,NULL);
668:   MatGetBlockSize(B,&bs);
669:   VecDuplicate(x,&xp);
670:   m = 0;
671:   xred = NULL;
672:   yred = NULL;
673:   if (PCTelescope_isActiveRank(sred)) {
674:     DMCreateGlobalVector(ctx->dmrepart,&xred);
675:     VecDuplicate(xred,&yred);
676:     VecGetOwnershipRange(xred,&st,&ed);
677:     ISCreateStride(comm,ed-st,st,1,&isin);
678:     VecGetLocalSize(xred,&m);
679:   } else {
680:     VecGetOwnershipRange(x,&st,&ed);
681:     ISCreateStride(comm,0,st,1,&isin);
682:   }
683:   ISSetBlockSize(isin,bs);
684:   VecCreate(comm,&xtmp);
685:   VecSetSizes(xtmp,m,PETSC_DECIDE);
686:   VecSetBlockSize(xtmp,bs);
687:   VecSetType(xtmp,((PetscObject)x)->type_name);
688:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);
689:   sred->xred    = xred;
690:   sred->yred    = yred;
691:   sred->isin    = isin;
692:   sred->scatter = scatter;
693:   sred->xtmp    = xtmp;

695:   ctx->xp       = xp;
696:   VecDestroy(&x);
697:   return(0);
698: }

700: PetscErrorCode PCTelescopeSetUp_dmda(PC pc,PC_Telescope sred)
701: {
702:   PC_Telescope_DMDACtx *ctx;
703:   PetscInt             dim;
704:   DM                   dm;
705:   MPI_Comm             comm;
706:   PetscErrorCode       ierr;

709:   PetscInfo(pc,"PCTelescope: setup (DMDA)\n");
710:   PetscMalloc1(1,&ctx);
711:   PetscMemzero(ctx,sizeof(PC_Telescope_DMDACtx));
712:   sred->dm_ctx = (void*)ctx;

714:   PetscObjectGetComm((PetscObject)pc,&comm);
715:   PCGetDM(pc,&dm);
716:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

718:   PCTelescopeSetUp_dmda_repart(pc,sred,ctx);
719:   PCTelescopeSetUp_dmda_repart_coors(pc,sred,ctx);
720:   switch (dim) {
721:   case 1: SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
722:     break;
723:   case 2: PCTelescopeSetUp_dmda_permutation_2d(pc,sred,ctx);
724:     break;
725:   case 3: PCTelescopeSetUp_dmda_permutation_3d(pc,sred,ctx);
726:     break;
727:   }
728:   PCTelescopeSetUp_dmda_scatters(pc,sred,ctx);
729:   return(0);
730: }

732: PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
733: {
734:   PetscErrorCode       ierr;
735:   PC_Telescope_DMDACtx *ctx;
736:   MPI_Comm             comm,subcomm;
737:   Mat                  Bperm,Bred,B,P;
738:   PetscInt             nr,nc;
739:   IS                   isrow,iscol;
740:   Mat                  Blocal,*_Blocal;

743:   PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (DMDA)\n");
744:   PetscObjectGetComm((PetscObject)pc,&comm);
745:   subcomm = PetscSubcommChild(sred->psubcomm);
746:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;

748:   PCGetOperators(pc,NULL,&B);
749:   MatGetSize(B,&nr,&nc);

751:   P = ctx->permutation;
752:   MatPtAP(B,P,MAT_INITIAL_MATRIX,1.1,&Bperm);

754:   /* Get submatrices */
755:   isrow = sred->isin;
756:   ISCreateStride(comm,nc,0,1,&iscol);

758:   MatCreateSubMatrices(Bperm,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
759:   Blocal = *_Blocal;
760:   Bred = NULL;
761:   if (PCTelescope_isActiveRank(sred)) {
762:     PetscInt mm;

764:     if (reuse != MAT_INITIAL_MATRIX) {Bred = *A;}
765:     MatGetSize(Blocal,&mm,NULL);
766:     /* MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred); */
767:     MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
768:   }
769:   *A = Bred;

771:   ISDestroy(&iscol);
772:   MatDestroy(&Bperm);
773:   MatDestroyMatrices(1,&_Blocal);
774:   return(0);
775: }

777: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
778: {
780:   DM             dm;
781:   PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
782:   void           *dmksp_ctx;

785:   PCGetDM(pc,&dm);
786:   DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);
787:   /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
788:   if (dmksp_func && !sred->ignore_kspcomputeoperators) {
789:     DM  dmrepart;
790:     Mat Ak;

792:     *A = NULL;
793:     if (PCTelescope_isActiveRank(sred)) {
794:       KSPGetDM(sred->ksp,&dmrepart);
795:       if (reuse == MAT_INITIAL_MATRIX) {
796:         DMCreateMatrix(dmrepart,&Ak);
797:         *A = Ak;
798:       } else if (reuse == MAT_REUSE_MATRIX) {
799:         Ak = *A;
800:       }
801:       /*
802:        There is no need to explicitly assemble the operator now,
803:        the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
804:       */
805:     }
806:   } else {
807:     PCTelescopeMatCreate_dmda_dmactivefalse(pc,sred,reuse,A);
808:   }
809:   return(0);
810: }

812: PetscErrorCode PCTelescopeSubNullSpaceCreate_dmda_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
813: {
814:   PetscErrorCode       ierr;
815:   PetscBool            has_const;
816:   PetscInt             i,k,n = 0;
817:   const Vec            *vecs;
818:   Vec                  *sub_vecs = NULL;
819:   MPI_Comm             subcomm;
820:   PC_Telescope_DMDACtx *ctx;

823:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
824:   subcomm = PetscSubcommChild(sred->psubcomm);
825:   MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);

827:   if (PCTelescope_isActiveRank(sred)) {
828:     /* create new vectors */
829:     if (n) {
830:       VecDuplicateVecs(sred->xred,n,&sub_vecs);
831:     }
832:   }

834:   /* copy entries */
835:   for (k=0; k<n; k++) {
836:     const PetscScalar *x_array;
837:     PetscScalar       *LA_sub_vec;
838:     PetscInt          st,ed;

840:     /* permute vector into ordering associated with re-partitioned dmda */
841:     MatMultTranspose(ctx->permutation,vecs[k],ctx->xp);

843:     /* pull in vector x->xtmp */
844:     VecScatterBegin(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
845:     VecScatterEnd(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);

847:     /* copy vector entries into xred */
848:     VecGetArrayRead(sred->xtmp,&x_array);
849:     if (sub_vecs) {
850:       if (sub_vecs[k]) {
851:         VecGetOwnershipRange(sub_vecs[k],&st,&ed);
852:         VecGetArray(sub_vecs[k],&LA_sub_vec);
853:         for (i=0; i<ed-st; i++) {
854:           LA_sub_vec[i] = x_array[i];
855:         }
856:         VecRestoreArray(sub_vecs[k],&LA_sub_vec);
857:       }
858:     }
859:     VecRestoreArrayRead(sred->xtmp,&x_array);
860:   }

862:   if (PCTelescope_isActiveRank(sred)) {
863:     /* create new (near) nullspace for redundant object */
864:     MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
865:     VecDestroyVecs(n,&sub_vecs);
866:     if (nullspace->remove) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callbacks not supported when propagating (near) nullspaces with PCTelescope");
867:     if (nullspace->rmctx) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Propagation of custom remove callback context not supported when propagating (near) nullspaces with PCTelescope");
868:   }
869:   return(0);
870: }

872: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc,PC_Telescope sred,Mat sub_mat)
873: {
875:   Mat            B;

878:   PCGetOperators(pc,NULL,&B);
879:   {
880:     MatNullSpace nullspace,sub_nullspace;
881:     MatGetNullSpace(B,&nullspace);
882:     if (nullspace) {
883:       PetscInfo(pc,"PCTelescope: generating nullspace (DMDA)\n");
884:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nullspace,&sub_nullspace);
885:       if (PCTelescope_isActiveRank(sred)) {
886:         MatSetNullSpace(sub_mat,sub_nullspace);
887:         MatNullSpaceDestroy(&sub_nullspace);
888:       }
889:     }
890:   }
891:   {
892:     MatNullSpace nearnullspace,sub_nearnullspace;
893:     MatGetNearNullSpace(B,&nearnullspace);
894:     if (nearnullspace) {
895:       PetscInfo(pc,"PCTelescope: generating near nullspace (DMDA)\n");
896:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);
897:       if (PCTelescope_isActiveRank(sred)) {
898:         MatSetNearNullSpace(sub_mat,sub_nearnullspace);
899:         MatNullSpaceDestroy(&sub_nearnullspace);
900:       }
901:     }
902:   }
903:   return(0);
904: }

906: PetscErrorCode PCApply_Telescope_dmda(PC pc,Vec x,Vec y)
907: {
908:   PC_Telescope         sred = (PC_Telescope)pc->data;
909:   PetscErrorCode       ierr;
910:   Mat                  perm;
911:   Vec                  xtmp,xp,xred,yred;
912:   PetscInt             i,st,ed;
913:   VecScatter           scatter;
914:   PetscScalar          *array;
915:   const PetscScalar    *x_array;
916:   PC_Telescope_DMDACtx *ctx;

918:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
919:   xtmp    = sred->xtmp;
920:   scatter = sred->scatter;
921:   xred    = sred->xred;
922:   yred    = sred->yred;
923:   perm    = ctx->permutation;
924:   xp      = ctx->xp;

927:   PetscCitationsRegister(citation,&cited);

929:   /* permute vector into ordering associated with re-partitioned dmda */
930:   MatMultTranspose(perm,x,xp);

932:   /* pull in vector x->xtmp */
933:   VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
934:   VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

936:   /* copy vector entires into xred */
937:   VecGetArrayRead(xtmp,&x_array);
938:   if (xred) {
939:     PetscScalar *LA_xred;
940:     VecGetOwnershipRange(xred,&st,&ed);

942:     VecGetArray(xred,&LA_xred);
943:     for (i=0; i<ed-st; i++) {
944:       LA_xred[i] = x_array[i];
945:     }
946:     VecRestoreArray(xred,&LA_xred);
947:   }
948:   VecRestoreArrayRead(xtmp,&x_array);

950:   /* solve */
951:   if (PCTelescope_isActiveRank(sred)) {
952:     KSPSolve(sred->ksp,xred,yred);
953:     KSPCheckSolve(sred->ksp,pc,yred);
954:   }

956:   /* return vector */
957:   VecGetArray(xtmp,&array);
958:   if (yred) {
959:     const PetscScalar *LA_yred;
960:     VecGetOwnershipRange(yred,&st,&ed);
961:     VecGetArrayRead(yred,&LA_yred);
962:     for (i=0; i<ed-st; i++) {
963:       array[i] = LA_yred[i];
964:     }
965:     VecRestoreArrayRead(yred,&LA_yred);
966:   }
967:   VecRestoreArray(xtmp,&array);
968:   VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
969:   VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
970:   MatMult(perm,xp,y);
971:   return(0);
972: }

974: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
975: {
976:   PC_Telescope         sred = (PC_Telescope)pc->data;
977:   PetscErrorCode       ierr;
978:   Mat                  perm;
979:   Vec                  xtmp,xp,yred;
980:   PetscInt             i,st,ed;
981:   VecScatter           scatter;
982:   const PetscScalar    *x_array;
983:   PetscBool            default_init_guess_value = PETSC_FALSE;
984:   PC_Telescope_DMDACtx *ctx;

987:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
988:   xtmp    = sred->xtmp;
989:   scatter = sred->scatter;
990:   yred    = sred->yred;
991:   perm    = ctx->permutation;
992:   xp      = ctx->xp;

994:   if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope_dmda only supports max_it = 1");
995:   *reason = (PCRichardsonConvergedReason)0;

997:   if (!zeroguess) {
998:     PetscInfo(pc,"PCTelescopeDMDA: Scattering y for non-zero-initial guess\n");
999:     /* permute vector into ordering associated with re-partitioned dmda */
1000:     MatMultTranspose(perm,y,xp);

1002:     /* pull in vector x->xtmp */
1003:     VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
1004:     VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

1006:     /* copy vector entires into xred */
1007:     VecGetArrayRead(xtmp,&x_array);
1008:     if (yred) {
1009:       PetscScalar *LA_yred;
1010:       VecGetOwnershipRange(yred,&st,&ed);
1011:       VecGetArray(yred,&LA_yred);
1012:       for (i=0; i<ed-st; i++) {
1013:         LA_yred[i] = x_array[i];
1014:       }
1015:       VecRestoreArray(yred,&LA_yred);
1016:     }
1017:     VecRestoreArrayRead(xtmp,&x_array);
1018:   }

1020:   if (PCTelescope_isActiveRank(sred)) {
1021:     KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
1022:     if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
1023:   }

1025:   PCApply_Telescope_dmda(pc,x,y);

1027:   if (PCTelescope_isActiveRank(sred)) {
1028:     KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
1029:   }

1031:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
1032:   *outits = 1;
1033:   return(0);
1034: }

1036: PetscErrorCode PCReset_Telescope_dmda(PC pc)
1037: {
1038:   PetscErrorCode       ierr;
1039:   PC_Telescope         sred = (PC_Telescope)pc->data;
1040:   PC_Telescope_DMDACtx *ctx;

1043:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
1044:   VecDestroy(&ctx->xp);
1045:   MatDestroy(&ctx->permutation);
1046:   DMDestroy(&ctx->dmrepart);
1047:   PetscFree(ctx->range_i_re);
1048:   PetscFree(ctx->range_j_re);
1049:   PetscFree(ctx->range_k_re);
1050:   PetscFree(ctx->start_i_re);
1051:   PetscFree(ctx->start_j_re);
1052:   PetscFree(ctx->start_k_re);
1053:   return(0);
1054: }

1056: PetscErrorCode DMView_DA_Short_3d(DM dm,PetscViewer v)
1057: {
1058:   PetscInt       M,N,P,m,n,p,ndof,nsw;
1059:   MPI_Comm       comm;
1060:   PetscMPIInt    size;
1061:   const char*    prefix;

1065:   PetscObjectGetComm((PetscObject)dm,&comm);
1066:   MPI_Comm_size(comm,&size);
1067:   DMGetOptionsPrefix(dm,&prefix);
1068:   DMDAGetInfo(dm,NULL,&M,&N,&P,&m,&n,&p,&ndof,&nsw,NULL,NULL,NULL,NULL);
1069:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1070:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1071:   PetscViewerASCIIPrintf(v,"  M %D N %D P %D m %D n %D p %D dof %D overlap %D\n",M,N,P,m,n,p,ndof,nsw);
1072:   return(0);
1073: }

1075: PetscErrorCode DMView_DA_Short_2d(DM dm,PetscViewer v)
1076: {
1077:   PetscInt       M,N,m,n,ndof,nsw;
1078:   MPI_Comm       comm;
1079:   PetscMPIInt    size;
1080:   const char*    prefix;

1084:   PetscObjectGetComm((PetscObject)dm,&comm);
1085:   MPI_Comm_size(comm,&size);
1086:   DMGetOptionsPrefix(dm,&prefix);
1087:   DMDAGetInfo(dm,NULL,&M,&N,NULL,&m,&n,NULL,&ndof,&nsw,NULL,NULL,NULL,NULL);
1088:   if (prefix) {PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);}
1089:   else {PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);}
1090:   PetscViewerASCIIPrintf(v,"  M %D N %D m %D n %D dof %D overlap %D\n",M,N,m,n,ndof,nsw);
1091:   return(0);
1092: }

1094: PetscErrorCode DMView_DA_Short(DM dm,PetscViewer v)
1095: {
1097:   PetscInt       dim;

1100:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1101:   switch (dim) {
1102:   case 2: DMView_DA_Short_2d(dm,v);
1103:     break;
1104:   case 3: DMView_DA_Short_3d(dm,v);
1105:     break;
1106:   }
1107:   return(0);
1108: }