Actual source code: dacorn.c

petsc-3.3-p7 2013-05-11
  2: /*
  3:   Code for manipulating distributed regular arrays in parallel.
  4: */

  6: #include <petsc-private/daimpl.h>    /*I   "petscdmda.h"   I*/

 10: /*@
 11:    DMDASetCoordinates - Sets into the DMDA a vector that indicates the 
 12:       coordinates of the local nodes (NOT including ghost nodes).

 14:    Collective on DMDA

 16:    Input Parameter:
 17: +  da - the distributed array
 18: -  c - coordinate vector

 20:    Note:
 21:     The coordinates should NOT include those for all ghost points

 23:   Level: intermediate

 25: .keywords: distributed array, get, corners, nodes, local indices, coordinates

 27: .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
 28: @*/
 29: PetscErrorCode  DMDASetCoordinates(DM da,Vec c)
 30: {
 32:   DM_DA          *dd = (DM_DA*)da->data;
 33:   PetscInt       bs;

 38:   VecGetBlockSize(c,&bs);
 39:   if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA");
 40:   PetscObjectReference((PetscObject)c);
 41:   VecDestroy(&dd->coordinates);
 42:   dd->coordinates = c;
 43:   VecDestroy(&dd->ghosted_coordinates);
 44:   return(0);
 45: }

 49: /*@
 50:    DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the 
 51:       coordinates of the local nodes, including ghost nodes.

 53:    Collective on DMDA

 55:    Input Parameter:
 56: +  da - the distributed array
 57: -  c - coordinate vector

 59:    Note:
 60:     The coordinates of interior ghost points can be set using DMDASetCoordinates()
 61:     followed by DMDAGetGhostedCoordinates().  This is intended to enable the setting
 62:     of ghost coordinates outside of the domain.

 64:     Non-ghosted coordinates, if set, are assumed still valid.

 66:   Level: intermediate

 68: .keywords: distributed array, get, corners, nodes, local indices, coordinates

 70: .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
 71: @*/
 72: PetscErrorCode  DMDASetGhostedCoordinates(DM da,Vec c)
 73: {
 75:   DM_DA          *dd = (DM_DA*)da->data;
 76:   PetscInt       bs;

 81:   VecGetBlockSize(c,&bs);
 82:   if (bs != dd->dim) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_INCOMP,"Block size of vector must match dimension of DMDA");
 83:   PetscObjectReference((PetscObject)c);
 84:   VecDestroy(&dd->ghosted_coordinates);
 85:   dd->ghosted_coordinates = c;
 86:   return(0);
 87: }

 91: /*@
 92:    DMDAGetCoordinates - Gets the node coordinates associated with a DMDA.

 94:    Not Collective

 96:    Input Parameter:
 97: .  da - the distributed array

 99:    Output Parameter:
100: .  c - coordinate vector

102:    Note:
103:     Each process has only the coordinates for its local nodes (does NOT have the
104:   coordinates for the ghost nodes).

106:     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
107:     and (x_0,y_0,z_0,x_1,y_1,z_1...)

109:   Level: intermediate

111: .keywords: distributed array, get, corners, nodes, local indices, coordinates

113: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
114: @*/
115: PetscErrorCode  DMDAGetCoordinates(DM da,Vec *c)
116: {
117:   DM_DA          *dd = (DM_DA*)da->data;
121:   *c = dd->coordinates;
122:   return(0);
123: }

127: /*@
128:    DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates

130:    Collective on DMDA

132:    Input Parameter:
133: .  da - the distributed array

135:    Output Parameter:
136: .  dac - coordinate DMDA

138:   Level: intermediate

140: .keywords: distributed array, get, corners, nodes, local indices, coordinates

142: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
143: @*/
144: PetscErrorCode  DMDAGetCoordinateDA(DM da,DM *cda)
145: {
146:   PetscMPIInt    size;
148:   DM_DA          *dd = (DM_DA*)da->data;

151:   if (!dd->da_coordinates) {
152:     MPI_Comm_size(((PetscObject)da)->comm,&size);
153:     if (dd->dim == 1) {
154:       PetscInt         s,m,*lc,l;
155:       DMDABoundaryType bx;
156:       DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);
157:       DMDAGetCorners(da,0,0,0,&l,0,0);
158:       PetscMalloc(size*sizeof(PetscInt),&lc);
159:       MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
160:       DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);
161:       PetscFree(lc);
162:     } else if (dd->dim == 2) {
163:       PetscInt         i,s,m,*lc,*ld,l,k,n,M,N;
164:       DMDABoundaryType bx,by;
165:       DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);
166:       DMDAGetCorners(da,0,0,0,&l,&k,0);
167:       PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);
168:       /* only first M values in lc matter */
169:       MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
170:       /* every Mth value in ld matters */
171:       MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
172:       for ( i=0; i<N; i++) {
173:         ld[i] = ld[M*i];
174:       }
175:       DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);
176:       PetscFree2(lc,ld);
177:     } else if (dd->dim == 3) {
178:       PetscInt         i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
179:       DMDABoundaryType bx,by,bz;
180:       DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);
181:       DMDAGetCorners(da,0,0,0,&l,&k,&q);
182:       PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);
183:       /* only first M values in lc matter */
184:       MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
185:       /* every Mth value in ld matters */
186:       MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
187:       for ( i=0; i<N; i++) {
188:         ld[i] = ld[M*i];
189:       }
190:       MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);
191:       for ( i=0; i<P; i++) {
192:         le[i] = le[M*N*i];
193:       }
194:       DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);
195:       PetscFree3(lc,ld,le);
196:     }
197:   }
198:   *cda = dd->da_coordinates;
199:   return(0);
200: }


205: /*@
206:    DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA.

208:    Collective on DMDA

210:    Input Parameter:
211: .  da - the distributed array

213:    Output Parameter:
214: .  c - coordinate vector

216:    Note:
217:     Each process has only the coordinates for its local AND ghost nodes

219:     For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
220:     and (x_0,y_0,z_0,x_1,y_1,z_1...)

222:   Level: intermediate

224: .keywords: distributed array, get, corners, nodes, local indices, coordinates

226: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA()
227: @*/
228: PetscErrorCode  DMDAGetGhostedCoordinates(DM da,Vec *c)
229: {
231:   DM_DA          *dd = (DM_DA*)da->data;

236:   if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call");
237:   if (!dd->ghosted_coordinates) {
238:     DM dac;
239:     DMDAGetCoordinateDA(da,&dac);
240:     DMCreateLocalVector(dac,&dd->ghosted_coordinates);
241:     DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);
242:     DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);
243:   }
244:   *c = dd->ghosted_coordinates;
245:   return(0);
246: }

250: /*@C
251:    DMDASetFieldName - Sets the names of individual field components in multicomponent
252:    vectors associated with a DMDA.

254:    Not Collective

256:    Input Parameters:
257: +  da - the distributed array
258: .  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 
259:         number of degrees of freedom per node within the DMDA
260: -  names - the name of the field (component)

262:   Level: intermediate

264: .keywords: distributed array, get, component name

266: .seealso: DMDAGetFieldName()
267: @*/
268: PetscErrorCode  DMDASetFieldName(DM da,PetscInt nf,const char name[])
269: {
271:   DM_DA          *dd = (DM_DA*)da->data;

275:   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
276:   PetscFree(dd->fieldname[nf]);
277:   PetscStrallocpy(name,&dd->fieldname[nf]);
278:   return(0);
279: }

283: /*@C
284:    DMDAGetFieldName - Gets the names of individual field components in multicomponent
285:    vectors associated with a DMDA.

287:    Not Collective

289:    Input Parameter:
290: +  da - the distributed array
291: -  nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 
292:         number of degrees of freedom per node within the DMDA

294:    Output Parameter:
295: .  names - the name of the field (component)

297:   Level: intermediate

299: .keywords: distributed array, get, component name

301: .seealso: DMDASetFieldName()
302: @*/
303: PetscErrorCode  DMDAGetFieldName(DM da,PetscInt nf,const char **name)
304: {
305:   DM_DA          *dd = (DM_DA*)da->data;

310:   if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
311:   *name = dd->fieldname[nf];
312:   return(0);
313: }

317: /*@
318:    DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
319:    corner of the local region, excluding ghost points.

321:    Not Collective

323:    Input Parameter:
324: .  da - the distributed array

326:    Output Parameters:
327: +  x,y,z - the corner indices (where y and z are optional; these are used
328:            for 2D and 3D problems)
329: -  m,n,p - widths in the corresponding directions (where n and p are optional;
330:            these are used for 2D and 3D problems)

332:    Note:
333:    The corner information is independent of the number of degrees of 
334:    freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
335:    m, n, p can be thought of as coordinates on a logical grid, where each
336:    grid point has (potentially) several degrees of freedom.
337:    Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.

339:   Level: beginner

341: .keywords: distributed array, get, corners, nodes, local indices

343: .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
344: @*/
345: PetscErrorCode  DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
346: {
347:   PetscInt w;
348:   DM_DA    *dd = (DM_DA*)da->data;

352:   /* since the xs, xe ... have all been multiplied by the number of degrees 
353:      of freedom per cell, w = dd->w, we divide that out before returning.*/
354:   w = dd->w;
355:   if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w;
356:   /* the y and z have NOT been multiplied by w */
357:   if (y) *y = dd->ys;   if (n) *n = (dd->ye - dd->ys);
358:   if (z) *z = dd->zs;   if (p) *p = (dd->ze - dd->zs);
359:   return(0);
360: }

364: /*@
365:    DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.

367:    Not Collective

369:    Input Parameter:
370: .  da - the distributed array

372:    Output Parameters:
373: +  lmin - local minimum coordinates (length dim, optional)
374: -  lmax - local maximim coordinates (length dim, optional)

376:   Level: beginner

378: .keywords: distributed array, get, coordinates

380: .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox()
381: @*/
382: PetscErrorCode  DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
383: {
384:   PetscErrorCode    ierr;
385:   Vec               coords  = PETSC_NULL;
386:   PetscInt          dim,i,j;
387:   const PetscScalar *local_coords;
388:   PetscReal         min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL};
389:   PetscInt          N,Ni;
390:   DM_DA             *dd = (DM_DA*)da->data;

394:   dim = dd->dim;
395:   DMDAGetCoordinates(da,&coords);
396:   if (coords) {
397:     VecGetArrayRead(coords,&local_coords);
398:     VecGetLocalSize(coords,&N);
399:     Ni = N/dim;
400:     for (i=0; i<Ni; i++) {
401:       for (j=0; j<3; j++) {
402:         min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0;
403:         max[j] = j < dim ? PetscMax(min[j],PetscRealPart(local_coords[i*dim+j])) : 0;
404:       }
405:     }
406:     VecRestoreArrayRead(coords,&local_coords);
407:   } else {                      /* Just use grid indices */
408:     DMDALocalInfo info;
409:     DMDAGetLocalInfo(da,&info);
410:     min[0] = info.xs;
411:     min[1] = info.ys;
412:     min[2] = info.zs;
413:     max[0] = info.xs + info.xm-1;
414:     max[1] = info.ys + info.ym-1;
415:     max[2] = info.zs + info.zm-1;
416:   }
417:   if (lmin) {PetscMemcpy(lmin,min,dim*sizeof(PetscReal));}
418:   if (lmax) {PetscMemcpy(lmax,max,dim*sizeof(PetscReal));}
419:   return(0);
420: }

424: /*@
425:    DMDAGetBoundingBox - Returns the global bounding box for the DMDA.

427:    Collective on DMDA

429:    Input Parameter:
430: .  da - the distributed array

432:    Output Parameters:
433: +  gmin - global minimum coordinates (length dim, optional)
434: -  gmax - global maximim coordinates (length dim, optional)

436:   Level: beginner

438: .keywords: distributed array, get, coordinates

440: .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox()
441: @*/
442: PetscErrorCode  DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
443: {
445:   PetscMPIInt    count;
446:   PetscReal      lmin[3],lmax[3];
447:   DM_DA          *dd = (DM_DA*)da->data;

451:   count = PetscMPIIntCast(dd->dim);
452:   DMDAGetLocalBoundingBox(da,lmin,lmax);
453:   if (gmin) {MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);}
454:   if (gmax) {MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);}
455:   return(0);
456: }

460: /*@
461:    DMDAGetReducedDA - Gets the DMDA with the same layout but with fewer or more fields

463:    Collective on DMDA

465:    Input Parameter:
466: +  da - the distributed array
467: .  nfields - number of fields in new DMDA

469:    Output Parameter:
470: .  nda - the new DMDA

472:   Level: intermediate

474: .keywords: distributed array, get, corners, nodes, local indices, coordinates

476: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
477: @*/
478: PetscErrorCode  DMDAGetReducedDA(DM da,PetscInt nfields,DM *nda)
479: {
481:   DM_DA            *dd = (DM_DA*)da->data;

483:   PetscInt          s,m,n,p,M,N,P,dim;
484:   const PetscInt   *lx,*ly,*lz;
485:   DMDABoundaryType  bx,by,bz;
486:   DMDAStencilType   stencil_type;
487: 
489:   DMDAGetInfo(da,&dim,&M,&N,&P,&m,&n,&p,0,&s,&bx,&by,&bz,&stencil_type);
490:   DMDAGetOwnershipRanges(da,&lx,&ly,&lz);
491:   if (dim == 1) {
492:     DMDACreate1d(((PetscObject)da)->comm,bx,M,nfields,s,dd->lx,nda);
493:   } else if (dim == 2) {
494:     DMDACreate2d(((PetscObject)da)->comm,bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);
495:   } else if (dim == 3) {
496:     DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);
497:   }
498:   if (dd->coordinates) {
499:     DM_DA *ndd = (DM_DA*)(*nda)->data;
500:     PetscObjectReference((PetscObject)dd->coordinates);
501:     ndd->coordinates = dd->coordinates;
502:   }
503:   return(0);
504: }