Actual source code: da1.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: /*
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95
  5: */

  7: #include <petsc-private/dmdaimpl.h>     /*I  "petscdmda.h"   I*/

  9: #include <petscdraw.h>
 12: PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
 13: {
 15:   PetscMPIInt    rank;
 16:   PetscBool      iascii,isdraw,isbinary;
 17:   DM_DA          *dd = (DM_DA*)da->data;
 18: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 19:   PetscBool ismatlab;
 20: #endif

 23:   MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);

 25:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 26:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
 27:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
 28: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 29:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
 30: #endif
 31:   if (iascii) {
 32:     PetscViewerFormat format;

 34:     PetscViewerGetFormat(viewer, &format);
 35:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
 36:       DMDALocalInfo info;
 37:       DMDAGetLocalInfo(da,&info);
 38:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 39:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);
 40:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
 41:       PetscViewerFlush(viewer);
 42:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 43:     } else {
 44:       DMView_DA_VTK(da, viewer);
 45:     }
 46:   } else if (isdraw) {
 47:     PetscDraw draw;
 48:     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
 49:     PetscInt  base;
 50:     char      node[10];
 51:     PetscBool isnull;

 53:     PetscViewerDrawGetDraw(viewer,0,&draw);
 54:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

 56:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 57:     PetscDrawSynchronizedClear(draw);

 59:     /* first processor draws all node lines */
 60:     if (!rank) {
 61:       PetscInt xmin_tmp;
 62:       ymin = 0.0; ymax = 0.3;

 64:       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
 65:         PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 66:       }

 68:       xmin = 0.0; xmax = dd->M - 1;
 69:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 70:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 71:     }

 73:     PetscDrawSynchronizedFlush(draw);
 74:     PetscDrawPause(draw);

 76:     /* draw my box */
 77:     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
 78:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 79:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 80:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 81:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 83:     /* Put in index numbers */
 84:     base = dd->base / dd->w;
 85:     for (x=xmin; x<=xmax; x++) {
 86:       sprintf(node,"%d",(int)base++);
 87:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 88:     }

 90:     PetscDrawSynchronizedFlush(draw);
 91:     PetscDrawPause(draw);
 92:   } else if (isbinary) {
 93:     DMView_DA_Binary(da,viewer);
 94: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 95:   } else if (ismatlab) {
 96:     DMView_DA_Matlab(da,viewer);
 97: #endif
 98:   }
 99:   return(0);
100: }


105: PetscErrorCode  DMSetUp_DA_1D(DM da)
106: {
107:   DM_DA            *dd   = (DM_DA*)da->data;
108:   const PetscInt   M     = dd->M;
109:   const PetscInt   dof   = dd->w;
110:   const PetscInt   s     = dd->s;
111:   const PetscInt   sDist = s;  /* stencil distance in points */
112:   const PetscInt   *lx   = dd->lx;
113:   DMBoundaryType   bx    = dd->bx;
114:   MPI_Comm         comm;
115:   Vec              local, global;
116:   VecScatter       gtol;
117:   IS               to, from;
118:   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
119:   PetscMPIInt      rank, size;
120:   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
121:   PetscErrorCode   ierr;

124:   PetscObjectGetComm((PetscObject) da, &comm);
125:   MPI_Comm_size(comm,&size);
126:   MPI_Comm_rank(comm,&rank);

128:   dd->m = size;
129:   m     = dd->m;

131:   if (s > 0) {
132:     /* if not communicating data then should be ok to have nothing on some processes */
133:     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
134:     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
135:   }

137:   /*
138:      Determine locally owned region
139:      xs is the first local node number, x is the number of local nodes
140:   */
141:   if (!lx) {
142:     PetscMalloc1(m, &dd->lx);
143:     PetscOptionsGetBool(NULL,"-da_partition_blockcomm",&flg1,NULL);
144:     PetscOptionsGetBool(NULL,"-da_partition_nodes_at_end",&flg2,NULL);
145:     if (flg1) {      /* Block Comm type Distribution */
146:       xs = rank*M/m;
147:       x  = (rank + 1)*M/m - xs;
148:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
149:       x = (M + rank)/m;
150:       if (M/m == x) xs = rank*x;
151:       else          xs = rank*(x-1) + (M+rank)%(x*m);
152:     } else { /* The odd nodes are evenly distributed across the first k nodes */
153:       /* Regular PETSc Distribution */
154:       x = M/m + ((M % m) > rank);
155:       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
156:       else                 xs = rank * (PetscInt)(M/m) + rank;
157:     }
158:     MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);
159:     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
160:     dd->lx[m-1] = M - dd->lx[m-1];
161:   } else {
162:     x  = lx[rank];
163:     xs = 0;
164:     for (i=0; i<rank; i++) xs += lx[i];
165:     /* verify that data user provided is consistent */
166:     left = xs;
167:     for (i=rank; i<size; i++) left += lx[i];
168:     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
169:   }

171:   /*
172:    check if the scatter requires more than one process neighbor or wraps around
173:    the domain more than once
174:   */
175:   if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);

177:   xe  = xs + x;

179:   /* determine ghost region (Xs) and region scattered into (IXs)  */
180:   if (xs-sDist > 0) {
181:     Xs  = xs - sDist;
182:     IXs = xs - sDist;
183:   } else {
184:     if (bx) Xs = xs - sDist;
185:     else Xs = 0;
186:     IXs = 0;
187:   }
188:   if (xe+sDist <= M) {
189:     Xe  = xe + sDist;
190:     IXe = xe + sDist;
191:   } else {
192:     if (bx) Xe = xe + sDist;
193:     else Xe = M;
194:     IXe = M;
195:   }

197:   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
198:     Xs  = xs - sDist;
199:     Xe  = xe + sDist;
200:     IXs = xs - sDist;
201:     IXe = xe + sDist;
202:   }

204:   /* allocate the base parallel and sequential vectors */
205:   dd->Nlocal = dof*x;
206:   VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);
207:   dd->nlocal = dof*(Xe-Xs);
208:   VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);

210:   VecGetOwnershipRange(global,&start,NULL);

212:   /* Create Global to Local Vector Scatter Context */
213:   /* global to local must retrieve ghost points */
214:   ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);

216:   PetscMalloc1((x+2*(sDist)),&idx);
217:   PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));

219:   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

221:   nn = IXs-Xs;
222:   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
223:     for (i=0; i<sDist; i++) {  /* Left ghost points */
224:       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
225:       else                 idx[nn++] = M+(xs-sDist+i);
226:     }

228:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

230:     for (i=0; i<sDist; i++) { /* Right ghost points */
231:       if ((xe+i)<M) idx [nn++] =  xe+i;
232:       else          idx [nn++] = (xe+i) - M;
233:     }
234:   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
235:     for (i=0; i<(sDist); i++) {  /* Left ghost points */
236:       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
237:       else                 idx[nn++] = sDist - i;
238:     }

240:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

242:     for (i=0; i<(sDist); i++) { /* Right ghost points */
243:       if ((xe+i)<M) idx[nn++] =  xe+i;
244:       else          idx[nn++] = M - (i + 1);
245:     }
246:   } else {      /* Now do all cases with no periodicity */
247:     if (0 <= xs-sDist) {
248:       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
249:     } else {
250:       for (i=0; i<xs; i++) idx[nn++] = i;
251:     }

253:     for (i=0; i<x; i++) idx [nn++] = xs + i;

255:     if ((xe+sDist)<=M) {
256:       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
257:     } else {
258:       for (i=xe; i<M; i++) idx[nn++]=i;
259:     }
260:   }

262:   ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);
263:   VecScatterCreate(global,from,local,to,&gtol);
264:   PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);
265:   ISDestroy(&to);
266:   ISDestroy(&from);
267:   VecDestroy(&local);
268:   VecDestroy(&global);

270:   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
271:   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;

273:   dd->gtol      = gtol;
274:   dd->base      = dof*xs;
275:   da->ops->view = DMView_DA_1d;

277:   /*
278:      Set the local to global ordering in the global vector, this allows use
279:      of VecSetValuesLocal().
280:   */
281:   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

283:   ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);
284:   PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);

286:   return(0);
287: }


292: /*@C
293:    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
294:    regular array data that is distributed across some processors.

296:    Collective on MPI_Comm

298:    Input Parameters:
299: +  comm - MPI communicator
300: .  bx - type of ghost cells at the boundary the array should have, if any. Use
301:           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
302: .  M - global dimension of the array (use -M to indicate that it may be set to a different value
303:             from the command line with -da_grid_x <M>)
304: .  dof - number of degrees of freedom per node
305: .  s - stencil width
306: -  lx - array containing number of nodes in the X direction on each processor,
307:         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.

309:    Output Parameter:
310: .  da - the resulting distributed array object

312:    Options Database Key:
313: +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
314: .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
315: .  -da_refine_x <rx> - refinement factor
316: -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0

318:    Level: beginner

320:    Notes:
321:    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
322:    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
323:    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.

325: .keywords: distributed array, create, one-dimensional

327: .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
328:           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
329:           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()

331: @*/
332: PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
333: {
335:   PetscMPIInt    size;

338:   DMDACreate(comm, da);
339:   DMDASetDim(*da, 1);
340:   DMDASetSizes(*da, M, 1, 1);
341:   MPI_Comm_size(comm, &size);
342:   DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
343:   DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);
344:   DMDASetDof(*da, dof);
345:   DMDASetStencilWidth(*da, s);
346:   DMDASetOwnershipRanges(*da, lx, NULL, NULL);
347:   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
348:   DMSetFromOptions(*da);
349:   DMSetUp(*da);
350:   return(0);
351: }