Actual source code: da1.c

petsc-master 2017-11-16
Report Typos and Errors

  2: /*
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95
  5: */

  7:  #include <petsc/private/dmdaimpl.h>

  9:  #include <petscdraw.h>
 10: static PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
 11: {
 13:   PetscMPIInt    rank;
 14:   PetscBool      iascii,isdraw,isglvis,isbinary;
 15:   DM_DA          *dd = (DM_DA*)da->data;
 16: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 17:   PetscBool ismatlab;
 18: #endif

 21:   MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);

 23:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 24:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
 25:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
 26:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
 27: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 28:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
 29: #endif
 30:   if (iascii) {
 31:     PetscViewerFormat format;

 33:     PetscViewerGetFormat(viewer, &format);
 34:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL && format != PETSC_VIEWER_ASCII_GLVIS) {
 35:       DMDALocalInfo info;
 36:       DMDAGetLocalInfo(da,&info);
 37:       PetscViewerASCIIPushSynchronized(viewer);
 38:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);
 39:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
 40:       PetscViewerFlush(viewer);
 41:       PetscViewerASCIIPopSynchronized(viewer);
 42:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
 43:       DMView_DA_GLVis(da,viewer);
 44:     } else {
 45:       DMView_DA_VTK(da, viewer);
 46:     }
 47:   } else if (isdraw) {
 48:     PetscDraw draw;
 49:     double    ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
 50:     PetscInt  base;
 51:     char      node[10];
 52:     PetscBool isnull;

 54:     PetscViewerDrawGetDraw(viewer,0,&draw);
 55:     PetscDrawIsNull(draw,&isnull);
 56:     if (isnull) return(0);

 58:     PetscDrawCheckResizedWindow(draw);
 59:     PetscDrawClear(draw);
 60:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);

 62:     PetscDrawCollectiveBegin(draw);
 63:     /* first processor draws all node lines */
 64:     if (!rank) {
 65:       PetscInt xmin_tmp;
 66:       ymin = 0.0; ymax = 0.3;
 67:       for (xmin_tmp=0; xmin_tmp < dd->M; xmin_tmp++) {
 68:         PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 69:       }
 70:       xmin = 0.0; xmax = dd->M - 1;
 71:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 72:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 73:     }
 74:     PetscDrawCollectiveEnd(draw);
 75:     PetscDrawFlush(draw);
 76:     PetscDrawPause(draw);

 78:     PetscDrawCollectiveBegin(draw);
 79:     /* draw my box */
 80:     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
 81:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 82:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 83:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 84:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
 85:     /* Put in index numbers */
 86:     base = dd->base / dd->w;
 87:     for (x=xmin; x<=xmax; x++) {
 88:       PetscSNPrintf(node,sizeof(node),"%d",(int)base++);
 89:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 90:     }
 91:     PetscDrawCollectiveEnd(draw);
 92:     PetscDrawFlush(draw);
 93:     PetscDrawPause(draw);
 94:     PetscDrawSave(draw);
 95:   } else if (isglvis) {
 96:     DMView_DA_GLVis(da,viewer);
 97:   } else if (isbinary) {
 98:     DMView_DA_Binary(da,viewer);
 99: #if defined(PETSC_HAVE_MATLAB_ENGINE)
100:   } else if (ismatlab) {
101:     DMView_DA_Matlab(da,viewer);
102: #endif
103:   }
104:   return(0);
105: }


108: PetscErrorCode  DMSetUp_DA_1D(DM da)
109: {
110:   DM_DA            *dd   = (DM_DA*)da->data;
111:   const PetscInt   M     = dd->M;
112:   const PetscInt   dof   = dd->w;
113:   const PetscInt   s     = dd->s;
114:   const PetscInt   sDist = s;  /* stencil distance in points */
115:   const PetscInt   *lx   = dd->lx;
116:   DMBoundaryType   bx    = dd->bx;
117:   MPI_Comm         comm;
118:   Vec              local, global;
119:   VecScatter       gtol;
120:   IS               to, from;
121:   PetscBool        flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
122:   PetscMPIInt      rank, size;
123:   PetscInt         i,*idx,nn,left,xs,xe,x,Xs,Xe,start,m,IXs,IXe;
124:   PetscErrorCode   ierr;

127:   PetscObjectGetComm((PetscObject) da, &comm);
128:   MPI_Comm_size(comm,&size);
129:   MPI_Comm_rank(comm,&rank);

131:   dd->p = 1;
132:   dd->n = 1;
133:   dd->m = size;
134:   m     = dd->m;

136:   if (s > 0) {
137:     /* if not communicating data then should be ok to have nothing on some processes */
138:     if (M < m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
139:     if ((M-1) < s && size > 1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
140:   }

142:   /*
143:      Determine locally owned region
144:      xs is the first local node number, x is the number of local nodes
145:   */
146:   if (!lx) {
147:     PetscMalloc1(m, &dd->lx);
148:     PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_blockcomm",&flg1,NULL);
149:     PetscOptionsGetBool(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_partition_nodes_at_end",&flg2,NULL);
150:     if (flg1) {      /* Block Comm type Distribution */
151:       xs = rank*M/m;
152:       x  = (rank + 1)*M/m - xs;
153:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
154:       x = (M + rank)/m;
155:       if (M/m == x) xs = rank*x;
156:       else          xs = rank*(x-1) + (M+rank)%(x*m);
157:     } else { /* The odd nodes are evenly distributed across the first k nodes */
158:       /* Regular PETSc Distribution */
159:       x = M/m + ((M % m) > rank);
160:       if (rank >= (M % m)) xs = (rank * (PetscInt)(M/m) + M % m);
161:       else                 xs = rank * (PetscInt)(M/m) + rank;
162:     }
163:     MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);
164:     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
165:     dd->lx[m-1] = M - dd->lx[m-1];
166:   } else {
167:     x  = lx[rank];
168:     xs = 0;
169:     for (i=0; i<rank; i++) xs += lx[i];
170:     /* verify that data user provided is consistent */
171:     left = xs;
172:     for (i=rank; i<size; i++) left += lx[i];
173:     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
174:   }

176:   /*
177:    check if the scatter requires more than one process neighbor or wraps around
178:    the domain more than once
179:   */
180:   if ((x < s) & ((M > 1) | (bx == DM_BOUNDARY_PERIODIC))) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);

182:   xe  = xs + x;

184:   /* determine ghost region (Xs) and region scattered into (IXs)  */
185:   if (xs-sDist > 0) {
186:     Xs  = xs - sDist;
187:     IXs = xs - sDist;
188:   } else {
189:     if (bx) Xs = xs - sDist;
190:     else Xs = 0;
191:     IXs = 0;
192:   }
193:   if (xe+sDist <= M) {
194:     Xe  = xe + sDist;
195:     IXe = xe + sDist;
196:   } else {
197:     if (bx) Xe = xe + sDist;
198:     else Xe = M;
199:     IXe = M;
200:   }

202:   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
203:     Xs  = xs - sDist;
204:     Xe  = xe + sDist;
205:     IXs = xs - sDist;
206:     IXe = xe + sDist;
207:   }

209:   /* allocate the base parallel and sequential vectors */
210:   dd->Nlocal = dof*x;
211:   VecCreateMPIWithArray(comm,dof,dd->Nlocal,PETSC_DECIDE,NULL,&global);
212:   dd->nlocal = dof*(Xe-Xs);
213:   VecCreateSeqWithArray(PETSC_COMM_SELF,dof,dd->nlocal,NULL,&local);

215:   VecGetOwnershipRange(global,&start,NULL);

217:   /* Create Global to Local Vector Scatter Context */
218:   /* global to local must retrieve ghost points */
219:   ISCreateStride(comm,dof*(IXe-IXs),dof*(IXs-Xs),1,&to);

221:   PetscMalloc1(x+2*sDist,&idx);
222:   PetscLogObjectMemory((PetscObject)da,(x+2*(sDist))*sizeof(PetscInt));

224:   for (i=0; i<IXs-Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

226:   nn = IXs-Xs;
227:   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
228:     for (i=0; i<sDist; i++) {  /* Left ghost points */
229:       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
230:       else                 idx[nn++] = M+(xs-sDist+i);
231:     }

233:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

235:     for (i=0; i<sDist; i++) { /* Right ghost points */
236:       if ((xe+i)<M) idx [nn++] =  xe+i;
237:       else          idx [nn++] = (xe+i) - M;
238:     }
239:   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
240:     for (i=0; i<(sDist); i++) {  /* Left ghost points */
241:       if ((xs-sDist+i)>=0) idx[nn++] = xs-sDist+i;
242:       else                 idx[nn++] = sDist - i;
243:     }

245:     for (i=0; i<x; i++) idx [nn++] = xs + i;  /* Non-ghost points */

247:     for (i=0; i<(sDist); i++) { /* Right ghost points */
248:       if ((xe+i)<M) idx[nn++] =  xe+i;
249:       else          idx[nn++] = M - (i + 2);
250:     }
251:   } else {      /* Now do all cases with no periodicity */
252:     if (0 <= xs-sDist) {
253:       for (i=0; i<sDist; i++) idx[nn++] = xs - sDist + i;
254:     } else {
255:       for (i=0; i<xs; i++) idx[nn++] = i;
256:     }

258:     for (i=0; i<x; i++) idx [nn++] = xs + i;

260:     if ((xe+sDist)<=M) {
261:       for (i=0; i<sDist; i++) idx[nn++]=xe+i;
262:     } else {
263:       for (i=xe; i<M; i++) idx[nn++]=i;
264:     }
265:   }

267:   ISCreateBlock(comm,dof,nn-IXs+Xs,&idx[IXs-Xs],PETSC_USE_POINTER,&from);
268:   VecScatterCreate(global,from,local,to,&gtol);
269:   PetscLogObjectParent((PetscObject)da,(PetscObject)gtol);
270:   ISDestroy(&to);
271:   ISDestroy(&from);
272:   VecDestroy(&local);
273:   VecDestroy(&global);

275:   dd->xs = dof*xs; dd->xe = dof*xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
276:   dd->Xs = dof*Xs; dd->Xe = dof*Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;

278:   dd->gtol      = gtol;
279:   dd->base      = dof*xs;
280:   da->ops->view = DMView_DA_1d;

282:   /*
283:      Set the local to global ordering in the global vector, this allows use
284:      of VecSetValuesLocal().
285:   */
286:   for (i=0; i<Xe-IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

288:   ISLocalToGlobalMappingCreate(comm,dof,nn,idx,PETSC_OWN_POINTER,&da->ltogmap);
289:   PetscLogObjectParent((PetscObject)da,(PetscObject)da->ltogmap);

291:   return(0);
292: }


295: /*@C
296:    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
297:    regular array data that is distributed across some processors.

299:    Collective on MPI_Comm

301:    Input Parameters:
302: +  comm - MPI communicator
303: .  bx - type of ghost cells at the boundary the array should have, if any. Use
304:           DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, or DM_BOUNDARY_PERIODIC.
305: .  M - global dimension of the array
306:             from the command line with -da_grid_x <M>)
307: .  dof - number of degrees of freedom per node
308: .  s - stencil width
309: -  lx - array containing number of nodes in the X direction on each processor,
310:         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.

312:    Output Parameter:
313: .  da - the resulting distributed array object

315:    Options Database Key:
316: +  -dm_view - Calls DMView() at the conclusion of DMDACreate1d()
317: .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
318: .  -da_refine_x <rx> - refinement factor
319: -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0

321:    Level: beginner

323:    Notes:
324:    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
325:    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
326:    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.

328:    You must call DMSetUp() after this call before using this DM. 

330:    If you wish to use the options database to change values in the DMDA call DMSetFromOptions() after this call
331:    but before DMSetUp(). 

333: .keywords: distributed array, create, one-dimensional

335: .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
336:           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMLocalToLocalBegin(), DMLocalToLocalEnd(), DMDAGetRefinementFactor(),
337:           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()

339: @*/
340: PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
341: {
343:   PetscMPIInt    size;

346:   DMDACreate(comm, da);
347:   DMSetDimension(*da, 1);
348:   DMDASetSizes(*da, M, 1, 1);
349:   MPI_Comm_size(comm, &size);
350:   DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
351:   DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);
352:   DMDASetDof(*da, dof);
353:   DMDASetStencilWidth(*da, s);
354:   DMDASetOwnershipRanges(*da, lx, NULL, NULL);
355:   return(0);
356: }