Actual source code: da1.c

  1: /*
  2:    Code for manipulating distributed regular 1d arrays in parallel.
  3:    This file was created by Peter Mell   6/30/95
  4: */

  6: #include <petsc/private/dmdaimpl.h>

  8: #include <petscdraw.h>
  9: static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer)
 10: {
 11:   PetscMPIInt rank;
 12:   PetscBool   iascii, isdraw, isglvis, isbinary;
 13:   DM_DA      *dd = (DM_DA *)da->data;
 14: #if defined(PETSC_HAVE_MATLAB)
 15:   PetscBool ismatlab;
 16: #endif

 18:   PetscFunctionBegin;
 19:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank));

 21:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 22:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
 23:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
 24:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
 25: #if defined(PETSC_HAVE_MATLAB)
 26:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab));
 27: #endif
 28:   if (iascii) {
 29:     PetscViewerFormat format;

 31:     PetscCall(PetscViewerGetFormat(viewer, &format));
 32:     if (format == PETSC_VIEWER_LOAD_BALANCE) {
 33:       PetscInt      i, nmax = 0, nmin = PETSC_MAX_INT, navg = 0, *nz, nzlocal;
 34:       DMDALocalInfo info;
 35:       PetscMPIInt   size;
 36:       PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
 37:       PetscCall(DMDAGetLocalInfo(da, &info));
 38:       nzlocal = info.xm;
 39:       PetscCall(PetscMalloc1(size, &nz));
 40:       PetscCallMPI(MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da)));
 41:       for (i = 0; i < (PetscInt)size; i++) {
 42:         nmax = PetscMax(nmax, nz[i]);
 43:         nmin = PetscMin(nmin, nz[i]);
 44:         navg += nz[i];
 45:       }
 46:       PetscCall(PetscFree(nz));
 47:       navg = navg / size;
 48:       PetscCall(PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax));
 49:       PetscFunctionReturn(PETSC_SUCCESS);
 50:     }
 51:     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
 52:       DMDALocalInfo info;
 53:       PetscCall(DMDAGetLocalInfo(da, &info));
 54:       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
 55:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s));
 56:       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm));
 57:       PetscCall(PetscViewerFlush(viewer));
 58:       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
 59:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) PetscCall(DMView_DA_GLVis(da, viewer));
 60:     else PetscCall(DMView_DA_VTK(da, viewer));
 61:   } else if (isdraw) {
 62:     PetscDraw draw;
 63:     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
 64:     PetscInt  base;
 65:     char      node[10];
 66:     PetscBool isnull;

 68:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
 69:     PetscCall(PetscDrawIsNull(draw, &isnull));
 70:     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);

 72:     PetscCall(PetscDrawCheckResizedWindow(draw));
 73:     PetscCall(PetscDrawClear(draw));
 74:     PetscCall(PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax));

 76:     PetscDrawCollectiveBegin(draw);
 77:     /* first processor draws all node lines */
 78:     if (rank == 0) {
 79:       PetscInt xmin_tmp;
 80:       ymin = 0.0;
 81:       ymax = 0.3;
 82:       for (xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscCall(PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK));
 83:       xmin = 0.0;
 84:       xmax = dd->M - 1;
 85:       PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK));
 86:       PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK));
 87:     }
 88:     PetscDrawCollectiveEnd(draw);
 89:     PetscCall(PetscDrawFlush(draw));
 90:     PetscCall(PetscDrawPause(draw));

 92:     PetscDrawCollectiveBegin(draw);
 93:     /* draw my box */
 94:     ymin = 0;
 95:     ymax = 0.3;
 96:     xmin = dd->xs / dd->w;
 97:     xmax = (dd->xe / dd->w) - 1;
 98:     PetscCall(PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED));
 99:     PetscCall(PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED));
100:     PetscCall(PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED));
101:     PetscCall(PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED));
102:     /* Put in index numbers */
103:     base = dd->base / dd->w;
104:     for (x = xmin; x <= xmax; x++) {
105:       PetscCall(PetscSNPrintf(node, sizeof(node), "%d", (int)base++));
106:       PetscCall(PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node));
107:     }
108:     PetscDrawCollectiveEnd(draw);
109:     PetscCall(PetscDrawFlush(draw));
110:     PetscCall(PetscDrawPause(draw));
111:     PetscCall(PetscDrawSave(draw));
112:   } else if (isglvis) {
113:     PetscCall(DMView_DA_GLVis(da, viewer));
114:   } else if (isbinary) {
115:     PetscCall(DMView_DA_Binary(da, viewer));
116: #if defined(PETSC_HAVE_MATLAB)
117:   } else if (ismatlab) {
118:     PetscCall(DMView_DA_Matlab(da, viewer));
119: #endif
120:   }
121:   PetscFunctionReturn(PETSC_SUCCESS);
122: }

124: PetscErrorCode DMSetUp_DA_1D(DM da)
125: {
126:   DM_DA          *dd    = (DM_DA *)da->data;
127:   const PetscInt  M     = dd->M;
128:   const PetscInt  dof   = dd->w;
129:   const PetscInt  s     = dd->s;
130:   const PetscInt  sDist = s; /* stencil distance in points */
131:   const PetscInt *lx    = dd->lx;
132:   DMBoundaryType  bx    = dd->bx;
133:   MPI_Comm        comm;
134:   Vec             local, global;
135:   VecScatter      gtol;
136:   IS              to, from;
137:   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
138:   PetscMPIInt     rank, size;
139:   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;

141:   PetscFunctionBegin;
142:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
143:   PetscCallMPI(MPI_Comm_size(comm, &size));
144:   PetscCallMPI(MPI_Comm_rank(comm, &rank));

146:   dd->p = 1;
147:   dd->n = 1;
148:   dd->m = size;
149:   m     = dd->m;

151:   if (s > 0) {
152:     /* if not communicating data then should be ok to have nothing on some processes */
153:     PetscCheck(M >= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "More processes than data points! %" PetscInt_FMT " %" PetscInt_FMT, m, M);
154:     PetscCheck((M - 1) >= s || size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Array is too small for stencil! %" PetscInt_FMT " %" PetscInt_FMT, M - 1, s);
155:   }

157:   /*
158:      Determine locally owned region
159:      xs is the first local node number, x is the number of local nodes
160:   */
161:   if (!lx) {
162:     PetscCall(PetscMalloc1(m, &dd->lx));
163:     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL));
164:     PetscCall(PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL));
165:     if (flg1) { /* Block Comm type Distribution */
166:       xs = rank * M / m;
167:       x  = (rank + 1) * M / m - xs;
168:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
169:       x = (M + rank) / m;
170:       if (M / m == x) xs = rank * x;
171:       else xs = rank * (x - 1) + (M + rank) % (x * m);
172:     } else { /* The odd nodes are evenly distributed across the first k nodes */
173:       /* Regular PETSc Distribution */
174:       x = M / m + ((M % m) > rank);
175:       if (rank >= (M % m)) xs = (rank * (PetscInt)(M / m) + M % m);
176:       else xs = rank * (PetscInt)(M / m) + rank;
177:     }
178:     PetscCallMPI(MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm));
179:     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
180:     dd->lx[m - 1] = M - dd->lx[m - 1];
181:   } else {
182:     x  = lx[rank];
183:     xs = 0;
184:     for (i = 0; i < rank; i++) xs += lx[i];
185:     /* verify that data user provided is consistent */
186:     left = xs;
187:     for (i = rank; i < size; i++) left += lx[i];
188:     PetscCheck(left == M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Sum of lx across processors not equal to M %" PetscInt_FMT " %" PetscInt_FMT, left, M);
189:   }

191:   /*
192:    check if the scatter requires more than one process neighbor or wraps around
193:    the domain more than once
194:   */
195:   PetscCheck((x >= s) || ((M <= 1) && (bx != DM_BOUNDARY_PERIODIC)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local x-width of domain x %" PetscInt_FMT " is smaller than stencil width s %" PetscInt_FMT, x, s);

197:   xe = xs + x;

199:   /* determine ghost region (Xs) and region scattered into (IXs)  */
200:   if (xs - sDist > 0) {
201:     Xs  = xs - sDist;
202:     IXs = xs - sDist;
203:   } else {
204:     if (bx) Xs = xs - sDist;
205:     else Xs = 0;
206:     IXs = 0;
207:   }
208:   if (xe + sDist <= M) {
209:     Xe  = xe + sDist;
210:     IXe = xe + sDist;
211:   } else {
212:     if (bx) Xe = xe + sDist;
213:     else Xe = M;
214:     IXe = M;
215:   }

217:   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
218:     Xs  = xs - sDist;
219:     Xe  = xe + sDist;
220:     IXs = xs - sDist;
221:     IXe = xe + sDist;
222:   }

224:   /* allocate the base parallel and sequential vectors */
225:   dd->Nlocal = dof * x;
226:   PetscCall(VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global));
227:   dd->nlocal = dof * (Xe - Xs);
228:   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local));

230:   PetscCall(VecGetOwnershipRange(global, &start, NULL));

232:   /* Create Global to Local Vector Scatter Context */
233:   /* global to local must retrieve ghost points */
234:   PetscCall(ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to));

236:   PetscCall(PetscMalloc1(x + 2 * sDist, &idx));

238:   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

240:   nn = IXs - Xs;
241:   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
242:     for (i = 0; i < sDist; i++) {   /* Left ghost points */
243:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
244:       else idx[nn++] = M + (xs - sDist + i);
245:     }

247:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

249:     for (i = 0; i < sDist; i++) { /* Right ghost points */
250:       if ((xe + i) < M) idx[nn++] = xe + i;
251:       else idx[nn++] = (xe + i) - M;
252:     }
253:   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
254:     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
255:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
256:       else idx[nn++] = sDist - i;
257:     }

259:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

261:     for (i = 0; i < (sDist); i++) { /* Right ghost points */
262:       if ((xe + i) < M) idx[nn++] = xe + i;
263:       else idx[nn++] = M - (i + 2);
264:     }
265:   } else { /* Now do all cases with no periodicity */
266:     if (0 <= xs - sDist) {
267:       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
268:     } else {
269:       for (i = 0; i < xs; i++) idx[nn++] = i;
270:     }

272:     for (i = 0; i < x; i++) idx[nn++] = xs + i;

274:     if ((xe + sDist) <= M) {
275:       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
276:     } else {
277:       for (i = xe; i < M; i++) idx[nn++] = i;
278:     }
279:   }

281:   PetscCall(ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from));
282:   PetscCall(VecScatterCreate(global, from, local, to, &gtol));
283:   PetscCall(ISDestroy(&to));
284:   PetscCall(ISDestroy(&from));
285:   PetscCall(VecDestroy(&local));
286:   PetscCall(VecDestroy(&global));

288:   dd->xs = dof * xs;
289:   dd->xe = dof * xe;
290:   dd->ys = 0;
291:   dd->ye = 1;
292:   dd->zs = 0;
293:   dd->ze = 1;
294:   dd->Xs = dof * Xs;
295:   dd->Xe = dof * Xe;
296:   dd->Ys = 0;
297:   dd->Ye = 1;
298:   dd->Zs = 0;
299:   dd->Ze = 1;

301:   dd->gtol      = gtol;
302:   dd->base      = dof * xs;
303:   da->ops->view = DMView_DA_1d;

305:   /*
306:      Set the local to global ordering in the global vector, this allows use
307:      of VecSetValuesLocal().
308:   */
309:   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

311:   PetscCall(ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap));
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: /*@C
316:   DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
317:   regular array data that is distributed across one or mpre MPI processes.

319:   Collective

321:   Input Parameters:
322: + comm - MPI communicator
323: . bx   - type of ghost cells at the boundary the array should have, if any. Use
324:          `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, or `DM_BOUNDARY_PERIODIC`.
325: . M    - global dimension of the array (that is the number of grid points)
326: . dof  - number of degrees of freedom per node
327: . s    - stencil width
328: - lx   - array containing number of nodes in the X direction on each processor,
329:          or `NULL`. If non-null, must be of length as the number of processes in the MPI_Comm.
330:          The sum of these entries must equal `M`

332:   Output Parameter:
333: . da - the resulting distributed array object

335:   Options Database Keys:
336: + -dm_view          - Calls `DMView()` at the conclusion of `DMDACreate1d()`
337: . -da_grid_x <nx>   - number of grid points in x direction
338: . -da_refine_x <rx> - refinement factor
339: - -da_refine <n>    - refine the `DMDA` n times before creating it

341:   Level: beginner

343:   Notes:
344:   The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
345:   The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
346:   and `DMCreateLocalVector()` and calls to `VecDuplicate()` if more are needed.

348:   You must call `DMSetUp()` after this call before using this `DM`.

350:   If you wish to use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
351:   but before `DMSetUp()`.

353: .seealso: [](sec_struct), `DMDA`, `DM`, `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
354:           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
355:           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
356:           `DMStagCreate1d()`, `DMBoundaryType`
357: @*/
358: PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
359: {
360:   PetscMPIInt size;

362:   PetscFunctionBegin;
363:   PetscCall(DMDACreate(comm, da));
364:   PetscCall(DMSetDimension(*da, 1));
365:   PetscCall(DMDASetSizes(*da, M, 1, 1));
366:   PetscCallMPI(MPI_Comm_size(comm, &size));
367:   PetscCall(DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE));
368:   PetscCall(DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE));
369:   PetscCall(DMDASetDof(*da, dof));
370:   PetscCall(DMDASetStencilWidth(*da, s));
371:   PetscCall(DMDASetOwnershipRanges(*da, lx, NULL, NULL));
372:   PetscFunctionReturn(PETSC_SUCCESS);
373: }