Actual source code: gr2.c

  1: /*
  2:    Plots vectors obtained with DMDACreate2d()
  3: */

  5: #include <petsc/private/dmdaimpl.h>
  6: #include <petsc/private/glvisvecimpl.h>
  7: #include <petsc/private/viewerhdf5impl.h>
  8: #include <petscdraw.h>

 10: /*
 11:         The data that is passed into the graphics callback
 12: */
 13: typedef struct {
 14:   PetscMPIInt        rank;
 15:   PetscInt           m, n, dof, k;
 16:   PetscReal          xmin, xmax, ymin, ymax, min, max;
 17:   const PetscScalar *xy, *v;
 18:   PetscBool          showaxis, showgrid;
 19:   const char        *name0, *name1;
 20: } ZoomCtx;

 22: /*
 23:        This does the drawing for one particular field
 24:     in one particular set of coordinates. It is a callback
 25:     called from PetscDrawZoom()
 26: */
 27: static PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw, void *ctx)
 28: {
 29:   ZoomCtx           *zctx = (ZoomCtx *)ctx;
 30:   PetscInt           m, n, i, j, k, dof, id, c1, c2, c3, c4;
 31:   PetscReal          min, max, x1, x2, x3, x4, y_1, y2, y3, y4;
 32:   const PetscScalar *xy, *v;

 34:   PetscFunctionBegin;
 35:   m   = zctx->m;
 36:   n   = zctx->n;
 37:   dof = zctx->dof;
 38:   k   = zctx->k;
 39:   xy  = zctx->xy;
 40:   v   = zctx->v;
 41:   min = zctx->min;
 42:   max = zctx->max;

 44:   /* PetscDraw the contour plot patch */
 45:   PetscDrawCollectiveBegin(draw);
 46:   for (j = 0; j < n - 1; j++) {
 47:     for (i = 0; i < m - 1; i++) {
 48:       id  = i + j * m;
 49:       x1  = PetscRealPart(xy[2 * id]);
 50:       y_1 = PetscRealPart(xy[2 * id + 1]);
 51:       c1  = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 53:       id = i + j * m + 1;
 54:       x2 = PetscRealPart(xy[2 * id]);
 55:       y2 = PetscRealPart(xy[2 * id + 1]);
 56:       c2 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 58:       id = i + j * m + 1 + m;
 59:       x3 = PetscRealPart(xy[2 * id]);
 60:       y3 = PetscRealPart(xy[2 * id + 1]);
 61:       c3 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 63:       id = i + j * m + m;
 64:       x4 = PetscRealPart(xy[2 * id]);
 65:       y4 = PetscRealPart(xy[2 * id + 1]);
 66:       c4 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 68:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x2, y2, x3, y3, c1, c2, c3));
 69:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x3, y3, x4, y4, c1, c3, c4));
 70:       if (zctx->showgrid) {
 71:         PetscCall(PetscDrawLine(draw, x1, y_1, x2, y2, PETSC_DRAW_BLACK));
 72:         PetscCall(PetscDrawLine(draw, x2, y2, x3, y3, PETSC_DRAW_BLACK));
 73:         PetscCall(PetscDrawLine(draw, x3, y3, x4, y4, PETSC_DRAW_BLACK));
 74:         PetscCall(PetscDrawLine(draw, x4, y4, x1, y_1, PETSC_DRAW_BLACK));
 75:       }
 76:     }
 77:   }
 78:   if (zctx->showaxis && !zctx->rank) {
 79:     if (zctx->name0 || zctx->name1) {
 80:       PetscReal xl, yl, xr, yr, x, y;
 81:       PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
 82:       x  = xl + .30 * (xr - xl);
 83:       xl = xl + .01 * (xr - xl);
 84:       y  = yr - .30 * (yr - yl);
 85:       yl = yl + .01 * (yr - yl);
 86:       if (zctx->name0) PetscCall(PetscDrawString(draw, x, yl, PETSC_DRAW_BLACK, zctx->name0));
 87:       if (zctx->name1) PetscCall(PetscDrawStringVertical(draw, xl, y, PETSC_DRAW_BLACK, zctx->name1));
 88:     }
 89:     /*
 90:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 91:        but that may require some refactoring.
 92:     */
 93:     {
 94:       double    xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 95:       double    xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 96:       char      value[16];
 97:       size_t    len;
 98:       PetscReal w;
 99:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmin));
100:       PetscCall(PetscDrawString(draw, xmin, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
101:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmax));
102:       PetscCall(PetscStrlen(value, &len));
103:       PetscCall(PetscDrawStringGetSize(draw, &w, NULL));
104:       PetscCall(PetscDrawString(draw, xmax - len * w, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
105:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymin));
106:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymin, PETSC_DRAW_BLACK, value));
107:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymax));
108:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymax, PETSC_DRAW_BLACK, value));
109:     }
110:   }
111:   PetscDrawCollectiveEnd(draw);
112:   PetscFunctionReturn(PETSC_SUCCESS);
113: }

115: static PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin, PetscViewer viewer)
116: {
117:   DM                  da, dac, dag;
118:   PetscInt            N, s, M, w, ncoors = 4;
119:   const PetscInt     *lx, *ly;
120:   PetscReal           coors[4];
121:   PetscDraw           draw, popup;
122:   PetscBool           isnull, useports = PETSC_FALSE;
123:   MPI_Comm            comm;
124:   Vec                 xlocal, xcoor, xcoorl;
125:   DMBoundaryType      bx, by;
126:   DMDAStencilType     st;
127:   ZoomCtx             zctx;
128:   PetscDrawViewPorts *ports = NULL;
129:   PetscViewerFormat   format;
130:   PetscInt           *displayfields;
131:   PetscInt            ndisplayfields, i, nbounds;
132:   const PetscReal    *bounds;

134:   PetscFunctionBegin;
135:   zctx.showgrid = PETSC_FALSE;
136:   zctx.showaxis = PETSC_TRUE;

138:   PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
139:   PetscCall(PetscDrawIsNull(draw, &isnull));
140:   if (isnull) PetscFunctionReturn(PETSC_SUCCESS);

142:   PetscCall(PetscViewerDrawGetBounds(viewer, &nbounds, &bounds));

144:   PetscCall(VecGetDM(xin, &da));
145:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

147:   PetscCall(PetscObjectGetComm((PetscObject)xin, &comm));
148:   PetscCallMPI(MPI_Comm_rank(comm, &zctx.rank));

150:   PetscCall(DMDAGetInfo(da, NULL, &M, &N, NULL, &zctx.m, &zctx.n, NULL, &w, &s, &bx, &by, NULL, &st));
151:   PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, NULL));

153:   /*
154:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
155:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
156:      update the local values plus ONE layer of ghost values.
157:   */
158:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsGhosted", (PetscObject *)&xlocal));
159:   if (!xlocal) {
160:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
161:       /*
162:          if original da is not of stencil width one, or periodic or not a box stencil then
163:          create a special DMDA to handle one level of ghost points for graphics
164:       */
165:       PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, w, 1, lx, ly, &dac));
166:       PetscCall(DMSetUp(dac));
167:       PetscCall(PetscInfo(da, "Creating auxiliary DMDA for managing graphics ghost points\n"));
168:     } else {
169:       /* otherwise we can use the da we already have */
170:       dac = da;
171:     }
172:     /* create local vector for holding ghosted values used in graphics */
173:     PetscCall(DMCreateLocalVector(dac, &xlocal));
174:     if (dac != da) {
175:       /* don't keep any public reference of this DMDA, it is only available through xlocal */
176:       PetscCall(PetscObjectDereference((PetscObject)dac));
177:     } else {
178:       /* remove association between xlocal and da, because below we compose in the opposite
179:          direction and if we left this connect we'd get a loop, so the objects could
180:          never be destroyed */
181:       PetscCall(PetscObjectRemoveReference((PetscObject)xlocal, "__PETSc_dm"));
182:     }
183:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsGhosted", (PetscObject)xlocal));
184:     PetscCall(PetscObjectDereference((PetscObject)xlocal));
185:   } else {
186:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
187:       PetscCall(VecGetDM(xlocal, &dac));
188:     } else {
189:       dac = da;
190:     }
191:   }

193:   /*
194:       Get local (ghosted) values of vector
195:   */
196:   PetscCall(DMGlobalToLocalBegin(dac, xin, INSERT_VALUES, xlocal));
197:   PetscCall(DMGlobalToLocalEnd(dac, xin, INSERT_VALUES, xlocal));
198:   PetscCall(VecGetArrayRead(xlocal, &zctx.v));

200:   /*
201:       Get coordinates of nodes
202:   */
203:   PetscCall(DMGetCoordinates(da, &xcoor));
204:   if (!xcoor) {
205:     PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0));
206:     PetscCall(DMGetCoordinates(da, &xcoor));
207:   }

209:   /*
210:       Determine the min and max coordinates in plot
211:   */
212:   PetscCall(VecStrideMin(xcoor, 0, NULL, &zctx.xmin));
213:   PetscCall(VecStrideMax(xcoor, 0, NULL, &zctx.xmax));
214:   PetscCall(VecStrideMin(xcoor, 1, NULL, &zctx.ymin));
215:   PetscCall(VecStrideMax(xcoor, 1, NULL, &zctx.ymax));
216:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_axis", &zctx.showaxis, NULL));
217:   if (zctx.showaxis) {
218:     coors[0] = zctx.xmin - .05 * (zctx.xmax - zctx.xmin);
219:     coors[1] = zctx.ymin - .05 * (zctx.ymax - zctx.ymin);
220:     coors[2] = zctx.xmax + .05 * (zctx.xmax - zctx.xmin);
221:     coors[3] = zctx.ymax + .05 * (zctx.ymax - zctx.ymin);
222:   } else {
223:     coors[0] = zctx.xmin;
224:     coors[1] = zctx.ymin;
225:     coors[2] = zctx.xmax;
226:     coors[3] = zctx.ymax;
227:   }
228:   PetscCall(PetscOptionsGetRealArray(NULL, NULL, "-draw_coordinates", coors, &ncoors, NULL));
229:   PetscCall(PetscInfo(da, "Preparing DMDA 2d contour plot coordinates %g %g %g %g\n", (double)coors[0], (double)coors[1], (double)coors[2], (double)coors[3]));

231:   /*
232:       Get local ghosted version of coordinates
233:   */
234:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject *)&xcoorl));
235:   if (!xcoorl) {
236:     /* create DMDA to get local version of graphics */
237:     PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, 2, 1, lx, ly, &dag));
238:     PetscCall(DMSetUp(dag));
239:     PetscCall(PetscInfo(dag, "Creating auxiliary DMDA for managing graphics coordinates ghost points\n"));
240:     PetscCall(DMCreateLocalVector(dag, &xcoorl));
241:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject)xcoorl));
242:     PetscCall(PetscObjectDereference((PetscObject)dag));
243:     PetscCall(PetscObjectDereference((PetscObject)xcoorl));
244:   } else PetscCall(VecGetDM(xcoorl, &dag));
245:   PetscCall(DMGlobalToLocalBegin(dag, xcoor, INSERT_VALUES, xcoorl));
246:   PetscCall(DMGlobalToLocalEnd(dag, xcoor, INSERT_VALUES, xcoorl));
247:   PetscCall(VecGetArrayRead(xcoorl, &zctx.xy));
248:   PetscCall(DMDAGetCoordinateName(da, 0, &zctx.name0));
249:   PetscCall(DMDAGetCoordinateName(da, 1, &zctx.name1));

251:   /*
252:       Get information about size of area each processor must do graphics for
253:   */
254:   PetscCall(DMDAGetInfo(dac, NULL, &M, &N, NULL, NULL, NULL, NULL, &zctx.dof, NULL, &bx, &by, NULL, NULL));
255:   PetscCall(DMDAGetGhostCorners(dac, NULL, NULL, NULL, &zctx.m, &zctx.n, NULL));
256:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_grid", &zctx.showgrid, NULL));

258:   PetscCall(DMDASelectFields(da, &ndisplayfields, &displayfields));
259:   PetscCall(PetscViewerGetFormat(viewer, &format));
260:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_ports", &useports, NULL));
261:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
262:   if (useports) {
263:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
264:     PetscCall(PetscDrawCheckResizedWindow(draw));
265:     PetscCall(PetscDrawClear(draw));
266:     PetscCall(PetscDrawViewPortsCreate(draw, ndisplayfields, &ports));
267:   }

269:   /*
270:       Loop over each field; drawing each in a different window
271:   */
272:   for (i = 0; i < ndisplayfields; i++) {
273:     zctx.k = displayfields[i];

275:     /* determine the min and max value in plot */
276:     PetscCall(VecStrideMin(xin, zctx.k, NULL, &zctx.min));
277:     PetscCall(VecStrideMax(xin, zctx.k, NULL, &zctx.max));
278:     if (zctx.k < nbounds) {
279:       zctx.min = bounds[2 * zctx.k];
280:       zctx.max = bounds[2 * zctx.k + 1];
281:     }
282:     if (zctx.min == zctx.max) {
283:       zctx.min -= 1.e-12;
284:       zctx.max += 1.e-12;
285:     }
286:     PetscCall(PetscInfo(da, "DMDA 2d contour plot min %g max %g\n", (double)zctx.min, (double)zctx.max));

288:     if (useports) {
289:       PetscCall(PetscDrawViewPortsSet(ports, i));
290:     } else {
291:       const char *title;
292:       PetscCall(PetscViewerDrawGetDraw(viewer, i, &draw));
293:       PetscCall(DMDAGetFieldName(da, zctx.k, &title));
294:       if (title) PetscCall(PetscDrawSetTitle(draw, title));
295:     }

297:     PetscCall(PetscDrawGetPopup(draw, &popup));
298:     PetscCall(PetscDrawScalePopup(popup, zctx.min, zctx.max));
299:     PetscCall(PetscDrawSetCoordinates(draw, coors[0], coors[1], coors[2], coors[3]));
300:     PetscCall(PetscDrawZoom(draw, VecView_MPI_Draw_DA2d_Zoom, &zctx));
301:     if (!useports) PetscCall(PetscDrawSave(draw));
302:   }
303:   if (useports) {
304:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
305:     PetscCall(PetscDrawSave(draw));
306:   }

308:   PetscCall(PetscDrawViewPortsDestroy(ports));
309:   PetscCall(PetscFree(displayfields));
310:   PetscCall(VecRestoreArrayRead(xcoorl, &zctx.xy));
311:   PetscCall(VecRestoreArrayRead(xlocal, &zctx.v));
312:   PetscFunctionReturn(PETSC_SUCCESS);
313: }

315: #if defined(PETSC_HAVE_HDF5)
316: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
317: {
318:   PetscMPIInt comm_size;
319:   hsize_t     chunk_size, target_size, dim;
320:   hsize_t     vec_size = sizeof(PetscScalar) * da->M * da->N * da->P * da->w;
321:   hsize_t     avg_local_vec_size, KiB = 1024, MiB = KiB * KiB, GiB = MiB * KiB, min_size = MiB;
322:   hsize_t     max_chunks     = 64 * KiB; /* HDF5 internal limitation */
323:   hsize_t     max_chunk_size = 4 * GiB;  /* HDF5 internal limitation */
324:   PetscInt    zslices = da->p, yslices = da->n, xslices = da->m;

326:   PetscFunctionBegin;
327:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size));
328:   avg_local_vec_size = (hsize_t)PetscCeilInt(vec_size, comm_size); /* we will attempt to use this as the chunk size */

330:   target_size = (hsize_t)PetscMin((PetscInt64)vec_size, PetscMin((PetscInt64)max_chunk_size, PetscMax((PetscInt64)avg_local_vec_size, PetscMax(PetscCeilInt64(vec_size, max_chunks), (PetscInt64)min_size))));
331:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
332:   chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(PetscReal);

334:   /*
335:    if size/rank > max_chunk_size, we need radical measures: even going down to
336:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
337:    what, composed in the most efficient way possible.
338:    N.B. this minimises the number of chunks, which may or may not be the optimal
339:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
340:    IO nodes involved, but this author has no access to a BG to figure out how to
341:    reliably find the right number. And even then it may or may not be enough.
342:    */
343:   if (avg_local_vec_size > max_chunk_size) {
344:     /* check if we can just split local z-axis: is that enough? */
345:     zslices = PetscCeilInt(vec_size, da->p * max_chunk_size) * zslices;
346:     if (zslices > da->P) {
347:       /* lattice is too large in xy-directions, splitting z only is not enough */
348:       zslices = da->P;
349:       yslices = PetscCeilInt(vec_size, zslices * da->n * max_chunk_size) * yslices;
350:       if (yslices > da->N) {
351:         /* lattice is too large in x-direction, splitting along z, y is not enough */
352:         yslices = da->N;
353:         xslices = PetscCeilInt(vec_size, zslices * yslices * da->m * max_chunk_size) * xslices;
354:       }
355:     }
356:     dim = 0;
357:     if (timestep >= 0) ++dim;
358:     /* prefer to split z-axis, even down to planar slices */
359:     if (dimension == 3) {
360:       chunkDims[dim++] = (hsize_t)da->P / zslices;
361:       chunkDims[dim++] = (hsize_t)da->N / yslices;
362:       chunkDims[dim++] = (hsize_t)da->M / xslices;
363:     } else {
364:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
365:       chunkDims[dim++] = (hsize_t)da->N / yslices;
366:       chunkDims[dim++] = (hsize_t)da->M / xslices;
367:     }
368:     chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
369:   } else {
370:     if (target_size < chunk_size) {
371:       /* only change the defaults if target_size < chunk_size */
372:       dim = 0;
373:       if (timestep >= 0) ++dim;
374:       /* prefer to split z-axis, even down to planar slices */
375:       if (dimension == 3) {
376:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
377:         if (target_size >= chunk_size / da->p) {
378:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
379:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->P, da->p);
380:         } else {
381:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
382:            radical and let everyone write all they've got */
383:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->P, da->p);
384:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
385:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
386:         }
387:       } else {
388:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
389:         if (target_size >= chunk_size / da->n) {
390:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
391:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->N, da->n);
392:         } else {
393:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
394:            radical and let everyone write all they've got */
395:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
396:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
397:         }
398:       }
399:       chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
400:     } else {
401:       /* precomputed chunks are fine, we don't need to do anything */
402:     }
403:   }
404:   PetscFunctionReturn(PETSC_SUCCESS);
405: }
406: #endif

408: #if defined(PETSC_HAVE_HDF5)
409: static PetscErrorCode VecView_MPI_HDF5_DA(Vec xin, PetscViewer viewer)
410: {
411:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5 *)viewer->data;
412:   DM                 dm;
413:   DM_DA             *da;
414:   hid_t              filespace;  /* file dataspace identifier */
415:   hid_t              chunkspace; /* chunk dataset property identifier */
416:   hid_t              dset_id;    /* dataset identifier */
417:   hid_t              memspace;   /* memory dataspace identifier */
418:   hid_t              file_id;
419:   hid_t              group;
420:   hid_t              memscalartype;  /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
421:   hid_t              filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
422:   hsize_t            dim;
423:   hsize_t            maxDims[6] = {0}, dims[6] = {0}, chunkDims[6] = {0}, count[6] = {0}, offset[6] = {0}; /* we depend on these being sane later on  */
424:   PetscBool          timestepping = PETSC_FALSE, dim2, spoutput;
425:   PetscInt           timestep     = PETSC_MIN_INT, dimension;
426:   const PetscScalar *x;
427:   const char        *vecname;

429:   PetscFunctionBegin;
430:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
431:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
432:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
433:   PetscCall(PetscViewerHDF5GetBaseDimension2(viewer, &dim2));
434:   PetscCall(PetscViewerHDF5GetSPOutput(viewer, &spoutput));

436:   PetscCall(VecGetDM(xin, &dm));
437:   PetscCheck(dm, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
438:   da = (DM_DA *)dm->data;
439:   PetscCall(DMGetDimension(dm, &dimension));

441:   /* Create the dataspace for the dataset.
442:    *
443:    * dims - holds the current dimensions of the dataset
444:    *
445:    * maxDims - holds the maximum dimensions of the dataset (unlimited
446:    * for the number of time steps with the current dimensions for the
447:    * other dimensions; so only additional time steps can be added).
448:    *
449:    * chunkDims - holds the size of a single time step (required to
450:    * permit extending dataset).
451:    */
452:   dim = 0;
453:   if (timestep >= 0) {
454:     dims[dim]      = timestep + 1;
455:     maxDims[dim]   = H5S_UNLIMITED;
456:     chunkDims[dim] = 1;
457:     ++dim;
458:   }
459:   if (dimension == 3) {
460:     PetscCall(PetscHDF5IntCast(da->P, dims + dim));
461:     maxDims[dim]   = dims[dim];
462:     chunkDims[dim] = dims[dim];
463:     ++dim;
464:   }
465:   if (dimension > 1) {
466:     PetscCall(PetscHDF5IntCast(da->N, dims + dim));
467:     maxDims[dim]   = dims[dim];
468:     chunkDims[dim] = dims[dim];
469:     ++dim;
470:   }
471:   PetscCall(PetscHDF5IntCast(da->M, dims + dim));
472:   maxDims[dim]   = dims[dim];
473:   chunkDims[dim] = dims[dim];
474:   ++dim;
475:   if (da->w > 1 || dim2) {
476:     PetscCall(PetscHDF5IntCast(da->w, dims + dim));
477:     maxDims[dim]   = dims[dim];
478:     chunkDims[dim] = dims[dim];
479:     ++dim;
480:   }
481:   #if defined(PETSC_USE_COMPLEX)
482:   dims[dim]      = 2;
483:   maxDims[dim]   = dims[dim];
484:   chunkDims[dim] = dims[dim];
485:   ++dim;
486:   #endif

488:   PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims));

490:   PetscCallHDF5Return(filespace, H5Screate_simple, (dim, dims, maxDims));

492:   #if defined(PETSC_USE_REAL_SINGLE)
493:   memscalartype  = H5T_NATIVE_FLOAT;
494:   filescalartype = H5T_NATIVE_FLOAT;
495:   #elif defined(PETSC_USE_REAL___FLOAT128)
496:     #error "HDF5 output with 128 bit floats not supported."
497:   #elif defined(PETSC_USE_REAL___FP16)
498:     #error "HDF5 output with 16 bit floats not supported."
499:   #else
500:   memscalartype = H5T_NATIVE_DOUBLE;
501:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
502:   else filescalartype = H5T_NATIVE_DOUBLE;
503:   #endif

505:   /* Create the dataset with default properties and close filespace */
506:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
507:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
508:     /* Create chunk */
509:     PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE));
510:     PetscCallHDF5(H5Pset_chunk, (chunkspace, dim, chunkDims));

512:     PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
513:   } else {
514:     PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));
515:     PetscCallHDF5(H5Dset_extent, (dset_id, dims));
516:   }
517:   PetscCallHDF5(H5Sclose, (filespace));

519:   /* Each process defines a dataset and writes it to the hyperslab in the file */
520:   dim = 0;
521:   if (timestep >= 0) {
522:     offset[dim] = timestep;
523:     ++dim;
524:   }
525:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs, offset + dim++));
526:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys, offset + dim++));
527:   PetscCall(PetscHDF5IntCast(da->xs / da->w, offset + dim++));
528:   if (da->w > 1 || dim2) offset[dim++] = 0;
529:   #if defined(PETSC_USE_COMPLEX)
530:   offset[dim++] = 0;
531:   #endif
532:   dim = 0;
533:   if (timestep >= 0) {
534:     count[dim] = 1;
535:     ++dim;
536:   }
537:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs, count + dim++));
538:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys, count + dim++));
539:   PetscCall(PetscHDF5IntCast((da->xe - da->xs) / da->w, count + dim++));
540:   if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w, count + dim++));
541:   #if defined(PETSC_USE_COMPLEX)
542:   count[dim++] = 2;
543:   #endif
544:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
545:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
546:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

548:   PetscCall(VecGetArrayRead(xin, &x));
549:   PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
550:   PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL));
551:   PetscCall(VecRestoreArrayRead(xin, &x));

553:   #if defined(PETSC_USE_COMPLEX)
554:   {
555:     PetscBool tru = PETSC_TRUE;
556:     PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru));
557:   }
558:   #endif
559:   if (timestepping) PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, &timestepping));

561:   /* Close/release resources */
562:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
563:   PetscCallHDF5(H5Sclose, (filespace));
564:   PetscCallHDF5(H5Sclose, (memspace));
565:   PetscCallHDF5(H5Dclose, (dset_id));
566:   PetscCall(PetscInfo(xin, "Wrote Vec object with name %s\n", vecname));
567:   PetscFunctionReturn(PETSC_SUCCESS);
568: }
569: #endif

571: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec, PetscViewer);

573: #if defined(PETSC_HAVE_MPIIO)
574: static PetscErrorCode DMDAArrayMPIIO(DM da, PetscViewer viewer, Vec xin, PetscBool write)
575: {
576:   MPI_File           mfdes;
577:   PetscMPIInt        gsizes[4], lsizes[4], lstarts[4], asiz, dof;
578:   MPI_Datatype       view;
579:   const PetscScalar *array;
580:   MPI_Offset         off;
581:   MPI_Aint           ub, ul;
582:   PetscInt           type, rows, vecrows, tr[2];
583:   DM_DA             *dd = (DM_DA *)da->data;
584:   PetscBool          skipheader;

586:   PetscFunctionBegin;
587:   PetscCall(VecGetSize(xin, &vecrows));
588:   PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipheader));
589:   if (!write) {
590:     /* Read vector header. */
591:     if (!skipheader) {
592:       PetscCall(PetscViewerBinaryRead(viewer, tr, 2, NULL, PETSC_INT));
593:       type = tr[0];
594:       rows = tr[1];
595:       PetscCheck(type == VEC_FILE_CLASSID, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Not vector next in file");
596:       PetscCheck(rows == vecrows, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Vector in file not same size as DMDA vector");
597:     }
598:   } else {
599:     tr[0] = VEC_FILE_CLASSID;
600:     tr[1] = vecrows;
601:     if (!skipheader) PetscCall(PetscViewerBinaryWrite(viewer, tr, 2, PETSC_INT));
602:   }

604:   PetscCall(PetscMPIIntCast(dd->w, &dof));
605:   gsizes[0] = dof;
606:   PetscCall(PetscMPIIntCast(dd->M, gsizes + 1));
607:   PetscCall(PetscMPIIntCast(dd->N, gsizes + 2));
608:   PetscCall(PetscMPIIntCast(dd->P, gsizes + 3));
609:   lsizes[0] = dof;
610:   PetscCall(PetscMPIIntCast((dd->xe - dd->xs) / dof, lsizes + 1));
611:   PetscCall(PetscMPIIntCast(dd->ye - dd->ys, lsizes + 2));
612:   PetscCall(PetscMPIIntCast(dd->ze - dd->zs, lsizes + 3));
613:   lstarts[0] = 0;
614:   PetscCall(PetscMPIIntCast(dd->xs / dof, lstarts + 1));
615:   PetscCall(PetscMPIIntCast(dd->ys, lstarts + 2));
616:   PetscCall(PetscMPIIntCast(dd->zs, lstarts + 3));
617:   PetscCallMPI(MPI_Type_create_subarray(da->dim + 1, gsizes, lsizes, lstarts, MPI_ORDER_FORTRAN, MPIU_SCALAR, &view));
618:   PetscCallMPI(MPI_Type_commit(&view));

620:   PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer, &mfdes));
621:   PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer, &off));
622:   PetscCallMPI(MPI_File_set_view(mfdes, off, MPIU_SCALAR, view, (char *)"native", MPI_INFO_NULL));
623:   PetscCall(VecGetArrayRead(xin, &array));
624:   asiz = lsizes[1] * (lsizes[2] > 0 ? lsizes[2] : 1) * (lsizes[3] > 0 ? lsizes[3] : 1) * dof;
625:   if (write) PetscCall(MPIU_File_write_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
626:   else PetscCall(MPIU_File_read_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
627:   PetscCallMPI(MPI_Type_get_extent(view, &ul, &ub));
628:   PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer, ub));
629:   PetscCall(VecRestoreArrayRead(xin, &array));
630:   PetscCallMPI(MPI_Type_free(&view));
631:   PetscFunctionReturn(PETSC_SUCCESS);
632: }
633: #endif

635: PetscErrorCode VecView_MPI_DA(Vec xin, PetscViewer viewer)
636: {
637:   DM        da;
638:   PetscInt  dim;
639:   Vec       natural;
640:   PetscBool isdraw, isvtk, isglvis;
641: #if defined(PETSC_HAVE_HDF5)
642:   PetscBool ishdf5;
643: #endif
644:   const char       *prefix, *name;
645:   PetscViewerFormat format;

647:   PetscFunctionBegin;
648:   PetscCall(VecGetDM(xin, &da));
649:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
650:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
651:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
652: #if defined(PETSC_HAVE_HDF5)
653:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
654: #endif
655:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
656:   if (isdraw) {
657:     PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
658:     if (dim == 1) {
659:       PetscCall(VecView_MPI_Draw_DA1d(xin, viewer));
660:     } else if (dim == 2) {
661:       PetscCall(VecView_MPI_Draw_DA2d(xin, viewer));
662:     } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT, dim);
663:   } else if (isvtk) { /* Duplicate the Vec */
664:     Vec Y;
665:     PetscCall(VecDuplicate(xin, &Y));
666:     if (((PetscObject)xin)->name) {
667:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
668:       PetscCall(PetscObjectSetName((PetscObject)Y, ((PetscObject)xin)->name));
669:     }
670:     PetscCall(VecCopy(xin, Y));
671:     {
672:       PetscObject dmvtk;
673:       PetscBool   compatible, compatibleSet;
674:       PetscCall(PetscViewerVTKGetDM(viewer, &dmvtk));
675:       if (dmvtk) {
677:         PetscCall(DMGetCompatibility(da, (DM)dmvtk, &compatible, &compatibleSet));
678:         PetscCheck(compatibleSet && compatible, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
679:       }
680:       PetscCall(PetscViewerVTKAddField(viewer, (PetscObject)da, DMDAVTKWriteAll, PETSC_DEFAULT, PETSC_VTK_POINT_FIELD, PETSC_FALSE, (PetscObject)Y));
681:     }
682: #if defined(PETSC_HAVE_HDF5)
683:   } else if (ishdf5) {
684:     PetscCall(VecView_MPI_HDF5_DA(xin, viewer));
685: #endif
686:   } else if (isglvis) {
687:     PetscCall(VecView_GLVis(xin, viewer));
688:   } else {
689: #if defined(PETSC_HAVE_MPIIO)
690:     PetscBool isbinary, isMPIIO;

692:     PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
693:     if (isbinary) {
694:       PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
695:       if (isMPIIO) {
696:         PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_TRUE));
697:         PetscFunctionReturn(PETSC_SUCCESS);
698:       }
699:     }
700: #endif

702:     /* call viewer on natural ordering */
703:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
704:     PetscCall(DMDACreateNaturalVector(da, &natural));
705:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
706:     PetscCall(DMDAGlobalToNaturalBegin(da, xin, INSERT_VALUES, natural));
707:     PetscCall(DMDAGlobalToNaturalEnd(da, xin, INSERT_VALUES, natural));
708:     PetscCall(PetscObjectGetName((PetscObject)xin, &name));
709:     PetscCall(PetscObjectSetName((PetscObject)natural, name));

711:     PetscCall(PetscViewerGetFormat(viewer, &format));
712:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
713:       /* temporarily remove viewer format so it won't trigger in the VecView() */
714:       PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_DEFAULT));
715:     }

717:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
718:     PetscCall(VecView(natural, viewer));
719:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

721:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
722:       MPI_Comm    comm;
723:       FILE       *info;
724:       const char *fieldname;
725:       char        fieldbuf[256];
726:       PetscInt    dim, ni, nj, nk, pi, pj, pk, dof, n;

728:       /* set the viewer format back into the viewer */
729:       PetscCall(PetscViewerPopFormat(viewer));
730:       PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
731:       PetscCall(PetscViewerBinaryGetInfoPointer(viewer, &info));
732:       PetscCall(DMDAGetInfo(da, &dim, &ni, &nj, &nk, &pi, &pj, &pk, &dof, NULL, NULL, NULL, NULL, NULL));
733:       PetscCall(PetscFPrintf(comm, info, "#--- begin code written by PetscViewerBinary for MATLAB format ---#\n"));
734:       PetscCall(PetscFPrintf(comm, info, "#$$ tmp = PetscBinaryRead(fd); \n"));
735:       if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni));
736:       if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj));
737:       if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj, nk));

739:       for (n = 0; n < dof; n++) {
740:         PetscCall(DMDAGetFieldName(da, n, &fieldname));
741:         if (!fieldname || !fieldname[0]) {
742:           PetscCall(PetscSNPrintf(fieldbuf, sizeof fieldbuf, "field%" PetscInt_FMT, n));
743:           fieldname = fieldbuf;
744:         }
745:         if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n", name, fieldname, n + 1));
746:         if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n", name, fieldname, n + 1));
747:         if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n", name, fieldname, n + 1));
748:       }
749:       PetscCall(PetscFPrintf(comm, info, "#$$ clear tmp; \n"));
750:       PetscCall(PetscFPrintf(comm, info, "#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n"));
751:     }

753:     PetscCall(VecDestroy(&natural));
754:   }
755:   PetscFunctionReturn(PETSC_SUCCESS);
756: }

758: #if defined(PETSC_HAVE_HDF5)
759: static PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
760: {
761:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data;
762:   DM                da;
763:   int               dim, rdim;
764:   hsize_t           dims[6] = {0}, count[6] = {0}, offset[6] = {0};
765:   PetscBool         dim2 = PETSC_FALSE, timestepping = PETSC_FALSE;
766:   PetscInt          dimension, timestep              = PETSC_MIN_INT, dofInd;
767:   PetscScalar      *x;
768:   const char       *vecname;
769:   hid_t             filespace; /* file dataspace identifier */
770:   hid_t             dset_id;   /* dataset identifier */
771:   hid_t             memspace;  /* memory dataspace identifier */
772:   hid_t             file_id, group;
773:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
774:   DM_DA            *dd;

776:   PetscFunctionBegin;
777:   #if defined(PETSC_USE_REAL_SINGLE)
778:   scalartype = H5T_NATIVE_FLOAT;
779:   #elif defined(PETSC_USE_REAL___FLOAT128)
780:     #error "HDF5 output with 128 bit floats not supported."
781:   #elif defined(PETSC_USE_REAL___FP16)
782:     #error "HDF5 output with 16 bit floats not supported."
783:   #else
784:   scalartype = H5T_NATIVE_DOUBLE;
785:   #endif

787:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
788:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
789:   PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname));
790:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
791:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
792:   PetscCall(VecGetDM(xin, &da));
793:   dd = (DM_DA *)da->data;
794:   PetscCall(DMGetDimension(da, &dimension));

796:   /* Open dataset */
797:   PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));

799:   /* Retrieve the dataspace for the dataset */
800:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
801:   PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (filespace, dims, NULL));

803:   /* Expected dimension for holding the dof's */
804:   #if defined(PETSC_USE_COMPLEX)
805:   dofInd = rdim - 2;
806:   #else
807:   dofInd = rdim - 1;
808:   #endif

810:   /* The expected number of dimensions, assuming basedimension2 = false */
811:   dim = dimension;
812:   if (dd->w > 1) ++dim;
813:   if (timestep >= 0) ++dim;
814:   #if defined(PETSC_USE_COMPLEX)
815:   ++dim;
816:   #endif

818:   /* In this case the input dataset have one extra, unexpected dimension. */
819:   if (rdim == dim + 1) {
820:     /* In this case the block size unity */
821:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

823:     /* Special error message for the case where dof does not match the input file */
824:     else PetscCheck(dd->w == (PetscInt)dims[dofInd], PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected", (PetscInt)dims[dofInd], dd->w);

826:     /* Other cases where rdim != dim cannot be handled currently */
827:   } else PetscCheck(rdim == dim, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT, rdim, dim, dd->w);

829:   /* Set up the hyperslab size */
830:   dim = 0;
831:   if (timestep >= 0) {
832:     offset[dim] = timestep;
833:     count[dim]  = 1;
834:     ++dim;
835:   }
836:   if (dimension == 3) {
837:     PetscCall(PetscHDF5IntCast(dd->zs, offset + dim));
838:     PetscCall(PetscHDF5IntCast(dd->ze - dd->zs, count + dim));
839:     ++dim;
840:   }
841:   if (dimension > 1) {
842:     PetscCall(PetscHDF5IntCast(dd->ys, offset + dim));
843:     PetscCall(PetscHDF5IntCast(dd->ye - dd->ys, count + dim));
844:     ++dim;
845:   }
846:   PetscCall(PetscHDF5IntCast(dd->xs / dd->w, offset + dim));
847:   PetscCall(PetscHDF5IntCast((dd->xe - dd->xs) / dd->w, count + dim));
848:   ++dim;
849:   if (dd->w > 1 || dim2) {
850:     offset[dim] = 0;
851:     PetscCall(PetscHDF5IntCast(dd->w, count + dim));
852:     ++dim;
853:   }
854:   #if defined(PETSC_USE_COMPLEX)
855:   offset[dim] = 0;
856:   count[dim]  = 2;
857:   ++dim;
858:   #endif

860:   /* Create the memory and filespace */
861:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
862:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

864:   PetscCall(VecGetArray(xin, &x));
865:   PetscCallHDF5(H5Dread, (dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
866:   PetscCall(VecRestoreArray(xin, &x));

868:   /* Close/release resources */
869:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
870:   PetscCallHDF5(H5Sclose, (filespace));
871:   PetscCallHDF5(H5Sclose, (memspace));
872:   PetscCallHDF5(H5Dclose, (dset_id));
873:   PetscFunctionReturn(PETSC_SUCCESS);
874: }
875: #endif

877: static PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
878: {
879:   DM          da;
880:   Vec         natural;
881:   const char *prefix;
882:   PetscInt    bs;
883:   PetscBool   flag;
884:   DM_DA      *dd;
885: #if defined(PETSC_HAVE_MPIIO)
886:   PetscBool isMPIIO;
887: #endif

889:   PetscFunctionBegin;
890:   PetscCall(VecGetDM(xin, &da));
891:   dd = (DM_DA *)da->data;
892: #if defined(PETSC_HAVE_MPIIO)
893:   PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
894:   if (isMPIIO) {
895:     PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_FALSE));
896:     PetscFunctionReturn(PETSC_SUCCESS);
897:   }
898: #endif

900:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
901:   PetscCall(DMDACreateNaturalVector(da, &natural));
902:   PetscCall(PetscObjectSetName((PetscObject)natural, ((PetscObject)xin)->name));
903:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
904:   PetscCall(VecLoad(natural, viewer));
905:   PetscCall(DMDANaturalToGlobalBegin(da, natural, INSERT_VALUES, xin));
906:   PetscCall(DMDANaturalToGlobalEnd(da, natural, INSERT_VALUES, xin));
907:   PetscCall(VecDestroy(&natural));
908:   PetscCall(PetscInfo(xin, "Loading vector from natural ordering into DMDA\n"));
909:   PetscCall(PetscOptionsGetInt(NULL, ((PetscObject)xin)->prefix, "-vecload_block_size", &bs, &flag));
910:   if (flag && bs != dd->w) PetscCall(PetscInfo(xin, "Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n", bs, dd->w));
911:   PetscFunctionReturn(PETSC_SUCCESS);
912: }

914: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
915: {
916:   DM        da;
917:   PetscBool isbinary;
918: #if defined(PETSC_HAVE_HDF5)
919:   PetscBool ishdf5;
920: #endif

922:   PetscFunctionBegin;
923:   PetscCall(VecGetDM(xin, &da));
924:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

926: #if defined(PETSC_HAVE_HDF5)
927:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
928: #endif
929:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));

931:   if (isbinary) {
932:     PetscCall(VecLoad_Binary_DA(xin, viewer));
933: #if defined(PETSC_HAVE_HDF5)
934:   } else if (ishdf5) {
935:     PetscCall(VecLoad_HDF5_DA(xin, viewer));
936: #endif
937:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
938:   PetscFunctionReturn(PETSC_SUCCESS);
939: }