Actual source code: gr2.c

  1: /*
  2:    Plots vectors obtained with DMDACreate2d()
  3: */

  5: #include <petsc/private/dmdaimpl.h>
  6: #include <petsc/private/glvisvecimpl.h>
  7: #include <petsc/private/viewerhdf5impl.h>
  8: #include <petscdraw.h>

 10: /*
 11:         The data that is passed into the graphics callback
 12: */
 13: typedef struct {
 14:   PetscMPIInt        rank;
 15:   PetscInt           m, n, dof, k;
 16:   PetscReal          xmin, xmax, ymin, ymax, min, max;
 17:   const PetscScalar *xy, *v;
 18:   PetscBool          showaxis, showgrid;
 19:   const char        *name0, *name1;
 20: } ZoomCtx;

 22: /*
 23:        This does the drawing for one particular field
 24:     in one particular set of coordinates. It is a callback
 25:     called from PetscDrawZoom()
 26: */
 27: static PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw, void *ctx)
 28: {
 29:   ZoomCtx           *zctx = (ZoomCtx *)ctx;
 30:   PetscInt           m, n, i, j, k, dof, id;
 31:   int                c1, c2, c3, c4;
 32:   PetscReal          min, max, x1, x2, x3, x4, y_1, y2, y3, y4;
 33:   const PetscScalar *xy, *v;

 35:   PetscFunctionBegin;
 36:   m   = zctx->m;
 37:   n   = zctx->n;
 38:   dof = zctx->dof;
 39:   k   = zctx->k;
 40:   xy  = zctx->xy;
 41:   v   = zctx->v;
 42:   min = zctx->min;
 43:   max = zctx->max;

 45:   /* PetscDraw the contour plot patch */
 46:   PetscDrawCollectiveBegin(draw);
 47:   for (j = 0; j < n - 1; j++) {
 48:     for (i = 0; i < m - 1; i++) {
 49:       id  = i + j * m;
 50:       x1  = PetscRealPart(xy[2 * id]);
 51:       y_1 = PetscRealPart(xy[2 * id + 1]);
 52:       c1  = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 54:       id = i + j * m + 1;
 55:       x2 = PetscRealPart(xy[2 * id]);
 56:       y2 = PetscRealPart(xy[2 * id + 1]);
 57:       c2 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 59:       id = i + j * m + 1 + m;
 60:       x3 = PetscRealPart(xy[2 * id]);
 61:       y3 = PetscRealPart(xy[2 * id + 1]);
 62:       c3 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 64:       id = i + j * m + m;
 65:       x4 = PetscRealPart(xy[2 * id]);
 66:       y4 = PetscRealPart(xy[2 * id + 1]);
 67:       c4 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 69:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x2, y2, x3, y3, c1, c2, c3));
 70:       PetscCall(PetscDrawTriangle(draw, x1, y_1, x3, y3, x4, y4, c1, c3, c4));
 71:       if (zctx->showgrid) {
 72:         PetscCall(PetscDrawLine(draw, x1, y_1, x2, y2, PETSC_DRAW_BLACK));
 73:         PetscCall(PetscDrawLine(draw, x2, y2, x3, y3, PETSC_DRAW_BLACK));
 74:         PetscCall(PetscDrawLine(draw, x3, y3, x4, y4, PETSC_DRAW_BLACK));
 75:         PetscCall(PetscDrawLine(draw, x4, y4, x1, y_1, PETSC_DRAW_BLACK));
 76:       }
 77:     }
 78:   }
 79:   if (zctx->showaxis && !zctx->rank) {
 80:     if (zctx->name0 || zctx->name1) {
 81:       PetscReal xl, yl, xr, yr, x, y;
 82:       PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr));
 83:       x  = xl + .30 * (xr - xl);
 84:       xl = xl + .01 * (xr - xl);
 85:       y  = yr - .30 * (yr - yl);
 86:       yl = yl + .01 * (yr - yl);
 87:       if (zctx->name0) PetscCall(PetscDrawString(draw, x, yl, PETSC_DRAW_BLACK, zctx->name0));
 88:       if (zctx->name1) PetscCall(PetscDrawStringVertical(draw, xl, y, PETSC_DRAW_BLACK, zctx->name1));
 89:     }
 90:     /*
 91:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 92:        but that may require some refactoring.
 93:     */
 94:     {
 95:       double    xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 96:       double    xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 97:       char      value[16];
 98:       size_t    len;
 99:       PetscReal w;
100:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmin));
101:       PetscCall(PetscDrawString(draw, xmin, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
102:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmax));
103:       PetscCall(PetscStrlen(value, &len));
104:       PetscCall(PetscDrawStringGetSize(draw, &w, NULL));
105:       PetscCall(PetscDrawString(draw, xmax - len * w, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value));
106:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymin));
107:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymin, PETSC_DRAW_BLACK, value));
108:       PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymax));
109:       PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymax, PETSC_DRAW_BLACK, value));
110:     }
111:   }
112:   PetscDrawCollectiveEnd(draw);
113:   PetscFunctionReturn(PETSC_SUCCESS);
114: }

116: static PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin, PetscViewer viewer)
117: {
118:   DM                  da, dac, dag;
119:   PetscInt            N, s, M, w, ncoors = 4;
120:   const PetscInt     *lx, *ly;
121:   PetscReal           coors[4];
122:   PetscDraw           draw, popup;
123:   PetscBool           isnull, useports = PETSC_FALSE;
124:   MPI_Comm            comm;
125:   Vec                 xlocal, xcoor, xcoorl;
126:   DMBoundaryType      bx, by;
127:   DMDAStencilType     st;
128:   ZoomCtx             zctx;
129:   PetscDrawViewPorts *ports = NULL;
130:   PetscViewerFormat   format;
131:   PetscInt           *displayfields;
132:   PetscInt            ndisplayfields, i, nbounds;
133:   const PetscReal    *bounds;

135:   PetscFunctionBegin;
136:   zctx.showgrid = PETSC_FALSE;
137:   zctx.showaxis = PETSC_TRUE;

139:   PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
140:   PetscCall(PetscDrawIsNull(draw, &isnull));
141:   if (isnull) PetscFunctionReturn(PETSC_SUCCESS);

143:   PetscCall(PetscViewerDrawGetBounds(viewer, &nbounds, &bounds));

145:   PetscCall(VecGetDM(xin, &da));
146:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

148:   PetscCall(PetscObjectGetComm((PetscObject)xin, &comm));
149:   PetscCallMPI(MPI_Comm_rank(comm, &zctx.rank));

151:   PetscCall(DMDAGetInfo(da, NULL, &M, &N, NULL, &zctx.m, &zctx.n, NULL, &w, &s, &bx, &by, NULL, &st));
152:   PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, NULL));

154:   /*
155:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
156:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
157:      update the local values plus ONE layer of ghost values.
158:   */
159:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsGhosted", (PetscObject *)&xlocal));
160:   if (!xlocal) {
161:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
162:       /*
163:          if original da is not of stencil width one, or periodic or not a box stencil then
164:          create a special DMDA to handle one level of ghost points for graphics
165:       */
166:       PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, w, 1, lx, ly, &dac));
167:       PetscCall(DMSetUp(dac));
168:       PetscCall(PetscInfo(da, "Creating auxiliary DMDA for managing graphics ghost points\n"));
169:     } else {
170:       /* otherwise we can use the da we already have */
171:       dac = da;
172:     }
173:     /* create local vector for holding ghosted values used in graphics */
174:     PetscCall(DMCreateLocalVector(dac, &xlocal));
175:     if (dac != da) {
176:       /* don't keep any public reference of this DMDA, it is only available through xlocal */
177:       PetscCall(PetscObjectDereference((PetscObject)dac));
178:     } else {
179:       /* remove association between xlocal and da, because below we compose in the opposite
180:          direction and if we left this connect we'd get a loop, so the objects could
181:          never be destroyed */
182:       PetscCall(PetscObjectRemoveReference((PetscObject)xlocal, "__PETSc_dm"));
183:     }
184:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsGhosted", (PetscObject)xlocal));
185:     PetscCall(PetscObjectDereference((PetscObject)xlocal));
186:   } else {
187:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
188:       PetscCall(VecGetDM(xlocal, &dac));
189:     } else {
190:       dac = da;
191:     }
192:   }

194:   /*
195:       Get local (ghosted) values of vector
196:   */
197:   PetscCall(DMGlobalToLocalBegin(dac, xin, INSERT_VALUES, xlocal));
198:   PetscCall(DMGlobalToLocalEnd(dac, xin, INSERT_VALUES, xlocal));
199:   PetscCall(VecGetArrayRead(xlocal, &zctx.v));

201:   /*
202:       Get coordinates of nodes
203:   */
204:   PetscCall(DMGetCoordinates(da, &xcoor));
205:   if (!xcoor) {
206:     PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0));
207:     PetscCall(DMGetCoordinates(da, &xcoor));
208:   }

210:   /*
211:       Determine the min and max coordinates in plot
212:   */
213:   PetscCall(VecStrideMin(xcoor, 0, NULL, &zctx.xmin));
214:   PetscCall(VecStrideMax(xcoor, 0, NULL, &zctx.xmax));
215:   PetscCall(VecStrideMin(xcoor, 1, NULL, &zctx.ymin));
216:   PetscCall(VecStrideMax(xcoor, 1, NULL, &zctx.ymax));
217:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_axis", &zctx.showaxis, NULL));
218:   if (zctx.showaxis) {
219:     coors[0] = zctx.xmin - .05 * (zctx.xmax - zctx.xmin);
220:     coors[1] = zctx.ymin - .05 * (zctx.ymax - zctx.ymin);
221:     coors[2] = zctx.xmax + .05 * (zctx.xmax - zctx.xmin);
222:     coors[3] = zctx.ymax + .05 * (zctx.ymax - zctx.ymin);
223:   } else {
224:     coors[0] = zctx.xmin;
225:     coors[1] = zctx.ymin;
226:     coors[2] = zctx.xmax;
227:     coors[3] = zctx.ymax;
228:   }
229:   PetscCall(PetscOptionsGetRealArray(NULL, NULL, "-draw_coordinates", coors, &ncoors, NULL));
230:   PetscCall(PetscInfo(da, "Preparing DMDA 2d contour plot coordinates %g %g %g %g\n", (double)coors[0], (double)coors[1], (double)coors[2], (double)coors[3]));

232:   /*
233:       Get local ghosted version of coordinates
234:   */
235:   PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject *)&xcoorl));
236:   if (!xcoorl) {
237:     /* create DMDA to get local version of graphics */
238:     PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, 2, 1, lx, ly, &dag));
239:     PetscCall(DMSetUp(dag));
240:     PetscCall(PetscInfo(dag, "Creating auxiliary DMDA for managing graphics coordinates ghost points\n"));
241:     PetscCall(DMCreateLocalVector(dag, &xcoorl));
242:     PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject)xcoorl));
243:     PetscCall(PetscObjectDereference((PetscObject)dag));
244:     PetscCall(PetscObjectDereference((PetscObject)xcoorl));
245:   } else PetscCall(VecGetDM(xcoorl, &dag));
246:   PetscCall(DMGlobalToLocalBegin(dag, xcoor, INSERT_VALUES, xcoorl));
247:   PetscCall(DMGlobalToLocalEnd(dag, xcoor, INSERT_VALUES, xcoorl));
248:   PetscCall(VecGetArrayRead(xcoorl, &zctx.xy));
249:   PetscCall(DMDAGetCoordinateName(da, 0, &zctx.name0));
250:   PetscCall(DMDAGetCoordinateName(da, 1, &zctx.name1));

252:   /*
253:       Get information about size of area each processor must do graphics for
254:   */
255:   PetscCall(DMDAGetInfo(dac, NULL, &M, &N, NULL, NULL, NULL, NULL, &zctx.dof, NULL, &bx, &by, NULL, NULL));
256:   PetscCall(DMDAGetGhostCorners(dac, NULL, NULL, NULL, &zctx.m, &zctx.n, NULL));
257:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_grid", &zctx.showgrid, NULL));

259:   PetscCall(DMDASelectFields(da, &ndisplayfields, &displayfields));
260:   PetscCall(PetscViewerGetFormat(viewer, &format));
261:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_ports", &useports, NULL));
262:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
263:   if (useports) {
264:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
265:     PetscCall(PetscDrawCheckResizedWindow(draw));
266:     PetscCall(PetscDrawClear(draw));
267:     PetscCall(PetscDrawViewPortsCreate(draw, ndisplayfields, &ports));
268:   }

270:   /*
271:       Loop over each field; drawing each in a different window
272:   */
273:   for (i = 0; i < ndisplayfields; i++) {
274:     zctx.k = displayfields[i];

276:     /* determine the min and max value in plot */
277:     PetscCall(VecStrideMin(xin, zctx.k, NULL, &zctx.min));
278:     PetscCall(VecStrideMax(xin, zctx.k, NULL, &zctx.max));
279:     if (zctx.k < nbounds) {
280:       zctx.min = bounds[2 * zctx.k];
281:       zctx.max = bounds[2 * zctx.k + 1];
282:     }
283:     if (zctx.min == zctx.max) {
284:       zctx.min -= 1.e-12;
285:       zctx.max += 1.e-12;
286:     }
287:     PetscCall(PetscInfo(da, "DMDA 2d contour plot min %g max %g\n", (double)zctx.min, (double)zctx.max));

289:     if (useports) {
290:       PetscCall(PetscDrawViewPortsSet(ports, i));
291:     } else {
292:       const char *title;
293:       PetscCall(PetscViewerDrawGetDraw(viewer, i, &draw));
294:       PetscCall(DMDAGetFieldName(da, zctx.k, &title));
295:       if (title) PetscCall(PetscDrawSetTitle(draw, title));
296:     }

298:     PetscCall(PetscDrawGetPopup(draw, &popup));
299:     PetscCall(PetscDrawScalePopup(popup, zctx.min, zctx.max));
300:     PetscCall(PetscDrawSetCoordinates(draw, coors[0], coors[1], coors[2], coors[3]));
301:     PetscCall(PetscDrawZoom(draw, VecView_MPI_Draw_DA2d_Zoom, &zctx));
302:     if (!useports) PetscCall(PetscDrawSave(draw));
303:   }
304:   if (useports) {
305:     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
306:     PetscCall(PetscDrawSave(draw));
307:   }

309:   PetscCall(PetscDrawViewPortsDestroy(ports));
310:   PetscCall(PetscFree(displayfields));
311:   PetscCall(VecRestoreArrayRead(xcoorl, &zctx.xy));
312:   PetscCall(VecRestoreArrayRead(xlocal, &zctx.v));
313:   PetscFunctionReturn(PETSC_SUCCESS);
314: }

316: #if defined(PETSC_HAVE_HDF5)
317: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
318: {
319:   PetscMPIInt comm_size;
320:   hsize_t     chunk_size, target_size, dim;
321:   hsize_t     vec_size = sizeof(PetscScalar) * da->M * da->N * da->P * da->w;
322:   hsize_t     avg_local_vec_size, KiB = 1024, MiB = KiB * KiB, GiB = MiB * KiB, min_size = MiB;
323:   hsize_t     max_chunks     = 64 * KiB; /* HDF5 internal limitation */
324:   hsize_t     max_chunk_size = 4 * GiB;  /* HDF5 internal limitation */
325:   hsize_t     zslices = da->p, yslices = da->n, xslices = da->m;

327:   PetscFunctionBegin;
328:   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size));
329:   avg_local_vec_size = (hsize_t)PetscCeilInt64(vec_size, comm_size); /* we will attempt to use this as the chunk size */

331:   target_size = (hsize_t)PetscMin((PetscInt64)vec_size, PetscMin((PetscInt64)max_chunk_size, PetscMax((PetscInt64)avg_local_vec_size, PetscMax(PetscCeilInt64(vec_size, max_chunks), (PetscInt64)min_size))));
332:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
333:   chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(PetscReal);

335:   /*
336:    if size/rank > max_chunk_size, we need radical measures: even going down to
337:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
338:    what, composed in the most efficient way possible.
339:    N.B. this minimises the number of chunks, which may or may not be the optimal
340:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
341:    IO nodes involved, but this author has no access to a BG to figure out how to
342:    reliably find the right number. And even then it may or may not be enough.
343:    */
344:   if (avg_local_vec_size > max_chunk_size) {
345:     hsize_t zslices_full = da->P;
346:     hsize_t yslices_full = da->N;

348:     /* check if we can just split local z-axis: is that enough? */
349:     zslices = PetscCeilInt64(vec_size, da->p * max_chunk_size) * zslices;
350:     if (zslices > zslices_full) {
351:       /* lattice is too large in xy-directions, splitting z only is not enough */
352:       zslices = zslices_full;
353:       yslices = PetscCeilInt64(vec_size, zslices * da->n * max_chunk_size) * yslices;
354:       if (yslices > yslices_full) {
355:         /* lattice is too large in x-direction, splitting along z, y is not enough */
356:         yslices = yslices_full;
357:         xslices = PetscCeilInt64(vec_size, zslices * yslices * da->m * max_chunk_size) * xslices;
358:       }
359:     }
360:     dim = 0;
361:     if (timestep >= 0) ++dim;
362:     /* prefer to split z-axis, even down to planar slices */
363:     if (dimension == 3) {
364:       chunkDims[dim++] = (hsize_t)da->P / zslices;
365:       chunkDims[dim++] = (hsize_t)da->N / yslices;
366:       chunkDims[dim++] = (hsize_t)da->M / xslices;
367:     } else {
368:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
369:       chunkDims[dim++] = (hsize_t)da->N / yslices;
370:       chunkDims[dim++] = (hsize_t)da->M / xslices;
371:     }
372:     chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
373:   } else {
374:     if (target_size < chunk_size) {
375:       /* only change the defaults if target_size < chunk_size */
376:       dim = 0;
377:       if (timestep >= 0) ++dim;
378:       /* prefer to split z-axis, even down to planar slices */
379:       if (dimension == 3) {
380:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
381:         if (target_size >= chunk_size / da->p) {
382:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
383:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->P, da->p);
384:         } else {
385:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
386:            radical and let everyone write all they've got */
387:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->P, da->p);
388:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
389:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
390:         }
391:       } else {
392:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
393:         if (target_size >= chunk_size / da->n) {
394:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
395:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->N, da->n);
396:         } else {
397:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
398:            radical and let everyone write all they've got */
399:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
400:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
401:         }
402:       }
403:       chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
404:     } else {
405:       /* precomputed chunks are fine, we don't need to do anything */
406:     }
407:   }
408:   PetscFunctionReturn(PETSC_SUCCESS);
409: }
410: #endif

412: #if defined(PETSC_HAVE_HDF5)
413: static PetscErrorCode VecView_MPI_HDF5_DA(Vec xin, PetscViewer viewer)
414: {
415:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5 *)viewer->data;
416:   DM                 dm;
417:   DM_DA             *da;
418:   hid_t              filespace;  /* file dataspace identifier */
419:   hid_t              chunkspace; /* chunk dataset property identifier */
420:   hid_t              dset_id;    /* dataset identifier */
421:   hid_t              memspace;   /* memory dataspace identifier */
422:   hid_t              file_id;
423:   hid_t              group;
424:   hid_t              memscalartype;  /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
425:   hid_t              filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
426:   hsize_t            dim;
427:   hsize_t            maxDims[6] = {0}, dims[6] = {0}, chunkDims[6] = {0}, count[6] = {0}, offset[6] = {0}; /* we depend on these being sane later on  */
428:   PetscBool          timestepping = PETSC_FALSE, dim2, spoutput;
429:   PetscInt           timestep     = PETSC_INT_MIN, dimension;
430:   const PetscScalar *x;
431:   const char        *vecname;

433:   PetscFunctionBegin;
434:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
435:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
436:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
437:   PetscCall(PetscViewerHDF5GetBaseDimension2(viewer, &dim2));
438:   PetscCall(PetscViewerHDF5GetSPOutput(viewer, &spoutput));

440:   PetscCall(VecGetDM(xin, &dm));
441:   PetscCheck(dm, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
442:   da = (DM_DA *)dm->data;
443:   PetscCall(DMGetDimension(dm, &dimension));

445:   /* Create the dataspace for the dataset.
446:    *
447:    * dims - holds the current dimensions of the dataset
448:    *
449:    * maxDims - holds the maximum dimensions of the dataset (unlimited
450:    * for the number of time steps with the current dimensions for the
451:    * other dimensions; so only additional time steps can be added).
452:    *
453:    * chunkDims - holds the size of a single time step (required to
454:    * permit extending dataset).
455:    */
456:   dim = 0;
457:   if (timestep >= 0) {
458:     dims[dim]      = timestep + 1;
459:     maxDims[dim]   = H5S_UNLIMITED;
460:     chunkDims[dim] = 1;
461:     ++dim;
462:   }
463:   if (dimension == 3) {
464:     PetscCall(PetscHDF5IntCast(da->P, dims + dim));
465:     maxDims[dim]   = dims[dim];
466:     chunkDims[dim] = dims[dim];
467:     ++dim;
468:   }
469:   if (dimension > 1) {
470:     PetscCall(PetscHDF5IntCast(da->N, dims + dim));
471:     maxDims[dim]   = dims[dim];
472:     chunkDims[dim] = dims[dim];
473:     ++dim;
474:   }
475:   PetscCall(PetscHDF5IntCast(da->M, dims + dim));
476:   maxDims[dim]   = dims[dim];
477:   chunkDims[dim] = dims[dim];
478:   ++dim;
479:   if (da->w > 1 || dim2) {
480:     PetscCall(PetscHDF5IntCast(da->w, dims + dim));
481:     maxDims[dim]   = dims[dim];
482:     chunkDims[dim] = dims[dim];
483:     ++dim;
484:   }
485:   #if defined(PETSC_USE_COMPLEX)
486:   dims[dim]      = 2;
487:   maxDims[dim]   = dims[dim];
488:   chunkDims[dim] = dims[dim];
489:   ++dim;
490:   #endif

492:   PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims));

494:   PetscCallHDF5Return(filespace, H5Screate_simple, ((int)dim, dims, maxDims));

496:   #if defined(PETSC_USE_REAL_SINGLE)
497:   memscalartype  = H5T_NATIVE_FLOAT;
498:   filescalartype = H5T_NATIVE_FLOAT;
499:   #elif defined(PETSC_USE_REAL___FLOAT128)
500:     #error "HDF5 output with 128 bit floats not supported."
501:   #elif defined(PETSC_USE_REAL___FP16)
502:     #error "HDF5 output with 16 bit floats not supported."
503:   #else
504:   memscalartype = H5T_NATIVE_DOUBLE;
505:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
506:   else filescalartype = H5T_NATIVE_DOUBLE;
507:   #endif

509:   /* Create the dataset with default properties and close filespace */
510:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
511:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
512:     /* Create chunk */
513:     PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE));
514:     PetscCallHDF5(H5Pset_chunk, (chunkspace, (int)dim, chunkDims));

516:     PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
517:   } else {
518:     PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));
519:     PetscCallHDF5(H5Dset_extent, (dset_id, dims));
520:   }
521:   PetscCallHDF5(H5Sclose, (filespace));

523:   /* Each process defines a dataset and writes it to the hyperslab in the file */
524:   dim = 0;
525:   if (timestep >= 0) {
526:     offset[dim] = timestep;
527:     ++dim;
528:   }
529:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs, offset + dim++));
530:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys, offset + dim++));
531:   PetscCall(PetscHDF5IntCast(da->xs / da->w, offset + dim++));
532:   if (da->w > 1 || dim2) offset[dim++] = 0;
533:   #if defined(PETSC_USE_COMPLEX)
534:   offset[dim++] = 0;
535:   #endif
536:   dim = 0;
537:   if (timestep >= 0) {
538:     count[dim] = 1;
539:     ++dim;
540:   }
541:   if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs, count + dim++));
542:   if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys, count + dim++));
543:   PetscCall(PetscHDF5IntCast((da->xe - da->xs) / da->w, count + dim++));
544:   if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w, count + dim++));
545:   #if defined(PETSC_USE_COMPLEX)
546:   count[dim++] = 2;
547:   #endif
548:   PetscCallHDF5Return(memspace, H5Screate_simple, ((int)dim, count, NULL));
549:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
550:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

552:   PetscCall(VecGetArrayRead(xin, &x));
553:   PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
554:   PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL));
555:   PetscCall(VecRestoreArrayRead(xin, &x));

557:   #if defined(PETSC_USE_COMPLEX)
558:   {
559:     PetscBool tru = PETSC_TRUE;
560:     PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru));
561:   }
562:   #endif
563:   if (timestepping) PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, &timestepping));

565:   /* Close/release resources */
566:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
567:   PetscCallHDF5(H5Sclose, (filespace));
568:   PetscCallHDF5(H5Sclose, (memspace));
569:   PetscCallHDF5(H5Dclose, (dset_id));
570:   PetscCall(PetscInfo(xin, "Wrote Vec object with name %s\n", vecname));
571:   PetscFunctionReturn(PETSC_SUCCESS);
572: }
573: #endif

575: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec, PetscViewer);

577: #if defined(PETSC_HAVE_MPIIO)
578: static PetscErrorCode DMDAArrayMPIIO(DM da, PetscViewer viewer, Vec xin, PetscBool write)
579: {
580:   MPI_File           mfdes;
581:   PetscMPIInt        gsizes[4], lsizes[4], lstarts[4], asiz, dof;
582:   MPI_Datatype       view;
583:   const PetscScalar *array;
584:   MPI_Offset         off;
585:   MPI_Aint           ub, ul;
586:   PetscInt           type, rows, vecrows, tr[2];
587:   DM_DA             *dd = (DM_DA *)da->data;
588:   PetscBool          skipheader;

590:   PetscFunctionBegin;
591:   PetscCall(VecGetSize(xin, &vecrows));
592:   PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipheader));
593:   if (!write) {
594:     /* Read vector header. */
595:     if (!skipheader) {
596:       PetscCall(PetscViewerBinaryRead(viewer, tr, 2, NULL, PETSC_INT));
597:       type = tr[0];
598:       rows = tr[1];
599:       PetscCheck(type == VEC_FILE_CLASSID, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Not vector next in file");
600:       PetscCheck(rows == vecrows, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Vector in file not same size as DMDA vector");
601:     }
602:   } else {
603:     tr[0] = VEC_FILE_CLASSID;
604:     tr[1] = vecrows;
605:     if (!skipheader) PetscCall(PetscViewerBinaryWrite(viewer, tr, 2, PETSC_INT));
606:   }

608:   PetscCall(PetscMPIIntCast(dd->w, &dof));
609:   gsizes[0] = dof;
610:   PetscCall(PetscMPIIntCast(dd->M, gsizes + 1));
611:   PetscCall(PetscMPIIntCast(dd->N, gsizes + 2));
612:   PetscCall(PetscMPIIntCast(dd->P, gsizes + 3));
613:   lsizes[0] = dof;
614:   PetscCall(PetscMPIIntCast((dd->xe - dd->xs) / dof, lsizes + 1));
615:   PetscCall(PetscMPIIntCast(dd->ye - dd->ys, lsizes + 2));
616:   PetscCall(PetscMPIIntCast(dd->ze - dd->zs, lsizes + 3));
617:   lstarts[0] = 0;
618:   PetscCall(PetscMPIIntCast(dd->xs / dof, lstarts + 1));
619:   PetscCall(PetscMPIIntCast(dd->ys, lstarts + 2));
620:   PetscCall(PetscMPIIntCast(dd->zs, lstarts + 3));
621:   PetscCallMPI(MPI_Type_create_subarray((PetscMPIInt)(da->dim + 1), gsizes, lsizes, lstarts, MPI_ORDER_FORTRAN, MPIU_SCALAR, &view));
622:   PetscCallMPI(MPI_Type_commit(&view));

624:   PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer, &mfdes));
625:   PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer, &off));
626:   PetscCallMPI(MPI_File_set_view(mfdes, off, MPIU_SCALAR, view, (char *)"native", MPI_INFO_NULL));
627:   PetscCall(VecGetArrayRead(xin, &array));
628:   asiz = lsizes[1] * (lsizes[2] > 0 ? lsizes[2] : 1) * (lsizes[3] > 0 ? lsizes[3] : 1) * dof;
629:   if (write) PetscCall(MPIU_File_write_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
630:   else PetscCall(MPIU_File_read_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE));
631:   PetscCallMPI(MPI_Type_get_extent(view, &ul, &ub));
632:   PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer, ub));
633:   PetscCall(VecRestoreArrayRead(xin, &array));
634:   PetscCallMPI(MPI_Type_free(&view));
635:   PetscFunctionReturn(PETSC_SUCCESS);
636: }
637: #endif

639: PetscErrorCode VecView_MPI_DA(Vec xin, PetscViewer viewer)
640: {
641:   DM        da;
642:   PetscInt  dim;
643:   Vec       natural;
644:   PetscBool isdraw, isvtk, isglvis;
645: #if defined(PETSC_HAVE_HDF5)
646:   PetscBool ishdf5;
647: #endif
648:   const char       *prefix, *name;
649:   PetscViewerFormat format;

651:   PetscFunctionBegin;
652:   PetscCall(VecGetDM(xin, &da));
653:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");
654:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
655:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
656: #if defined(PETSC_HAVE_HDF5)
657:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
658: #endif
659:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis));
660:   if (isdraw) {
661:     PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
662:     if (dim == 1) {
663:       PetscCall(VecView_MPI_Draw_DA1d(xin, viewer));
664:     } else if (dim == 2) {
665:       PetscCall(VecView_MPI_Draw_DA2d(xin, viewer));
666:     } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT, dim);
667:   } else if (isvtk) { /* Duplicate the Vec */
668:     Vec Y;
669:     PetscCall(VecDuplicate(xin, &Y));
670:     if (((PetscObject)xin)->name) {
671:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
672:       PetscCall(PetscObjectSetName((PetscObject)Y, ((PetscObject)xin)->name));
673:     }
674:     PetscCall(VecCopy(xin, Y));
675:     {
676:       PetscObject dmvtk;
677:       PetscBool   compatible, compatibleSet;
678:       PetscCall(PetscViewerVTKGetDM(viewer, &dmvtk));
679:       if (dmvtk) {
681:         PetscCall(DMGetCompatibility(da, (DM)dmvtk, &compatible, &compatibleSet));
682:         PetscCheck(compatibleSet && compatible, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same.");
683:       }
684:       PetscCall(PetscViewerVTKAddField(viewer, (PetscObject)da, DMDAVTKWriteAll, PETSC_DEFAULT, PETSC_VTK_POINT_FIELD, PETSC_FALSE, (PetscObject)Y));
685:     }
686: #if defined(PETSC_HAVE_HDF5)
687:   } else if (ishdf5) {
688:     PetscCall(VecView_MPI_HDF5_DA(xin, viewer));
689: #endif
690:   } else if (isglvis) {
691:     PetscCall(VecView_GLVis(xin, viewer));
692:   } else {
693: #if defined(PETSC_HAVE_MPIIO)
694:     PetscBool isbinary, isMPIIO;

696:     PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
697:     if (isbinary) {
698:       PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
699:       if (isMPIIO) {
700:         PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_TRUE));
701:         PetscFunctionReturn(PETSC_SUCCESS);
702:       }
703:     }
704: #endif

706:     /* call viewer on natural ordering */
707:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
708:     PetscCall(DMDACreateNaturalVector(da, &natural));
709:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
710:     PetscCall(DMDAGlobalToNaturalBegin(da, xin, INSERT_VALUES, natural));
711:     PetscCall(DMDAGlobalToNaturalEnd(da, xin, INSERT_VALUES, natural));
712:     PetscCall(PetscObjectGetName((PetscObject)xin, &name));
713:     PetscCall(PetscObjectSetName((PetscObject)natural, name));

715:     PetscCall(PetscViewerGetFormat(viewer, &format));
716:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
717:       /* temporarily remove viewer format so it won't trigger in the VecView() */
718:       PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_DEFAULT));
719:     }

721:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
722:     PetscCall(VecView(natural, viewer));
723:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

725:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
726:       MPI_Comm    comm;
727:       FILE       *info;
728:       const char *fieldname;
729:       char        fieldbuf[256];
730:       PetscInt    dim, ni, nj, nk, pi, pj, pk, dof, n;

732:       /* set the viewer format back into the viewer */
733:       PetscCall(PetscViewerPopFormat(viewer));
734:       PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
735:       PetscCall(PetscViewerBinaryGetInfoPointer(viewer, &info));
736:       PetscCall(DMDAGetInfo(da, &dim, &ni, &nj, &nk, &pi, &pj, &pk, &dof, NULL, NULL, NULL, NULL, NULL));
737:       PetscCall(PetscFPrintf(comm, info, "#--- begin code written by PetscViewerBinary for MATLAB format ---#\n"));
738:       PetscCall(PetscFPrintf(comm, info, "#$$ tmp = PetscBinaryRead(fd); \n"));
739:       if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni));
740:       if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj));
741:       if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj, nk));

743:       for (n = 0; n < dof; n++) {
744:         PetscCall(DMDAGetFieldName(da, n, &fieldname));
745:         if (!fieldname || !fieldname[0]) {
746:           PetscCall(PetscSNPrintf(fieldbuf, sizeof fieldbuf, "field%" PetscInt_FMT, n));
747:           fieldname = fieldbuf;
748:         }
749:         if (dim == 1) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n", name, fieldname, n + 1));
750:         if (dim == 2) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n", name, fieldname, n + 1));
751:         if (dim == 3) PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n", name, fieldname, n + 1));
752:       }
753:       PetscCall(PetscFPrintf(comm, info, "#$$ clear tmp; \n"));
754:       PetscCall(PetscFPrintf(comm, info, "#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n"));
755:     }

757:     PetscCall(VecDestroy(&natural));
758:   }
759:   PetscFunctionReturn(PETSC_SUCCESS);
760: }

762: #if defined(PETSC_HAVE_HDF5)
763: static PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
764: {
765:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data;
766:   DM                da;
767:   int               dim, rdim;
768:   hsize_t           dims[6] = {0}, count[6] = {0}, offset[6] = {0};
769:   PetscBool         dim2 = PETSC_FALSE, timestepping = PETSC_FALSE;
770:   PetscInt          dimension, timestep              = PETSC_INT_MIN, dofInd;
771:   PetscScalar      *x;
772:   const char       *vecname;
773:   hid_t             filespace; /* file dataspace identifier */
774:   hid_t             dset_id;   /* dataset identifier */
775:   hid_t             memspace;  /* memory dataspace identifier */
776:   hid_t             file_id, group;
777:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
778:   DM_DA            *dd;

780:   PetscFunctionBegin;
781:   #if defined(PETSC_USE_REAL_SINGLE)
782:   scalartype = H5T_NATIVE_FLOAT;
783:   #elif defined(PETSC_USE_REAL___FLOAT128)
784:     #error "HDF5 output with 128 bit floats not supported."
785:   #elif defined(PETSC_USE_REAL___FP16)
786:     #error "HDF5 output with 16 bit floats not supported."
787:   #else
788:   scalartype = H5T_NATIVE_DOUBLE;
789:   #endif

791:   PetscCall(PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group));
792:   PetscCall(PetscObjectGetName((PetscObject)xin, &vecname));
793:   PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname));
794:   PetscCall(PetscViewerHDF5IsTimestepping(viewer, &timestepping));
795:   if (timestepping) PetscCall(PetscViewerHDF5GetTimestep(viewer, &timestep));
796:   PetscCall(VecGetDM(xin, &da));
797:   dd = (DM_DA *)da->data;
798:   PetscCall(DMGetDimension(da, &dimension));

800:   /* Open dataset */
801:   PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));

803:   /* Retrieve the dataspace for the dataset */
804:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
805:   PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (filespace, dims, NULL));

807:   /* Expected dimension for holding the dof's */
808:   #if defined(PETSC_USE_COMPLEX)
809:   dofInd = rdim - 2;
810:   #else
811:   dofInd = rdim - 1;
812:   #endif

814:   /* The expected number of dimensions, assuming basedimension2 = false */
815:   dim = (int)dimension;
816:   if (dd->w > 1) ++dim;
817:   if (timestep >= 0) ++dim;
818:   #if defined(PETSC_USE_COMPLEX)
819:   ++dim;
820:   #endif

822:   /* In this case the input dataset have one extra, unexpected dimension. */
823:   if (rdim == dim + 1) {
824:     /* In this case the block size unity */
825:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

827:     /* Special error message for the case where dof does not match the input file */
828:     else PetscCheck(dd->w == (PetscInt)dims[dofInd], PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected", (PetscInt)dims[dofInd], dd->w);

830:     /* Other cases where rdim != dim cannot be handled currently */
831:   } else PetscCheck(rdim == dim, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT, rdim, dim, dd->w);

833:   /* Set up the hyperslab size */
834:   dim = 0;
835:   if (timestep >= 0) {
836:     offset[dim] = timestep;
837:     count[dim]  = 1;
838:     ++dim;
839:   }
840:   if (dimension == 3) {
841:     PetscCall(PetscHDF5IntCast(dd->zs, offset + dim));
842:     PetscCall(PetscHDF5IntCast(dd->ze - dd->zs, count + dim));
843:     ++dim;
844:   }
845:   if (dimension > 1) {
846:     PetscCall(PetscHDF5IntCast(dd->ys, offset + dim));
847:     PetscCall(PetscHDF5IntCast(dd->ye - dd->ys, count + dim));
848:     ++dim;
849:   }
850:   PetscCall(PetscHDF5IntCast(dd->xs / dd->w, offset + dim));
851:   PetscCall(PetscHDF5IntCast((dd->xe - dd->xs) / dd->w, count + dim));
852:   ++dim;
853:   if (dd->w > 1 || dim2) {
854:     offset[dim] = 0;
855:     PetscCall(PetscHDF5IntCast(dd->w, count + dim));
856:     ++dim;
857:   }
858:   #if defined(PETSC_USE_COMPLEX)
859:   offset[dim] = 0;
860:   count[dim]  = 2;
861:   ++dim;
862:   #endif

864:   /* Create the memory and filespace */
865:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
866:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

868:   PetscCall(VecGetArray(xin, &x));
869:   PetscCallHDF5(H5Dread, (dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
870:   PetscCall(VecRestoreArray(xin, &x));

872:   /* Close/release resources */
873:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
874:   PetscCallHDF5(H5Sclose, (filespace));
875:   PetscCallHDF5(H5Sclose, (memspace));
876:   PetscCallHDF5(H5Dclose, (dset_id));
877:   PetscFunctionReturn(PETSC_SUCCESS);
878: }
879: #endif

881: static PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
882: {
883:   DM          da;
884:   Vec         natural;
885:   const char *prefix;
886:   PetscInt    bs;
887:   PetscBool   flag;
888:   DM_DA      *dd;
889: #if defined(PETSC_HAVE_MPIIO)
890:   PetscBool isMPIIO;
891: #endif

893:   PetscFunctionBegin;
894:   PetscCall(VecGetDM(xin, &da));
895:   dd = (DM_DA *)da->data;
896: #if defined(PETSC_HAVE_MPIIO)
897:   PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO));
898:   if (isMPIIO) {
899:     PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_FALSE));
900:     PetscFunctionReturn(PETSC_SUCCESS);
901:   }
902: #endif

904:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix));
905:   PetscCall(DMDACreateNaturalVector(da, &natural));
906:   PetscCall(PetscObjectSetName((PetscObject)natural, ((PetscObject)xin)->name));
907:   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix));
908:   PetscCall(VecLoad(natural, viewer));
909:   PetscCall(DMDANaturalToGlobalBegin(da, natural, INSERT_VALUES, xin));
910:   PetscCall(DMDANaturalToGlobalEnd(da, natural, INSERT_VALUES, xin));
911:   PetscCall(VecDestroy(&natural));
912:   PetscCall(PetscInfo(xin, "Loading vector from natural ordering into DMDA\n"));
913:   PetscCall(PetscOptionsGetInt(NULL, ((PetscObject)xin)->prefix, "-vecload_block_size", &bs, &flag));
914:   if (flag && bs != dd->w) PetscCall(PetscInfo(xin, "Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n", bs, dd->w));
915:   PetscFunctionReturn(PETSC_SUCCESS);
916: }

918: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
919: {
920:   DM        da;
921:   PetscBool isbinary;
922: #if defined(PETSC_HAVE_HDF5)
923:   PetscBool ishdf5;
924: #endif

926:   PetscFunctionBegin;
927:   PetscCall(VecGetDM(xin, &da));
928:   PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA");

930: #if defined(PETSC_HAVE_HDF5)
931:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5));
932: #endif
933:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));

935:   if (isbinary) {
936:     PetscCall(VecLoad_Binary_DA(xin, viewer));
937: #if defined(PETSC_HAVE_HDF5)
938:   } else if (ishdf5) {
939:     PetscCall(VecLoad_HDF5_DA(xin, viewer));
940: #endif
941:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
942:   PetscFunctionReturn(PETSC_SUCCESS);
943: }