Actual source code: gr2.c


  2: /*
  3:    Plots vectors obtained with DMDACreate2d()
  4: */

  6: #include <petsc/private/dmdaimpl.h>
  7: #include <petsc/private/glvisvecimpl.h>
  8: #include <petsc/private/viewerhdf5impl.h>
  9: #include <petscdraw.h>

 11: /*
 12:         The data that is passed into the graphics callback
 13: */
 14: typedef struct {
 15:   PetscMPIInt        rank;
 16:   PetscInt           m, n, dof, k;
 17:   PetscReal          xmin, xmax, ymin, ymax, min, max;
 18:   const PetscScalar *xy, *v;
 19:   PetscBool          showaxis, showgrid;
 20:   const char        *name0, *name1;
 21: } ZoomCtx;

 23: /*
 24:        This does the drawing for one particular field
 25:     in one particular set of coordinates. It is a callback
 26:     called from PetscDrawZoom()
 27: */
 28: PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw, void *ctx)
 29: {
 30:   ZoomCtx           *zctx = (ZoomCtx *)ctx;
 31:   PetscInt           m, n, i, j, k, dof, id, c1, c2, c3, c4;
 32:   PetscReal          min, max, x1, x2, x3, x4, y_1, y2, y3, y4;
 33:   const PetscScalar *xy, *v;

 35:   m   = zctx->m;
 36:   n   = zctx->n;
 37:   dof = zctx->dof;
 38:   k   = zctx->k;
 39:   xy  = zctx->xy;
 40:   v   = zctx->v;
 41:   min = zctx->min;
 42:   max = zctx->max;

 44:   /* PetscDraw the contour plot patch */
 45:   PetscDrawCollectiveBegin(draw);
 46:   for (j = 0; j < n - 1; j++) {
 47:     for (i = 0; i < m - 1; i++) {
 48:       id  = i + j * m;
 49:       x1  = PetscRealPart(xy[2 * id]);
 50:       y_1 = PetscRealPart(xy[2 * id + 1]);
 51:       c1  = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 53:       id = i + j * m + 1;
 54:       x2 = PetscRealPart(xy[2 * id]);
 55:       y2 = PetscRealPart(xy[2 * id + 1]);
 56:       c2 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 58:       id = i + j * m + 1 + m;
 59:       x3 = PetscRealPart(xy[2 * id]);
 60:       y3 = PetscRealPart(xy[2 * id + 1]);
 61:       c3 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 63:       id = i + j * m + m;
 64:       x4 = PetscRealPart(xy[2 * id]);
 65:       y4 = PetscRealPart(xy[2 * id + 1]);
 66:       c4 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max);

 68:       PetscDrawTriangle(draw, x1, y_1, x2, y2, x3, y3, c1, c2, c3);
 69:       PetscDrawTriangle(draw, x1, y_1, x3, y3, x4, y4, c1, c3, c4);
 70:       if (zctx->showgrid) {
 71:         PetscDrawLine(draw, x1, y_1, x2, y2, PETSC_DRAW_BLACK);
 72:         PetscDrawLine(draw, x2, y2, x3, y3, PETSC_DRAW_BLACK);
 73:         PetscDrawLine(draw, x3, y3, x4, y4, PETSC_DRAW_BLACK);
 74:         PetscDrawLine(draw, x4, y4, x1, y_1, PETSC_DRAW_BLACK);
 75:       }
 76:     }
 77:   }
 78:   if (zctx->showaxis && !zctx->rank) {
 79:     if (zctx->name0 || zctx->name1) {
 80:       PetscReal xl, yl, xr, yr, x, y;
 81:       PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr);
 82:       x  = xl + .30 * (xr - xl);
 83:       xl = xl + .01 * (xr - xl);
 84:       y  = yr - .30 * (yr - yl);
 85:       yl = yl + .01 * (yr - yl);
 86:       if (zctx->name0) PetscDrawString(draw, x, yl, PETSC_DRAW_BLACK, zctx->name0);
 87:       if (zctx->name1) PetscDrawStringVertical(draw, xl, y, PETSC_DRAW_BLACK, zctx->name1);
 88:     }
 89:     /*
 90:        Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits
 91:        but that may require some refactoring.
 92:     */
 93:     {
 94:       double    xmin = (double)zctx->xmin, ymin = (double)zctx->ymin;
 95:       double    xmax = (double)zctx->xmax, ymax = (double)zctx->ymax;
 96:       char      value[16];
 97:       size_t    len;
 98:       PetscReal w;
 99:       PetscSNPrintf(value, 16, "%0.2e", xmin);
100:       PetscDrawString(draw, xmin, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value);
101:       PetscSNPrintf(value, 16, "%0.2e", xmax);
102:       PetscStrlen(value, &len);
103:       PetscDrawStringGetSize(draw, &w, NULL);
104:       PetscDrawString(draw, xmax - len * w, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value);
105:       PetscSNPrintf(value, 16, "%0.2e", ymin);
106:       PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymin, PETSC_DRAW_BLACK, value);
107:       PetscSNPrintf(value, 16, "%0.2e", ymax);
108:       PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymax, PETSC_DRAW_BLACK, value);
109:     }
110:   }
111:   PetscDrawCollectiveEnd(draw);
112:   return 0;
113: }

115: PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin, PetscViewer viewer)
116: {
117:   DM                  da, dac, dag;
118:   PetscInt            N, s, M, w, ncoors = 4;
119:   const PetscInt     *lx, *ly;
120:   PetscReal           coors[4];
121:   PetscDraw           draw, popup;
122:   PetscBool           isnull, useports = PETSC_FALSE;
123:   MPI_Comm            comm;
124:   Vec                 xlocal, xcoor, xcoorl;
125:   DMBoundaryType      bx, by;
126:   DMDAStencilType     st;
127:   ZoomCtx             zctx;
128:   PetscDrawViewPorts *ports = NULL;
129:   PetscViewerFormat   format;
130:   PetscInt           *displayfields;
131:   PetscInt            ndisplayfields, i, nbounds;
132:   const PetscReal    *bounds;

134:   zctx.showgrid = PETSC_FALSE;
135:   zctx.showaxis = PETSC_TRUE;

137:   PetscViewerDrawGetDraw(viewer, 0, &draw);
138:   PetscDrawIsNull(draw, &isnull);
139:   if (isnull) return 0;

141:   PetscViewerDrawGetBounds(viewer, &nbounds, &bounds);

143:   VecGetDM(xin, &da);

146:   PetscObjectGetComm((PetscObject)xin, &comm);
147:   MPI_Comm_rank(comm, &zctx.rank);

149:   DMDAGetInfo(da, NULL, &M, &N, NULL, &zctx.m, &zctx.n, NULL, &w, &s, &bx, &by, NULL, &st);
150:   DMDAGetOwnershipRanges(da, &lx, &ly, NULL);

152:   /*
153:      Obtain a sequential vector that is going to contain the local values plus ONE layer of
154:      ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will
155:      update the local values plus ONE layer of ghost values.
156:   */
157:   PetscObjectQuery((PetscObject)da, "GraphicsGhosted", (PetscObject *)&xlocal);
158:   if (!xlocal) {
159:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
160:       /*
161:          if original da is not of stencil width one, or periodic or not a box stencil then
162:          create a special DMDA to handle one level of ghost points for graphics
163:       */
164:       DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, w, 1, lx, ly, &dac);
165:       DMSetUp(dac);
166:       PetscInfo(da, "Creating auxiliary DMDA for managing graphics ghost points\n");
167:     } else {
168:       /* otherwise we can use the da we already have */
169:       dac = da;
170:     }
171:     /* create local vector for holding ghosted values used in graphics */
172:     DMCreateLocalVector(dac, &xlocal);
173:     if (dac != da) {
174:       /* don't keep any public reference of this DMDA, is is only available through xlocal */
175:       PetscObjectDereference((PetscObject)dac);
176:     } else {
177:       /* remove association between xlocal and da, because below we compose in the opposite
178:          direction and if we left this connect we'd get a loop, so the objects could
179:          never be destroyed */
180:       PetscObjectRemoveReference((PetscObject)xlocal, "__PETSc_dm");
181:     }
182:     PetscObjectCompose((PetscObject)da, "GraphicsGhosted", (PetscObject)xlocal);
183:     PetscObjectDereference((PetscObject)xlocal);
184:   } else {
185:     if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) {
186:       VecGetDM(xlocal, &dac);
187:     } else {
188:       dac = da;
189:     }
190:   }

192:   /*
193:       Get local (ghosted) values of vector
194:   */
195:   DMGlobalToLocalBegin(dac, xin, INSERT_VALUES, xlocal);
196:   DMGlobalToLocalEnd(dac, xin, INSERT_VALUES, xlocal);
197:   VecGetArrayRead(xlocal, &zctx.v);

199:   /*
200:       Get coordinates of nodes
201:   */
202:   DMGetCoordinates(da, &xcoor);
203:   if (!xcoor) {
204:     DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0);
205:     DMGetCoordinates(da, &xcoor);
206:   }

208:   /*
209:       Determine the min and max coordinates in plot
210:   */
211:   VecStrideMin(xcoor, 0, NULL, &zctx.xmin);
212:   VecStrideMax(xcoor, 0, NULL, &zctx.xmax);
213:   VecStrideMin(xcoor, 1, NULL, &zctx.ymin);
214:   VecStrideMax(xcoor, 1, NULL, &zctx.ymax);
215:   PetscOptionsGetBool(NULL, NULL, "-draw_contour_axis", &zctx.showaxis, NULL);
216:   if (zctx.showaxis) {
217:     coors[0] = zctx.xmin - .05 * (zctx.xmax - zctx.xmin);
218:     coors[1] = zctx.ymin - .05 * (zctx.ymax - zctx.ymin);
219:     coors[2] = zctx.xmax + .05 * (zctx.xmax - zctx.xmin);
220:     coors[3] = zctx.ymax + .05 * (zctx.ymax - zctx.ymin);
221:   } else {
222:     coors[0] = zctx.xmin;
223:     coors[1] = zctx.ymin;
224:     coors[2] = zctx.xmax;
225:     coors[3] = zctx.ymax;
226:   }
227:   PetscOptionsGetRealArray(NULL, NULL, "-draw_coordinates", coors, &ncoors, NULL);
228:   PetscInfo(da, "Preparing DMDA 2d contour plot coordinates %g %g %g %g\n", (double)coors[0], (double)coors[1], (double)coors[2], (double)coors[3]);

230:   /*
231:       Get local ghosted version of coordinates
232:   */
233:   PetscObjectQuery((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject *)&xcoorl);
234:   if (!xcoorl) {
235:     /* create DMDA to get local version of graphics */
236:     DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, 2, 1, lx, ly, &dag);
237:     DMSetUp(dag);
238:     PetscInfo(dag, "Creating auxiliary DMDA for managing graphics coordinates ghost points\n");
239:     DMCreateLocalVector(dag, &xcoorl);
240:     PetscObjectCompose((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject)xcoorl);
241:     PetscObjectDereference((PetscObject)dag);
242:     PetscObjectDereference((PetscObject)xcoorl);
243:   } else VecGetDM(xcoorl, &dag);
244:   DMGlobalToLocalBegin(dag, xcoor, INSERT_VALUES, xcoorl);
245:   DMGlobalToLocalEnd(dag, xcoor, INSERT_VALUES, xcoorl);
246:   VecGetArrayRead(xcoorl, &zctx.xy);
247:   DMDAGetCoordinateName(da, 0, &zctx.name0);
248:   DMDAGetCoordinateName(da, 1, &zctx.name1);

250:   /*
251:       Get information about size of area each processor must do graphics for
252:   */
253:   DMDAGetInfo(dac, NULL, &M, &N, NULL, NULL, NULL, NULL, &zctx.dof, NULL, &bx, &by, NULL, NULL);
254:   DMDAGetGhostCorners(dac, NULL, NULL, NULL, &zctx.m, &zctx.n, NULL);
255:   PetscOptionsGetBool(NULL, NULL, "-draw_contour_grid", &zctx.showgrid, NULL);

257:   DMDASelectFields(da, &ndisplayfields, &displayfields);
258:   PetscViewerGetFormat(viewer, &format);
259:   PetscOptionsGetBool(NULL, NULL, "-draw_ports", &useports, NULL);
260:   if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE;
261:   if (useports) {
262:     PetscViewerDrawGetDraw(viewer, 0, &draw);
263:     PetscDrawCheckResizedWindow(draw);
264:     PetscDrawClear(draw);
265:     PetscDrawViewPortsCreate(draw, ndisplayfields, &ports);
266:   }

268:   /*
269:       Loop over each field; drawing each in a different window
270:   */
271:   for (i = 0; i < ndisplayfields; i++) {
272:     zctx.k = displayfields[i];

274:     /* determine the min and max value in plot */
275:     VecStrideMin(xin, zctx.k, NULL, &zctx.min);
276:     VecStrideMax(xin, zctx.k, NULL, &zctx.max);
277:     if (zctx.k < nbounds) {
278:       zctx.min = bounds[2 * zctx.k];
279:       zctx.max = bounds[2 * zctx.k + 1];
280:     }
281:     if (zctx.min == zctx.max) {
282:       zctx.min -= 1.e-12;
283:       zctx.max += 1.e-12;
284:     }
285:     PetscInfo(da, "DMDA 2d contour plot min %g max %g\n", (double)zctx.min, (double)zctx.max);

287:     if (useports) {
288:       PetscDrawViewPortsSet(ports, i);
289:     } else {
290:       const char *title;
291:       PetscViewerDrawGetDraw(viewer, i, &draw);
292:       DMDAGetFieldName(da, zctx.k, &title);
293:       if (title) PetscDrawSetTitle(draw, title);
294:     }

296:     PetscDrawGetPopup(draw, &popup);
297:     PetscDrawScalePopup(popup, zctx.min, zctx.max);
298:     PetscDrawSetCoordinates(draw, coors[0], coors[1], coors[2], coors[3]);
299:     PetscDrawZoom(draw, VecView_MPI_Draw_DA2d_Zoom, &zctx);
300:     if (!useports) PetscDrawSave(draw);
301:   }
302:   if (useports) {
303:     PetscViewerDrawGetDraw(viewer, 0, &draw);
304:     PetscDrawSave(draw);
305:   }

307:   PetscDrawViewPortsDestroy(ports);
308:   PetscFree(displayfields);
309:   VecRestoreArrayRead(xcoorl, &zctx.xy);
310:   VecRestoreArrayRead(xlocal, &zctx.v);
311:   return 0;
312: }

314: #if defined(PETSC_HAVE_HDF5)
315: static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims)
316: {
317:   PetscMPIInt comm_size;
318:   hsize_t     chunk_size, target_size, dim;
319:   hsize_t     vec_size = sizeof(PetscScalar) * da->M * da->N * da->P * da->w;
320:   hsize_t     avg_local_vec_size, KiB = 1024, MiB = KiB * KiB, GiB = MiB * KiB, min_size = MiB;
321:   hsize_t     max_chunks     = 64 * KiB; /* HDF5 internal limitation */
322:   hsize_t     max_chunk_size = 4 * GiB;  /* HDF5 internal limitation */
323:   PetscInt    zslices = da->p, yslices = da->n, xslices = da->m;

325:   MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size);
326:   avg_local_vec_size = (hsize_t)PetscCeilInt(vec_size, comm_size); /* we will attempt to use this as the chunk size */

328:   target_size = (hsize_t)PetscMin((PetscInt64)vec_size, PetscMin((PetscInt64)max_chunk_size, PetscMax((PetscInt64)avg_local_vec_size, PetscMax(PetscCeilInt64(vec_size, max_chunks), (PetscInt64)min_size))));
329:   /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */
330:   chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(PetscReal);

332:   /*
333:    if size/rank > max_chunk_size, we need radical measures: even going down to
334:    avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter
335:    what, composed in the most efficient way possible.
336:    N.B. this minimises the number of chunks, which may or may not be the optimal
337:    solution. In a BG, for example, the optimal solution is probably to make # chunks = #
338:    IO nodes involved, but this author has no access to a BG to figure out how to
339:    reliably find the right number. And even then it may or may not be enough.
340:    */
341:   if (avg_local_vec_size > max_chunk_size) {
342:     /* check if we can just split local z-axis: is that enough? */
343:     zslices = PetscCeilInt(vec_size, da->p * max_chunk_size) * zslices;
344:     if (zslices > da->P) {
345:       /* lattice is too large in xy-directions, splitting z only is not enough */
346:       zslices = da->P;
347:       yslices = PetscCeilInt(vec_size, zslices * da->n * max_chunk_size) * yslices;
348:       if (yslices > da->N) {
349:         /* lattice is too large in x-direction, splitting along z, y is not enough */
350:         yslices = da->N;
351:         xslices = PetscCeilInt(vec_size, zslices * yslices * da->m * max_chunk_size) * xslices;
352:       }
353:     }
354:     dim = 0;
355:     if (timestep >= 0) ++dim;
356:     /* prefer to split z-axis, even down to planar slices */
357:     if (dimension == 3) {
358:       chunkDims[dim++] = (hsize_t)da->P / zslices;
359:       chunkDims[dim++] = (hsize_t)da->N / yslices;
360:       chunkDims[dim++] = (hsize_t)da->M / xslices;
361:     } else {
362:       /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
363:       chunkDims[dim++] = (hsize_t)da->N / yslices;
364:       chunkDims[dim++] = (hsize_t)da->M / xslices;
365:     }
366:     chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
367:   } else {
368:     if (target_size < chunk_size) {
369:       /* only change the defaults if target_size < chunk_size */
370:       dim = 0;
371:       if (timestep >= 0) ++dim;
372:       /* prefer to split z-axis, even down to planar slices */
373:       if (dimension == 3) {
374:         /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */
375:         if (target_size >= chunk_size / da->p) {
376:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
377:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->P, da->p);
378:         } else {
379:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
380:            radical and let everyone write all they've got */
381:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->P, da->p);
382:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
383:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
384:         }
385:       } else {
386:         /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */
387:         if (target_size >= chunk_size / da->n) {
388:           /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */
389:           chunkDims[dim] = (hsize_t)PetscCeilInt(da->N, da->n);
390:         } else {
391:           /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be
392:            radical and let everyone write all they've got */
393:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n);
394:           chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m);
395:         }
396:       }
397:       chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double);
398:     } else {
399:       /* precomputed chunks are fine, we don't need to do anything */
400:     }
401:   }
402:   return 0;
403: }
404: #endif

406: #if defined(PETSC_HAVE_HDF5)
407: PetscErrorCode VecView_MPI_HDF5_DA(Vec xin, PetscViewer viewer)
408: {
409:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5 *)viewer->data;
410:   DM                 dm;
411:   DM_DA             *da;
412:   hid_t              filespace;  /* file dataspace identifier */
413:   hid_t              chunkspace; /* chunk dataset property identifier */
414:   hid_t              dset_id;    /* dataset identifier */
415:   hid_t              memspace;   /* memory dataspace identifier */
416:   hid_t              file_id;
417:   hid_t              group;
418:   hid_t              memscalartype;  /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
419:   hid_t              filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
420:   hsize_t            dim;
421:   hsize_t            maxDims[6] = {0}, dims[6] = {0}, chunkDims[6] = {0}, count[6] = {0}, offset[6] = {0}; /* we depend on these being sane later on  */
422:   PetscBool          timestepping = PETSC_FALSE, dim2, spoutput;
423:   PetscInt           timestep     = PETSC_MIN_INT, dimension;
424:   const PetscScalar *x;
425:   const char        *vecname;

427:   PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group);
428:   PetscViewerHDF5IsTimestepping(viewer, &timestepping);
429:   if (timestepping) PetscViewerHDF5GetTimestep(viewer, &timestep);
430:   PetscViewerHDF5GetBaseDimension2(viewer, &dim2);
431:   PetscViewerHDF5GetSPOutput(viewer, &spoutput);

433:   VecGetDM(xin, &dm);
435:   da = (DM_DA *)dm->data;
436:   DMGetDimension(dm, &dimension);

438:   /* Create the dataspace for the dataset.
439:    *
440:    * dims - holds the current dimensions of the dataset
441:    *
442:    * maxDims - holds the maximum dimensions of the dataset (unlimited
443:    * for the number of time steps with the current dimensions for the
444:    * other dimensions; so only additional time steps can be added).
445:    *
446:    * chunkDims - holds the size of a single time step (required to
447:    * permit extending dataset).
448:    */
449:   dim = 0;
450:   if (timestep >= 0) {
451:     dims[dim]      = timestep + 1;
452:     maxDims[dim]   = H5S_UNLIMITED;
453:     chunkDims[dim] = 1;
454:     ++dim;
455:   }
456:   if (dimension == 3) {
457:     PetscHDF5IntCast(da->P, dims + dim);
458:     maxDims[dim]   = dims[dim];
459:     chunkDims[dim] = dims[dim];
460:     ++dim;
461:   }
462:   if (dimension > 1) {
463:     PetscHDF5IntCast(da->N, dims + dim);
464:     maxDims[dim]   = dims[dim];
465:     chunkDims[dim] = dims[dim];
466:     ++dim;
467:   }
468:   PetscHDF5IntCast(da->M, dims + dim);
469:   maxDims[dim]   = dims[dim];
470:   chunkDims[dim] = dims[dim];
471:   ++dim;
472:   if (da->w > 1 || dim2) {
473:     PetscHDF5IntCast(da->w, dims + dim);
474:     maxDims[dim]   = dims[dim];
475:     chunkDims[dim] = dims[dim];
476:     ++dim;
477:   }
478:   #if defined(PETSC_USE_COMPLEX)
479:   dims[dim]      = 2;
480:   maxDims[dim]   = dims[dim];
481:   chunkDims[dim] = dims[dim];
482:   ++dim;
483:   #endif

485:   VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims);

487:   PetscCallHDF5Return(filespace, H5Screate_simple, (dim, dims, maxDims));

489:   #if defined(PETSC_USE_REAL_SINGLE)
490:   memscalartype  = H5T_NATIVE_FLOAT;
491:   filescalartype = H5T_NATIVE_FLOAT;
492:   #elif defined(PETSC_USE_REAL___FLOAT128)
493:     #error "HDF5 output with 128 bit floats not supported."
494:   #elif defined(PETSC_USE_REAL___FP16)
495:     #error "HDF5 output with 16 bit floats not supported."
496:   #else
497:   memscalartype = H5T_NATIVE_DOUBLE;
498:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
499:   else filescalartype = H5T_NATIVE_DOUBLE;
500:   #endif

502:   /* Create the dataset with default properties and close filespace */
503:   PetscObjectGetName((PetscObject)xin, &vecname);
504:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
505:     /* Create chunk */
506:     PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE));
507:     PetscCallHDF5(H5Pset_chunk, (chunkspace, dim, chunkDims));

509:     PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
510:   } else {
511:     PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));
512:     PetscCallHDF5(H5Dset_extent, (dset_id, dims));
513:   }
514:   PetscCallHDF5(H5Sclose, (filespace));

516:   /* Each process defines a dataset and writes it to the hyperslab in the file */
517:   dim = 0;
518:   if (timestep >= 0) {
519:     offset[dim] = timestep;
520:     ++dim;
521:   }
522:   if (dimension == 3) PetscHDF5IntCast(da->zs, offset + dim++);
523:   if (dimension > 1) PetscHDF5IntCast(da->ys, offset + dim++);
524:   PetscHDF5IntCast(da->xs / da->w, offset + dim++);
525:   if (da->w > 1 || dim2) offset[dim++] = 0;
526:   #if defined(PETSC_USE_COMPLEX)
527:   offset[dim++] = 0;
528:   #endif
529:   dim = 0;
530:   if (timestep >= 0) {
531:     count[dim] = 1;
532:     ++dim;
533:   }
534:   if (dimension == 3) PetscHDF5IntCast(da->ze - da->zs, count + dim++);
535:   if (dimension > 1) PetscHDF5IntCast(da->ye - da->ys, count + dim++);
536:   PetscHDF5IntCast((da->xe - da->xs) / da->w, count + dim++);
537:   if (da->w > 1 || dim2) PetscHDF5IntCast(da->w, count + dim++);
538:   #if defined(PETSC_USE_COMPLEX)
539:   count[dim++] = 2;
540:   #endif
541:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
542:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
543:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

545:   VecGetArrayRead(xin, &x);
546:   PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
547:   PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL));
548:   VecRestoreArrayRead(xin, &x);

550:   #if defined(PETSC_USE_COMPLEX)
551:   {
552:     PetscBool tru = PETSC_TRUE;
553:     PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru);
554:   }
555:   #endif
556:   if (timestepping) PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, &timestepping);

558:   /* Close/release resources */
559:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
560:   PetscCallHDF5(H5Sclose, (filespace));
561:   PetscCallHDF5(H5Sclose, (memspace));
562:   PetscCallHDF5(H5Dclose, (dset_id));
563:   PetscInfo(xin, "Wrote Vec object with name %s\n", vecname);
564:   return 0;
565: }
566: #endif

568: extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec, PetscViewer);

570: #if defined(PETSC_HAVE_MPIIO)
571: static PetscErrorCode DMDAArrayMPIIO(DM da, PetscViewer viewer, Vec xin, PetscBool write)
572: {
573:   MPI_File           mfdes;
574:   PetscMPIInt        gsizes[4], lsizes[4], lstarts[4], asiz, dof;
575:   MPI_Datatype       view;
576:   const PetscScalar *array;
577:   MPI_Offset         off;
578:   MPI_Aint           ub, ul;
579:   PetscInt           type, rows, vecrows, tr[2];
580:   DM_DA             *dd = (DM_DA *)da->data;
581:   PetscBool          skipheader;

583:   VecGetSize(xin, &vecrows);
584:   PetscViewerBinaryGetSkipHeader(viewer, &skipheader);
585:   if (!write) {
586:     /* Read vector header. */
587:     if (!skipheader) {
588:       PetscViewerBinaryRead(viewer, tr, 2, NULL, PETSC_INT);
589:       type = tr[0];
590:       rows = tr[1];
593:     }
594:   } else {
595:     tr[0] = VEC_FILE_CLASSID;
596:     tr[1] = vecrows;
597:     if (!skipheader) PetscViewerBinaryWrite(viewer, tr, 2, PETSC_INT);
598:   }

600:   PetscMPIIntCast(dd->w, &dof);
601:   gsizes[0] = dof;
602:   PetscMPIIntCast(dd->M, gsizes + 1);
603:   PetscMPIIntCast(dd->N, gsizes + 2);
604:   PetscMPIIntCast(dd->P, gsizes + 3);
605:   lsizes[0] = dof;
606:   PetscMPIIntCast((dd->xe - dd->xs) / dof, lsizes + 1);
607:   PetscMPIIntCast(dd->ye - dd->ys, lsizes + 2);
608:   PetscMPIIntCast(dd->ze - dd->zs, lsizes + 3);
609:   lstarts[0] = 0;
610:   PetscMPIIntCast(dd->xs / dof, lstarts + 1);
611:   PetscMPIIntCast(dd->ys, lstarts + 2);
612:   PetscMPIIntCast(dd->zs, lstarts + 3);
613:   MPI_Type_create_subarray(da->dim + 1, gsizes, lsizes, lstarts, MPI_ORDER_FORTRAN, MPIU_SCALAR, &view);
614:   MPI_Type_commit(&view);

616:   PetscViewerBinaryGetMPIIODescriptor(viewer, &mfdes);
617:   PetscViewerBinaryGetMPIIOOffset(viewer, &off);
618:   MPI_File_set_view(mfdes, off, MPIU_SCALAR, view, (char *)"native", MPI_INFO_NULL);
619:   VecGetArrayRead(xin, &array);
620:   asiz = lsizes[1] * (lsizes[2] > 0 ? lsizes[2] : 1) * (lsizes[3] > 0 ? lsizes[3] : 1) * dof;
621:   if (write) MPIU_File_write_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE);
622:   else MPIU_File_read_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE);
623:   MPI_Type_get_extent(view, &ul, &ub);
624:   PetscViewerBinaryAddMPIIOOffset(viewer, ub);
625:   VecRestoreArrayRead(xin, &array);
626:   MPI_Type_free(&view);
627:   return 0;
628: }
629: #endif

631: PetscErrorCode VecView_MPI_DA(Vec xin, PetscViewer viewer)
632: {
633:   DM        da;
634:   PetscInt  dim;
635:   Vec       natural;
636:   PetscBool isdraw, isvtk, isglvis;
637: #if defined(PETSC_HAVE_HDF5)
638:   PetscBool ishdf5;
639: #endif
640:   const char       *prefix, *name;
641:   PetscViewerFormat format;

643:   VecGetDM(xin, &da);
645:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw);
646:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk);
647: #if defined(PETSC_HAVE_HDF5)
648:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5);
649: #endif
650:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis);
651:   if (isdraw) {
652:     DMDAGetInfo(da, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
653:     if (dim == 1) {
654:       VecView_MPI_Draw_DA1d(xin, viewer);
655:     } else if (dim == 2) {
656:       VecView_MPI_Draw_DA2d(xin, viewer);
657:     } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT, dim);
658:   } else if (isvtk) { /* Duplicate the Vec */
659:     Vec Y;
660:     VecDuplicate(xin, &Y);
661:     if (((PetscObject)xin)->name) {
662:       /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */
663:       PetscObjectSetName((PetscObject)Y, ((PetscObject)xin)->name);
664:     }
665:     VecCopy(xin, Y);
666:     {
667:       PetscObject dmvtk;
668:       PetscBool   compatible, compatibleSet;
669:       PetscViewerVTKGetDM(viewer, &dmvtk);
670:       if (dmvtk) {
672:         DMGetCompatibility(da, (DM)dmvtk, &compatible, &compatibleSet);
674:       }
675:       PetscViewerVTKAddField(viewer, (PetscObject)da, DMDAVTKWriteAll, PETSC_DEFAULT, PETSC_VTK_POINT_FIELD, PETSC_FALSE, (PetscObject)Y);
676:     }
677: #if defined(PETSC_HAVE_HDF5)
678:   } else if (ishdf5) {
679:     VecView_MPI_HDF5_DA(xin, viewer);
680: #endif
681:   } else if (isglvis) {
682:     VecView_GLVis(xin, viewer);
683:   } else {
684: #if defined(PETSC_HAVE_MPIIO)
685:     PetscBool isbinary, isMPIIO;

687:     PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary);
688:     if (isbinary) {
689:       PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO);
690:       if (isMPIIO) {
691:         DMDAArrayMPIIO(da, viewer, xin, PETSC_TRUE);
692:         return 0;
693:       }
694:     }
695: #endif

697:     /* call viewer on natural ordering */
698:     PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix);
699:     DMDACreateNaturalVector(da, &natural);
700:     PetscObjectSetOptionsPrefix((PetscObject)natural, prefix);
701:     DMDAGlobalToNaturalBegin(da, xin, INSERT_VALUES, natural);
702:     DMDAGlobalToNaturalEnd(da, xin, INSERT_VALUES, natural);
703:     PetscObjectGetName((PetscObject)xin, &name);
704:     PetscObjectSetName((PetscObject)natural, name);

706:     PetscViewerGetFormat(viewer, &format);
707:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
708:       /* temporarily remove viewer format so it won't trigger in the VecView() */
709:       PetscViewerPushFormat(viewer, PETSC_VIEWER_DEFAULT);
710:     }

712:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE;
713:     VecView(natural, viewer);
714:     ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE;

716:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
717:       MPI_Comm    comm;
718:       FILE       *info;
719:       const char *fieldname;
720:       char        fieldbuf[256];
721:       PetscInt    dim, ni, nj, nk, pi, pj, pk, dof, n;

723:       /* set the viewer format back into the viewer */
724:       PetscViewerPopFormat(viewer);
725:       PetscObjectGetComm((PetscObject)viewer, &comm);
726:       PetscViewerBinaryGetInfoPointer(viewer, &info);
727:       DMDAGetInfo(da, &dim, &ni, &nj, &nk, &pi, &pj, &pk, &dof, NULL, NULL, NULL, NULL, NULL);
728:       PetscFPrintf(comm, info, "#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
729:       PetscFPrintf(comm, info, "#$$ tmp = PetscBinaryRead(fd); \n");
730:       if (dim == 1) PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni);
731:       if (dim == 2) PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj);
732:       if (dim == 3) PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj, nk);

734:       for (n = 0; n < dof; n++) {
735:         DMDAGetFieldName(da, n, &fieldname);
736:         if (!fieldname || !fieldname[0]) {
737:           PetscSNPrintf(fieldbuf, sizeof fieldbuf, "field%" PetscInt_FMT, n);
738:           fieldname = fieldbuf;
739:         }
740:         if (dim == 1) PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n", name, fieldname, n + 1);
741:         if (dim == 2) PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n", name, fieldname, n + 1);
742:         if (dim == 3) PetscFPrintf(comm, info, "#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n", name, fieldname, n + 1);
743:       }
744:       PetscFPrintf(comm, info, "#$$ clear tmp; \n");
745:       PetscFPrintf(comm, info, "#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
746:     }

748:     VecDestroy(&natural);
749:   }
750:   return 0;
751: }

753: #if defined(PETSC_HAVE_HDF5)
754: PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer)
755: {
756:   PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data;
757:   DM                da;
758:   int               dim, rdim;
759:   hsize_t           dims[6] = {0}, count[6] = {0}, offset[6] = {0};
760:   PetscBool         dim2 = PETSC_FALSE, timestepping = PETSC_FALSE;
761:   PetscInt          dimension, timestep              = PETSC_MIN_INT, dofInd;
762:   PetscScalar      *x;
763:   const char       *vecname;
764:   hid_t             filespace; /* file dataspace identifier */
765:   hid_t             dset_id;   /* dataset identifier */
766:   hid_t             memspace;  /* memory dataspace identifier */
767:   hid_t             file_id, group;
768:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
769:   DM_DA            *dd;

771:   #if defined(PETSC_USE_REAL_SINGLE)
772:   scalartype = H5T_NATIVE_FLOAT;
773:   #elif defined(PETSC_USE_REAL___FLOAT128)
774:     #error "HDF5 output with 128 bit floats not supported."
775:   #elif defined(PETSC_USE_REAL___FP16)
776:     #error "HDF5 output with 16 bit floats not supported."
777:   #else
778:   scalartype = H5T_NATIVE_DOUBLE;
779:   #endif

781:   PetscViewerHDF5OpenGroup(viewer, NULL, &file_id, &group);
782:   PetscObjectGetName((PetscObject)xin, &vecname);
783:   PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname);
784:   PetscViewerHDF5IsTimestepping(viewer, &timestepping);
785:   if (timestepping) PetscViewerHDF5GetTimestep(viewer, &timestep);
786:   VecGetDM(xin, &da);
787:   dd = (DM_DA *)da->data;
788:   DMGetDimension(da, &dimension);

790:   /* Open dataset */
791:   PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT));

793:   /* Retrieve the dataspace for the dataset */
794:   PetscCallHDF5Return(filespace, H5Dget_space, (dset_id));
795:   PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (filespace, dims, NULL));

797:   /* Expected dimension for holding the dof's */
798:   #if defined(PETSC_USE_COMPLEX)
799:   dofInd = rdim - 2;
800:   #else
801:   dofInd = rdim - 1;
802:   #endif

804:   /* The expected number of dimensions, assuming basedimension2 = false */
805:   dim = dimension;
806:   if (dd->w > 1) ++dim;
807:   if (timestep >= 0) ++dim;
808:   #if defined(PETSC_USE_COMPLEX)
809:   ++dim;
810:   #endif

812:   /* In this case the input dataset have one extra, unexpected dimension. */
813:   if (rdim == dim + 1) {
814:     /* In this case the block size unity */
815:     if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE;

817:     /* Special error message for the case where dof does not match the input file */

820:     /* Other cases where rdim != dim cannot be handled currently */

823:   /* Set up the hyperslab size */
824:   dim = 0;
825:   if (timestep >= 0) {
826:     offset[dim] = timestep;
827:     count[dim]  = 1;
828:     ++dim;
829:   }
830:   if (dimension == 3) {
831:     PetscHDF5IntCast(dd->zs, offset + dim);
832:     PetscHDF5IntCast(dd->ze - dd->zs, count + dim);
833:     ++dim;
834:   }
835:   if (dimension > 1) {
836:     PetscHDF5IntCast(dd->ys, offset + dim);
837:     PetscHDF5IntCast(dd->ye - dd->ys, count + dim);
838:     ++dim;
839:   }
840:   PetscHDF5IntCast(dd->xs / dd->w, offset + dim);
841:   PetscHDF5IntCast((dd->xe - dd->xs) / dd->w, count + dim);
842:   ++dim;
843:   if (dd->w > 1 || dim2) {
844:     offset[dim] = 0;
845:     PetscHDF5IntCast(dd->w, count + dim);
846:     ++dim;
847:   }
848:   #if defined(PETSC_USE_COMPLEX)
849:   offset[dim] = 0;
850:   count[dim]  = 2;
851:   ++dim;
852:   #endif

854:   /* Create the memory and filespace */
855:   PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL));
856:   PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL));

858:   VecGetArray(xin, &x);
859:   PetscCallHDF5(H5Dread, (dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x));
860:   VecRestoreArray(xin, &x);

862:   /* Close/release resources */
863:   if (group != file_id) PetscCallHDF5(H5Gclose, (group));
864:   PetscCallHDF5(H5Sclose, (filespace));
865:   PetscCallHDF5(H5Sclose, (memspace));
866:   PetscCallHDF5(H5Dclose, (dset_id));
867:   return 0;
868: }
869: #endif

871: PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer)
872: {
873:   DM          da;
874:   Vec         natural;
875:   const char *prefix;
876:   PetscInt    bs;
877:   PetscBool   flag;
878:   DM_DA      *dd;
879: #if defined(PETSC_HAVE_MPIIO)
880:   PetscBool isMPIIO;
881: #endif

883:   VecGetDM(xin, &da);
884:   dd = (DM_DA *)da->data;
885: #if defined(PETSC_HAVE_MPIIO)
886:   PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO);
887:   if (isMPIIO) {
888:     DMDAArrayMPIIO(da, viewer, xin, PETSC_FALSE);
889:     return 0;
890:   }
891: #endif

893:   PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix);
894:   DMDACreateNaturalVector(da, &natural);
895:   PetscObjectSetName((PetscObject)natural, ((PetscObject)xin)->name);
896:   PetscObjectSetOptionsPrefix((PetscObject)natural, prefix);
897:   VecLoad(natural, viewer);
898:   DMDANaturalToGlobalBegin(da, natural, INSERT_VALUES, xin);
899:   DMDANaturalToGlobalEnd(da, natural, INSERT_VALUES, xin);
900:   VecDestroy(&natural);
901:   PetscInfo(xin, "Loading vector from natural ordering into DMDA\n");
902:   PetscOptionsGetInt(NULL, ((PetscObject)xin)->prefix, "-vecload_block_size", &bs, &flag);
903:   if (flag && bs != dd->w) PetscInfo(xin, "Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n", bs, dd->w);
904:   return 0;
905: }

907: PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer)
908: {
909:   DM        da;
910:   PetscBool isbinary;
911: #if defined(PETSC_HAVE_HDF5)
912:   PetscBool ishdf5;
913: #endif

915:   VecGetDM(xin, &da);

918: #if defined(PETSC_HAVE_HDF5)
919:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5);
920: #endif
921:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary);

923:   if (isbinary) {
924:     VecLoad_Binary_DA(xin, viewer);
925: #if defined(PETSC_HAVE_HDF5)
926:   } else if (ishdf5) {
927:     VecLoad_HDF5_DA(xin, viewer);
928: #endif
929:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
930:   return 0;
931: }