Actual source code: grvtk.c
1: #include <petsc/private/dmdaimpl.h>
2: /*
3: Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4: including the private vtkvimpl.h file. The code should be refactored.
5: */
6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
8: /* Helper function which determines if any DMDA fields are named. This is used
9: as a proxy for the user's intention to use DMDA fields as distinct
10: scalar-valued fields as opposed to a single vector-valued field */
11: static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed)
12: {
13: PetscInt f, bs;
15: *fieldsnamed = PETSC_FALSE;
16: DMDAGetDof(da, &bs);
17: for (f = 0; f < bs; ++f) {
18: const char *fieldname;
19: DMDAGetFieldName(da, f, &fieldname);
20: if (fieldname) {
21: *fieldsnamed = PETSC_TRUE;
22: break;
23: }
24: }
25: return 0;
26: }
28: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer)
29: {
30: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
31: #if defined(PETSC_USE_REAL_SINGLE)
32: const char precision[] = "Float32";
33: #elif defined(PETSC_USE_REAL_DOUBLE)
34: const char precision[] = "Float64";
35: #else
36: const char precision[] = "UnknownPrecision";
37: #endif
38: MPI_Comm comm;
39: Vec Coords;
40: PetscViewer_VTK *vtk = (PetscViewer_VTK *)viewer->data;
41: PetscViewerVTKObjectLink link;
42: FILE *fp;
43: PetscMPIInt rank, size, tag;
44: DMDALocalInfo info;
45: PetscInt dim, mx, my, mz, cdim, bs, maxnnodes, maxbs, i, j, k, r;
46: PetscInt64 boffset;
47: PetscInt rloc[6], (*grloc)[6] = NULL;
48: PetscScalar *array, *array2;
50: PetscObjectGetComm((PetscObject)da, &comm);
52: MPI_Comm_size(comm, &size);
53: MPI_Comm_rank(comm, &rank);
54: DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
55: DMDAGetLocalInfo(da, &info);
56: DMGetCoordinates(da, &Coords);
57: if (Coords) {
58: PetscInt csize;
59: VecGetSize(Coords, &csize);
61: cdim = csize / (mx * my * mz);
63: } else {
64: cdim = dim;
65: }
67: PetscFOpen(comm, vtk->filename, "wb", &fp);
68: PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n");
69: PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order);
70: PetscFPrintf(comm, fp, " <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1);
72: if (rank == 0) PetscMalloc1(size * 6, &grloc);
73: rloc[0] = info.xs;
74: rloc[1] = info.xm;
75: rloc[2] = info.ys;
76: rloc[3] = info.ym;
77: rloc[4] = info.zs;
78: rloc[5] = info.zm;
79: MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm);
81: /* Write XML header */
82: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
83: maxbs = 0; /* Used for the temporary array size on rank 0 */
84: boffset = 0; /* Offset into binary file */
85: for (r = 0; r < size; r++) {
86: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
87: if (rank == 0) {
88: xs = grloc[r][0];
89: xm = grloc[r][1];
90: ys = grloc[r][2];
91: ym = grloc[r][3];
92: zs = grloc[r][4];
93: zm = grloc[r][5];
94: nnodes = xm * ym * zm;
95: }
96: maxnnodes = PetscMax(maxnnodes, nnodes);
97: PetscFPrintf(comm, fp, " <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1);
98: PetscFPrintf(comm, fp, " <Points>\n");
99: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset);
100: boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(int);
101: PetscFPrintf(comm, fp, " </Points>\n");
103: PetscFPrintf(comm, fp, " <PointData Scalars=\"ScalarPointData\">\n");
104: for (link = vtk->link; link; link = link->next) {
105: Vec X = (Vec)link->vec;
106: PetscInt bs, f;
107: DM daCurr;
108: PetscBool fieldsnamed;
109: const char *vecname = "Unnamed";
111: VecGetDM(X, &daCurr);
112: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
113: maxbs = PetscMax(maxbs, bs);
115: if (((PetscObject)X)->name || link != vtk->link) PetscObjectGetName((PetscObject)X, &vecname);
117: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
118: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
119: if (fieldsnamed) {
120: for (f = 0; f < bs; f++) {
121: char buf[256];
122: const char *fieldname;
123: DMDAGetFieldName(daCurr, f, &fieldname);
124: if (!fieldname) {
125: PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f);
126: fieldname = buf;
127: }
128: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset);
129: boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
130: }
131: } else {
132: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset);
133: boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
134: }
135: }
136: PetscFPrintf(comm, fp, " </PointData>\n");
137: PetscFPrintf(comm, fp, " </Piece>\n");
138: }
139: PetscFPrintf(comm, fp, " </StructuredGrid>\n");
140: PetscFPrintf(comm, fp, " <AppendedData encoding=\"raw\">\n");
141: PetscFPrintf(comm, fp, "_");
143: /* Now write the arrays. */
144: tag = ((PetscObject)viewer)->tag;
145: PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2);
146: for (r = 0; r < size; r++) {
147: MPI_Status status;
148: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
149: if (rank == 0) {
150: xs = grloc[r][0];
151: xm = grloc[r][1];
152: ys = grloc[r][2];
153: ym = grloc[r][3];
154: zs = grloc[r][4];
155: zm = grloc[r][5];
156: nnodes = xm * ym * zm;
157: } else if (r == rank) {
158: nnodes = info.xm * info.ym * info.zm;
159: }
161: /* Write the coordinates */
162: if (Coords) {
163: const PetscScalar *coords;
164: VecGetArrayRead(Coords, &coords);
165: if (rank == 0) {
166: if (r) {
167: PetscMPIInt nn;
168: MPI_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status);
169: MPI_Get_count(&status, MPIU_SCALAR, &nn);
171: } else PetscArraycpy(array, coords, nnodes * cdim);
172: /* Transpose coordinates to VTK (C-style) ordering */
173: for (k = 0; k < zm; k++) {
174: for (j = 0; j < ym; j++) {
175: for (i = 0; i < xm; i++) {
176: PetscInt Iloc = i + xm * (j + ym * k);
177: array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
178: array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
179: array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
180: }
181: }
182: }
183: } else if (r == rank) {
184: MPI_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm);
185: }
186: VecRestoreArrayRead(Coords, &coords);
187: } else { /* Fabricate some coordinates using grid index */
188: for (k = 0; k < zm; k++) {
189: for (j = 0; j < ym; j++) {
190: for (i = 0; i < xm; i++) {
191: PetscInt Iloc = i + xm * (j + ym * k);
192: array2[Iloc * 3 + 0] = xs + i;
193: array2[Iloc * 3 + 1] = ys + j;
194: array2[Iloc * 3 + 2] = zs + k;
195: }
196: }
197: }
198: }
199: PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR);
201: /* Write each of the objects queued up for this file */
202: for (link = vtk->link; link; link = link->next) {
203: Vec X = (Vec)link->vec;
204: const PetscScalar *x;
205: PetscInt bs, f;
206: DM daCurr;
207: PetscBool fieldsnamed;
208: VecGetDM(X, &daCurr);
209: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
210: VecGetArrayRead(X, &x);
211: if (rank == 0) {
212: if (r) {
213: PetscMPIInt nn;
214: MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status);
215: MPI_Get_count(&status, MPIU_SCALAR, &nn);
217: } else PetscArraycpy(array, x, nnodes * bs);
219: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
220: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
221: if (fieldsnamed) {
222: for (f = 0; f < bs; f++) {
223: /* Extract and transpose the f'th field */
224: for (k = 0; k < zm; k++) {
225: for (j = 0; j < ym; j++) {
226: for (i = 0; i < xm; i++) {
227: PetscInt Iloc = i + xm * (j + ym * k);
228: array2[Iloc] = array[Iloc * bs + f];
229: }
230: }
231: }
232: PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR);
233: }
234: } else PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR);
235: } else if (r == rank) MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm);
236: VecRestoreArrayRead(X, &x);
237: }
238: }
239: PetscFree2(array, array2);
240: PetscFree(grloc);
242: PetscFPrintf(comm, fp, "\n </AppendedData>\n");
243: PetscFPrintf(comm, fp, "</VTKFile>\n");
244: PetscFClose(comm, fp);
245: return 0;
246: }
248: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer)
249: {
250: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
251: #if defined(PETSC_USE_REAL_SINGLE)
252: const char precision[] = "Float32";
253: #elif defined(PETSC_USE_REAL_DOUBLE)
254: const char precision[] = "Float64";
255: #else
256: const char precision[] = "UnknownPrecision";
257: #endif
258: MPI_Comm comm;
259: PetscViewer_VTK *vtk = (PetscViewer_VTK *)viewer->data;
260: PetscViewerVTKObjectLink link;
261: FILE *fp;
262: PetscMPIInt rank, size, tag;
263: DMDALocalInfo info;
264: PetscInt dim, mx, my, mz, maxnnodes, maxbs, i, j, k, r;
265: PetscInt64 boffset;
266: PetscInt rloc[6], (*grloc)[6] = NULL;
267: PetscScalar *array, *array2;
269: PetscObjectGetComm((PetscObject)da, &comm);
271: MPI_Comm_size(comm, &size);
272: MPI_Comm_rank(comm, &rank);
273: DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
274: DMDAGetLocalInfo(da, &info);
275: PetscFOpen(comm, vtk->filename, "wb", &fp);
276: PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n");
277: PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order);
278: PetscFPrintf(comm, fp, " <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1);
280: if (rank == 0) PetscMalloc1(size * 6, &grloc);
281: rloc[0] = info.xs;
282: rloc[1] = info.xm;
283: rloc[2] = info.ys;
284: rloc[3] = info.ym;
285: rloc[4] = info.zs;
286: rloc[5] = info.zm;
287: MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm);
289: /* Write XML header */
290: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
291: maxbs = 0; /* Used for the temporary array size on rank 0 */
292: boffset = 0; /* Offset into binary file */
293: for (r = 0; r < size; r++) {
294: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
295: if (rank == 0) {
296: xs = grloc[r][0];
297: xm = grloc[r][1];
298: ys = grloc[r][2];
299: ym = grloc[r][3];
300: zs = grloc[r][4];
301: zm = grloc[r][5];
302: nnodes = xm * ym * zm;
303: }
304: maxnnodes = PetscMax(maxnnodes, nnodes);
305: PetscFPrintf(comm, fp, " <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1);
306: PetscFPrintf(comm, fp, " <Coordinates>\n");
307: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset);
308: boffset += xm * sizeof(PetscScalar) + sizeof(int);
309: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset);
310: boffset += ym * sizeof(PetscScalar) + sizeof(int);
311: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset);
312: boffset += zm * sizeof(PetscScalar) + sizeof(int);
313: PetscFPrintf(comm, fp, " </Coordinates>\n");
314: PetscFPrintf(comm, fp, " <PointData Scalars=\"ScalarPointData\">\n");
315: for (link = vtk->link; link; link = link->next) {
316: Vec X = (Vec)link->vec;
317: PetscInt bs, f;
318: DM daCurr;
319: PetscBool fieldsnamed;
320: const char *vecname = "Unnamed";
322: VecGetDM(X, &daCurr);
323: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
324: maxbs = PetscMax(maxbs, bs);
325: if (((PetscObject)X)->name || link != vtk->link) PetscObjectGetName((PetscObject)X, &vecname);
327: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
328: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
329: if (fieldsnamed) {
330: for (f = 0; f < bs; f++) {
331: char buf[256];
332: const char *fieldname;
333: DMDAGetFieldName(daCurr, f, &fieldname);
334: if (!fieldname) {
335: PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f);
336: fieldname = buf;
337: }
338: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset);
339: boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
340: }
341: } else {
342: PetscFPrintf(comm, fp, " <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset);
343: boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
344: }
345: }
346: PetscFPrintf(comm, fp, " </PointData>\n");
347: PetscFPrintf(comm, fp, " </Piece>\n");
348: }
349: PetscFPrintf(comm, fp, " </RectilinearGrid>\n");
350: PetscFPrintf(comm, fp, " <AppendedData encoding=\"raw\">\n");
351: PetscFPrintf(comm, fp, "_");
353: /* Now write the arrays. */
354: tag = ((PetscObject)viewer)->tag;
355: PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2);
356: for (r = 0; r < size; r++) {
357: MPI_Status status;
358: PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
359: if (rank == 0) {
360: xs = grloc[r][0];
361: xm = grloc[r][1];
362: ys = grloc[r][2];
363: ym = grloc[r][3];
364: zs = grloc[r][4];
365: zm = grloc[r][5];
366: nnodes = xm * ym * zm;
367: } else if (r == rank) {
368: nnodes = info.xm * info.ym * info.zm;
369: }
370: { /* Write the coordinates */
371: Vec Coords;
373: DMGetCoordinates(da, &Coords);
374: if (Coords) {
375: const PetscScalar *coords;
376: VecGetArrayRead(Coords, &coords);
377: if (rank == 0) {
378: if (r) {
379: PetscMPIInt nn;
380: MPI_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status);
381: MPI_Get_count(&status, MPIU_SCALAR, &nn);
383: } else {
384: /* x coordinates */
385: for (j = 0, k = 0, i = 0; i < xm; i++) {
386: PetscInt Iloc = i + xm * (j + ym * k);
387: array[i] = coords[Iloc * dim + 0];
388: }
389: /* y coordinates */
390: for (i = 0, k = 0, j = 0; j < ym; j++) {
391: PetscInt Iloc = i + xm * (j + ym * k);
392: array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
393: }
394: /* z coordinates */
395: for (i = 0, j = 0, k = 0; k < zm; k++) {
396: PetscInt Iloc = i + xm * (j + ym * k);
397: array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
398: }
399: }
400: } else if (r == rank) {
401: xm = info.xm;
402: ym = info.ym;
403: zm = info.zm;
404: for (j = 0, k = 0, i = 0; i < xm; i++) {
405: PetscInt Iloc = i + xm * (j + ym * k);
406: array2[i] = coords[Iloc * dim + 0];
407: }
408: for (i = 0, k = 0, j = 0; j < ym; j++) {
409: PetscInt Iloc = i + xm * (j + ym * k);
410: array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
411: }
412: for (i = 0, j = 0, k = 0; k < zm; k++) {
413: PetscInt Iloc = i + xm * (j + ym * k);
414: array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
415: }
416: MPI_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm);
417: }
418: VecRestoreArrayRead(Coords, &coords);
419: } else { /* Fabricate some coordinates using grid index */
420: for (i = 0; i < xm; i++) array[i] = xs + i;
421: for (j = 0; j < ym; j++) array[j + xm] = ys + j;
422: for (k = 0; k < zm; k++) array[k + xm + ym] = zs + k;
423: }
424: if (rank == 0) {
425: PetscViewerVTKFWrite(viewer, fp, &(array[0]), xm, MPIU_SCALAR);
426: PetscViewerVTKFWrite(viewer, fp, &(array[xm]), ym, MPIU_SCALAR);
427: PetscViewerVTKFWrite(viewer, fp, &(array[xm + ym]), zm, MPIU_SCALAR);
428: }
429: }
431: /* Write each of the objects queued up for this file */
432: for (link = vtk->link; link; link = link->next) {
433: Vec X = (Vec)link->vec;
434: const PetscScalar *x;
435: PetscInt bs, f;
436: DM daCurr;
437: PetscBool fieldsnamed;
438: VecGetDM(X, &daCurr);
439: DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL);
441: VecGetArrayRead(X, &x);
442: if (rank == 0) {
443: if (r) {
444: PetscMPIInt nn;
445: MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status);
446: MPI_Get_count(&status, MPIU_SCALAR, &nn);
448: } else PetscArraycpy(array, x, nnodes * bs);
449: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
450: DMDAGetFieldsNamed(daCurr, &fieldsnamed);
451: if (fieldsnamed) {
452: for (f = 0; f < bs; f++) {
453: /* Extract and transpose the f'th field */
454: for (k = 0; k < zm; k++) {
455: for (j = 0; j < ym; j++) {
456: for (i = 0; i < xm; i++) {
457: PetscInt Iloc = i + xm * (j + ym * k);
458: array2[Iloc] = array[Iloc * bs + f];
459: }
460: }
461: }
462: PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR);
463: }
464: }
465: PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR);
467: } else if (r == rank) {
468: MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm);
469: }
470: VecRestoreArrayRead(X, &x);
471: }
472: }
473: PetscFree2(array, array2);
474: PetscFree(grloc);
476: PetscFPrintf(comm, fp, "\n </AppendedData>\n");
477: PetscFPrintf(comm, fp, "</VTKFile>\n");
478: PetscFClose(comm, fp);
479: return 0;
480: }
482: /*@C
483: DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
485: Collective
487: Input Parameters:
488: + odm - `DMDA` specifying the grid layout, passed as a `PetscObject`
489: - viewer - viewer of type `PETSCVIEWERVTK`
491: Level: developer
493: Notes:
494: This function is a callback used by the VTK viewer to actually write the file.
495: The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
496: Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
498: If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
499: fields are written. Otherwise, a single multi-dof (vector) field is written.
501: .seealso: `DMDA`, `DM`, `PETSCVIEWERVTK`
502: @*/
503: PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer)
504: {
505: DM dm = (DM)odm;
506: PetscBool isvtk;
510: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk);
512: switch (viewer->format) {
513: case PETSC_VIEWER_VTK_VTS:
514: DMDAVTKWriteAll_VTS(dm, viewer);
515: break;
516: case PETSC_VIEWER_VTK_VTR:
517: DMDAVTKWriteAll_VTR(dm, viewer);
518: break;
519: default:
520: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
521: }
522: return 0;
523: }