Actual source code: ex18.c
1: static char help[] = "Tests for parallel mesh loading and parallel topological interpolation\n\n";
3: #include <petsc/private/dmpleximpl.h>
4: /* List of test meshes
6: Network
7: -------
8: Test 0 (2 ranks):
10: network=0:
11: ---------
12: cell 0 cell 1 cell 2 nCells-1 (edge)
13: 0 ------ 1 ------ 2 ------ 3 -- -- v -- -- nCells (vertex)
15: vertex distribution:
16: rank 0: 0 1
17: rank 1: 2 3 ... nCells
18: cell(edge) distribution:
19: rank 0: 0 1
20: rank 1: 2 ... nCells-1
22: network=1:
23: ---------
24: v2
25: ^
26: |
27: cell 2
28: |
29: v0 --cell 0--> v3--cell 1--> v1
31: vertex distribution:
32: rank 0: 0 1 3
33: rank 1: 2
34: cell(edge) distribution:
35: rank 0: 0 1
36: rank 1: 2
38: example:
39: mpiexec -n 2 ./ex18 -distribute 1 -dim 1 -orig_dm_view -dist_dm_view -dist_dm_view -petscpartitioner_type parmetis -ncells 50
41: Triangle
42: --------
43: Test 0 (2 ranks):
44: Two triangles sharing a face
46: 2
47: / | \
48: / | \
49: / | \
50: 0 0 | 1 3
51: \ | /
52: \ | /
53: \ | /
54: 1
56: vertex distribution:
57: rank 0: 0 1
58: rank 1: 2 3
59: cell distribution:
60: rank 0: 0
61: rank 1: 1
63: Test 1 (3 ranks):
64: Four triangles partitioned across 3 ranks
66: 0 _______ 3
67: | \ / |
68: | \ 1 / |
69: | \ / |
70: | 0 2 2 |
71: | / \ |
72: | / 3 \ |
73: | / \ |
74: 1 ------- 4
76: vertex distribution:
77: rank 0: 0 1
78: rank 1: 2 3
79: rank 2: 4
80: cell distribution:
81: rank 0: 0
82: rank 1: 1
83: rank 2: 2 3
85: Test 2 (3 ranks):
86: Four triangles partitioned across 3 ranks
88: 1 _______ 3
89: | \ / |
90: | \ 1 / |
91: | \ / |
92: | 0 0 2 |
93: | / \ |
94: | / 3 \ |
95: | / \ |
96: 2 ------- 4
98: vertex distribution:
99: rank 0: 0 1
100: rank 1: 2 3
101: rank 2: 4
102: cell distribution:
103: rank 0: 0
104: rank 1: 1
105: rank 2: 2 3
107: Tetrahedron
108: -----------
109: Test 0:
110: Two tets sharing a face
112: cell 3 _______ cell
113: 0 / | \ \ 1
114: / | \ \
115: / | \ \
116: 0----|----4-----2
117: \ | / /
118: \ | / /
119: \ | / /
120: 1-------
121: y
122: | x
123: |/
124: *----z
126: vertex distribution:
127: rank 0: 0 1
128: rank 1: 2 3 4
129: cell distribution:
130: rank 0: 0
131: rank 1: 1
133: Quadrilateral
134: -------------
135: Test 0 (2 ranks):
136: Two quads sharing a face
138: 3-------2-------5
139: | | |
140: | 0 | 1 |
141: | | |
142: 0-------1-------4
144: vertex distribution:
145: rank 0: 0 1 2
146: rank 1: 3 4 5
147: cell distribution:
148: rank 0: 0
149: rank 1: 1
151: TODO Test 1:
152: A quad and a triangle sharing a face
154: 5-------4
155: | | \
156: | 0 | \
157: | | 1 \
158: 2-------3----6
160: Hexahedron
161: ----------
162: Test 0 (2 ranks):
163: Two hexes sharing a face
165: cell 7-------------6-------------11 cell
166: 0 /| /| /| 1
167: / | F1 / | F7 / |
168: / | / | / |
169: 4-------------5-------------10 |
170: | | F4 | | F10 | |
171: | | | | | |
172: |F5 | |F3 | |F9 |
173: | | F2 | | F8 | |
174: | 3---------|---2---------|---9
175: | / | / | /
176: | / F0 | / F6 | /
177: |/ |/ |/
178: 0-------------1-------------8
180: vertex distribution:
181: rank 0: 0 1 2 3 4 5
182: rank 1: 6 7 8 9 10 11
183: cell distribution:
184: rank 0: 0
185: rank 1: 1
187: */
189: typedef enum {
190: NONE,
191: CREATE,
192: AFTER_CREATE,
193: AFTER_DISTRIBUTE
194: } InterpType;
196: typedef struct {
197: PetscInt debug; /* The debugging level */
198: PetscInt testNum; /* Indicates the mesh to create */
199: PetscInt dim; /* The topological mesh dimension */
200: PetscBool cellSimplex; /* Use simplices or hexes */
201: PetscBool distribute; /* Distribute the mesh */
202: InterpType interpolate; /* Interpolate the mesh before or after DMPlexDistribute() */
203: PetscBool useGenerator; /* Construct mesh with a mesh generator */
204: PetscBool testOrientIF; /* Test for different original interface orientations */
205: PetscBool testHeavy; /* Run the heavy PointSF test */
206: PetscBool customView; /* Show results of DMPlexIsInterpolated() etc. */
207: PetscInt ornt[2]; /* Orientation of interface on rank 0 and rank 1 */
208: PetscInt faces[3]; /* Number of faces per dimension for generator */
209: PetscScalar coords[128];
210: PetscReal coordsTol;
211: PetscInt ncoords;
212: PetscInt pointsToExpand[128];
213: PetscInt nPointsToExpand;
214: PetscBool testExpandPointsEmpty;
215: char filename[PETSC_MAX_PATH_LEN]; /* Import mesh from file */
216: } AppCtx;
218: struct _n_PortableBoundary {
219: Vec coordinates;
220: PetscInt depth;
221: PetscSection *sections;
222: };
223: typedef struct _n_PortableBoundary *PortableBoundary;
225: #if defined(PETSC_USE_LOG)
226: static PetscLogStage stage[3];
227: #endif
229: static PetscErrorCode DMPlexCheckPointSFHeavy(DM, PortableBoundary);
230: static PetscErrorCode DMPlexSetOrientInterface_Private(DM, PetscBool);
231: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM, PortableBoundary *);
232: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM, IS, PetscSection, IS *);
234: static PetscErrorCode PortableBoundaryDestroy(PortableBoundary *bnd)
235: {
236: PetscInt d;
238: if (!*bnd) return 0;
239: VecDestroy(&(*bnd)->coordinates);
240: for (d = 0; d < (*bnd)->depth; d++) PetscSectionDestroy(&(*bnd)->sections[d]);
241: PetscFree((*bnd)->sections);
242: PetscFree(*bnd);
243: return 0;
244: }
246: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
247: {
248: const char *interpTypes[4] = {"none", "create", "after_create", "after_distribute"};
249: PetscInt interp = NONE, dim;
250: PetscBool flg1, flg2;
252: options->debug = 0;
253: options->testNum = 0;
254: options->dim = 2;
255: options->cellSimplex = PETSC_TRUE;
256: options->distribute = PETSC_FALSE;
257: options->interpolate = NONE;
258: options->useGenerator = PETSC_FALSE;
259: options->testOrientIF = PETSC_FALSE;
260: options->testHeavy = PETSC_TRUE;
261: options->customView = PETSC_FALSE;
262: options->testExpandPointsEmpty = PETSC_FALSE;
263: options->ornt[0] = 0;
264: options->ornt[1] = 0;
265: options->faces[0] = 2;
266: options->faces[1] = 2;
267: options->faces[2] = 2;
268: options->filename[0] = '\0';
269: options->coordsTol = PETSC_DEFAULT;
271: PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
272: PetscOptionsBoundedInt("-debug", "The debugging level", "ex18.c", options->debug, &options->debug, NULL, 0);
273: PetscOptionsBoundedInt("-testnum", "The mesh to create", "ex18.c", options->testNum, &options->testNum, NULL, 0);
274: PetscOptionsBool("-cell_simplex", "Generate simplices if true, otherwise hexes", "ex18.c", options->cellSimplex, &options->cellSimplex, NULL);
275: PetscOptionsBool("-distribute", "Distribute the mesh", "ex18.c", options->distribute, &options->distribute, NULL);
276: PetscOptionsEList("-interpolate", "Type of mesh interpolation (none, create, after_create, after_distribute)", "ex18.c", interpTypes, 4, interpTypes[options->interpolate], &interp, NULL);
277: options->interpolate = (InterpType)interp;
279: PetscOptionsBool("-use_generator", "Use a mesh generator to build the mesh", "ex18.c", options->useGenerator, &options->useGenerator, NULL);
280: options->ncoords = 128;
281: PetscOptionsScalarArray("-view_vertices_from_coords", "Print DAG points corresponding to vertices with given coordinates", "ex18.c", options->coords, &options->ncoords, NULL);
282: PetscOptionsReal("-view_vertices_from_coords_tol", "Tolerance for -view_vertices_from_coords", "ex18.c", options->coordsTol, &options->coordsTol, NULL);
283: options->nPointsToExpand = 128;
284: PetscOptionsIntArray("-test_expand_points", "Expand given array of DAG point using DMPlexGetConeRecursive() and print results", "ex18.c", options->pointsToExpand, &options->nPointsToExpand, NULL);
285: if (options->nPointsToExpand) PetscOptionsBool("-test_expand_points_empty", "For -test_expand_points, rank 0 will have empty input array", "ex18.c", options->testExpandPointsEmpty, &options->testExpandPointsEmpty, NULL);
286: PetscOptionsBool("-test_heavy", "Run the heavy PointSF test", "ex18.c", options->testHeavy, &options->testHeavy, NULL);
287: PetscOptionsBool("-custom_view", "Custom DMPlex view", "ex18.c", options->customView, &options->customView, NULL);
288: PetscOptionsRangeInt("-dim", "The topological mesh dimension", "ex18.c", options->dim, &options->dim, &flg1, 1, 3);
289: dim = 3;
290: PetscOptionsIntArray("-faces", "Number of faces per dimension", "ex18.c", options->faces, &dim, &flg2);
291: if (flg2) {
293: options->dim = dim;
294: }
295: PetscOptionsString("-filename", "The mesh file", "ex18.c", options->filename, options->filename, sizeof(options->filename), NULL);
296: PetscOptionsBoundedInt("-rotate_interface_0", "Rotation (relative orientation) of interface on rank 0; implies -interpolate create -distribute 0", "ex18.c", options->ornt[0], &options->ornt[0], &options->testOrientIF, 0);
297: PetscOptionsBoundedInt("-rotate_interface_1", "Rotation (relative orientation) of interface on rank 1; implies -interpolate create -distribute 0", "ex18.c", options->ornt[1], &options->ornt[1], &flg2, 0);
299: if (options->testOrientIF) {
300: PetscInt i;
301: for (i = 0; i < 2; i++) {
302: if (options->ornt[i] >= 10) options->ornt[i] = -(options->ornt[i] - 10); /* 11 12 13 become -1 -2 -3 */
303: }
304: options->filename[0] = 0;
305: options->useGenerator = PETSC_FALSE;
306: options->dim = 3;
307: options->cellSimplex = PETSC_TRUE;
308: options->interpolate = CREATE;
309: options->distribute = PETSC_FALSE;
310: }
311: PetscOptionsEnd();
312: return 0;
313: }
315: static PetscErrorCode CreateMesh_1D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
316: {
317: PetscInt testNum = user->testNum;
318: PetscMPIInt rank, size;
319: PetscInt numCorners = 2, i;
320: PetscInt numCells, numVertices, network;
321: PetscInt *cells;
322: PetscReal *coords;
324: MPI_Comm_rank(comm, &rank);
325: MPI_Comm_size(comm, &size);
328: numCells = 3;
329: PetscOptionsGetInt(NULL, NULL, "-ncells", &numCells, NULL);
332: if (size == 1) {
333: numVertices = numCells + 1;
334: PetscMalloc2(2 * numCells, &cells, 2 * numVertices, &coords);
335: for (i = 0; i < numCells; i++) {
336: cells[2 * i] = i;
337: cells[2 * i + 1] = i + 1;
338: coords[2 * i] = i;
339: coords[2 * i + 1] = i + 1;
340: }
342: DMPlexCreateFromCellListPetsc(comm, user->dim, numCells, numVertices, numCorners, PETSC_FALSE, cells, user->dim, coords, dm);
343: PetscFree2(cells, coords);
344: return 0;
345: }
347: network = 0;
348: PetscOptionsGetInt(NULL, NULL, "-network_case", &network, NULL);
349: if (network == 0) {
350: switch (rank) {
351: case 0: {
352: numCells = 2;
353: numVertices = numCells;
354: PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords);
355: cells[0] = 0;
356: cells[1] = 1;
357: cells[2] = 1;
358: cells[3] = 2;
359: coords[0] = 0.;
360: coords[1] = 1.;
361: coords[2] = 1.;
362: coords[3] = 2.;
363: } break;
364: case 1: {
365: numCells -= 2;
366: numVertices = numCells + 1;
367: PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords);
368: for (i = 0; i < numCells; i++) {
369: cells[2 * i] = 2 + i;
370: cells[2 * i + 1] = 2 + i + 1;
371: coords[2 * i] = 2 + i;
372: coords[2 * i + 1] = 2 + i + 1;
373: }
374: } break;
375: default:
376: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
377: }
378: } else { /* network_case = 1 */
379: /* ----------------------- */
380: switch (rank) {
381: case 0: {
382: numCells = 2;
383: numVertices = 3;
384: PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords);
385: cells[0] = 0;
386: cells[1] = 3;
387: cells[2] = 3;
388: cells[3] = 1;
389: } break;
390: case 1: {
391: numCells = 1;
392: numVertices = 1;
393: PetscMalloc2(2 * numCells, &cells, 2 * numCells, &coords);
394: cells[0] = 3;
395: cells[1] = 2;
396: } break;
397: default:
398: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
399: }
400: }
401: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, PETSC_FALSE, cells, user->dim, coords, NULL, NULL, dm);
402: PetscFree2(cells, coords);
403: return 0;
404: }
406: static PetscErrorCode CreateSimplex_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
407: {
408: PetscInt testNum = user->testNum, p;
409: PetscMPIInt rank, size;
411: MPI_Comm_rank(comm, &rank);
412: MPI_Comm_size(comm, &size);
413: switch (testNum) {
414: case 0:
416: switch (rank) {
417: case 0: {
418: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
419: const PetscInt cells[3] = {0, 1, 2};
420: PetscReal coords[4] = {-0.5, 0.5, 0.0, 0.0};
421: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
423: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
424: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
425: } break;
426: case 1: {
427: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
428: const PetscInt cells[3] = {1, 3, 2};
429: PetscReal coords[4] = {0.0, 1.0, 0.5, 0.5};
430: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
432: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
433: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
434: } break;
435: default:
436: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
437: }
438: break;
439: case 1:
441: switch (rank) {
442: case 0: {
443: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
444: const PetscInt cells[3] = {0, 1, 2};
445: PetscReal coords[4] = {0.0, 1.0, 0.0, 0.0};
446: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
448: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
449: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
450: } break;
451: case 1: {
452: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
453: const PetscInt cells[3] = {0, 2, 3};
454: PetscReal coords[4] = {0.5, 0.5, 1.0, 1.0};
455: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
457: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
458: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
459: } break;
460: case 2: {
461: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
462: const PetscInt cells[6] = {2, 4, 3, 2, 1, 4};
463: PetscReal coords[2] = {1.0, 0.0};
464: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
466: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
467: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
468: } break;
469: default:
470: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
471: }
472: break;
473: case 2:
475: switch (rank) {
476: case 0: {
477: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
478: const PetscInt cells[3] = {1, 2, 0};
479: PetscReal coords[4] = {0.5, 0.5, 0.0, 1.0};
480: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
482: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
483: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
484: } break;
485: case 1: {
486: const PetscInt numCells = 1, numVertices = 2, numCorners = 3;
487: const PetscInt cells[3] = {1, 0, 3};
488: PetscReal coords[4] = {0.0, 0.0, 1.0, 1.0};
489: PetscInt markerPoints[6] = {1, 1, 2, 1, 3, 1};
491: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
492: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
493: } break;
494: case 2: {
495: const PetscInt numCells = 2, numVertices = 1, numCorners = 3;
496: const PetscInt cells[6] = {0, 4, 3, 0, 2, 4};
497: PetscReal coords[2] = {1.0, 0.0};
498: PetscInt markerPoints[10] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1};
500: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
501: for (p = 0; p < 3; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
502: } break;
503: default:
504: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
505: }
506: break;
507: default:
508: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
509: }
510: return 0;
511: }
513: static PetscErrorCode CreateSimplex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
514: {
515: PetscInt testNum = user->testNum, p;
516: PetscMPIInt rank, size;
518: MPI_Comm_rank(comm, &rank);
519: MPI_Comm_size(comm, &size);
520: switch (testNum) {
521: case 0:
523: switch (rank) {
524: case 0: {
525: const PetscInt numCells = 1, numVertices = 2, numCorners = 4;
526: const PetscInt cells[4] = {0, 2, 1, 3};
527: PetscReal coords[6] = {0.0, 0.0, -0.5, 0.0, -0.5, 0.0};
528: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
530: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
531: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
532: } break;
533: case 1: {
534: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
535: const PetscInt cells[4] = {1, 2, 4, 3};
536: PetscReal coords[9] = {1.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5};
537: PetscInt markerPoints[8] = {1, 1, 2, 1, 3, 1, 4, 1};
539: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
540: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
541: } break;
542: default:
543: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
544: }
545: break;
546: default:
547: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
548: }
549: if (user->testOrientIF) {
550: PetscInt ifp[] = {8, 6};
552: PetscObjectSetName((PetscObject)*dm, "Mesh before orientation");
553: DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view");
554: /* rotate interface face ifp[rank] by given orientation ornt[rank] */
555: DMPlexOrientPoint(*dm, ifp[rank], user->ornt[rank]);
556: DMViewFromOptions(*dm, NULL, "-before_orientation_dm_view");
557: DMPlexCheckFaces(*dm, 0);
558: DMPlexOrientInterface_Internal(*dm);
559: PetscPrintf(comm, "Orientation test PASSED\n");
560: }
561: return 0;
562: }
564: static PetscErrorCode CreateQuad_2D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
565: {
566: PetscInt testNum = user->testNum, p;
567: PetscMPIInt rank, size;
569: MPI_Comm_rank(comm, &rank);
570: MPI_Comm_size(comm, &size);
571: switch (testNum) {
572: case 0:
574: switch (rank) {
575: case 0: {
576: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
577: const PetscInt cells[4] = {0, 1, 2, 3};
578: PetscReal coords[6] = {-0.5, 0.0, 0.0, 0.0, 0.0, 1.0};
579: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
581: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
582: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
583: } break;
584: case 1: {
585: const PetscInt numCells = 1, numVertices = 3, numCorners = 4;
586: const PetscInt cells[4] = {1, 4, 5, 2};
587: PetscReal coords[6] = {-0.5, 1.0, 0.5, 0.0, 0.5, 1.0};
588: PetscInt markerPoints[4 * 2] = {1, 1, 2, 1, 3, 1, 4, 1};
590: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
591: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
592: } break;
593: default:
594: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
595: }
596: break;
597: default:
598: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
599: }
600: return 0;
601: }
603: static PetscErrorCode CreateHex_3D(MPI_Comm comm, PetscBool interpolate, AppCtx *user, DM *dm)
604: {
605: PetscInt testNum = user->testNum, p;
606: PetscMPIInt rank, size;
608: MPI_Comm_rank(comm, &rank);
609: MPI_Comm_size(comm, &size);
610: switch (testNum) {
611: case 0:
613: switch (rank) {
614: case 0: {
615: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
616: const PetscInt cells[8] = {0, 3, 2, 1, 4, 5, 6, 7};
617: PetscReal coords[6 * 3] = {-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -0.5, 1.0, 0.0, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0};
618: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
620: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
621: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
622: } break;
623: case 1: {
624: const PetscInt numCells = 1, numVertices = 6, numCorners = 8;
625: const PetscInt cells[8] = {1, 2, 9, 8, 5, 10, 11, 6};
626: PetscReal coords[6 * 3] = {0.0, 1.0, 1.0, -0.5, 1.0, 1.0, 0.5, 0.0, 0.0, 0.5, 1.0, 0.0, 0.5, 0.0, 1.0, 0.5, 1.0, 1.0};
627: PetscInt markerPoints[8 * 2] = {2, 1, 3, 1, 4, 1, 5, 1, 6, 1, 7, 1, 8, 1, 9, 1};
629: DMPlexCreateFromCellListParallelPetsc(comm, user->dim, numCells, numVertices, PETSC_DECIDE, numCorners, interpolate, cells, user->dim, coords, NULL, NULL, dm);
630: for (p = 0; p < 4; ++p) DMSetLabelValue(*dm, "marker", markerPoints[p * 2], markerPoints[p * 2 + 1]);
631: } break;
632: default:
633: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh for rank %d", rank);
634: }
635: break;
636: default:
637: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "No test mesh %" PetscInt_FMT, testNum);
638: }
639: return 0;
640: }
642: static PetscErrorCode CustomView(DM dm, PetscViewer v)
643: {
644: DMPlexInterpolatedFlag interpolated;
645: PetscBool distributed;
647: DMPlexIsDistributed(dm, &distributed);
648: DMPlexIsInterpolatedCollective(dm, &interpolated);
649: PetscViewerASCIIPrintf(v, "DMPlexIsDistributed: %s\n", PetscBools[distributed]);
650: PetscViewerASCIIPrintf(v, "DMPlexIsInterpolatedCollective: %s\n", DMPlexInterpolatedFlags[interpolated]);
651: return 0;
652: }
654: static PetscErrorCode CreateMeshFromFile(MPI_Comm comm, AppCtx *user, DM *dm, DM *serialDM)
655: {
656: const char *filename = user->filename;
657: PetscBool testHeavy = user->testHeavy;
658: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
659: PetscBool distributed = PETSC_FALSE;
661: *serialDM = NULL;
662: if (testHeavy && interpCreate) DMPlexSetOrientInterface_Private(NULL, PETSC_FALSE);
663: PetscLogStagePush(stage[0]);
664: DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, dm); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
665: PetscLogStagePop();
666: if (testHeavy && interpCreate) DMPlexSetOrientInterface_Private(NULL, PETSC_TRUE);
667: DMPlexIsDistributed(*dm, &distributed);
668: PetscPrintf(comm, "DMPlexCreateFromFile produced %s mesh.\n", distributed ? "distributed" : "serial");
669: if (testHeavy && distributed) {
670: PetscOptionsSetValue(NULL, "-dm_plex_hdf5_force_sequential", NULL);
671: DMPlexCreateFromFile(comm, filename, "ex18_plex", interpCreate, serialDM);
672: DMPlexIsDistributed(*serialDM, &distributed);
674: }
675: DMGetDimension(*dm, &user->dim);
676: return 0;
677: }
679: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
680: {
681: PetscPartitioner part;
682: PortableBoundary boundary = NULL;
683: DM serialDM = NULL;
684: PetscBool cellSimplex = user->cellSimplex;
685: PetscBool useGenerator = user->useGenerator;
686: PetscBool interpCreate = user->interpolate == CREATE ? PETSC_TRUE : PETSC_FALSE;
687: PetscBool interpSerial = user->interpolate == AFTER_CREATE ? PETSC_TRUE : PETSC_FALSE;
688: PetscBool interpParallel = user->interpolate == AFTER_DISTRIBUTE ? PETSC_TRUE : PETSC_FALSE;
689: PetscBool testHeavy = user->testHeavy;
690: PetscMPIInt rank;
692: MPI_Comm_rank(comm, &rank);
693: if (user->filename[0]) {
694: CreateMeshFromFile(comm, user, dm, &serialDM);
695: } else if (useGenerator) {
696: PetscLogStagePush(stage[0]);
697: DMPlexCreateBoxMesh(comm, user->dim, cellSimplex, user->faces, NULL, NULL, NULL, interpCreate, dm);
698: PetscLogStagePop();
699: } else {
700: PetscLogStagePush(stage[0]);
701: switch (user->dim) {
702: case 1:
703: CreateMesh_1D(comm, interpCreate, user, dm);
704: break;
705: case 2:
706: if (cellSimplex) {
707: CreateSimplex_2D(comm, interpCreate, user, dm);
708: } else {
709: CreateQuad_2D(comm, interpCreate, user, dm);
710: }
711: break;
712: case 3:
713: if (cellSimplex) {
714: CreateSimplex_3D(comm, interpCreate, user, dm);
715: } else {
716: CreateHex_3D(comm, interpCreate, user, dm);
717: }
718: break;
719: default:
720: SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot make meshes for dimension %" PetscInt_FMT, user->dim);
721: }
722: PetscLogStagePop();
723: }
725: PetscObjectSetName((PetscObject)*dm, "Original Mesh");
726: DMViewFromOptions(*dm, NULL, "-orig_dm_view");
728: if (interpSerial) {
729: DM idm;
731: if (testHeavy) DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE);
732: PetscLogStagePush(stage[2]);
733: DMPlexInterpolate(*dm, &idm); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
734: PetscLogStagePop();
735: if (testHeavy) DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE);
736: DMDestroy(dm);
737: *dm = idm;
738: PetscObjectSetName((PetscObject)*dm, "Interpolated Mesh");
739: DMViewFromOptions(*dm, NULL, "-intp_dm_view");
740: }
742: /* Set partitioner options */
743: DMPlexGetPartitioner(*dm, &part);
744: if (part) {
745: PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE);
746: PetscPartitionerSetFromOptions(part);
747: }
749: if (user->customView) CustomView(*dm, PETSC_VIEWER_STDOUT_(comm));
750: if (testHeavy) {
751: PetscBool distributed;
753: DMPlexIsDistributed(*dm, &distributed);
754: if (!serialDM && !distributed) {
755: serialDM = *dm;
756: PetscObjectReference((PetscObject)*dm);
757: }
758: if (serialDM) DMPlexGetExpandedBoundary_Private(serialDM, &boundary);
759: if (boundary) {
760: /* check DM which has been created in parallel and already interpolated */
761: DMPlexCheckPointSFHeavy(*dm, boundary);
762: }
763: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
764: DMPlexOrientInterface_Internal(*dm);
765: }
766: if (user->distribute) {
767: DM pdm = NULL;
769: /* Redistribute mesh over processes using that partitioner */
770: PetscLogStagePush(stage[1]);
771: DMPlexDistribute(*dm, 0, NULL, &pdm);
772: PetscLogStagePop();
773: if (pdm) {
774: DMDestroy(dm);
775: *dm = pdm;
776: PetscObjectSetName((PetscObject)*dm, "Redistributed Mesh");
777: DMViewFromOptions(*dm, NULL, "-dist_dm_view");
778: }
780: if (interpParallel) {
781: DM idm;
783: if (testHeavy) DMPlexSetOrientInterface_Private(*dm, PETSC_FALSE);
784: PetscLogStagePush(stage[2]);
785: DMPlexInterpolate(*dm, &idm); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexCheckPointSFHeavy() */
786: PetscLogStagePop();
787: if (testHeavy) DMPlexSetOrientInterface_Private(*dm, PETSC_TRUE);
788: DMDestroy(dm);
789: *dm = idm;
790: PetscObjectSetName((PetscObject)*dm, "Interpolated Redistributed Mesh");
791: DMViewFromOptions(*dm, NULL, "-intp_dm_view");
792: }
793: }
794: if (testHeavy) {
795: if (boundary) DMPlexCheckPointSFHeavy(*dm, boundary);
796: /* Orient interface because it could be deliberately skipped above. It is idempotent. */
797: DMPlexOrientInterface_Internal(*dm);
798: }
800: PetscObjectSetName((PetscObject)*dm, "Parallel Mesh");
801: DMPlexDistributeSetDefault(*dm, PETSC_FALSE);
802: DMSetFromOptions(*dm);
803: DMViewFromOptions(*dm, NULL, "-dm_view");
805: if (user->customView) CustomView(*dm, PETSC_VIEWER_STDOUT_(comm));
806: DMDestroy(&serialDM);
807: PortableBoundaryDestroy(&boundary);
808: return 0;
809: }
811: #define ps2d(number) ((double)PetscRealPart(number))
812: static inline PetscErrorCode coord2str(char buf[], size_t len, PetscInt dim, const PetscScalar coords[], PetscReal tol)
813: {
815: if (tol >= 1e-3) {
816: switch (dim) {
817: case 1:
818: PetscSNPrintf(buf, len, "(%12.3f)", ps2d(coords[0]));
819: case 2:
820: PetscSNPrintf(buf, len, "(%12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]));
821: case 3:
822: PetscSNPrintf(buf, len, "(%12.3f, %12.3f, %12.3f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2]));
823: }
824: } else {
825: switch (dim) {
826: case 1:
827: PetscSNPrintf(buf, len, "(%12.6f)", ps2d(coords[0]));
828: case 2:
829: PetscSNPrintf(buf, len, "(%12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]));
830: case 3:
831: PetscSNPrintf(buf, len, "(%12.6f, %12.6f, %12.6f)", ps2d(coords[0]), ps2d(coords[1]), ps2d(coords[2]));
832: }
833: }
834: return 0;
835: }
837: static PetscErrorCode ViewVerticesFromCoords(DM dm, Vec coordsVec, PetscReal tol, PetscViewer viewer)
838: {
839: PetscInt dim, i, npoints;
840: IS pointsIS;
841: const PetscInt *points;
842: const PetscScalar *coords;
843: char coordstr[128];
844: MPI_Comm comm;
845: PetscMPIInt rank;
847: PetscObjectGetComm((PetscObject)dm, &comm);
848: MPI_Comm_rank(comm, &rank);
849: DMGetDimension(dm, &dim);
850: PetscViewerASCIIPushSynchronized(viewer);
851: DMPlexFindVertices(dm, coordsVec, tol, &pointsIS);
852: ISGetIndices(pointsIS, &points);
853: ISGetLocalSize(pointsIS, &npoints);
854: VecGetArrayRead(coordsVec, &coords);
855: for (i = 0; i < npoints; i++) {
856: coord2str(coordstr, sizeof(coordstr), dim, &coords[i * dim], tol);
857: if (rank == 0 && i) PetscViewerASCIISynchronizedPrintf(viewer, "-----\n");
858: PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %s --> points[%" PetscInt_FMT "] = %" PetscInt_FMT "\n", rank, coordstr, i, points[i]);
859: PetscViewerFlush(viewer);
860: }
861: PetscViewerASCIIPopSynchronized(viewer);
862: VecRestoreArrayRead(coordsVec, &coords);
863: ISRestoreIndices(pointsIS, &points);
864: ISDestroy(&pointsIS);
865: return 0;
866: }
868: static PetscErrorCode TestExpandPoints(DM dm, AppCtx *user)
869: {
870: IS is;
871: PetscSection *sects;
872: IS *iss;
873: PetscInt d, depth;
874: PetscMPIInt rank;
875: PetscViewer viewer = PETSC_VIEWER_STDOUT_WORLD, sviewer;
877: MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank);
878: if (user->testExpandPointsEmpty && rank == 0) {
879: ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &is);
880: } else {
881: ISCreateGeneral(PETSC_COMM_SELF, user->nPointsToExpand, user->pointsToExpand, PETSC_USE_POINTER, &is);
882: }
883: DMPlexGetConeRecursive(dm, is, &depth, &iss, §s);
884: PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
885: PetscViewerASCIIPrintf(sviewer, "[%d] ==========================\n", rank);
886: for (d = depth - 1; d >= 0; d--) {
887: IS checkIS;
888: PetscBool flg;
890: PetscViewerASCIIPrintf(sviewer, "depth %" PetscInt_FMT " ---------------\n", d);
891: PetscSectionView(sects[d], sviewer);
892: ISView(iss[d], sviewer);
893: /* check reverse operation */
894: if (d < depth - 1) {
895: DMPlexExpandedConesToFaces_Private(dm, iss[d], sects[d], &checkIS);
896: ISEqualUnsorted(checkIS, iss[d + 1], &flg);
898: ISDestroy(&checkIS);
899: }
900: }
901: PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer);
902: PetscViewerFlush(viewer);
903: DMPlexRestoreConeRecursive(dm, is, &depth, &iss, §s);
904: ISDestroy(&is);
905: return 0;
906: }
908: static PetscErrorCode DMPlexExpandedConesToFaces_Private(DM dm, IS is, PetscSection section, IS *newis)
909: {
910: PetscInt n, n1, ncone, numCoveredPoints, o, p, q, start, end;
911: const PetscInt *coveredPoints;
912: const PetscInt *arr, *cone;
913: PetscInt *newarr;
915: ISGetLocalSize(is, &n);
916: PetscSectionGetStorageSize(section, &n1);
917: PetscSectionGetChart(section, &start, &end);
919: ISGetIndices(is, &arr);
920: PetscMalloc1(end - start, &newarr);
921: for (q = start; q < end; q++) {
922: PetscSectionGetDof(section, q, &ncone);
923: PetscSectionGetOffset(section, q, &o);
924: cone = &arr[o];
925: if (ncone == 1) {
926: numCoveredPoints = 1;
927: p = cone[0];
928: } else {
929: PetscInt i;
930: p = PETSC_MAX_INT;
931: for (i = 0; i < ncone; i++)
932: if (cone[i] < 0) {
933: p = -1;
934: break;
935: }
936: if (p >= 0) {
937: DMPlexGetJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints);
939: if (numCoveredPoints) p = coveredPoints[0];
940: else p = -2;
941: DMPlexRestoreJoin(dm, ncone, cone, &numCoveredPoints, &coveredPoints);
942: }
943: }
944: newarr[q - start] = p;
945: }
946: ISRestoreIndices(is, &arr);
947: ISCreateGeneral(PETSC_COMM_SELF, end - start, newarr, PETSC_OWN_POINTER, newis);
948: return 0;
949: }
951: static PetscErrorCode DMPlexExpandedVerticesToFaces_Private(DM dm, IS boundary_expanded_is, PetscInt depth, PetscSection sections[], IS *boundary_is)
952: {
953: PetscInt d;
954: IS is, newis;
956: is = boundary_expanded_is;
957: PetscObjectReference((PetscObject)is);
958: for (d = 0; d < depth - 1; ++d) {
959: DMPlexExpandedConesToFaces_Private(dm, is, sections[d], &newis);
960: ISDestroy(&is);
961: is = newis;
962: }
963: *boundary_is = is;
964: return 0;
965: }
967: #define CHKERRQI(incall, ierr) \
968: if (ierr) incall = PETSC_FALSE;
970: static PetscErrorCode DMLabelViewFromOptionsOnComm_Private(DMLabel label, const char optionname[], MPI_Comm comm)
971: {
972: PetscViewer viewer;
973: PetscBool flg;
974: static PetscBool incall = PETSC_FALSE;
975: PetscViewerFormat format;
977: if (incall) return 0;
978: incall = PETSC_TRUE;
979: CHKERRQI(incall, PetscOptionsGetViewer(comm, ((PetscObject)label)->options, ((PetscObject)label)->prefix, optionname, &viewer, &format, &flg));
980: if (flg) {
981: CHKERRQI(incall, PetscViewerPushFormat(viewer, format));
982: CHKERRQI(incall, DMLabelView(label, viewer));
983: CHKERRQI(incall, PetscViewerPopFormat(viewer));
984: CHKERRQI(incall, PetscViewerDestroy(&viewer));
985: }
986: incall = PETSC_FALSE;
987: return 0;
988: }
990: /* TODO: this is hotfixing DMLabelGetStratumIS() - it should be fixed systematically instead */
991: static inline PetscErrorCode DMLabelGetStratumISOnComm_Private(DMLabel label, PetscInt value, MPI_Comm comm, IS *is)
992: {
993: IS tmpis;
995: DMLabelGetStratumIS(label, value, &tmpis);
996: if (!tmpis) ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_USE_POINTER, &tmpis);
997: ISOnComm(tmpis, comm, PETSC_COPY_VALUES, is);
998: ISDestroy(&tmpis);
999: return 0;
1000: }
1002: /* currently only for simple PetscSection without fields or constraints */
1003: static PetscErrorCode PetscSectionReplicate_Private(MPI_Comm comm, PetscMPIInt rootrank, PetscSection sec0, PetscSection *secout)
1004: {
1005: PetscSection sec;
1006: PetscInt chart[2], p;
1007: PetscInt *dofarr;
1008: PetscMPIInt rank;
1010: MPI_Comm_rank(comm, &rank);
1011: if (rank == rootrank) PetscSectionGetChart(sec0, &chart[0], &chart[1]);
1012: MPI_Bcast(chart, 2, MPIU_INT, rootrank, comm);
1013: PetscMalloc1(chart[1] - chart[0], &dofarr);
1014: if (rank == rootrank) {
1015: for (p = chart[0]; p < chart[1]; p++) PetscSectionGetDof(sec0, p, &dofarr[p - chart[0]]);
1016: }
1017: MPI_Bcast(dofarr, chart[1] - chart[0], MPIU_INT, rootrank, comm);
1018: PetscSectionCreate(comm, &sec);
1019: PetscSectionSetChart(sec, chart[0], chart[1]);
1020: for (p = chart[0]; p < chart[1]; p++) PetscSectionSetDof(sec, p, dofarr[p - chart[0]]);
1021: PetscSectionSetUp(sec);
1022: PetscFree(dofarr);
1023: *secout = sec;
1024: return 0;
1025: }
1027: static PetscErrorCode DMPlexExpandedVerticesCoordinatesToFaces_Private(DM ipdm, PortableBoundary bnd, IS *face_is)
1028: {
1029: IS faces_expanded_is;
1031: DMPlexFindVertices(ipdm, bnd->coordinates, 0.0, &faces_expanded_is);
1032: DMPlexExpandedVerticesToFaces_Private(ipdm, faces_expanded_is, bnd->depth, bnd->sections, face_is);
1033: ISDestroy(&faces_expanded_is);
1034: return 0;
1035: }
1037: /* hack disabling DMPlexOrientInterface() call in DMPlexInterpolate() via -dm_plex_interpolate_orient_interfaces option */
1038: static PetscErrorCode DMPlexSetOrientInterface_Private(DM dm, PetscBool enable)
1039: {
1040: PetscOptions options = NULL;
1041: const char *prefix = NULL;
1042: const char opt[] = "-dm_plex_interpolate_orient_interfaces";
1043: char prefix_opt[512];
1044: PetscBool flg, set;
1045: static PetscBool wasSetTrue = PETSC_FALSE;
1047: if (dm) {
1048: PetscObjectGetOptionsPrefix((PetscObject)dm, &prefix);
1049: options = ((PetscObject)dm)->options;
1050: }
1051: PetscStrcpy(prefix_opt, "-");
1052: PetscStrlcat(prefix_opt, prefix, sizeof(prefix_opt));
1053: PetscStrlcat(prefix_opt, &opt[1], sizeof(prefix_opt));
1054: PetscOptionsGetBool(options, prefix, opt, &flg, &set);
1055: if (!enable) {
1056: if (set && flg) wasSetTrue = PETSC_TRUE;
1057: PetscOptionsSetValue(options, prefix_opt, "0");
1058: } else if (set && !flg) {
1059: if (wasSetTrue) {
1060: PetscOptionsSetValue(options, prefix_opt, "1");
1061: } else {
1062: /* default is PETSC_TRUE */
1063: PetscOptionsClearValue(options, prefix_opt);
1064: }
1065: wasSetTrue = PETSC_FALSE;
1066: }
1067: if (PetscDefined(USE_DEBUG)) {
1068: PetscOptionsGetBool(options, prefix, opt, &flg, &set);
1070: }
1071: return 0;
1072: }
1074: /* get coordinate description of the whole-domain boundary */
1075: static PetscErrorCode DMPlexGetExpandedBoundary_Private(DM dm, PortableBoundary *boundary)
1076: {
1077: PortableBoundary bnd0, bnd;
1078: MPI_Comm comm;
1079: DM idm;
1080: DMLabel label;
1081: PetscInt d;
1082: const char boundaryName[] = "DMPlexDistributeInterpolateMarkInterface_boundary";
1083: IS boundary_is;
1084: IS *boundary_expanded_iss;
1085: PetscMPIInt rootrank = 0;
1086: PetscMPIInt rank, size;
1087: PetscInt value = 1;
1088: DMPlexInterpolatedFlag intp;
1089: PetscBool flg;
1091: PetscNew(&bnd);
1092: PetscObjectGetComm((PetscObject)dm, &comm);
1093: MPI_Comm_rank(comm, &rank);
1094: MPI_Comm_size(comm, &size);
1095: DMPlexIsDistributed(dm, &flg);
1098: /* interpolate serial DM if not yet interpolated */
1099: DMPlexIsInterpolatedCollective(dm, &intp);
1100: if (intp == DMPLEX_INTERPOLATED_FULL) {
1101: idm = dm;
1102: PetscObjectReference((PetscObject)dm);
1103: } else {
1104: DMPlexInterpolate(dm, &idm);
1105: DMViewFromOptions(idm, NULL, "-idm_view");
1106: }
1108: /* mark whole-domain boundary of the serial DM */
1109: DMLabelCreate(PETSC_COMM_SELF, boundaryName, &label);
1110: DMAddLabel(idm, label);
1111: DMPlexMarkBoundaryFaces(idm, value, label);
1112: DMLabelViewFromOptionsOnComm_Private(label, "-idm_boundary_view", comm);
1113: DMLabelGetStratumIS(label, value, &boundary_is);
1115: /* translate to coordinates */
1116: PetscNew(&bnd0);
1117: DMGetCoordinatesLocalSetUp(idm);
1118: if (rank == rootrank) {
1119: DMPlexGetConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections);
1120: DMGetCoordinatesLocalTuple(dm, boundary_expanded_iss[0], NULL, &bnd0->coordinates);
1121: /* self-check */
1122: {
1123: IS is0;
1124: DMPlexExpandedVerticesCoordinatesToFaces_Private(idm, bnd0, &is0);
1125: ISEqual(is0, boundary_is, &flg);
1127: ISDestroy(&is0);
1128: }
1129: } else {
1130: VecCreateSeq(PETSC_COMM_SELF, 0, &bnd0->coordinates);
1131: }
1133: {
1134: Vec tmp;
1135: VecScatter sc;
1136: IS xis;
1137: PetscInt n;
1139: /* just convert seq vectors to mpi vector */
1140: VecGetLocalSize(bnd0->coordinates, &n);
1141: MPI_Bcast(&n, 1, MPIU_INT, rootrank, comm);
1142: if (rank == rootrank) {
1143: VecCreateMPI(comm, n, n, &tmp);
1144: } else {
1145: VecCreateMPI(comm, 0, n, &tmp);
1146: }
1147: VecCopy(bnd0->coordinates, tmp);
1148: VecDestroy(&bnd0->coordinates);
1149: bnd0->coordinates = tmp;
1151: /* replicate coordinates from root rank to all ranks */
1152: VecCreateMPI(comm, n, n * size, &bnd->coordinates);
1153: ISCreateStride(comm, n, 0, 1, &xis);
1154: VecScatterCreate(bnd0->coordinates, xis, bnd->coordinates, NULL, &sc);
1155: VecScatterBegin(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD);
1156: VecScatterEnd(sc, bnd0->coordinates, bnd->coordinates, INSERT_VALUES, SCATTER_FORWARD);
1157: VecScatterDestroy(&sc);
1158: ISDestroy(&xis);
1159: }
1160: bnd->depth = bnd0->depth;
1161: MPI_Bcast(&bnd->depth, 1, MPIU_INT, rootrank, comm);
1162: PetscMalloc1(bnd->depth, &bnd->sections);
1163: for (d = 0; d < bnd->depth; d++) PetscSectionReplicate_Private(comm, rootrank, (rank == rootrank) ? bnd0->sections[d] : NULL, &bnd->sections[d]);
1165: if (rank == rootrank) DMPlexRestoreConeRecursive(idm, boundary_is, &bnd0->depth, &boundary_expanded_iss, &bnd0->sections);
1166: PortableBoundaryDestroy(&bnd0);
1167: DMRemoveLabelBySelf(idm, &label, PETSC_TRUE);
1168: DMLabelDestroy(&label);
1169: ISDestroy(&boundary_is);
1170: DMDestroy(&idm);
1171: *boundary = bnd;
1172: return 0;
1173: }
1175: /* get faces of inter-partition interface */
1176: static PetscErrorCode DMPlexGetInterfaceFaces_Private(DM ipdm, IS boundary_faces_is, IS *interface_faces_is)
1177: {
1178: MPI_Comm comm;
1179: DMLabel label;
1180: IS part_boundary_faces_is;
1181: const char partBoundaryName[] = "DMPlexDistributeInterpolateMarkInterface_partBoundary";
1182: PetscInt value = 1;
1183: DMPlexInterpolatedFlag intp;
1185: PetscObjectGetComm((PetscObject)ipdm, &comm);
1186: DMPlexIsInterpolatedCollective(ipdm, &intp);
1189: /* get ipdm partition boundary (partBoundary) */
1190: DMLabelCreate(PETSC_COMM_SELF, partBoundaryName, &label);
1191: DMAddLabel(ipdm, label);
1192: DMPlexMarkBoundaryFaces(ipdm, value, label);
1193: DMLabelViewFromOptionsOnComm_Private(label, "-ipdm_part_boundary_view", comm);
1194: DMLabelGetStratumISOnComm_Private(label, value, comm, &part_boundary_faces_is);
1195: DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE);
1196: DMLabelDestroy(&label);
1198: /* remove ipdm whole-domain boundary (boundary_faces_is) from ipdm partition boundary (part_boundary_faces_is), resulting just in inter-partition interface */
1199: ISDifference(part_boundary_faces_is, boundary_faces_is, interface_faces_is);
1200: ISDestroy(&part_boundary_faces_is);
1201: return 0;
1202: }
1204: /* compute inter-partition interface including edges and vertices */
1205: static PetscErrorCode DMPlexComputeCompleteInterface_Private(DM ipdm, IS interface_faces_is, IS *interface_is)
1206: {
1207: DMLabel label;
1208: PetscInt value = 1;
1209: const char interfaceName[] = "DMPlexDistributeInterpolateMarkInterface_interface";
1210: DMPlexInterpolatedFlag intp;
1211: MPI_Comm comm;
1213: PetscObjectGetComm((PetscObject)ipdm, &comm);
1214: DMPlexIsInterpolatedCollective(ipdm, &intp);
1217: DMLabelCreate(PETSC_COMM_SELF, interfaceName, &label);
1218: DMAddLabel(ipdm, label);
1219: DMLabelSetStratumIS(label, value, interface_faces_is);
1220: DMLabelViewFromOptionsOnComm_Private(label, "-interface_faces_view", comm);
1221: DMPlexLabelComplete(ipdm, label);
1222: DMLabelViewFromOptionsOnComm_Private(label, "-interface_view", comm);
1223: DMLabelGetStratumISOnComm_Private(label, value, comm, interface_is);
1224: PetscObjectSetName((PetscObject)*interface_is, "interface_is");
1225: ISViewFromOptions(*interface_is, NULL, "-interface_is_view");
1226: DMRemoveLabelBySelf(ipdm, &label, PETSC_TRUE);
1227: DMLabelDestroy(&label);
1228: return 0;
1229: }
1231: static PetscErrorCode PointSFGetOutwardInterfacePoints(PetscSF sf, IS *is)
1232: {
1233: PetscInt n;
1234: const PetscInt *arr;
1236: PetscSFGetGraph(sf, NULL, &n, &arr, NULL);
1237: ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_USE_POINTER, is);
1238: return 0;
1239: }
1241: static PetscErrorCode PointSFGetInwardInterfacePoints(PetscSF sf, IS *is)
1242: {
1243: PetscInt n;
1244: const PetscInt *rootdegree;
1245: PetscInt *arr;
1247: PetscSFSetUp(sf);
1248: PetscSFComputeDegreeBegin(sf, &rootdegree);
1249: PetscSFComputeDegreeEnd(sf, &rootdegree);
1250: PetscSFComputeMultiRootOriginalNumbering(sf, rootdegree, &n, &arr);
1251: ISCreateGeneral(PetscObjectComm((PetscObject)sf), n, arr, PETSC_OWN_POINTER, is);
1252: return 0;
1253: }
1255: static PetscErrorCode PointSFGetInterfacePoints_Private(PetscSF pointSF, IS *is)
1256: {
1257: IS pointSF_out_is, pointSF_in_is;
1259: PointSFGetOutwardInterfacePoints(pointSF, &pointSF_out_is);
1260: PointSFGetInwardInterfacePoints(pointSF, &pointSF_in_is);
1261: ISExpand(pointSF_out_is, pointSF_in_is, is);
1262: ISDestroy(&pointSF_out_is);
1263: ISDestroy(&pointSF_in_is);
1264: return 0;
1265: }
1269: static PetscErrorCode ViewPointsWithType_Internal(DM dm, IS pointsIS, PetscViewer v)
1270: {
1271: DMLabel label;
1272: PetscSection coordsSection;
1273: Vec coordsVec;
1274: PetscScalar *coordsScalar;
1275: PetscInt coneSize, depth, dim, i, p, npoints;
1276: const PetscInt *points;
1278: DMGetDimension(dm, &dim);
1279: DMGetCoordinateSection(dm, &coordsSection);
1280: DMGetCoordinatesLocal(dm, &coordsVec);
1281: VecGetArray(coordsVec, &coordsScalar);
1282: ISGetLocalSize(pointsIS, &npoints);
1283: ISGetIndices(pointsIS, &points);
1284: DMPlexGetDepthLabel(dm, &label);
1285: PetscViewerASCIIPushTab(v);
1286: for (i = 0; i < npoints; i++) {
1287: p = points[i];
1288: DMLabelGetValue(label, p, &depth);
1289: if (!depth) {
1290: PetscInt n, o;
1291: char coordstr[128];
1293: PetscSectionGetDof(coordsSection, p, &n);
1294: PetscSectionGetOffset(coordsSection, p, &o);
1295: coord2str(coordstr, sizeof(coordstr), n, &coordsScalar[o], 1.0);
1296: PetscViewerASCIISynchronizedPrintf(v, "vertex %" PetscInt_FMT " w/ coordinates %s\n", p, coordstr);
1297: } else {
1298: char entityType[16];
1300: switch (depth) {
1301: case 1:
1302: PetscStrcpy(entityType, "edge");
1303: break;
1304: case 2:
1305: PetscStrcpy(entityType, "face");
1306: break;
1307: case 3:
1308: PetscStrcpy(entityType, "cell");
1309: break;
1310: default:
1311: SETERRQ(PetscObjectComm((PetscObject)v), PETSC_ERR_SUP, "Only for depth <= 3");
1312: }
1313: if (depth == dim && dim < 3) PetscStrlcat(entityType, " (cell)", sizeof(entityType));
1314: PetscViewerASCIISynchronizedPrintf(v, "%s %" PetscInt_FMT "\n", entityType, p);
1315: }
1316: DMPlexGetConeSize(dm, p, &coneSize);
1317: if (coneSize) {
1318: const PetscInt *cone;
1319: IS coneIS;
1321: DMPlexGetCone(dm, p, &cone);
1322: ISCreateGeneral(PETSC_COMM_SELF, coneSize, cone, PETSC_USE_POINTER, &coneIS);
1323: ViewPointsWithType_Internal(dm, coneIS, v);
1324: ISDestroy(&coneIS);
1325: }
1326: }
1327: PetscViewerASCIIPopTab(v);
1328: VecRestoreArray(coordsVec, &coordsScalar);
1329: ISRestoreIndices(pointsIS, &points);
1330: return 0;
1331: }
1333: static PetscErrorCode ViewPointsWithType(DM dm, IS points, PetscViewer v)
1334: {
1335: PetscBool flg;
1336: PetscInt npoints;
1337: PetscMPIInt rank;
1339: PetscObjectTypeCompare((PetscObject)v, PETSCVIEWERASCII, &flg);
1341: MPI_Comm_rank(PetscObjectComm((PetscObject)v), &rank);
1342: PetscViewerASCIIPushSynchronized(v);
1343: ISGetLocalSize(points, &npoints);
1344: if (npoints) {
1345: PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", rank);
1346: ViewPointsWithType_Internal(dm, points, v);
1347: }
1348: PetscViewerFlush(v);
1349: PetscViewerASCIIPopSynchronized(v);
1350: return 0;
1351: }
1353: static PetscErrorCode DMPlexComparePointSFWithInterface_Private(DM ipdm, IS interface_is)
1354: {
1355: PetscSF pointsf;
1356: IS pointsf_is;
1357: PetscBool flg;
1358: MPI_Comm comm;
1359: PetscMPIInt size;
1361: PetscObjectGetComm((PetscObject)ipdm, &comm);
1362: MPI_Comm_size(comm, &size);
1363: DMGetPointSF(ipdm, &pointsf);
1364: if (pointsf) {
1365: PetscInt nroots;
1366: PetscSFGetGraph(pointsf, &nroots, NULL, NULL, NULL);
1367: if (nroots < 0) pointsf = NULL; /* uninitialized SF */
1368: }
1369: if (!pointsf) {
1370: PetscInt N = 0;
1371: if (interface_is) ISGetSize(interface_is, &N);
1373: return 0;
1374: }
1376: /* get PointSF points as IS pointsf_is */
1377: PointSFGetInterfacePoints_Private(pointsf, &pointsf_is);
1379: /* compare pointsf_is with interface_is */
1380: ISEqual(interface_is, pointsf_is, &flg);
1381: MPI_Allreduce(MPI_IN_PLACE, &flg, 1, MPIU_BOOL, MPI_LAND, comm);
1382: if (!flg) {
1383: IS pointsf_extra_is, pointsf_missing_is;
1384: PetscViewer errv = PETSC_VIEWER_STDERR_(comm);
1385: CHKERRMY(ISDifference(interface_is, pointsf_is, &pointsf_missing_is));
1386: CHKERRMY(ISDifference(pointsf_is, interface_is, &pointsf_extra_is));
1387: CHKERRMY(PetscViewerASCIIPrintf(errv, "Points missing in PointSF:\n"));
1388: CHKERRMY(ViewPointsWithType(ipdm, pointsf_missing_is, errv));
1389: CHKERRMY(PetscViewerASCIIPrintf(errv, "Extra points in PointSF:\n"));
1390: CHKERRMY(ViewPointsWithType(ipdm, pointsf_extra_is, errv));
1391: CHKERRMY(ISDestroy(&pointsf_extra_is));
1392: CHKERRMY(ISDestroy(&pointsf_missing_is));
1393: SETERRQ(comm, PETSC_ERR_PLIB, "PointSF is wrong! See details above.");
1394: }
1395: ISDestroy(&pointsf_is);
1396: return 0;
1397: }
1399: /* remove faces & edges from label, leave just vertices */
1400: static PetscErrorCode DMPlexISFilterVertices_Private(DM dm, IS points)
1401: {
1402: PetscInt vStart, vEnd;
1403: MPI_Comm comm;
1405: PetscObjectGetComm((PetscObject)dm, &comm);
1406: DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);
1407: ISGeneralFilter(points, vStart, vEnd);
1408: return 0;
1409: }
1411: /*
1412: DMPlexCheckPointSFHeavy - Thoroughly test that the PointSF after parallel DMPlexInterpolate() includes exactly all interface points.
1414: Collective
1416: Input Parameters:
1417: . dm - The DMPlex object
1419: Notes:
1420: The input DMPlex must be serial (one partition has all points, the other partitions have no points).
1421: This is a heavy test which involves DMPlexInterpolate() if the input DM is not interpolated yet, and depends on having a representation of the whole-domain boundary (PortableBoundary), which can be obtained only by DMPlexGetExpandedBoundary_Private() (which involves DMPlexInterpolate() of a sequential DM).
1422: This is mainly intended for debugging/testing purposes.
1424: Algorithm:
1425: 1. boundary faces of the serial version of the whole mesh are found using DMPlexMarkBoundaryFaces()
1426: 2. boundary faces are translated into vertices using DMPlexGetConeRecursive() and these are translated into coordinates - this description (aka PortableBoundary) is completely independent of partitioning and point numbering
1427: 3. the mesh is distributed or loaded in parallel
1428: 4. boundary faces of the distributed mesh are reconstructed from PortableBoundary using DMPlexFindVertices()
1429: 5. partition boundary faces of the parallel mesh are found using DMPlexMarkBoundaryFaces()
1430: 6. partition interfaces are computed as set difference of partition boundary faces minus the reconstructed boundary
1431: 7. check that interface covered by PointSF (union of inward and outward points) is equal to the partition interface for each rank, otherwise print the difference and throw an error
1433: Level: developer
1435: .seealso: DMGetPointSF(), DMPlexCheckSymmetry(), DMPlexCheckSkeleton(), DMPlexCheckFaces()
1436: */
1437: static PetscErrorCode DMPlexCheckPointSFHeavy(DM dm, PortableBoundary bnd)
1438: {
1439: DM ipdm = NULL;
1440: IS boundary_faces_is, interface_faces_is, interface_is;
1441: DMPlexInterpolatedFlag intp;
1442: MPI_Comm comm;
1444: PetscObjectGetComm((PetscObject)dm, &comm);
1446: DMPlexIsInterpolatedCollective(dm, &intp);
1447: if (intp == DMPLEX_INTERPOLATED_FULL) {
1448: ipdm = dm;
1449: } else {
1450: /* create temporary interpolated DM if input DM is not interpolated */
1451: DMPlexSetOrientInterface_Private(dm, PETSC_FALSE);
1452: DMPlexInterpolate(dm, &ipdm); /* with DMPlexOrientInterface_Internal() call skipped so that PointSF issues are left to DMPlexComparePointSFWithInterface_Private() below */
1453: DMPlexSetOrientInterface_Private(dm, PETSC_TRUE);
1454: }
1455: DMViewFromOptions(ipdm, NULL, "-ipdm_view");
1457: /* recover ipdm whole-domain boundary faces from the expanded vertices coordinates */
1458: DMPlexExpandedVerticesCoordinatesToFaces_Private(ipdm, bnd, &boundary_faces_is);
1459: /* get inter-partition interface faces (interface_faces_is)*/
1460: DMPlexGetInterfaceFaces_Private(ipdm, boundary_faces_is, &interface_faces_is);
1461: /* compute inter-partition interface including edges and vertices (interface_is) */
1462: DMPlexComputeCompleteInterface_Private(ipdm, interface_faces_is, &interface_is);
1463: /* destroy immediate ISs */
1464: ISDestroy(&boundary_faces_is);
1465: ISDestroy(&interface_faces_is);
1467: /* for uninterpolated case, keep just vertices in interface */
1468: if (!intp) {
1469: DMPlexISFilterVertices_Private(ipdm, interface_is);
1470: DMDestroy(&ipdm);
1471: }
1473: /* compare PointSF with the boundary reconstructed from coordinates */
1474: DMPlexComparePointSFWithInterface_Private(dm, interface_is);
1475: PetscPrintf(comm, "DMPlexCheckPointSFHeavy PASSED\n");
1476: ISDestroy(&interface_is);
1477: return 0;
1478: }
1480: int main(int argc, char **argv)
1481: {
1482: DM dm;
1483: AppCtx user;
1486: PetscInitialize(&argc, &argv, NULL, help);
1487: PetscLogStageRegister("create", &stage[0]);
1488: PetscLogStageRegister("distribute", &stage[1]);
1489: PetscLogStageRegister("interpolate", &stage[2]);
1490: ProcessOptions(PETSC_COMM_WORLD, &user);
1491: CreateMesh(PETSC_COMM_WORLD, &user, &dm);
1492: if (user.nPointsToExpand) TestExpandPoints(dm, &user);
1493: if (user.ncoords) {
1494: Vec coords;
1496: VecCreateSeqWithArray(PETSC_COMM_SELF, user.ncoords, user.ncoords, user.coords, &coords);
1497: ViewVerticesFromCoords(dm, coords, user.coordsTol, PETSC_VIEWER_STDOUT_WORLD);
1498: VecDestroy(&coords);
1499: }
1500: DMDestroy(&dm);
1501: PetscFinalize();
1502: return 0;
1503: }
1505: /*TEST
1507: testset:
1508: nsize: 2
1509: args: -dm_view ascii::ascii_info_detail
1510: args: -dm_plex_check_all
1511: test:
1512: suffix: 1_tri_dist0
1513: args: -distribute 0 -interpolate {{none create}separate output}
1514: test:
1515: suffix: 1_tri_dist1
1516: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1517: test:
1518: suffix: 1_quad_dist0
1519: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1520: test:
1521: suffix: 1_quad_dist1
1522: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1523: test:
1524: suffix: 1_1d_dist1
1525: args: -dim 1 -distribute 1
1527: testset:
1528: nsize: 3
1529: args: -testnum 1 -interpolate create
1530: args: -dm_plex_check_all
1531: test:
1532: suffix: 2
1533: args: -dm_view ascii::ascii_info_detail
1534: test:
1535: suffix: 2a
1536: args: -dm_plex_check_cones_conform_on_interfaces_verbose
1537: test:
1538: suffix: 2b
1539: args: -test_expand_points 0,1,2,5,6
1540: test:
1541: suffix: 2c
1542: args: -test_expand_points 0,1,2,5,6 -test_expand_points_empty
1544: testset:
1545: # the same as 1% for 3D
1546: nsize: 2
1547: args: -dim 3 -dm_view ascii::ascii_info_detail
1548: args: -dm_plex_check_all
1549: test:
1550: suffix: 4_tet_dist0
1551: args: -distribute 0 -interpolate {{none create}separate output}
1552: test:
1553: suffix: 4_tet_dist1
1554: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1555: test:
1556: suffix: 4_hex_dist0
1557: args: -cell_simplex 0 -distribute 0 -interpolate {{none create}separate output}
1558: test:
1559: suffix: 4_hex_dist1
1560: args: -cell_simplex 0 -distribute 1 -interpolate {{none create after_distribute}separate output}
1562: test:
1563: # the same as 4_tet_dist0 but test different initial orientations
1564: suffix: 4_tet_test_orient
1565: nsize: 2
1566: args: -dim 3 -distribute 0
1567: args: -dm_plex_check_all
1568: args: -rotate_interface_0 {{0 1 2 11 12 13}}
1569: args: -rotate_interface_1 {{0 1 2 11 12 13}}
1571: testset:
1572: requires: exodusii
1573: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo
1574: args: -dm_view ascii::ascii_info_detail
1575: args: -dm_plex_check_all
1576: args: -custom_view
1577: test:
1578: suffix: 5_seq
1579: nsize: 1
1580: args: -distribute 0 -interpolate {{none create}separate output}
1581: test:
1582: # Detail viewing in a non-distributed mesh is broken because the DMLabelView() is collective, but the label is not shared
1583: suffix: 5_dist0
1584: nsize: 2
1585: args: -distribute 0 -interpolate {{none create}separate output} -dm_view
1586: test:
1587: suffix: 5_dist1
1588: nsize: 2
1589: args: -distribute 1 -interpolate {{none create after_distribute}separate output}
1591: testset:
1592: nsize: {{1 2 4}}
1593: args: -use_generator
1594: args: -dm_plex_check_all
1595: args: -distribute -interpolate none
1596: test:
1597: suffix: 6_tri
1598: requires: triangle
1599: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1600: test:
1601: suffix: 6_quad
1602: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1603: test:
1604: suffix: 6_tet
1605: requires: ctetgen
1606: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1607: test:
1608: suffix: 6_hex
1609: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1610: testset:
1611: nsize: {{1 2 4}}
1612: args: -use_generator
1613: args: -dm_plex_check_all
1614: args: -distribute -interpolate create
1615: test:
1616: suffix: 6_int_tri
1617: requires: triangle
1618: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1619: test:
1620: suffix: 6_int_quad
1621: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1622: test:
1623: suffix: 6_int_tet
1624: requires: ctetgen
1625: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1626: test:
1627: suffix: 6_int_hex
1628: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1629: testset:
1630: nsize: {{2 4}}
1631: args: -use_generator
1632: args: -dm_plex_check_all
1633: args: -distribute -interpolate after_distribute
1634: test:
1635: suffix: 6_parint_tri
1636: requires: triangle
1637: args: -faces {{2,2 1,3 7,4}} -cell_simplex 1 -dm_generator triangle
1638: test:
1639: suffix: 6_parint_quad
1640: args: -faces {{2,2 1,3 7,4}} -cell_simplex 0
1641: test:
1642: suffix: 6_parint_tet
1643: requires: ctetgen
1644: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 1 -dm_generator ctetgen
1645: test:
1646: suffix: 6_parint_hex
1647: args: -faces {{2,2,2 1,3,5 3,4,7}} -cell_simplex 0
1649: testset: # 7 EXODUS
1650: requires: exodusii
1651: args: -dm_plex_check_all
1652: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
1653: args: -distribute
1654: test: # seq load, simple partitioner
1655: suffix: 7_exo
1656: nsize: {{1 2 4 5}}
1657: args: -interpolate none
1658: test: # seq load, seq interpolation, simple partitioner
1659: suffix: 7_exo_int_simple
1660: nsize: {{1 2 4 5}}
1661: args: -interpolate create
1662: test: # seq load, seq interpolation, metis partitioner
1663: suffix: 7_exo_int_metis
1664: requires: parmetis
1665: nsize: {{2 4 5}}
1666: args: -interpolate create
1667: args: -petscpartitioner_type parmetis
1668: test: # seq load, simple partitioner, par interpolation
1669: suffix: 7_exo_simple_int
1670: nsize: {{2 4 5}}
1671: args: -interpolate after_distribute
1672: test: # seq load, metis partitioner, par interpolation
1673: suffix: 7_exo_metis_int
1674: requires: parmetis
1675: nsize: {{2 4 5}}
1676: args: -interpolate after_distribute
1677: args: -petscpartitioner_type parmetis
1679: testset: # 7 HDF5 SEQUANTIAL LOAD
1680: requires: hdf5 !complex
1681: args: -dm_plex_check_all
1682: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1683: args: -dm_plex_hdf5_force_sequential
1684: args: -distribute
1685: test: # seq load, simple partitioner
1686: suffix: 7_seq_hdf5_simple
1687: nsize: {{1 2 4 5}}
1688: args: -interpolate none
1689: test: # seq load, seq interpolation, simple partitioner
1690: suffix: 7_seq_hdf5_int_simple
1691: nsize: {{1 2 4 5}}
1692: args: -interpolate after_create
1693: test: # seq load, seq interpolation, metis partitioner
1694: nsize: {{2 4 5}}
1695: suffix: 7_seq_hdf5_int_metis
1696: requires: parmetis
1697: args: -interpolate after_create
1698: args: -petscpartitioner_type parmetis
1699: test: # seq load, simple partitioner, par interpolation
1700: suffix: 7_seq_hdf5_simple_int
1701: nsize: {{2 4 5}}
1702: args: -interpolate after_distribute
1703: test: # seq load, metis partitioner, par interpolation
1704: nsize: {{2 4 5}}
1705: suffix: 7_seq_hdf5_metis_int
1706: requires: parmetis
1707: args: -interpolate after_distribute
1708: args: -petscpartitioner_type parmetis
1710: testset: # 7 HDF5 PARALLEL LOAD
1711: requires: hdf5 !complex
1712: nsize: {{2 4 5}}
1713: args: -dm_plex_check_all
1714: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1715: test: # par load
1716: suffix: 7_par_hdf5
1717: args: -interpolate none
1718: test: # par load, par interpolation
1719: suffix: 7_par_hdf5_int
1720: args: -interpolate after_create
1721: test: # par load, parmetis repartitioner
1722: TODO: Parallel partitioning of uninterpolated meshes not supported
1723: suffix: 7_par_hdf5_parmetis
1724: requires: parmetis
1725: args: -distribute -petscpartitioner_type parmetis
1726: args: -interpolate none
1727: test: # par load, par interpolation, parmetis repartitioner
1728: suffix: 7_par_hdf5_int_parmetis
1729: requires: parmetis
1730: args: -distribute -petscpartitioner_type parmetis
1731: args: -interpolate after_create
1732: test: # par load, parmetis partitioner, par interpolation
1733: TODO: Parallel partitioning of uninterpolated meshes not supported
1734: suffix: 7_par_hdf5_parmetis_int
1735: requires: parmetis
1736: args: -distribute -petscpartitioner_type parmetis
1737: args: -interpolate after_distribute
1739: test:
1740: suffix: 7_hdf5_hierarch
1741: requires: hdf5 ptscotch !complex
1742: nsize: {{2 3 4}separate output}
1743: args: -distribute
1744: args: -interpolate after_create
1745: args: -petscpartitioner_type matpartitioning -petscpartitioner_view ::ascii_info
1746: args: -mat_partitioning_type hierarch -mat_partitioning_hierarchical_nfineparts 2
1747: args: -mat_partitioning_hierarchical_coarseparttype ptscotch -mat_partitioning_hierarchical_fineparttype ptscotch
1749: test:
1750: suffix: 8
1751: requires: hdf5 !complex
1752: nsize: 4
1753: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
1754: args: -distribute 0 -interpolate after_create
1755: args: -view_vertices_from_coords 0.,1.,0.,-0.5,1.,0.,0.583,-0.644,0.,-2.,-2.,-2. -view_vertices_from_coords_tol 1e-3
1756: args: -dm_plex_check_all
1757: args: -custom_view
1759: testset: # 9 HDF5 SEQUANTIAL LOAD
1760: requires: hdf5 !complex datafilespath
1761: args: -dm_plex_check_all
1762: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1763: args: -dm_plex_hdf5_force_sequential
1764: args: -distribute
1765: test: # seq load, simple partitioner
1766: suffix: 9_seq_hdf5_simple
1767: nsize: {{1 2 4 5}}
1768: args: -interpolate none
1769: test: # seq load, seq interpolation, simple partitioner
1770: suffix: 9_seq_hdf5_int_simple
1771: nsize: {{1 2 4 5}}
1772: args: -interpolate after_create
1773: test: # seq load, seq interpolation, metis partitioner
1774: nsize: {{2 4 5}}
1775: suffix: 9_seq_hdf5_int_metis
1776: requires: parmetis
1777: args: -interpolate after_create
1778: args: -petscpartitioner_type parmetis
1779: test: # seq load, simple partitioner, par interpolation
1780: suffix: 9_seq_hdf5_simple_int
1781: nsize: {{2 4 5}}
1782: args: -interpolate after_distribute
1783: test: # seq load, simple partitioner, par interpolation
1784: # This is like 9_seq_hdf5_simple_int but testing error output of DMPlexCheckPointSFHeavy().
1785: # Once 9_seq_hdf5_simple_int gets fixed, this one gets broken.
1786: # We can then provide an intentionally broken mesh instead.
1787: TODO: This test is broken because PointSF is fixed.
1788: suffix: 9_seq_hdf5_simple_int_err
1789: nsize: 4
1790: args: -interpolate after_distribute
1791: filter: sed -e "/PETSC ERROR/,$$d"
1792: test: # seq load, metis partitioner, par interpolation
1793: nsize: {{2 4 5}}
1794: suffix: 9_seq_hdf5_metis_int
1795: requires: parmetis
1796: args: -interpolate after_distribute
1797: args: -petscpartitioner_type parmetis
1799: testset: # 9 HDF5 PARALLEL LOAD
1800: requires: hdf5 !complex datafilespath
1801: nsize: {{2 4 5}}
1802: args: -dm_plex_check_all
1803: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
1804: test: # par load
1805: suffix: 9_par_hdf5
1806: args: -interpolate none
1807: test: # par load, par interpolation
1808: suffix: 9_par_hdf5_int
1809: args: -interpolate after_create
1810: test: # par load, parmetis repartitioner
1811: TODO: Parallel partitioning of uninterpolated meshes not supported
1812: suffix: 9_par_hdf5_parmetis
1813: requires: parmetis
1814: args: -distribute -petscpartitioner_type parmetis
1815: args: -interpolate none
1816: test: # par load, par interpolation, parmetis repartitioner
1817: suffix: 9_par_hdf5_int_parmetis
1818: requires: parmetis
1819: args: -distribute -petscpartitioner_type parmetis
1820: args: -interpolate after_create
1821: test: # par load, parmetis partitioner, par interpolation
1822: TODO: Parallel partitioning of uninterpolated meshes not supported
1823: suffix: 9_par_hdf5_parmetis_int
1824: requires: parmetis
1825: args: -distribute -petscpartitioner_type parmetis
1826: args: -interpolate after_distribute
1828: testset: # 10 HDF5 PARALLEL LOAD
1829: requires: hdf5 !complex datafilespath
1830: nsize: {{2 4 7}}
1831: args: -dm_plex_check_all
1832: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined2.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /topo -dm_plex_hdf5_geometry_path /geom
1833: test: # par load, par interpolation
1834: suffix: 10_par_hdf5_int
1835: args: -interpolate after_create
1836: TEST*/