Actual source code: ex24.c
1: static char help[] = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
2: static char FILENAME[] = "ex24.c";
4: #include <petscdmplex.h>
5: #include <petscviewerhdf5.h>
7: #if defined(PETSC_HAVE_PTSCOTCH)
8: EXTERN_C_BEGIN
9: #include <ptscotch.h>
10: EXTERN_C_END
11: #endif
13: typedef struct {
14: PetscBool compare_is; /* Compare ISs and PetscSections */
15: PetscBool compare_dm; /* Compare DM */
16: PetscBool tpw; /* Use target partition weights */
17: char partitioning[64];
18: char repartitioning[64];
19: } AppCtx;
21: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
22: {
23: PetscBool repartition = PETSC_TRUE;
25: options->compare_is = PETSC_FALSE;
26: options->compare_dm = PETSC_FALSE;
28: PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
29: PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL);
30: PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL);
31: PetscStrncpy(options->partitioning, MATPARTITIONINGPARMETIS, sizeof(options->partitioning));
32: PetscOptionsString("-partitioning", "The mat partitioning type to test", "None", options->partitioning, options->partitioning, sizeof(options->partitioning), NULL);
33: PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL);
34: if (repartition) {
35: PetscStrncpy(options->repartitioning, MATPARTITIONINGPARMETIS, 64);
36: PetscOptionsString("-repartitioning", "The mat partitioning type to test (second partitioning)", "None", options->repartitioning, options->repartitioning, sizeof(options->repartitioning), NULL);
37: } else {
38: options->repartitioning[0] = '\0';
39: }
40: PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL);
41: PetscOptionsEnd();
42: return 0;
43: }
45: static PetscErrorCode ScotchResetRandomSeed()
46: {
47: #if defined(PETSC_HAVE_PTSCOTCH)
48: SCOTCH_randomReset();
49: #endif
50: return 0;
51: }
53: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
54: {
55: DMCreate(comm, dm);
56: DMSetType(*dm, DMPLEX);
57: DMPlexDistributeSetDefault(*dm, PETSC_FALSE);
58: DMSetFromOptions(*dm);
59: DMViewFromOptions(*dm, NULL, "-dm_view");
60: return 0;
61: }
63: int main(int argc, char **argv)
64: {
65: MPI_Comm comm;
66: DM dm1, dm2, dmdist1, dmdist2;
67: DMPlexInterpolatedFlag interp;
68: MatPartitioning mp;
69: PetscPartitioner part1, part2;
70: AppCtx user;
71: IS is1 = NULL, is2 = NULL;
72: IS is1g, is2g;
73: PetscSection s1 = NULL, s2 = NULL, tpws = NULL;
74: PetscInt i;
75: PetscBool flg;
76: PetscMPIInt size;
79: PetscInitialize(&argc, &argv, NULL, help);
80: comm = PETSC_COMM_WORLD;
81: MPI_Comm_size(comm, &size);
82: ProcessOptions(comm, &user);
83: CreateMesh(comm, &user, &dm1);
84: CreateMesh(comm, &user, &dm2);
86: if (user.tpw) {
87: PetscSectionCreate(comm, &tpws);
88: PetscSectionSetChart(tpws, 0, size);
89: for (i = 0; i < size; i++) {
90: PetscInt tdof = i % 2 ? 2 * i - 1 : i + 2;
91: PetscSectionSetDof(tpws, i, tdof);
92: }
93: if (size > 1) { /* test zero tpw entry */
94: PetscSectionSetDof(tpws, 0, 0);
95: }
96: PetscSectionSetUp(tpws);
97: }
99: /* partition dm1 using PETSCPARTITIONERPARMETIS */
100: ScotchResetRandomSeed();
101: DMPlexGetPartitioner(dm1, &part1);
102: PetscObjectSetOptionsPrefix((PetscObject)part1, "p1_");
103: PetscPartitionerSetType(part1, user.partitioning);
104: PetscPartitionerSetFromOptions(part1);
105: PetscSectionCreate(comm, &s1);
106: PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1);
108: /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
109: ScotchResetRandomSeed();
110: DMPlexGetPartitioner(dm2, &part2);
111: PetscObjectSetOptionsPrefix((PetscObject)part2, "p2_");
112: PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING);
113: PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp);
114: MatPartitioningSetType(mp, user.partitioning);
115: PetscPartitionerSetFromOptions(part2);
116: PetscSectionCreate(comm, &s2);
117: PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2);
119: ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g);
120: ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g);
121: ISViewFromOptions(is1g, NULL, "-seq_is1_view");
122: ISViewFromOptions(is2g, NULL, "-seq_is2_view");
123: /* compare the two ISs */
124: if (user.compare_is) {
125: ISEqualUnsorted(is1g, is2g, &flg);
126: if (!flg) PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n", user.partitioning, size);
127: }
128: ISDestroy(&is1g);
129: ISDestroy(&is2g);
131: /* compare the two PetscSections */
132: PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view");
133: PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view");
134: if (user.compare_is) {
135: PetscSectionCompare(s1, s2, &flg);
136: if (!flg) PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n", user.partitioning, size);
137: }
139: /* distribute both DMs */
140: ScotchResetRandomSeed();
141: DMPlexDistribute(dm1, 0, NULL, &dmdist1);
142: ScotchResetRandomSeed();
143: DMPlexDistribute(dm2, 0, NULL, &dmdist2);
145: /* cleanup */
146: PetscSectionDestroy(&tpws);
147: PetscSectionDestroy(&s1);
148: PetscSectionDestroy(&s2);
149: ISDestroy(&is1);
150: ISDestroy(&is2);
151: DMDestroy(&dm1);
152: DMDestroy(&dm2);
154: /* if distributed DMs are NULL (sequential case), then quit */
155: if (!dmdist1 && !dmdist2) return 0;
157: DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view");
158: DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view");
160: /* compare the two distributed DMs */
161: if (user.compare_dm) {
162: DMPlexEqual(dmdist1, dmdist2, &flg);
163: if (!flg) PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n", user.partitioning, size);
164: }
166: /* if repartitioning is disabled, then quit */
167: if (user.repartitioning[0] == '\0') return 0;
169: if (user.tpw) {
170: PetscSectionCreate(comm, &tpws);
171: PetscSectionSetChart(tpws, 0, size);
172: for (i = 0; i < size; i++) {
173: PetscInt tdof = i % 2 ? i + 1 : size - i;
174: PetscSectionSetDof(tpws, i, tdof);
175: }
176: PetscSectionSetUp(tpws);
177: }
179: /* repartition distributed DM dmdist1 */
180: ScotchResetRandomSeed();
181: DMPlexGetPartitioner(dmdist1, &part1);
182: PetscObjectSetOptionsPrefix((PetscObject)part1, "dp1_");
183: PetscPartitionerSetType(part1, user.repartitioning);
184: PetscPartitionerSetFromOptions(part1);
185: PetscSectionCreate(comm, &s1);
186: PetscPartitionerDMPlexPartition(part1, dmdist1, tpws, s1, &is1);
188: /* repartition distributed DM dmdist2 */
189: ScotchResetRandomSeed();
190: DMPlexGetPartitioner(dmdist2, &part2);
191: PetscObjectSetOptionsPrefix((PetscObject)part2, "dp2_");
192: PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING);
193: PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp);
194: MatPartitioningSetType(mp, user.repartitioning);
195: PetscPartitionerSetFromOptions(part2);
196: PetscSectionCreate(comm, &s2);
197: PetscPartitionerDMPlexPartition(part2, dmdist2, tpws, s2, &is2);
199: /* compare the two ISs */
200: ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g);
201: ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g);
202: ISViewFromOptions(is1g, NULL, "-dist_is1_view");
203: ISViewFromOptions(is2g, NULL, "-dist_is2_view");
204: if (user.compare_is) {
205: ISEqualUnsorted(is1g, is2g, &flg);
206: if (!flg) PetscPrintf(comm, "Distributed ISs are not equal, with %s with size %d.\n", user.repartitioning, size);
207: }
208: ISDestroy(&is1g);
209: ISDestroy(&is2g);
211: /* compare the two PetscSections */
212: PetscSectionViewFromOptions(s1, NULL, "-dist_s1_view");
213: PetscSectionViewFromOptions(s2, NULL, "-dist_s2_view");
214: if (user.compare_is) {
215: PetscSectionCompare(s1, s2, &flg);
216: if (!flg) PetscPrintf(comm, "Distributed PetscSections are not equal, with %s with size %d.\n", user.repartitioning, size);
217: }
219: /* redistribute both distributed DMs */
220: ScotchResetRandomSeed();
221: DMPlexDistribute(dmdist1, 0, NULL, &dm1);
222: ScotchResetRandomSeed();
223: DMPlexDistribute(dmdist2, 0, NULL, &dm2);
225: /* compare the two distributed DMs */
226: DMPlexIsInterpolated(dm1, &interp);
227: if (interp == DMPLEX_INTERPOLATED_NONE) {
228: DMPlexEqual(dm1, dm2, &flg);
229: if (!flg) PetscPrintf(comm, "Redistributed DMs are not equal, with %s with size %d.\n", user.repartitioning, size);
230: }
232: /* cleanup */
233: PetscSectionDestroy(&tpws);
234: PetscSectionDestroy(&s1);
235: PetscSectionDestroy(&s2);
236: ISDestroy(&is1);
237: ISDestroy(&is2);
238: DMDestroy(&dm1);
239: DMDestroy(&dm2);
240: DMDestroy(&dmdist1);
241: DMDestroy(&dmdist2);
242: PetscFinalize();
243: return 0;
244: }
246: /*TEST
248: test:
249: # partition sequential mesh loaded from Exodus file
250: suffix: 0
251: nsize: {{1 2 3 4 8}}
252: requires: chaco parmetis ptscotch exodusii
253: args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
254: args: -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
255: test:
256: # repartition mesh already partitioned naively by MED loader
257: suffix: 1
258: nsize: {{1 2 3 4 8}}
259: TODO: MED
260: requires: parmetis ptscotch med
261: args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med
262: args: -repartition 0 -partitioning {{parmetis ptscotch}}
263: test:
264: # partition mesh generated by ctetgen using scotch, then repartition with scotch, diff view
265: suffix: 3
266: nsize: 4
267: requires: ptscotch ctetgen
268: args: -dm_plex_dim 3 -dm_plex_box_faces 2,3,2 -partitioning ptscotch -repartitioning ptscotch
269: args: -p1_petscpartitioner_view -p2_petscpartitioner_view -dp1_petscpartitioner_view -dp2_petscpartitioner_view -tpweight {{0 1}}
270: test:
271: # partition mesh generated by ctetgen using partitioners supported both by MatPartitioning and PetscPartitioner
272: suffix: 4
273: nsize: {{1 2 3 4 8}}
274: requires: chaco parmetis ptscotch ctetgen
275: args: -dm_plex_dim 3 -dm_plex_box_faces {{2,3,4 5,4,3 7,11,5}} -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
277: TEST*/