Actual source code: ex12.c

  1: static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscsf.h>

  6: /* Sample usage:

  8: Load a file in serial and distribute it on 24 processes:

 10:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24

 12: Load a file in serial and distribute it on 24 processes using a custom partitioner:

 14:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24

 16: Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:

 18:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24

 20: Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:

 22:   make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24

 24: */

 26: enum {
 27:   STAGE_LOAD,
 28:   STAGE_DISTRIBUTE,
 29:   STAGE_REFINE,
 30:   STAGE_REDISTRIBUTE
 31: };

 33: typedef struct {
 34:   /* Domain and mesh definition */
 35:   PetscInt      overlap;          /* The cell overlap to use during partitioning */
 36:   PetscBool     testPartition;    /* Use a fixed partitioning for testing */
 37:   PetscBool     testRedundant;    /* Use a redundant partitioning for testing */
 38:   PetscBool     loadBalance;      /* Load balance via a second distribute step */
 39:   PetscBool     partitionBalance; /* Balance shared point partition */
 40:   PetscLogStage stages[4];
 41: } AppCtx;

 43: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 44: {
 45:   options->overlap          = 0;
 46:   options->testPartition    = PETSC_FALSE;
 47:   options->testRedundant    = PETSC_FALSE;
 48:   options->loadBalance      = PETSC_FALSE;
 49:   options->partitionBalance = PETSC_FALSE;

 51:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 52:   PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL, 0);
 53:   PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL);
 54:   PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL);
 55:   PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL);
 56:   PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL);
 57:   PetscOptionsEnd();

 59:   PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);
 60:   PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);
 61:   PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);
 62:   PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]);
 63:   return 0;
 64: }

 66: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 67: {
 68:   DM          pdm             = NULL;
 69:   PetscInt    triSizes_n2[2]  = {4, 4};
 70:   PetscInt    triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
 71:   PetscInt    triSizes_n3[3]  = {3, 2, 3};
 72:   PetscInt    triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
 73:   PetscInt    triSizes_n4[4]  = {2, 2, 2, 2};
 74:   PetscInt    triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
 75:   PetscInt    triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
 76:   PetscInt    triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
 77:   PetscInt    quadSizes[2]    = {2, 2};
 78:   PetscInt    quadPoints[4]   = {2, 3, 0, 1};
 79:   PetscInt    overlap         = user->overlap >= 0 ? user->overlap : 0;
 80:   PetscInt    dim;
 81:   PetscBool   simplex;
 82:   PetscMPIInt rank, size;

 84:   MPI_Comm_rank(comm, &rank);
 85:   MPI_Comm_size(comm, &size);
 86:   PetscLogStagePush(user->stages[STAGE_LOAD]);
 87:   DMCreate(comm, dm);
 88:   DMSetType(*dm, DMPLEX);
 89:   DMPlexDistributeSetDefault(*dm, PETSC_FALSE);
 90:   DMSetFromOptions(*dm);
 91:   DMViewFromOptions(*dm, NULL, "-orig_dm_view");
 92:   PetscLogStagePop();
 93:   DMGetDimension(*dm, &dim);
 94:   DMPlexIsSimplex(*dm, &simplex);
 95:   PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);
 96:   if (!user->testRedundant) {
 97:     PetscPartitioner part;

 99:     DMPlexGetPartitioner(*dm, &part);
100:     PetscPartitionerSetFromOptions(part);
101:     DMPlexSetPartitionBalance(*dm, user->partitionBalance);
102:     if (user->testPartition) {
103:       const PetscInt *sizes  = NULL;
104:       const PetscInt *points = NULL;

106:       if (rank == 0) {
107:         if (dim == 2 && simplex && size == 2) {
108:           sizes  = triSizes_n2;
109:           points = triPoints_n2;
110:         } else if (dim == 2 && simplex && size == 3) {
111:           sizes  = triSizes_n3;
112:           points = triPoints_n3;
113:         } else if (dim == 2 && simplex && size == 4) {
114:           sizes  = triSizes_n4;
115:           points = triPoints_n4;
116:         } else if (dim == 2 && simplex && size == 8) {
117:           sizes  = triSizes_n8;
118:           points = triPoints_n8;
119:         } else if (dim == 2 && !simplex && size == 2) {
120:           sizes  = quadSizes;
121:           points = quadPoints;
122:         }
123:       }
124:       PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
125:       PetscPartitionerShellSetPartition(part, size, sizes, points);
126:     }
127:     DMPlexDistribute(*dm, overlap, NULL, &pdm);
128:   } else {
129:     PetscSF sf;

131:     DMPlexGetRedundantDM(*dm, &sf, &pdm);
132:     if (sf) {
133:       DM test;

135:       DMPlexCreate(comm, &test);
136:       PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh");
137:       DMPlexMigrate(*dm, sf, test);
138:       DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view");
139:       DMDestroy(&test);
140:     }
141:     PetscSFDestroy(&sf);
142:   }
143:   if (pdm) {
144:     DMDestroy(dm);
145:     *dm = pdm;
146:   }
147:   PetscLogStagePop();
148:   DMSetFromOptions(*dm);
149:   if (user->loadBalance) {
150:     PetscPartitioner part;

152:     DMViewFromOptions(*dm, NULL, "-prelb_dm_view");
153:     DMPlexSetOptionsPrefix(*dm, "lb_");
154:     PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]);
155:     DMPlexGetPartitioner(*dm, &part);
156:     PetscObjectSetOptionsPrefix((PetscObject)part, "lb_");
157:     PetscPartitionerSetFromOptions(part);
158:     if (user->testPartition) {
159:       PetscInt reSizes_n2[2]  = {2, 2};
160:       PetscInt rePoints_n2[4] = {2, 3, 0, 1};
161:       if (rank) {
162:         rePoints_n2[0] = 1;
163:         rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;
164:       }

166:       PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
167:       PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2);
168:     }
169:     DMPlexSetPartitionBalance(*dm, user->partitionBalance);
170:     DMPlexDistribute(*dm, overlap, NULL, &pdm);
171:     if (pdm) {
172:       DMDestroy(dm);
173:       *dm = pdm;
174:     }
175:     PetscLogStagePop();
176:   }
177:   PetscLogStagePush(user->stages[STAGE_REFINE]);
178:   DMViewFromOptions(*dm, NULL, "-dm_view");
179:   PetscLogStagePop();
180:   return 0;
181: }

183: int main(int argc, char **argv)
184: {
185:   DM     dm;
186:   AppCtx user; /* user-defined work context */

189:   PetscInitialize(&argc, &argv, NULL, help);
190:   ProcessOptions(PETSC_COMM_WORLD, &user);
191:   CreateMesh(PETSC_COMM_WORLD, &user, &dm);
192:   DMDestroy(&dm);
193:   PetscFinalize();
194:   return 0;
195: }

197: /*TEST
198:   # Parallel, no overlap tests 0-2
199:   test:
200:     suffix: 0
201:     requires: triangle
202:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex
203:   test:
204:     suffix: 1
205:     requires: triangle
206:     nsize: 3
207:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
208:   test:
209:     suffix: 2
210:     requires: triangle
211:     nsize: 8
212:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
213:   # Parallel, level-1 overlap tests 3-4
214:   test:
215:     suffix: 3
216:     requires: triangle
217:     nsize: 3
218:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
219:   test:
220:     suffix: 4
221:     requires: triangle
222:     nsize: 8
223:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
224:   # Parallel, level-2 overlap test 5
225:   test:
226:     suffix: 5
227:     requires: triangle
228:     nsize: 8
229:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
230:   # Parallel load balancing, test 6-7
231:   test:
232:     suffix: 6
233:     requires: triangle
234:     nsize: 2
235:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
236:   test:
237:     suffix: 7
238:     requires: triangle
239:     nsize: 2
240:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
241:   # Parallel redundant copying, test 8
242:   test:
243:     suffix: 8
244:     requires: triangle
245:     nsize: 2
246:     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
247:   test:
248:     suffix: lb_0
249:     requires: parmetis
250:     nsize: 4
251:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance

253:   # Same tests as above, but with balancing of the shared point partition
254:   test:
255:     suffix: 9
256:     requires: triangle
257:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance
258:   test:
259:     suffix: 10
260:     requires: triangle
261:     nsize: 3
262:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
263:   test:
264:     suffix: 11
265:     requires: triangle
266:     nsize: 8
267:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
268:   # Parallel, level-1 overlap tests 3-4
269:   test:
270:     suffix: 12
271:     requires: triangle
272:     nsize: 3
273:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
274:   test:
275:     suffix: 13
276:     requires: triangle
277:     nsize: 8
278:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
279:   # Parallel, level-2 overlap test 5
280:   test:
281:     suffix: 14
282:     requires: triangle
283:     nsize: 8
284:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
285:   # Parallel load balancing, test 6-7
286:   test:
287:     suffix: 15
288:     requires: triangle
289:     nsize: 2
290:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
291:   test:
292:     suffix: 16
293:     requires: triangle
294:     nsize: 2
295:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
296:   # Parallel redundant copying, test 8
297:   test:
298:     suffix: 17
299:     requires: triangle
300:     nsize: 2
301:     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
302:   test:
303:     suffix: lb_1
304:     requires: parmetis
305:     nsize: 4
306:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
307: TEST*/