Actual source code: partptscotch.c

  1: #include <petsc/private/partitionerimpl.h>

  3: #if defined(PETSC_HAVE_PTSCOTCH)
  4: EXTERN_C_BEGIN
  5:   #include <ptscotch.h>
  6: EXTERN_C_END
  7: #endif

  9: PetscBool  PTScotchPartitionerCite       = PETSC_FALSE;
 10: const char PTScotchPartitionerCitation[] = "@article{PTSCOTCH,\n"
 11:                                            "  author  = {C. Chevalier and F. Pellegrini},\n"
 12:                                            "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
 13:                                            "  journal = {Parallel Computing},\n"
 14:                                            "  volume  = {34},\n"
 15:                                            "  number  = {6},\n"
 16:                                            "  pages   = {318--331},\n"
 17:                                            "  year    = {2008},\n"
 18:                                            "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
 19:                                            "}\n";

 21: typedef struct {
 22:   MPI_Comm  pcomm;
 23:   PetscInt  strategy;
 24:   PetscReal imbalance;
 25: } PetscPartitioner_PTScotch;

 27: #if defined(PETSC_HAVE_PTSCOTCH)

 29:   #define PetscCallPTSCOTCH(...) \
 30:     do { \
 32:     } while (0)

 34: static int PTScotch_Strategy(PetscInt strategy)
 35: {
 36:   switch (strategy) {
 37:   case 0:
 38:     return SCOTCH_STRATDEFAULT;
 39:   case 1:
 40:     return SCOTCH_STRATQUALITY;
 41:   case 2:
 42:     return SCOTCH_STRATSPEED;
 43:   case 3:
 44:     return SCOTCH_STRATBALANCE;
 45:   case 4:
 46:     return SCOTCH_STRATSAFETY;
 47:   case 5:
 48:     return SCOTCH_STRATSCALABILITY;
 49:   case 6:
 50:     return SCOTCH_STRATRECURSIVE;
 51:   case 7:
 52:     return SCOTCH_STRATREMAP;
 53:   default:
 54:     return SCOTCH_STRATDEFAULT;
 55:   }
 56: }

 58: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
 59: {
 60:   SCOTCH_Arch  archdat;
 61:   SCOTCH_Graph grafdat;
 62:   SCOTCH_Strat stradat;
 63:   SCOTCH_Num   vertnbr = n;
 64:   SCOTCH_Num   edgenbr = xadj[n];
 65:   SCOTCH_Num  *velotab = vtxwgt;
 66:   SCOTCH_Num  *edlotab = adjwgt;
 67:   SCOTCH_Num   flagval = strategy;
 68:   double       kbalval = imbalance;

 70:   {
 71:     PetscBool flg = PETSC_TRUE;
 72:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights");
 73:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
 74:     if (!flg) velotab = NULL;
 75:   }
 76:   SCOTCH_graphInit(&grafdat);
 77:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);
 78:   SCOTCH_stratInit(&stradat);
 79:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);
 80:   SCOTCH_archInit(&archdat);
 81:   if (tpart) {
 82:     SCOTCH_archCmpltw(&archdat, nparts, tpart);
 83:   } else {
 84:     SCOTCH_archCmplt(&archdat, nparts);
 85:   }
 86:   SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);
 87:   SCOTCH_archExit(&archdat);
 88:   SCOTCH_stratExit(&stradat);
 89:   SCOTCH_graphExit(&grafdat);
 90:   return 0;
 91: }

 93: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
 94: {
 95:   PetscMPIInt     procglbnbr;
 96:   PetscMPIInt     proclocnum;
 97:   SCOTCH_Arch     archdat;
 98:   SCOTCH_Dgraph   grafdat;
 99:   SCOTCH_Dmapping mappdat;
100:   SCOTCH_Strat    stradat;
101:   SCOTCH_Num      vertlocnbr;
102:   SCOTCH_Num      edgelocnbr;
103:   SCOTCH_Num     *veloloctab = vtxwgt;
104:   SCOTCH_Num     *edloloctab = adjwgt;
105:   SCOTCH_Num      flagval    = strategy;
106:   double          kbalval    = imbalance;

108:   {
109:     PetscBool flg = PETSC_TRUE;
110:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights");
111:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
112:     if (!flg) veloloctab = NULL;
113:   }
114:   MPI_Comm_size(comm, &procglbnbr);
115:   MPI_Comm_rank(comm, &proclocnum);
116:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
117:   edgelocnbr = xadj[vertlocnbr];

119:   SCOTCH_dgraphInit(&grafdat, comm);
120:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);
121:   SCOTCH_stratInit(&stradat);
122:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
123:   SCOTCH_archInit(&archdat);
124:   if (tpart) { /* target partition weights */
125:     SCOTCH_archCmpltw(&archdat, nparts, tpart);
126:   } else {
127:     SCOTCH_archCmplt(&archdat, nparts);
128:   }
129:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);
130:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);
131:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
132:   SCOTCH_archExit(&archdat);
133:   SCOTCH_stratExit(&stradat);
134:   SCOTCH_dgraphExit(&grafdat);
135:   return 0;
136: }

138: #endif /* PETSC_HAVE_PTSCOTCH */

140: static const char *const PTScotchStrategyList[] = {"DEFAULT", "QUALITY", "SPEED", "BALANCE", "SAFETY", "SCALABILITY", "RECURSIVE", "REMAP"};

142: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
143: {
144:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;

146:   MPI_Comm_free(&p->pcomm);
147:   PetscFree(part->data);
148:   return 0;
149: }

151: static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
152: {
153:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;

155:   PetscViewerASCIIPushTab(viewer);
156:   PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n", PTScotchStrategyList[p->strategy]);
157:   PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n", (double)p->imbalance);
158:   PetscViewerASCIIPopTab(viewer);
159:   return 0;
160: }

162: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
163: {
164:   PetscBool iascii;

168:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
169:   if (iascii) PetscPartitionerView_PTScotch_ASCII(part, viewer);
170:   return 0;
171: }

173: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
174: {
175:   PetscPartitioner_PTScotch *p     = (PetscPartitioner_PTScotch *)part->data;
176:   const char *const         *slist = PTScotchStrategyList;
177:   PetscInt                   nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
178:   PetscBool                  flag;

180:   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
181:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy", "Partitioning strategy", "", slist, nlist, slist[p->strategy], &p->strategy, &flag);
182:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance", "Load imbalance ratio", "", p->imbalance, &p->imbalance, &flag);
183:   PetscOptionsHeadEnd();
184:   return 0;
185: }

187: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
188: {
189: #if defined(PETSC_HAVE_PTSCOTCH)
190:   MPI_Comm    comm;
191:   PetscInt    nvtxs = numVertices; /* The number of vertices in full graph */
192:   PetscInt   *vtxdist;             /* Distribution of vertices across processes */
193:   PetscInt   *xadj   = start;      /* Start of edge list for each vertex */
194:   PetscInt   *adjncy = adjacency;  /* Edge lists for all vertices */
195:   PetscInt   *vwgt   = NULL;       /* Vertex weights */
196:   PetscInt   *adjwgt = NULL;       /* Edge weights */
197:   PetscInt    v, i, *assignment, *points;
198:   PetscMPIInt size, rank, p;
199:   PetscBool   hasempty = PETSC_FALSE;
200:   PetscInt   *tpwgts   = NULL;

202:   PetscObjectGetComm((PetscObject)part, &comm);
203:   MPI_Comm_size(comm, &size);
204:   MPI_Comm_rank(comm, &rank);
205:   PetscMalloc2(size + 1, &vtxdist, PetscMax(nvtxs, 1), &assignment);
206:   /* Calculate vertex distribution */
207:   vtxdist[0] = 0;
208:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
209:   for (p = 2; p <= size; ++p) {
210:     hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
211:     vtxdist[p] += vtxdist[p - 1];
212:   }
213:   /* null graph */
214:   if (vtxdist[size] == 0) {
215:     PetscFree2(vtxdist, assignment);
216:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
217:     return 0;
218:   }

220:   /* Calculate vertex weights */
221:   if (vertSection) {
222:     PetscMalloc1(nvtxs, &vwgt);
223:     for (v = 0; v < nvtxs; ++v) PetscSectionGetDof(vertSection, v, &vwgt[v]);
224:   }

226:   /* Calculate partition weights */
227:   if (targetSection) {
228:     PetscInt sumw;

230:     PetscCalloc1(nparts, &tpwgts);
231:     for (p = 0, sumw = 0; p < nparts; ++p) {
232:       PetscSectionGetDof(targetSection, p, &tpwgts[p]);
233:       sumw += tpwgts[p];
234:     }
235:     if (!sumw) PetscFree(tpwgts);
236:   }

238:   {
239:     PetscPartitioner_PTScotch *pts   = (PetscPartitioner_PTScotch *)part->data;
240:     int                        strat = PTScotch_Strategy(pts->strategy);
241:     double                     imbal = (double)pts->imbalance;

243:     for (p = 0; !vtxdist[p + 1] && p < size; ++p)
244:       ;
245:     if (vtxdist[p + 1] == vtxdist[size]) {
246:       if (rank == p) PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);
247:     } else {
248:       MPI_Comm pcomm = pts->pcomm;

250:       if (hasempty) {
251:         PetscInt cnt;

253:         MPI_Comm_split(pts->pcomm, !!nvtxs, rank, &pcomm);
254:         for (p = 0, cnt = 0; p < size; p++) {
255:           if (vtxdist[p + 1] != vtxdist[p]) {
256:             vtxdist[cnt + 1] = vtxdist[p + 1];
257:             cnt++;
258:           }
259:         }
260:       };
261:       if (nvtxs) PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);
262:       if (hasempty) MPI_Comm_free(&pcomm);
263:     }
264:   }
265:   PetscFree(vwgt);
266:   PetscFree(tpwgts);

268:   /* Convert to PetscSection+IS */
269:   for (v = 0; v < nvtxs; ++v) PetscSectionAddDof(partSection, assignment[v], 1);
270:   PetscMalloc1(nvtxs, &points);
271:   for (p = 0, i = 0; p < nparts; ++p) {
272:     for (v = 0; v < nvtxs; ++v) {
273:       if (assignment[v] == p) points[i++] = v;
274:     }
275:   }
277:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

279:   PetscFree2(vtxdist, assignment);
280:   return 0;
281: #else
282:   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
283: #endif
284: }

286: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
287: {
288:   part->noGraph             = PETSC_FALSE;
289:   part->ops->view           = PetscPartitionerView_PTScotch;
290:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
291:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
292:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
293:   return 0;
294: }

296: /*MC
297:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

299:   Level: intermediate

301:   Options Database Keys:
302: +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
303: -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio

305:   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch

307: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
308: M*/

310: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
311: {
312:   PetscPartitioner_PTScotch *p;

315:   PetscNew(&p);
316:   part->data = p;

318:   MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm);
319:   p->strategy  = 0;
320:   p->imbalance = 0.01;

322:   PetscPartitionerInitialize_PTScotch(part);
323:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite);
324:   return 0;
325: }