Actual source code: partparmetis.c

  1: #include <petsc/private/partitionerimpl.h>

  3: #if defined(PETSC_HAVE_PARMETIS)
  4:   #include <parmetis.h>
  5: #endif

  7: PetscBool  ParMetisPartitionerCite       = PETSC_FALSE;
  8: const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
  9:                                            "  author  = {George Karypis and Vipin Kumar},\n"
 10:                                            "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
 11:                                            "  journal = {Journal of Parallel and Distributed Computing},\n"
 12:                                            "  volume  = {48},\n"
 13:                                            "  pages   = {71--85},\n"
 14:                                            "  year    = {1998}\n"
 15:                                            "  doi     = {https://doi.org/10.1006/jpdc.1997.1403}\n"
 16:                                            "}\n";

 18: typedef struct {
 19:   MPI_Comm  pcomm;
 20:   PetscInt  ptype;
 21:   PetscReal imbalanceRatio;
 22:   PetscInt  debugFlag;
 23:   PetscInt  randomSeed;
 24: } PetscPartitioner_ParMetis;

 26: static const char *ptypes[] = {"kway", "rb"};

 28: static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
 29: {
 30:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;

 32:   MPI_Comm_free(&p->pcomm);
 33:   PetscFree(part->data);
 34:   return 0;
 35: }

 37: static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
 38: {
 39:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;

 41:   PetscViewerASCIIPushTab(viewer);
 42:   PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);
 43:   PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double)p->imbalanceRatio);
 44:   PetscViewerASCIIPrintf(viewer, "debug flag %" PetscInt_FMT "\n", p->debugFlag);
 45:   PetscViewerASCIIPrintf(viewer, "random seed %" PetscInt_FMT "\n", p->randomSeed);
 46:   PetscViewerASCIIPopTab(viewer);
 47:   return 0;
 48: }

 50: static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
 51: {
 52:   PetscBool iascii;

 56:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
 57:   if (iascii) PetscPartitionerView_ParMetis_ASCII(part, viewer);
 58:   return 0;
 59: }

 61: static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
 62: {
 63:   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;

 65:   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner ParMetis Options");
 66:   PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);
 67:   PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);
 68:   PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);
 69:   PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);
 70:   PetscOptionsHeadEnd();
 71:   return 0;
 72: }

 74: static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 75: {
 76: #if defined(PETSC_HAVE_PARMETIS)
 77:   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *)part->data;
 78:   MPI_Comm                   comm;
 79:   PetscInt                   nvtxs = numVertices;     /* The number of vertices in full graph */
 80:   PetscInt                  *vtxdist;                 /* Distribution of vertices across processes */
 81:   PetscInt                  *xadj        = start;     /* Start of edge list for each vertex */
 82:   PetscInt                  *adjncy      = adjacency; /* Edge lists for all vertices */
 83:   PetscInt                  *vwgt        = NULL;      /* Vertex weights */
 84:   PetscInt                  *adjwgt      = NULL;      /* Edge weights */
 85:   PetscInt                   wgtflag     = 0;         /* Indicates which weights are present */
 86:   PetscInt                   numflag     = 0;         /* Indicates initial offset (0 or 1) */
 87:   PetscInt                   ncon        = 1;         /* The number of weights per vertex */
 88:   PetscInt                   metis_ptype = pm->ptype; /* kway or recursive bisection */
 89:   real_t                    *tpwgts;                  /* The fraction of vertex weights assigned to each partition */
 90:   real_t                    *ubvec;                   /* The balance intolerance for vertex weights */
 91:   PetscInt                   options[64];             /* Options */
 92:   PetscInt                   v, i, *assignment, *points;
 93:   PetscMPIInt                p, size, rank;
 94:   PetscBool                  hasempty = PETSC_FALSE;

 96:   PetscObjectGetComm((PetscObject)part, &comm);
 97:   MPI_Comm_size(comm, &size);
 98:   MPI_Comm_rank(comm, &rank);
 99:   /* Calculate vertex distribution */
100:   PetscMalloc4(size + 1, &vtxdist, nparts * ncon, &tpwgts, ncon, &ubvec, nvtxs, &assignment);
101:   vtxdist[0] = 0;
102:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
103:   for (p = 2; p <= size; ++p) {
104:     hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
105:     vtxdist[p] += vtxdist[p - 1];
106:   }
107:   /* null graph */
108:   if (vtxdist[size] == 0) {
109:     PetscFree4(vtxdist, tpwgts, ubvec, assignment);
110:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
111:     return 0;
112:   }
113:   /* Calculate partition weights */
114:   if (targetSection) {
115:     PetscInt p;
116:     real_t   sumt = 0.0;

118:     for (p = 0; p < nparts; ++p) {
119:       PetscInt tpd;

121:       PetscSectionGetDof(targetSection, p, &tpd);
122:       sumt += tpd;
123:       tpwgts[p] = tpd;
124:     }
125:     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
126:       for (p = 0, sumt = 0.0; p < nparts; ++p) {
127:         tpwgts[p] = PetscMax(tpwgts[p], PETSC_SMALL);
128:         sumt += tpwgts[p];
129:       }
130:       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
131:       for (p = 0, sumt = 0.0; p < nparts - 1; ++p) sumt += tpwgts[p];
132:       tpwgts[nparts - 1] = 1. - sumt;
133:     }
134:   } else {
135:     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0 / nparts;
136:   }
137:   ubvec[0] = pm->imbalanceRatio;

139:   /* Weight cells */
140:   if (vertSection) {
141:     PetscMalloc1(nvtxs, &vwgt);
142:     for (v = 0; v < nvtxs; ++v) PetscSectionGetDof(vertSection, v, &vwgt[v]);
143:     wgtflag |= 2; /* have weights on graph vertices */
144:   }

146:   for (p = 0; !vtxdist[p + 1] && p < size; ++p)
147:     ;
148:   if (vtxdist[p + 1] == vtxdist[size]) {
149:     if (rank == p) {
150:       int err;
151:       err                          = METIS_SetDefaultOptions(options); /* initialize all defaults */
152:       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
153:       options[METIS_OPTION_SEED]   = pm->randomSeed;
155:       if (metis_ptype == 1) {
156:         PetscStackPushExternal("METIS_PartGraphRecursive");
157:         err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
158:         PetscStackPop;
160:       } else {
161:         /*
162:          It would be nice to activate the two options below, but they would need some actual testing.
163:          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
164:          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
165:         */
166:         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
167:         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
168:         PetscStackPushExternal("METIS_PartGraphKway");
169:         err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
170:         PetscStackPop;
172:       }
173:     }
174:   } else {
175:     MPI_Comm pcomm = pm->pcomm;

177:     options[0] = 1; /*use options */
178:     options[1] = pm->debugFlag;
179:     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */

181:     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
182:       PetscInt cnt;

184:       MPI_Comm_split(pm->pcomm, !!nvtxs, rank, &pcomm);
185:       for (p = 0, cnt = 0; p < size; p++) {
186:         if (vtxdist[p + 1] != vtxdist[p]) {
187:           vtxdist[cnt + 1] = vtxdist[p + 1];
188:           cnt++;
189:         }
190:       }
191:     }
192:     if (nvtxs) {
193:       int err;
194:       PetscStackPushExternal("ParMETIS_V3_PartKway");
195:       err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
196:       PetscStackPop;
198:     }
199:     if (hasempty) MPI_Comm_free(&pcomm);
200:   }

202:   /* Convert to PetscSection+IS */
203:   for (v = 0; v < nvtxs; ++v) PetscSectionAddDof(partSection, assignment[v], 1);
204:   PetscMalloc1(nvtxs, &points);
205:   for (p = 0, i = 0; p < nparts; ++p) {
206:     for (v = 0; v < nvtxs; ++v) {
207:       if (assignment[v] == p) points[i++] = v;
208:     }
209:   }
211:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);
212:   PetscFree4(vtxdist, tpwgts, ubvec, assignment);
213:   PetscFree(vwgt);
214:   return 0;
215: #else
216:   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
217: #endif
218: }

220: static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
221: {
222:   part->noGraph             = PETSC_FALSE;
223:   part->ops->view           = PetscPartitionerView_ParMetis;
224:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
225:   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
226:   part->ops->partition      = PetscPartitionerPartition_ParMetis;
227:   return 0;
228: }

230: /*MC
231:   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library

233:   Level: intermediate

235:   Options Database Keys:
236: +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
237: .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
238: .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
239: -  -petscpartitioner_parmetis_seed <int> - Random seed

241:   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS

243: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
244: M*/

246: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
247: {
248:   PetscPartitioner_ParMetis *p;

251:   PetscNew(&p);
252:   part->data = p;

254:   MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm);
255:   p->ptype          = 0;
256:   p->imbalanceRatio = 1.05;
257:   p->debugFlag      = 0;
258:   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */

260:   PetscPartitionerInitialize_ParMetis(part);
261:   PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite);
262:   return 0;
263: }