Actual source code: partsimple.c


  2: #include <petscvec.h>
  3: #include <petsc/private/partitionerimpl.h>

  5: typedef struct {
  6:   PetscBool useGrid;        /* Flag to use a grid layout */
  7:   PetscInt  gridDim;        /* The grid dimension */
  8:   PetscInt  nodeGrid[3];    /* Dimension of node grid */
  9:   PetscInt  processGrid[3]; /* Dimension of local process grid on each node */
 10: } PetscPartitioner_Simple;

 12: static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
 13: {
 14:   PetscFree(part->data);
 15:   return 0;
 16: }

 18: static PetscErrorCode PetscPartitionerView_Simple_ASCII(PetscPartitioner part, PetscViewer viewer)
 19: {
 20:   return 0;
 21: }

 23: static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
 24: {
 25:   PetscBool iascii;

 29:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
 30:   if (iascii) PetscPartitionerView_Simple_ASCII(part, viewer);
 31:   return 0;
 32: }

 34: static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
 35: {
 36:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
 37:   PetscInt                 num, i;
 38:   PetscBool                flg;

 40:   for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1;
 41:   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Simple Options");
 42:   num = 3;
 43:   PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg);
 44:   if (flg) {
 45:     p->useGrid = PETSC_TRUE;
 46:     p->gridDim = num;
 47:   }
 48:   num = 3;
 49:   PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg);
 50:   if (flg) {
 51:     p->useGrid = PETSC_TRUE;
 52:     if (p->gridDim < 0) p->gridDim = num;
 54:   }
 55:   PetscOptionsHeadEnd();
 56:   return 0;
 57: }

 59: static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
 60: {
 61:   PetscPartitioner_Simple *p     = (PetscPartitioner_Simple *)part->data;
 62:   const PetscInt          *nodes = p->nodeGrid;
 63:   const PetscInt          *procs = p->processGrid;
 64:   PetscInt                *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1};
 65:   PetscInt                 Np = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i;
 66:   MPI_Comm                 comm;
 67:   PetscMPIInt              size;

 69:   if (vertSection) PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n");
 70:   if (targetSection) PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n");
 71:   PetscObjectGetComm((PetscObject)part, &comm);
 72:   MPI_Comm_size(comm, &size);
 73:   /* Check grid */
 74:   for (i = 0; i < 3; ++i) Np *= nodes[i] * procs[i];
 78:   for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i] * procs[i];
 79:   Nr = numVertices / nparts;
 80:   while (Nr > 1) {
 81:     for (i = 0; i < p->gridDim; ++i) {
 82:       cells[i] *= 2;
 83:       Nr /= 2;
 84:     }
 85:   }
 87:   for (i = 0; i < p->gridDim; ++i) {
 89:     pcells[i] = cells[i] / (nodes[i] * procs[i]);
 90:   }
 91:   /* Compute sizes */
 92:   for (np = 0; np < nparts; ++np) PetscSectionSetDof(partSection, np, numVertices / nparts);
 93:   PetscSectionSetUp(partSection);
 94:   PetscCalloc1(nparts, &offsets);
 95:   for (np = 0; np < nparts; ++np) PetscSectionGetOffset(partSection, np, &offsets[np]);
 96:   if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0;
 97:   /* Compute partition */
 98:   PetscMalloc1(numVertices, &cellproc);
 99:   for (nk = 0; nk < nodes[2]; ++nk) {
100:     for (nj = 0; nj < nodes[1]; ++nj) {
101:       for (ni = 0; ni < nodes[0]; ++ni) {
102:         const PetscInt nid = (nk * nodes[1] + nj) * nodes[0] + ni;

104:         for (pk = 0; pk < procs[2]; ++pk) {
105:           for (pj = 0; pj < procs[1]; ++pj) {
106:             for (pi = 0; pi < procs[0]; ++pi) {
107:               const PetscInt pid = ((nid * procs[2] + pk) * procs[1] + pj) * procs[0] + pi;

109:               /* Assume that cells are originally numbered lexicographically */
110:               for (ck = 0; ck < pcells[2]; ++ck) {
111:                 for (cj = 0; cj < pcells[1]; ++cj) {
112:                   for (ci = 0; ci < pcells[0]; ++ci) {
113:                     const PetscInt cid = (((nk * procs[2] + pk) * pcells[2] + ck) * cells[1] + ((nj * procs[1] + pj) * pcells[1] + cj)) * cells[0] + (ni * procs[0] + pi) * pcells[0] + ci;

115:                     cellproc[offsets[pid]++] = cid;
116:                   }
117:                 }
118:               }
119:             }
120:           }
121:         }
122:       }
123:     }
124:   }
126:   PetscFree(offsets);
127:   ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition);
128:   return 0;
129: }

131: static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
132: {
133:   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
134:   MPI_Comm                 comm;
135:   PetscInt                 np, *tpwgts = NULL, sumw = 0, numVerticesGlobal = 0;
136:   PetscMPIInt              size;

138:   if (p->useGrid) {
139:     PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition);
140:     return 0;
141:   }
142:   if (vertSection) PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights\n");
143:   PetscObjectGetComm((PetscObject)part, &comm);
144:   MPI_Comm_size(comm, &size);
145:   if (targetSection) {
146:     MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm);
147:     PetscCalloc1(nparts, &tpwgts);
148:     for (np = 0; np < nparts; ++np) {
149:       PetscSectionGetDof(targetSection, np, &tpwgts[np]);
150:       sumw += tpwgts[np];
151:     }
152:     if (sumw) {
153:       PetscInt m, mp;
154:       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np] * numVerticesGlobal) / sumw;
155:       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
156:         if (m < tpwgts[np]) {
157:           m  = tpwgts[np];
158:           mp = np;
159:         }
160:         sumw += tpwgts[np];
161:       }
162:       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
163:     }
164:     if (!sumw) PetscFree(tpwgts);
165:   }

167:   ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);
168:   if (size == 1) {
169:     if (tpwgts) {
170:       for (np = 0; np < nparts; ++np) PetscSectionSetDof(partSection, np, tpwgts[np]);
171:     } else {
172:       for (np = 0; np < nparts; ++np) PetscSectionSetDof(partSection, np, numVertices / nparts + ((numVertices % nparts) > np));
173:     }
174:   } else {
175:     if (tpwgts) {
176:       Vec          v;
177:       PetscScalar *array;
178:       PetscInt     st, j;
179:       PetscMPIInt  rank;

181:       VecCreate(comm, &v);
182:       VecSetSizes(v, numVertices, numVerticesGlobal);
183:       VecSetType(v, VECSTANDARD);
184:       MPI_Comm_rank(comm, &rank);
185:       for (np = 0, st = 0; np < nparts; ++np) {
186:         if (rank == np || (rank == size - 1 && size < nparts && np >= size)) {
187:           for (j = 0; j < tpwgts[np]; j++) VecSetValue(v, st + j, np, INSERT_VALUES);
188:         }
189:         st += tpwgts[np];
190:       }
191:       VecAssemblyBegin(v);
192:       VecAssemblyEnd(v);
193:       VecGetArray(v, &array);
194:       for (j = 0; j < numVertices; ++j) PetscSectionAddDof(partSection, PetscRealPart(array[j]), 1);
195:       VecRestoreArray(v, &array);
196:       VecDestroy(&v);
197:     } else {
198:       PetscMPIInt rank;
199:       PetscInt    nvGlobal, *offsets, myFirst, myLast;

201:       PetscMalloc1(size + 1, &offsets);
202:       offsets[0] = 0;
203:       MPI_Allgather(&numVertices, 1, MPIU_INT, &offsets[1], 1, MPIU_INT, comm);
204:       for (np = 2; np <= size; np++) offsets[np] += offsets[np - 1];
205:       nvGlobal = offsets[size];
206:       MPI_Comm_rank(comm, &rank);
207:       myFirst = offsets[rank];
208:       myLast  = offsets[rank + 1] - 1;
209:       PetscFree(offsets);
210:       if (numVertices) {
211:         PetscInt firstPart = 0, firstLargePart = 0;
212:         PetscInt lastPart = 0, lastLargePart = 0;
213:         PetscInt rem    = nvGlobal % nparts;
214:         PetscInt pSmall = nvGlobal / nparts;
215:         PetscInt pBig   = nvGlobal / nparts + 1;

217:         if (rem) {
218:           firstLargePart = myFirst / pBig;
219:           lastLargePart  = myLast / pBig;

221:           if (firstLargePart < rem) {
222:             firstPart = firstLargePart;
223:           } else {
224:             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
225:           }
226:           if (lastLargePart < rem) {
227:             lastPart = lastLargePart;
228:           } else {
229:             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
230:           }
231:         } else {
232:           firstPart = myFirst / (nvGlobal / nparts);
233:           lastPart  = myLast / (nvGlobal / nparts);
234:         }

236:         for (np = firstPart; np <= lastPart; np++) {
237:           PetscInt PartStart = np * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np);
238:           PetscInt PartEnd   = (np + 1) * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np + 1);

240:           PartStart = PetscMax(PartStart, myFirst);
241:           PartEnd   = PetscMin(PartEnd, myLast + 1);
242:           PetscSectionSetDof(partSection, np, PartEnd - PartStart);
243:         }
244:       }
245:     }
246:   }
247:   PetscFree(tpwgts);
248:   return 0;
249: }

251: static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
252: {
253:   part->noGraph             = PETSC_TRUE;
254:   part->ops->view           = PetscPartitionerView_Simple;
255:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple;
256:   part->ops->destroy        = PetscPartitionerDestroy_Simple;
257:   part->ops->partition      = PetscPartitionerPartition_Simple;
258:   return 0;
259: }

261: /*MC
262:   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object

264:   Level: intermediate

266: .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
267: M*/

269: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
270: {
271:   PetscPartitioner_Simple *p;

274:   PetscNew(&p);
275:   p->gridDim = -1;
276:   part->data = p;

278:   PetscPartitionerInitialize_Simple(part);
279:   return 0;
280: }