Actual source code: plexcheckinterface.c

  1: #include <petsc/private/dmpleximpl.h>

  3: /* TODO PetscArrayExchangeBegin/End */
  4: /* TODO blocksize */
  5: /* TODO move to API ? */
  6: static PetscErrorCode ExchangeArrayByRank_Private(PetscObject obj, MPI_Datatype dt, PetscInt nsranks, const PetscMPIInt sranks[], PetscInt ssize[], const void *sarr[], PetscInt nrranks, const PetscMPIInt rranks[], PetscInt *rsize_out[], void **rarr_out[])
  7: {
  8:   PetscInt     r;
  9:   PetscInt    *rsize;
 10:   void       **rarr;
 11:   MPI_Request *sreq, *rreq;
 12:   PetscMPIInt  tag, unitsize;
 13:   MPI_Comm     comm;

 15:   MPI_Type_size(dt, &unitsize);
 16:   PetscObjectGetComm(obj, &comm);
 17:   PetscMalloc2(nrranks, &rsize, nrranks, &rarr);
 18:   PetscMalloc2(nrranks, &rreq, nsranks, &sreq);
 19:   /* exchange array size */
 20:   PetscObjectGetNewTag(obj, &tag);
 21:   for (r = 0; r < nrranks; r++) MPI_Irecv(&rsize[r], 1, MPIU_INT, rranks[r], tag, comm, &rreq[r]);
 22:   for (r = 0; r < nsranks; r++) MPI_Isend(&ssize[r], 1, MPIU_INT, sranks[r], tag, comm, &sreq[r]);
 23:   MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE);
 24:   MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE);
 25:   /* exchange array */
 26:   PetscObjectGetNewTag(obj, &tag);
 27:   for (r = 0; r < nrranks; r++) {
 28:     PetscMalloc(rsize[r] * unitsize, &rarr[r]);
 29:     MPI_Irecv(rarr[r], rsize[r], dt, rranks[r], tag, comm, &rreq[r]);
 30:   }
 31:   for (r = 0; r < nsranks; r++) MPI_Isend(sarr[r], ssize[r], dt, sranks[r], tag, comm, &sreq[r]);
 32:   MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE);
 33:   MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE);
 34:   PetscFree2(rreq, sreq);
 35:   *rsize_out = rsize;
 36:   *rarr_out  = rarr;
 37:   return 0;
 38: }

 40: /* TODO VecExchangeBegin/End */
 41: /* TODO move to API ? */
 42: static PetscErrorCode ExchangeVecByRank_Private(PetscObject obj, PetscInt nsranks, const PetscMPIInt sranks[], Vec svecs[], PetscInt nrranks, const PetscMPIInt rranks[], Vec *rvecs[])
 43: {
 44:   PetscInt            r;
 45:   PetscInt           *ssize, *rsize;
 46:   PetscScalar       **rarr;
 47:   const PetscScalar **sarr;
 48:   Vec                *rvecs_;
 49:   MPI_Request        *sreq, *rreq;

 51:   PetscMalloc4(nsranks, &ssize, nsranks, &sarr, nrranks, &rreq, nsranks, &sreq);
 52:   for (r = 0; r < nsranks; r++) {
 53:     VecGetLocalSize(svecs[r], &ssize[r]);
 54:     VecGetArrayRead(svecs[r], &sarr[r]);
 55:   }
 56:   ExchangeArrayByRank_Private(obj, MPIU_SCALAR, nsranks, sranks, ssize, (const void **)sarr, nrranks, rranks, &rsize, (void ***)&rarr);
 57:   PetscMalloc1(nrranks, &rvecs_);
 58:   for (r = 0; r < nrranks; r++) {
 59:     /* set array in two steps to mimic PETSC_OWN_POINTER */
 60:     VecCreateSeqWithArray(PETSC_COMM_SELF, 1, rsize[r], NULL, &rvecs_[r]);
 61:     VecReplaceArray(rvecs_[r], rarr[r]);
 62:   }
 63:   for (r = 0; r < nsranks; r++) VecRestoreArrayRead(svecs[r], &sarr[r]);
 64:   PetscFree2(rsize, rarr);
 65:   PetscFree4(ssize, sarr, rreq, sreq);
 66:   *rvecs = rvecs_;
 67:   return 0;
 68: }

 70: static PetscErrorCode SortByRemote_Private(PetscSF sf, PetscInt *rmine1[], PetscInt *rremote1[])
 71: {
 72:   PetscInt           nleaves;
 73:   PetscInt           nranks;
 74:   const PetscMPIInt *ranks;
 75:   const PetscInt    *roffset, *rmine, *rremote;
 76:   PetscInt           n, o, r;

 78:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote);
 79:   nleaves = roffset[nranks];
 80:   PetscMalloc2(nleaves, rmine1, nleaves, rremote1);
 81:   for (r = 0; r < nranks; r++) {
 82:     /* simultaneously sort rank-wise portions of rmine & rremote by values in rremote
 83:        - to unify order with the other side */
 84:     o = roffset[r];
 85:     n = roffset[r + 1] - o;
 86:     PetscArraycpy(&(*rmine1)[o], &rmine[o], n);
 87:     PetscArraycpy(&(*rremote1)[o], &rremote[o], n);
 88:     PetscSortIntWithArray(n, &(*rremote1)[o], &(*rmine1)[o]);
 89:   }
 90:   return 0;
 91: }

 93: static PetscErrorCode GetRecursiveConeCoordinatesPerRank_Private(DM dm, PetscSF sf, PetscInt rmine[], Vec *coordinatesPerRank[])
 94: {
 95:   IS                 pointsPerRank, conesPerRank;
 96:   PetscInt           nranks;
 97:   const PetscMPIInt *ranks;
 98:   const PetscInt    *roffset;
 99:   PetscInt           n, o, r;

101:   DMGetCoordinatesLocalSetUp(dm);
102:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, NULL, NULL);
103:   PetscMalloc1(nranks, coordinatesPerRank);
104:   for (r = 0; r < nranks; r++) {
105:     o = roffset[r];
106:     n = roffset[r + 1] - o;
107:     ISCreateGeneral(PETSC_COMM_SELF, n, &rmine[o], PETSC_USE_POINTER, &pointsPerRank);
108:     DMPlexGetConeRecursiveVertices(dm, pointsPerRank, &conesPerRank);
109:     DMGetCoordinatesLocalTuple(dm, conesPerRank, NULL, &(*coordinatesPerRank)[r]);
110:     ISDestroy(&pointsPerRank);
111:     ISDestroy(&conesPerRank);
112:   }
113:   return 0;
114: }

116: static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(PetscSF sf, PetscSF imsf, PetscInt *irmine1[])
117: {
118:   PetscInt       *mRootsOrigNumbering;
119:   PetscInt        nileaves, niranks;
120:   const PetscInt *iroffset, *irmine, *degree;
121:   PetscInt        i, n, o, r;

123:   PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL);
124:   PetscSFGetRootRanks(imsf, &niranks, NULL, &iroffset, &irmine, NULL);
126:   PetscSFComputeDegreeBegin(sf, &degree);
127:   PetscSFComputeDegreeEnd(sf, &degree);
128:   PetscSFComputeMultiRootOriginalNumbering(sf, degree, NULL, &mRootsOrigNumbering);
129:   PetscMalloc1(nileaves, irmine1);
130:   for (r = 0; r < niranks; r++) {
131:     o = iroffset[r];
132:     n = iroffset[r + 1] - o;
133:     for (i = 0; i < n; i++) (*irmine1)[o + i] = mRootsOrigNumbering[irmine[o + i]];
134:   }
135:   PetscFree(mRootsOrigNumbering);
136:   return 0;
137: }

139: /*@
140:   DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.

142:   Input Parameters:
143: . dm - The `DMPLEX` object

145:   Level: developer

147:   Notes:
148:   For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point SF contains connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
149:   then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.

151:   This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use `DMPlexCheckFaces()`.

153:   For the complete list of DMPlexCheck* functions, see `DMSetFromOptions()`.

155:   Developer Note:
156:   Interface cones are expanded into vertices and then their coordinates are compared.

158: .seealso: [](chapter_unstructured), `DM`, `DMPLEX`, `DMPlexGetCone()`, `DMPlexGetConeSize()`, `DMGetPointSF()`, `DMGetCoordinates()`, `DMSetFromOptions()`
159: @*/
160: PetscErrorCode DMPlexCheckInterfaceCones(DM dm)
161: {
162:   PetscSF            sf;
163:   PetscInt           nleaves, nranks, nroots;
164:   const PetscInt    *mine, *roffset, *rmine, *rremote;
165:   const PetscSFNode *remote;
166:   const PetscMPIInt *ranks;
167:   PetscSF            msf, imsf;
168:   PetscInt           nileaves, niranks;
169:   const PetscMPIInt *iranks;
170:   const PetscInt    *iroffset, *irmine, *irremote;
171:   PetscInt          *rmine1, *rremote1; /* rmine and rremote copies simultaneously sorted by rank and rremote */
172:   PetscInt          *mine_orig_numbering;
173:   Vec               *sntCoordinatesPerRank;
174:   Vec               *refCoordinatesPerRank;
175:   Vec               *recCoordinatesPerRank = NULL;
176:   PetscInt           r;
177:   PetscMPIInt        commsize, myrank;
178:   PetscBool          same;
179:   PetscBool          verbose = PETSC_FALSE;
180:   MPI_Comm           comm;

183:   PetscObjectGetComm((PetscObject)dm, &comm);
184:   MPI_Comm_rank(comm, &myrank);
185:   MPI_Comm_size(comm, &commsize);
186:   if (commsize < 2) return 0;
187:   DMGetPointSF(dm, &sf);
188:   if (!sf) return 0;
189:   PetscSFGetGraph(sf, &nroots, &nleaves, &mine, &remote);
190:   if (nroots < 0) return 0;
192:   PetscSFSetUp(sf);
193:   PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote);

195:   /* Expand sent cones per rank */
196:   SortByRemote_Private(sf, &rmine1, &rremote1);
197:   GetRecursiveConeCoordinatesPerRank_Private(dm, sf, rmine1, &sntCoordinatesPerRank);

199:   /* Create inverse SF */
200:   PetscSFGetMultiSF(sf, &msf);
201:   PetscSFCreateInverseSF(msf, &imsf);
202:   PetscSFSetUp(imsf);
203:   PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL);
204:   PetscSFGetRootRanks(imsf, &niranks, &iranks, &iroffset, &irmine, &irremote);

206:   /* Compute original numbering of multi-roots (referenced points) */
207:   PetscSFComputeMultiRootOriginalNumberingByRank_Private(sf, imsf, &mine_orig_numbering);

209:   /* Expand coordinates of the referred cones per rank */
210:   GetRecursiveConeCoordinatesPerRank_Private(dm, imsf, mine_orig_numbering, &refCoordinatesPerRank);

212:   /* Send the coordinates */
213:   ExchangeVecByRank_Private((PetscObject)sf, nranks, ranks, sntCoordinatesPerRank, niranks, iranks, &recCoordinatesPerRank);

215:   /* verbose output */
216:   PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL);
217:   if (verbose) {
218:     PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
219:     PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n");
220:     PetscViewerASCIIPushSynchronized(v);
221:     PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", myrank);
222:     for (r = 0; r < nranks; r++) {
223:       PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " ranks[r]=%d sntCoordinatesPerRank[r]:\n", r, ranks[r]);
224:       PetscViewerASCIIPushTab(v);
225:       PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv);
226:       VecView(sntCoordinatesPerRank[r], sv);
227:       PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv);
228:       PetscViewerASCIIPopTab(v);
229:     }
230:     PetscViewerASCIISynchronizedPrintf(v, "  ----------\n");
231:     for (r = 0; r < niranks; r++) {
232:       PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d refCoordinatesPerRank[r]:\n", r, iranks[r]);
233:       PetscViewerASCIIPushTab(v);
234:       PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv);
235:       VecView(refCoordinatesPerRank[r], sv);
236:       PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv);
237:       PetscViewerASCIIPopTab(v);
238:     }
239:     PetscViewerASCIISynchronizedPrintf(v, "  ----------\n");
240:     for (r = 0; r < niranks; r++) {
241:       PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d recCoordinatesPerRank[r]:\n", r, iranks[r]);
242:       PetscViewerASCIIPushTab(v);
243:       PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv);
244:       VecView(recCoordinatesPerRank[r], sv);
245:       PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv);
246:       PetscViewerASCIIPopTab(v);
247:     }
248:     PetscViewerFlush(v);
249:     PetscViewerASCIIPopSynchronized(v);
250:   }

252:   /* Compare recCoordinatesPerRank with refCoordinatesPerRank */
253:   for (r = 0; r < niranks; r++) {
254:     VecEqual(refCoordinatesPerRank[r], recCoordinatesPerRank[r], &same);
256:   }

258:   /* destroy sent stuff */
259:   for (r = 0; r < nranks; r++) VecDestroy(&sntCoordinatesPerRank[r]);
260:   PetscFree(sntCoordinatesPerRank);
261:   PetscFree2(rmine1, rremote1);
262:   PetscSFDestroy(&imsf);

264:   /* destroy referenced stuff */
265:   for (r = 0; r < niranks; r++) VecDestroy(&refCoordinatesPerRank[r]);
266:   PetscFree(refCoordinatesPerRank);
267:   PetscFree(mine_orig_numbering);

269:   /* destroy received stuff */
270:   for (r = 0; r < niranks; r++) VecDestroy(&recCoordinatesPerRank[r]);
271:   PetscFree(recCoordinatesPerRank);
272:   return 0;
273: }