Actual source code: da1.c


  2: /*
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95
  5: */

  7: #include <petsc/private/dmdaimpl.h>

  9: #include <petscdraw.h>
 10: static PetscErrorCode DMView_DA_1d(DM da, PetscViewer viewer)
 11: {
 12:   PetscMPIInt rank;
 13:   PetscBool   iascii, isdraw, isglvis, isbinary;
 14:   DM_DA      *dd = (DM_DA *)da->data;
 15: #if defined(PETSC_HAVE_MATLAB)
 16:   PetscBool ismatlab;
 17: #endif

 19:   MPI_Comm_rank(PetscObjectComm((PetscObject)da), &rank);

 21:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
 22:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw);
 23:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis);
 24:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary);
 25: #if defined(PETSC_HAVE_MATLAB)
 26:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERMATLAB, &ismatlab);
 27: #endif
 28:   if (iascii) {
 29:     PetscViewerFormat format;

 31:     PetscViewerGetFormat(viewer, &format);
 32:     if (format == PETSC_VIEWER_LOAD_BALANCE) {
 33:       PetscInt      i, nmax = 0, nmin = PETSC_MAX_INT, navg = 0, *nz, nzlocal;
 34:       DMDALocalInfo info;
 35:       PetscMPIInt   size;
 36:       MPI_Comm_size(PetscObjectComm((PetscObject)da), &size);
 37:       DMDAGetLocalInfo(da, &info);
 38:       nzlocal = info.xm;
 39:       PetscMalloc1(size, &nz);
 40:       MPI_Allgather(&nzlocal, 1, MPIU_INT, nz, 1, MPIU_INT, PetscObjectComm((PetscObject)da));
 41:       for (i = 0; i < (PetscInt)size; i++) {
 42:         nmax = PetscMax(nmax, nz[i]);
 43:         nmin = PetscMin(nmin, nz[i]);
 44:         navg += nz[i];
 45:       }
 46:       PetscFree(nz);
 47:       navg = navg / size;
 48:       PetscViewerASCIIPrintf(viewer, "  Load Balance - Grid Points: Min %" PetscInt_FMT "  avg %" PetscInt_FMT "  max %" PetscInt_FMT "\n", nmin, navg, nmax);
 49:       return 0;
 50:     }
 51:     if (format != PETSC_VIEWER_ASCII_VTK_DEPRECATED && format != PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED && format != PETSC_VIEWER_ASCII_GLVIS) {
 52:       DMDALocalInfo info;
 53:       DMDAGetLocalInfo(da, &info);
 54:       PetscViewerASCIIPushSynchronized(viewer);
 55:       PetscViewerASCIISynchronizedPrintf(viewer, "Processor [%d] M %" PetscInt_FMT " m %" PetscInt_FMT " w %" PetscInt_FMT " s %" PetscInt_FMT "\n", rank, dd->M, dd->m, dd->w, dd->s);
 56:       PetscViewerASCIISynchronizedPrintf(viewer, "X range of indices: %" PetscInt_FMT " %" PetscInt_FMT "\n", info.xs, info.xs + info.xm);
 57:       PetscViewerFlush(viewer);
 58:       PetscViewerASCIIPopSynchronized(viewer);
 59:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) DMView_DA_GLVis(da, viewer);
 60:     else DMView_DA_VTK(da, viewer);
 61:   } else if (isdraw) {
 62:     PetscDraw draw;
 63:     double    ymin = -1, ymax = 1, xmin = -1, xmax = dd->M, x;
 64:     PetscInt  base;
 65:     char      node[10];
 66:     PetscBool isnull;

 68:     PetscViewerDrawGetDraw(viewer, 0, &draw);
 69:     PetscDrawIsNull(draw, &isnull);
 70:     if (isnull) return 0;

 72:     PetscDrawCheckResizedWindow(draw);
 73:     PetscDrawClear(draw);
 74:     PetscDrawSetCoordinates(draw, xmin, ymin, xmax, ymax);

 76:     PetscDrawCollectiveBegin(draw);
 77:     /* first processor draws all node lines */
 78:     if (rank == 0) {
 79:       PetscInt xmin_tmp;
 80:       ymin = 0.0;
 81:       ymax = 0.3;
 82:       for (xmin_tmp = 0; xmin_tmp < dd->M; xmin_tmp++) PetscDrawLine(draw, (double)xmin_tmp, ymin, (double)xmin_tmp, ymax, PETSC_DRAW_BLACK);
 83:       xmin = 0.0;
 84:       xmax = dd->M - 1;
 85:       PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_BLACK);
 86:       PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_BLACK);
 87:     }
 88:     PetscDrawCollectiveEnd(draw);
 89:     PetscDrawFlush(draw);
 90:     PetscDrawPause(draw);

 92:     PetscDrawCollectiveBegin(draw);
 93:     /* draw my box */
 94:     ymin = 0;
 95:     ymax = 0.3;
 96:     xmin = dd->xs / dd->w;
 97:     xmax = (dd->xe / dd->w) - 1;
 98:     PetscDrawLine(draw, xmin, ymin, xmax, ymin, PETSC_DRAW_RED);
 99:     PetscDrawLine(draw, xmin, ymin, xmin, ymax, PETSC_DRAW_RED);
100:     PetscDrawLine(draw, xmin, ymax, xmax, ymax, PETSC_DRAW_RED);
101:     PetscDrawLine(draw, xmax, ymin, xmax, ymax, PETSC_DRAW_RED);
102:     /* Put in index numbers */
103:     base = dd->base / dd->w;
104:     for (x = xmin; x <= xmax; x++) {
105:       PetscSNPrintf(node, sizeof(node), "%d", (int)base++);
106:       PetscDrawString(draw, x, ymin, PETSC_DRAW_RED, node);
107:     }
108:     PetscDrawCollectiveEnd(draw);
109:     PetscDrawFlush(draw);
110:     PetscDrawPause(draw);
111:     PetscDrawSave(draw);
112:   } else if (isglvis) {
113:     DMView_DA_GLVis(da, viewer);
114:   } else if (isbinary) {
115:     DMView_DA_Binary(da, viewer);
116: #if defined(PETSC_HAVE_MATLAB)
117:   } else if (ismatlab) {
118:     DMView_DA_Matlab(da, viewer);
119: #endif
120:   }
121:   return 0;
122: }

124: PetscErrorCode DMSetUp_DA_1D(DM da)
125: {
126:   DM_DA          *dd    = (DM_DA *)da->data;
127:   const PetscInt  M     = dd->M;
128:   const PetscInt  dof   = dd->w;
129:   const PetscInt  s     = dd->s;
130:   const PetscInt  sDist = s; /* stencil distance in points */
131:   const PetscInt *lx    = dd->lx;
132:   DMBoundaryType  bx    = dd->bx;
133:   MPI_Comm        comm;
134:   Vec             local, global;
135:   VecScatter      gtol;
136:   IS              to, from;
137:   PetscBool       flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
138:   PetscMPIInt     rank, size;
139:   PetscInt        i, *idx, nn, left, xs, xe, x, Xs, Xe, start, m, IXs, IXe;

141:   PetscObjectGetComm((PetscObject)da, &comm);
142:   MPI_Comm_size(comm, &size);
143:   MPI_Comm_rank(comm, &rank);

145:   dd->p = 1;
146:   dd->n = 1;
147:   dd->m = size;
148:   m     = dd->m;

150:   if (s > 0) {
151:     /* if not communicating data then should be ok to have nothing on some processes */
154:   }

156:   /*
157:      Determine locally owned region
158:      xs is the first local node number, x is the number of local nodes
159:   */
160:   if (!lx) {
161:     PetscMalloc1(m, &dd->lx);
162:     PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_blockcomm", &flg1, NULL);
163:     PetscOptionsGetBool(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_partition_nodes_at_end", &flg2, NULL);
164:     if (flg1) { /* Block Comm type Distribution */
165:       xs = rank * M / m;
166:       x  = (rank + 1) * M / m - xs;
167:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
168:       x = (M + rank) / m;
169:       if (M / m == x) xs = rank * x;
170:       else xs = rank * (x - 1) + (M + rank) % (x * m);
171:     } else { /* The odd nodes are evenly distributed across the first k nodes */
172:       /* Regular PETSc Distribution */
173:       x = M / m + ((M % m) > rank);
174:       if (rank >= (M % m)) xs = (rank * (PetscInt)(M / m) + M % m);
175:       else xs = rank * (PetscInt)(M / m) + rank;
176:     }
177:     MPI_Allgather(&xs, 1, MPIU_INT, dd->lx, 1, MPIU_INT, comm);
178:     for (i = 0; i < m - 1; i++) dd->lx[i] = dd->lx[i + 1] - dd->lx[i];
179:     dd->lx[m - 1] = M - dd->lx[m - 1];
180:   } else {
181:     x  = lx[rank];
182:     xs = 0;
183:     for (i = 0; i < rank; i++) xs += lx[i];
184:     /* verify that data user provided is consistent */
185:     left = xs;
186:     for (i = rank; i < size; i++) left += lx[i];
188:   }

190:   /*
191:    check if the scatter requires more than one process neighbor or wraps around
192:    the domain more than once
193:   */

196:   xe = xs + x;

198:   /* determine ghost region (Xs) and region scattered into (IXs)  */
199:   if (xs - sDist > 0) {
200:     Xs  = xs - sDist;
201:     IXs = xs - sDist;
202:   } else {
203:     if (bx) Xs = xs - sDist;
204:     else Xs = 0;
205:     IXs = 0;
206:   }
207:   if (xe + sDist <= M) {
208:     Xe  = xe + sDist;
209:     IXe = xe + sDist;
210:   } else {
211:     if (bx) Xe = xe + sDist;
212:     else Xe = M;
213:     IXe = M;
214:   }

216:   if (bx == DM_BOUNDARY_PERIODIC || bx == DM_BOUNDARY_MIRROR) {
217:     Xs  = xs - sDist;
218:     Xe  = xe + sDist;
219:     IXs = xs - sDist;
220:     IXe = xe + sDist;
221:   }

223:   /* allocate the base parallel and sequential vectors */
224:   dd->Nlocal = dof * x;
225:   VecCreateMPIWithArray(comm, dof, dd->Nlocal, PETSC_DECIDE, NULL, &global);
226:   dd->nlocal = dof * (Xe - Xs);
227:   VecCreateSeqWithArray(PETSC_COMM_SELF, dof, dd->nlocal, NULL, &local);

229:   VecGetOwnershipRange(global, &start, NULL);

231:   /* Create Global to Local Vector Scatter Context */
232:   /* global to local must retrieve ghost points */
233:   ISCreateStride(comm, dof * (IXe - IXs), dof * (IXs - Xs), 1, &to);

235:   PetscMalloc1(x + 2 * sDist, &idx);

237:   for (i = 0; i < IXs - Xs; i++) idx[i] = -1; /* prepend with -1s if needed for ghosted case*/

239:   nn = IXs - Xs;
240:   if (bx == DM_BOUNDARY_PERIODIC) { /* Handle all cases with periodic first */
241:     for (i = 0; i < sDist; i++) {   /* Left ghost points */
242:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
243:       else idx[nn++] = M + (xs - sDist + i);
244:     }

246:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

248:     for (i = 0; i < sDist; i++) { /* Right ghost points */
249:       if ((xe + i) < M) idx[nn++] = xe + i;
250:       else idx[nn++] = (xe + i) - M;
251:     }
252:   } else if (bx == DM_BOUNDARY_MIRROR) { /* Handle all cases with periodic first */
253:     for (i = 0; i < (sDist); i++) {      /* Left ghost points */
254:       if ((xs - sDist + i) >= 0) idx[nn++] = xs - sDist + i;
255:       else idx[nn++] = sDist - i;
256:     }

258:     for (i = 0; i < x; i++) idx[nn++] = xs + i; /* Non-ghost points */

260:     for (i = 0; i < (sDist); i++) { /* Right ghost points */
261:       if ((xe + i) < M) idx[nn++] = xe + i;
262:       else idx[nn++] = M - (i + 2);
263:     }
264:   } else { /* Now do all cases with no periodicity */
265:     if (0 <= xs - sDist) {
266:       for (i = 0; i < sDist; i++) idx[nn++] = xs - sDist + i;
267:     } else {
268:       for (i = 0; i < xs; i++) idx[nn++] = i;
269:     }

271:     for (i = 0; i < x; i++) idx[nn++] = xs + i;

273:     if ((xe + sDist) <= M) {
274:       for (i = 0; i < sDist; i++) idx[nn++] = xe + i;
275:     } else {
276:       for (i = xe; i < M; i++) idx[nn++] = i;
277:     }
278:   }

280:   ISCreateBlock(comm, dof, nn - IXs + Xs, &idx[IXs - Xs], PETSC_USE_POINTER, &from);
281:   VecScatterCreate(global, from, local, to, &gtol);
282:   ISDestroy(&to);
283:   ISDestroy(&from);
284:   VecDestroy(&local);
285:   VecDestroy(&global);

287:   dd->xs = dof * xs;
288:   dd->xe = dof * xe;
289:   dd->ys = 0;
290:   dd->ye = 1;
291:   dd->zs = 0;
292:   dd->ze = 1;
293:   dd->Xs = dof * Xs;
294:   dd->Xe = dof * Xe;
295:   dd->Ys = 0;
296:   dd->Ye = 1;
297:   dd->Zs = 0;
298:   dd->Ze = 1;

300:   dd->gtol      = gtol;
301:   dd->base      = dof * xs;
302:   da->ops->view = DMView_DA_1d;

304:   /*
305:      Set the local to global ordering in the global vector, this allows use
306:      of VecSetValuesLocal().
307:   */
308:   for (i = 0; i < Xe - IXe; i++) idx[nn++] = -1; /* pad with -1s if needed for ghosted case*/

310:   ISLocalToGlobalMappingCreate(comm, dof, nn, idx, PETSC_OWN_POINTER, &da->ltogmap);

312:   return 0;
313: }

315: /*@C
316:    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional
317:    regular array data that is distributed across some processors.

319:    Collective

321:    Input Parameters:
322: +  comm - MPI communicator
323: .  bx - type of ghost cells at the boundary the array should have, if any. Use
324:           `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, or `DM_BOUNDARY_PERIODIC`.
325: .  M - global dimension of the array (that is the number of grid points)
326:             from the command line with -da_grid_x <M>)
327: .  dof - number of degrees of freedom per node
328: .  s - stencil width
329: -  lx - array containing number of nodes in the X direction on each processor,
330:         or NULL. If non-null, must be of length as the number of processes in the MPI_Comm.
331:         The sum of these entries must equal M

333:    Output Parameter:
334: .  da - the resulting distributed array object

336:    Options Database Keys:
337: +  -dm_view - Calls `DMView()` at the conclusion of `DMDACreate1d()`
338: .  -da_grid_x <nx> - number of grid points in x direction
339: .  -da_refine_x <rx> - refinement factor
340: -  -da_refine <n> - refine the `DMDA` n times before creating it

342:    Level: beginner

344:    Notes:
345:    The array data itself is NOT stored in the `DMDA`, it is stored in `Vec` objects;
346:    The appropriate vector objects can be obtained with calls to `DMCreateGlobalVector()`
347:    and `DMCreateLocalVector()` and calls to `VecDuplicate()` if more are needed.

349:    You must call `DMSetUp()` after this call before using this `DM`.

351:    If you wish to use the options database to change values in the `DMDA` call `DMSetFromOptions()` after this call
352:    but before `DMSetUp()`.

354: .seealso: `DMDA`, `DM`, `DMDestroy()`, `DMView()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMGlobalToLocalBegin()`, `DMDASetRefinementFactor()`,
355:           `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToLocalBegin()`, `DMLocalToLocalEnd()`, `DMDAGetRefinementFactor()`,
356:           `DMDAGetInfo()`, `DMCreateGlobalVector()`, `DMCreateLocalVector()`, `DMDACreateNaturalVector()`, `DMLoad()`, `DMDAGetOwnershipRanges()`,
357:           `DMStagCreate1d()`
358: @*/
359: PetscErrorCode DMDACreate1d(MPI_Comm comm, DMBoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
360: {
361:   PetscMPIInt size;

363:   DMDACreate(comm, da);
364:   DMSetDimension(*da, 1);
365:   DMDASetSizes(*da, M, 1, 1);
366:   MPI_Comm_size(comm, &size);
367:   DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
368:   DMDASetBoundaryType(*da, bx, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE);
369:   DMDASetDof(*da, dof);
370:   DMDASetStencilWidth(*da, s);
371:   DMDASetOwnershipRanges(*da, lx, NULL, NULL);
372:   return 0;
373: }