Actual source code: pipes.c

  1: static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n";
  2: /*
  3:   Example: mpiexec -n <np> ./pipes -ts_max_steps 10
  4: */

  6: #include "wash.h"

  8: /*
  9:   WashNetworkDistribute - proc[0] distributes sequential wash object
 10:    Input Parameters:
 11: .  comm - MPI communicator
 12: .  wash - wash context with all network data in proc[0]

 14:    Output Parameter:
 15: .  wash - wash context with nedge, nvertex and edgelist distributed

 17:    Note: The routine is used for testing parallel generation of dmnetwork, then redistribute.
 18: */
 19: PetscErrorCode WashNetworkDistribute(MPI_Comm comm, Wash wash)
 20: {
 21:   PetscMPIInt rank, size, tag = 0;
 22:   PetscInt    i, e, v, numEdges, numVertices, nedges, *eowners = NULL, estart, eend, *vtype = NULL, nvertices;
 23:   PetscInt   *edgelist = wash->edgelist, *nvtx = NULL, *vtxDone = NULL;

 25:   MPI_Comm_size(comm, &size);
 26:   if (size == 1) return 0;

 28:   MPI_Comm_rank(comm, &rank);
 29:   numEdges    = wash->nedge;
 30:   numVertices = wash->nvertex;

 32:   /* (1) all processes get global and local number of edges */
 33:   MPI_Bcast(&numEdges, 1, MPIU_INT, 0, comm);
 34:   nedges = numEdges / size; /* local nedges */
 35:   if (rank == 0) nedges += numEdges - size * (numEdges / size);
 36:   wash->Nedge = numEdges;
 37:   wash->nedge = nedges;
 38:   /* PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges); */

 40:   PetscCalloc3(size + 1, &eowners, size, &nvtx, numVertices, &vtxDone);
 41:   MPI_Allgather(&nedges, 1, MPIU_INT, eowners + 1, 1, MPIU_INT, PETSC_COMM_WORLD);
 42:   eowners[0] = 0;
 43:   for (i = 2; i <= size; i++) eowners[i] += eowners[i - 1];

 45:   estart = eowners[rank];
 46:   eend   = eowners[rank + 1];
 47:   /* PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend); */

 49:   /* (2) distribute row block edgelist to all processors */
 50:   if (rank == 0) {
 51:     vtype = wash->vtype;
 52:     for (i = 1; i < size; i++) {
 53:       /* proc[0] sends edgelist to proc[i] */
 54:       MPI_Send(edgelist + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm);

 56:       /* proc[0] sends vtype to proc[i] */
 57:       MPI_Send(vtype + 2 * eowners[i], 2 * (eowners[i + 1] - eowners[i]), MPIU_INT, i, tag, comm);
 58:     }
 59:   } else {
 60:     MPI_Status status;
 61:     PetscMalloc1(2 * (eend - estart), &vtype);
 62:     PetscMalloc1(2 * (eend - estart), &edgelist);

 64:     MPI_Recv(edgelist, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status);
 65:     MPI_Recv(vtype, 2 * (eend - estart), MPIU_INT, 0, tag, comm, &status);
 66:   }

 68:   wash->edgelist = edgelist;

 70:   /* (3) all processes get global and local number of vertices, without ghost vertices */
 71:   if (rank == 0) {
 72:     for (i = 0; i < size; i++) {
 73:       for (e = eowners[i]; e < eowners[i + 1]; e++) {
 74:         v = edgelist[2 * e];
 75:         if (!vtxDone[v]) {
 76:           nvtx[i]++;
 77:           vtxDone[v] = 1;
 78:         }
 79:         v = edgelist[2 * e + 1];
 80:         if (!vtxDone[v]) {
 81:           nvtx[i]++;
 82:           vtxDone[v] = 1;
 83:         }
 84:       }
 85:     }
 86:   }
 87:   MPI_Bcast(&numVertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD);
 88:   MPI_Scatter(nvtx, 1, MPIU_INT, &nvertices, 1, MPIU_INT, 0, PETSC_COMM_WORLD);
 89:   PetscFree3(eowners, nvtx, vtxDone);

 91:   wash->Nvertex = numVertices;
 92:   wash->nvertex = nvertices;
 93:   wash->vtype   = vtype;
 94:   return 0;
 95: }

 97: PetscErrorCode WASHIFunction(TS ts, PetscReal t, Vec X, Vec Xdot, Vec F, void *ctx)
 98: {
 99:   Wash               wash = (Wash)ctx;
100:   DM                 networkdm;
101:   Vec                localX, localXdot, localF, localXold;
102:   const PetscInt    *cone;
103:   PetscInt           vfrom, vto, offsetfrom, offsetto, varoffset;
104:   PetscInt           v, vStart, vEnd, e, eStart, eEnd;
105:   PetscInt           nend, type;
106:   PetscBool          ghost;
107:   PetscScalar       *farr, *juncf, *pipef;
108:   PetscReal          dt;
109:   Pipe               pipe;
110:   PipeField         *pipex, *pipexdot, *juncx;
111:   Junction           junction;
112:   DMDALocalInfo      info;
113:   const PetscScalar *xarr, *xdotarr, *xoldarr;

115:   localX    = wash->localX;
116:   localXdot = wash->localXdot;

118:   TSGetSolution(ts, &localXold);
119:   TSGetDM(ts, &networkdm);
120:   TSGetTimeStep(ts, &dt);
121:   DMGetLocalVector(networkdm, &localF);

123:   /* Set F and localF as zero */
124:   VecSet(F, 0.0);
125:   VecSet(localF, 0.0);

127:   /* Update ghost values of locaX and locaXdot */
128:   DMGlobalToLocalBegin(networkdm, X, INSERT_VALUES, localX);
129:   DMGlobalToLocalEnd(networkdm, X, INSERT_VALUES, localX);

131:   DMGlobalToLocalBegin(networkdm, Xdot, INSERT_VALUES, localXdot);
132:   DMGlobalToLocalEnd(networkdm, Xdot, INSERT_VALUES, localXdot);

134:   VecGetArrayRead(localX, &xarr);
135:   VecGetArrayRead(localXdot, &xdotarr);
136:   VecGetArrayRead(localXold, &xoldarr);
137:   VecGetArray(localF, &farr);

139:   /* junction->type == JUNCTION:
140:            juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout)
141:        junction->type == RESERVOIR (upper stream):
142:            juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout)
143:        junction->type == VALVE (down stream):
144:            juncf[0] =  -qJ + sum(qin); juncf[1] = qJ
145:   */
146:   /* Vertex/junction initialization */
147:   DMNetworkGetVertexRange(networkdm, &vStart, &vEnd);
148:   for (v = vStart; v < vEnd; v++) {
149:     DMNetworkIsGhostVertex(networkdm, v, &ghost);
150:     if (ghost) continue;

152:     DMNetworkGetComponent(networkdm, v, 0, &type, (void **)&junction, NULL);
153:     DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &varoffset);
154:     juncx = (PipeField *)(xarr + varoffset);
155:     juncf = (PetscScalar *)(farr + varoffset);

157:     juncf[0] = -juncx[0].q;
158:     juncf[1] = juncx[0].q;

160:     if (junction->type == RESERVOIR) { /* upstream reservoir */
161:       juncf[0] = juncx[0].h - wash->H0;
162:     }
163:   }

165:   /* Edge/pipe */
166:   DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
167:   for (e = eStart; e < eEnd; e++) {
168:     DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL);
169:     DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset);
170:     pipex    = (PipeField *)(xarr + varoffset);
171:     pipexdot = (PipeField *)(xdotarr + varoffset);
172:     pipef    = (PetscScalar *)(farr + varoffset);

174:     /* Get some data into the pipe structure: note, some of these operations
175:      * might be redundant. Will it consume too much time? */
176:     pipe->dt   = dt;
177:     pipe->xold = (PipeField *)(xoldarr + varoffset);

179:     /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */
180:     DMDAGetLocalInfo(pipe->da, &info);
181:     PipeIFunctionLocal_Lax(&info, t, pipex, pipexdot, pipef, pipe);

183:     /* Get boundary values from connected vertices */
184:     DMNetworkGetConnectedVertices(networkdm, e, &cone);
185:     vfrom = cone[0]; /* local ordering */
186:     vto   = cone[1];
187:     DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom);
188:     DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto);

190:     /* Evaluate upstream boundary */
191:     DMNetworkGetComponent(networkdm, vfrom, 0, &type, (void **)&junction, NULL);
193:     juncx = (PipeField *)(xarr + offsetfrom);
194:     juncf = (PetscScalar *)(farr + offsetfrom);

196:     pipef[0] = pipex[0].h - juncx[0].h;
197:     juncf[1] -= pipex[0].q;

199:     /* Evaluate downstream boundary */
200:     DMNetworkGetComponent(networkdm, vto, 0, &type, (void **)&junction, NULL);
202:     juncx = (PipeField *)(xarr + offsetto);
203:     juncf = (PetscScalar *)(farr + offsetto);
204:     nend  = pipe->nnodes - 1;

206:     pipef[2 * nend + 1] = pipex[nend].h - juncx[0].h;
207:     juncf[0] += pipex[nend].q;
208:   }

210:   VecRestoreArrayRead(localX, &xarr);
211:   VecRestoreArrayRead(localXdot, &xdotarr);
212:   VecRestoreArray(localF, &farr);

214:   DMLocalToGlobalBegin(networkdm, localF, ADD_VALUES, F);
215:   DMLocalToGlobalEnd(networkdm, localF, ADD_VALUES, F);
216:   DMRestoreLocalVector(networkdm, &localF);
217:   /*
218:    PetscPrintf(PETSC_COMM_WORLD("F:\n");
219:    VecView(F,PETSC_VIEWER_STDOUT_WORLD);
220:    */
221:   return 0;
222: }

224: PetscErrorCode WASHSetInitialSolution(DM networkdm, Vec X, Wash wash)
225: {
226:   PetscInt           k, nx, vkey, vfrom, vto, offsetfrom, offsetto;
227:   PetscInt           type, varoffset;
228:   PetscInt           e, eStart, eEnd;
229:   Vec                localX;
230:   PetscScalar       *xarr;
231:   Pipe               pipe;
232:   Junction           junction;
233:   const PetscInt    *cone;
234:   const PetscScalar *xarray;

236:   VecSet(X, 0.0);
237:   DMGetLocalVector(networkdm, &localX);
238:   VecGetArray(localX, &xarr);

240:   /* Edge */
241:   DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
242:   for (e = eStart; e < eEnd; e++) {
243:     DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &varoffset);
244:     DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL);

246:     /* set initial values for this pipe */
247:     PipeComputeSteadyState(pipe, wash->Q0, wash->H0);
248:     VecGetSize(pipe->x, &nx);

250:     VecGetArrayRead(pipe->x, &xarray);
251:     /* copy pipe->x to xarray */
252:     for (k = 0; k < nx; k++) (xarr + varoffset)[k] = xarray[k];

254:     /* set boundary values into vfrom and vto */
255:     DMNetworkGetConnectedVertices(networkdm, e, &cone);
256:     vfrom = cone[0]; /* local ordering */
257:     vto   = cone[1];
258:     DMNetworkGetLocalVecOffset(networkdm, vfrom, ALL_COMPONENTS, &offsetfrom);
259:     DMNetworkGetLocalVecOffset(networkdm, vto, ALL_COMPONENTS, &offsetto);

261:     /* if vform is a head vertex: */
262:     DMNetworkGetComponent(networkdm, vfrom, 0, &vkey, (void **)&junction, NULL);
263:     if (junction->type == RESERVOIR) { (xarr + offsetfrom)[1] = wash->H0; /* 1st H */ }

265:     /* if vto is an end vertex: */
266:     DMNetworkGetComponent(networkdm, vto, 0, &vkey, (void **)&junction, NULL);
267:     if (junction->type == VALVE) { (xarr + offsetto)[0] = wash->QL; /* last Q */ }
268:     VecRestoreArrayRead(pipe->x, &xarray);
269:   }

271:   VecRestoreArray(localX, &xarr);
272:   DMLocalToGlobalBegin(networkdm, localX, ADD_VALUES, X);
273:   DMLocalToGlobalEnd(networkdm, localX, ADD_VALUES, X);
274:   DMRestoreLocalVector(networkdm, &localX);

276: #if 0
277:   PetscInt N;
278:   VecGetSize(X,&N);
279:   PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N);
280:   VecView(X,PETSC_VIEWER_STDOUT_WORLD);
281: #endif
282:   return 0;
283: }

285: PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context)
286: {
287:   DMNetworkMonitor monitor;

289:   monitor = (DMNetworkMonitor)context;
290:   DMNetworkMonitorView(monitor, x);
291:   return 0;
292: }

294: PetscErrorCode PipesView(DM networkdm, PetscInt KeyPipe, Vec X)
295: {
296:   PetscInt   i, numkeys = 1, *blocksize, *numselectedvariable, **selectedvariables, n;
297:   IS         isfrom_q, isfrom_h, isfrom;
298:   Vec        Xto;
299:   VecScatter ctx;
300:   MPI_Comm   comm;

302:   PetscObjectGetComm((PetscObject)networkdm, &comm);

304:   /* 1. Create isfrom_q for q-variable of pipes */
305:   PetscMalloc3(numkeys, &blocksize, numkeys, &numselectedvariable, numkeys, &selectedvariables);
306:   for (i = 0; i < numkeys; i++) {
307:     blocksize[i]           = 2;
308:     numselectedvariable[i] = 1;
309:     PetscMalloc1(numselectedvariable[i], &selectedvariables[i]);
310:     selectedvariables[i][0] = 0; /* q-variable */
311:   }
312:   DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_q);

314:   /* 2. Create Xto and isto */
315:   ISGetLocalSize(isfrom_q, &n);
316:   VecCreate(comm, &Xto);
317:   VecSetSizes(Xto, n, PETSC_DECIDE);
318:   VecSetFromOptions(Xto);
319:   VecSet(Xto, 0.0);

321:   /* 3. Create scatter */
322:   VecScatterCreate(X, isfrom_q, Xto, NULL, &ctx);

324:   /* 4. Scatter to Xq */
325:   VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
326:   VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
327:   VecScatterDestroy(&ctx);
328:   ISDestroy(&isfrom_q);

330:   PetscPrintf(PETSC_COMM_WORLD, "Xq:\n");
331:   VecView(Xto, PETSC_VIEWER_STDOUT_WORLD);

333:   /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */
334:   for (i = 0; i < numkeys; i++) { selectedvariables[i][0] = 1; /* h-variable */ }
335:   DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, numselectedvariable, selectedvariables, &isfrom_h);

337:   VecScatterCreate(X, isfrom_h, Xto, NULL, &ctx);
338:   VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
339:   VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
340:   VecScatterDestroy(&ctx);
341:   ISDestroy(&isfrom_h);

343:   PetscPrintf(PETSC_COMM_WORLD, "Xh:\n");
344:   VecView(Xto, PETSC_VIEWER_STDOUT_WORLD);
345:   VecDestroy(&Xto);

347:   /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */
348:   for (i = 0; i < numkeys; i++) { blocksize[i] = -1; /* select all the variables of the i-th component */ }
349:   DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, blocksize, NULL, NULL, &isfrom);
350:   ISDestroy(&isfrom);
351:   DMNetworkCreateIS(networkdm, numkeys, &KeyPipe, NULL, NULL, NULL, &isfrom);

353:   ISGetLocalSize(isfrom, &n);
354:   VecCreate(comm, &Xto);
355:   VecSetSizes(Xto, n, PETSC_DECIDE);
356:   VecSetFromOptions(Xto);
357:   VecSet(Xto, 0.0);

359:   VecScatterCreate(X, isfrom, Xto, NULL, &ctx);
360:   VecScatterBegin(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
361:   VecScatterEnd(ctx, X, Xto, INSERT_VALUES, SCATTER_FORWARD);
362:   VecScatterDestroy(&ctx);
363:   ISDestroy(&isfrom);

365:   PetscPrintf(PETSC_COMM_WORLD, "Xpipes:\n");
366:   VecView(Xto, PETSC_VIEWER_STDOUT_WORLD);

368:   /* 7. Free spaces */
369:   for (i = 0; i < numkeys; i++) PetscFree(selectedvariables[i]);
370:   PetscFree3(blocksize, numselectedvariable, selectedvariables);
371:   VecDestroy(&Xto);
372:   return 0;
373: }

375: PetscErrorCode ISJunctionsView(DM networkdm, PetscInt KeyJunc)
376: {
377:   PetscInt    numkeys = 1;
378:   IS          isfrom;
379:   MPI_Comm    comm;
380:   PetscMPIInt rank;

382:   PetscObjectGetComm((PetscObject)networkdm, &comm);
383:   MPI_Comm_rank(comm, &rank);

385:   /* Create a global isfrom for all junction variables */
386:   DMNetworkCreateIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom);
387:   PetscPrintf(PETSC_COMM_WORLD, "ISJunctions:\n");
388:   ISView(isfrom, PETSC_VIEWER_STDOUT_WORLD);
389:   ISDestroy(&isfrom);

391:   /* Create a local isfrom for all junction variables */
392:   DMNetworkCreateLocalIS(networkdm, numkeys, &KeyJunc, NULL, NULL, NULL, &isfrom);
393:   if (rank == 0) {
394:     PetscPrintf(PETSC_COMM_SELF, "[%d] ISLocalJunctions:\n", rank);
395:     ISView(isfrom, PETSC_VIEWER_STDOUT_SELF);
396:   }
397:   ISDestroy(&isfrom);
398:   return 0;
399: }

401: PetscErrorCode WashNetworkCleanUp(Wash wash)
402: {
403:   PetscMPIInt rank;

405:   MPI_Comm_rank(wash->comm, &rank);
406:   PetscFree(wash->edgelist);
407:   PetscFree(wash->vtype);
408:   if (rank == 0) PetscFree2(wash->junction, wash->pipe);
409:   return 0;
410: }

412: PetscErrorCode WashNetworkCreate(MPI_Comm comm, PetscInt pipesCase, Wash *wash_ptr)
413: {
414:   PetscInt    npipes;
415:   PetscMPIInt rank;
416:   Wash        wash = NULL;
417:   PetscInt    i, numVertices, numEdges, *vtype;
418:   PetscInt   *edgelist;
419:   Junction    junctions = NULL;
420:   Pipe        pipes     = NULL;
421:   PetscBool   washdist  = PETSC_TRUE;

423:   MPI_Comm_rank(comm, &rank);

425:   PetscCalloc1(1, &wash);
426:   wash->comm       = comm;
427:   *wash_ptr        = wash;
428:   wash->Q0         = 0.477432; /* RESERVOIR */
429:   wash->H0         = 150.0;
430:   wash->HL         = 143.488; /* VALVE */
431:   wash->QL         = 0.0;
432:   wash->nnodes_loc = 0;

434:   numVertices = 0;
435:   numEdges    = 0;
436:   edgelist    = NULL;

438:   /* proc[0] creates a sequential wash and edgelist */
439:   PetscPrintf(PETSC_COMM_WORLD, "Setup pipesCase %" PetscInt_FMT "\n", pipesCase);

441:   /* Set global number of pipes, edges, and junctions */
442:   /*-------------------------------------------------*/
443:   switch (pipesCase) {
444:   case 0:
445:     /* pipeCase 0: */
446:     /* =================================================
447:     (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE)
448:     ====================================================  */
449:     npipes = 3;
450:     PetscOptionsGetInt(NULL, NULL, "-npipes", &npipes, NULL);
451:     wash->nedge   = npipes;
452:     wash->nvertex = npipes + 1;

454:     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
455:     numVertices = 0;
456:     numEdges    = 0;
457:     edgelist    = NULL;
458:     if (rank == 0) {
459:       numVertices = wash->nvertex;
460:       numEdges    = wash->nedge;

462:       PetscCalloc1(2 * numEdges, &edgelist);
463:       for (i = 0; i < numEdges; i++) {
464:         edgelist[2 * i]     = i;
465:         edgelist[2 * i + 1] = i + 1;
466:       }

468:       /* Add network components */
469:       /*------------------------*/
470:       PetscCalloc2(numVertices, &junctions, numEdges, &pipes);

472:       /* vertex */
473:       for (i = 0; i < numVertices; i++) {
474:         junctions[i].id   = i;
475:         junctions[i].type = JUNCTION;
476:       }

478:       junctions[0].type               = RESERVOIR;
479:       junctions[numVertices - 1].type = VALVE;
480:     }
481:     break;
482:   case 1:
483:     /* pipeCase 1: */
484:     /* ==========================
485:                 v2 (VALVE)
486:                 ^
487:                 |
488:                E2
489:                 |
490:     v0 --E0--> v3--E1--> v1
491:   (RESERVOIR)            (RESERVOIR)
492:     =============================  */
493:     npipes        = 3;
494:     wash->nedge   = npipes;
495:     wash->nvertex = npipes + 1;

497:     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
498:     if (rank == 0) {
499:       numVertices = wash->nvertex;
500:       numEdges    = wash->nedge;

502:       PetscCalloc1(2 * numEdges, &edgelist);
503:       edgelist[0] = 0;
504:       edgelist[1] = 3; /* edge[0] */
505:       edgelist[2] = 3;
506:       edgelist[3] = 1; /* edge[1] */
507:       edgelist[4] = 3;
508:       edgelist[5] = 2; /* edge[2] */

510:       /* Add network components */
511:       /*------------------------*/
512:       PetscCalloc2(numVertices, &junctions, numEdges, &pipes);
513:       /* vertex */
514:       for (i = 0; i < numVertices; i++) {
515:         junctions[i].id   = i;
516:         junctions[i].type = JUNCTION;
517:       }

519:       junctions[0].type = RESERVOIR;
520:       junctions[1].type = VALVE;
521:       junctions[2].type = VALVE;
522:     }
523:     break;
524:   case 2:
525:     /* pipeCase 2: */
526:     /* ==========================
527:     (RESERVOIR)  v2--> E2
528:                        |
529:             v0 --E0--> v3--E1--> v1
530:     (RESERVOIR)               (VALVE)
531:     =============================  */

533:     /* Set application parameters -- to be used in function evaluations */
534:     npipes        = 3;
535:     wash->nedge   = npipes;
536:     wash->nvertex = npipes + 1;

538:     /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
539:     if (rank == 0) {
540:       numVertices = wash->nvertex;
541:       numEdges    = wash->nedge;

543:       PetscCalloc1(2 * numEdges, &edgelist);
544:       edgelist[0] = 0;
545:       edgelist[1] = 3; /* edge[0] */
546:       edgelist[2] = 3;
547:       edgelist[3] = 1; /* edge[1] */
548:       edgelist[4] = 2;
549:       edgelist[5] = 3; /* edge[2] */

551:       /* Add network components */
552:       /*------------------------*/
553:       PetscCalloc2(numVertices, &junctions, numEdges, &pipes);
554:       /* vertex */
555:       for (i = 0; i < numVertices; i++) {
556:         junctions[i].id   = i;
557:         junctions[i].type = JUNCTION;
558:       }

560:       junctions[0].type = RESERVOIR;
561:       junctions[1].type = VALVE;
562:       junctions[2].type = RESERVOIR;
563:     }
564:     break;
565:   default:
566:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "not done yet");
567:   }

569:   /* set edge global id */
570:   for (i = 0; i < numEdges; i++) pipes[i].id = i;

572:   if (rank == 0) { /* set vtype for proc[0] */
573:     PetscInt v;
574:     PetscMalloc1(2 * numEdges, &vtype);
575:     for (i = 0; i < 2 * numEdges; i++) {
576:       v        = edgelist[i];
577:       vtype[i] = junctions[v].type;
578:     }
579:     wash->vtype = vtype;
580:   }

582:   *wash_ptr      = wash;
583:   wash->nedge    = numEdges;
584:   wash->nvertex  = numVertices;
585:   wash->edgelist = edgelist;
586:   wash->junction = junctions;
587:   wash->pipe     = pipes;

589:   /* Distribute edgelist to other processors */
590:   PetscOptionsGetBool(NULL, NULL, "-wash_distribute", &washdist, NULL);
591:   if (washdist) {
592:     /*
593:      PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n");
594:      */
595:     WashNetworkDistribute(comm, wash);
596:   }
597:   return 0;
598: }

600: /* ------------------------------------------------------- */
601: int main(int argc, char **argv)
602: {
603:   Wash              wash;
604:   Junction          junctions, junction;
605:   Pipe              pipe, pipes;
606:   PetscInt          KeyPipe, KeyJunction, *edgelist = NULL, *vtype = NULL;
607:   PetscInt          i, e, v, eStart, eEnd, vStart, vEnd, key, vkey, type;
608:   PetscInt          steps = 1, nedges, nnodes = 6;
609:   const PetscInt   *cone;
610:   DM                networkdm;
611:   PetscMPIInt       size, rank;
612:   PetscReal         ftime;
613:   Vec               X;
614:   TS                ts;
615:   TSConvergedReason reason;
616:   PetscBool         viewpipes, viewjuncs, monipipes = PETSC_FALSE, userJac = PETSC_TRUE, viewdm = PETSC_FALSE, viewX = PETSC_FALSE;
617:   PetscInt          pipesCase = 0;
618:   DMNetworkMonitor  monitor;
619:   MPI_Comm          comm;

622:   PetscInitialize(&argc, &argv, "pOption", help);

624:   /* Read runtime options */
625:   PetscOptionsGetInt(NULL, NULL, "-case", &pipesCase, NULL);
626:   PetscOptionsGetBool(NULL, NULL, "-user_Jac", &userJac, NULL);
627:   PetscOptionsGetBool(NULL, NULL, "-pipe_monitor", &monipipes, NULL);
628:   PetscOptionsGetBool(NULL, NULL, "-viewdm", &viewdm, NULL);
629:   PetscOptionsGetBool(NULL, NULL, "-viewX", &viewX, NULL);
630:   PetscOptionsGetInt(NULL, NULL, "-npipenodes", &nnodes, NULL);

632:   /* Create networkdm */
633:   /*------------------*/
634:   DMNetworkCreate(PETSC_COMM_WORLD, &networkdm);
635:   PetscObjectGetComm((PetscObject)networkdm, &comm);
636:   MPI_Comm_rank(comm, &rank);
637:   MPI_Comm_size(comm, &size);

639:   if (size == 1 && monipipes) DMNetworkMonitorCreate(networkdm, &monitor);

641:   /* Register the components in the network */
642:   DMNetworkRegisterComponent(networkdm, "junctionstruct", sizeof(struct _p_Junction), &KeyJunction);
643:   DMNetworkRegisterComponent(networkdm, "pipestruct", sizeof(struct _p_Pipe), &KeyPipe);

645:   /* Create a distributed wash network (user-specific) */
646:   WashNetworkCreate(comm, pipesCase, &wash);
647:   nedges    = wash->nedge;
648:   edgelist  = wash->edgelist;
649:   vtype     = wash->vtype;
650:   junctions = wash->junction;
651:   pipes     = wash->pipe;

653:   /* Set up the network layout */
654:   DMNetworkSetNumSubNetworks(networkdm, PETSC_DECIDE, 1);
655:   DMNetworkAddSubnetwork(networkdm, NULL, nedges, edgelist, NULL);

657:   DMNetworkLayoutSetUp(networkdm);

659:   DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
660:   DMNetworkGetVertexRange(networkdm, &vStart, &vEnd);
661:   /* PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd); */

663:   if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */
664:     /* vEnd - vStart = nvertices + number of ghost vertices! */
665:     PetscCalloc2(vEnd - vStart, &junctions, nedges, &pipes);
666:   }

668:   /* Add Pipe component and number of variables to all local edges */
669:   for (e = eStart; e < eEnd; e++) {
670:     pipes[e - eStart].nnodes = nnodes;
671:     DMNetworkAddComponent(networkdm, e, KeyPipe, &pipes[e - eStart], 2 * pipes[e - eStart].nnodes);

673:     if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */
674:       pipes[e - eStart].length = 600.0;
675:       DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e - eStart].nnodes, 0, 2, 0.0, pipes[e - eStart].length, -0.8, 0.8, PETSC_TRUE);
676:       DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e - eStart].nnodes, 1, 2, 0.0, pipes[e - eStart].length, -400.0, 800.0, PETSC_TRUE);
677:     }
678:   }

680:   /* Add Junction component and number of variables to all local vertices */
681:   for (v = vStart; v < vEnd; v++) DMNetworkAddComponent(networkdm, v, KeyJunction, &junctions[v - vStart], 2);

683:   if (size > 1) { /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */
684:     DM               plexdm;
685:     PetscPartitioner part;
686:     DMNetworkGetPlex(networkdm, &plexdm);
687:     DMPlexGetPartitioner(plexdm, &part);
688:     PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE);
689:     PetscOptionsSetValue(NULL, "-dm_plex_csr_alg", "mat"); /* for parmetis */
690:   }

692:   /* Set up DM for use */
693:   DMSetUp(networkdm);
694:   if (viewdm) {
695:     PetscPrintf(PETSC_COMM_WORLD, "\nOriginal networkdm, DMView:\n");
696:     DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD);
697:   }

699:   /* Set user physical parameters to the components */
700:   for (e = eStart; e < eEnd; e++) {
701:     DMNetworkGetConnectedVertices(networkdm, e, &cone);
702:     /* vfrom */
703:     DMNetworkGetComponent(networkdm, cone[0], 0, &vkey, (void **)&junction, NULL);
704:     junction->type = (VertexType)vtype[2 * e];

706:     /* vto */
707:     DMNetworkGetComponent(networkdm, cone[1], 0, &vkey, (void **)&junction, NULL);
708:     junction->type = (VertexType)vtype[2 * e + 1];
709:   }

711:   WashNetworkCleanUp(wash);

713:   /* Network partitioning and distribution of data */
714:   DMNetworkDistribute(&networkdm, 0);
715:   if (viewdm) {
716:     PetscPrintf(PETSC_COMM_WORLD, "\nAfter DMNetworkDistribute, DMView:\n");
717:     DMView(networkdm, PETSC_VIEWER_STDOUT_WORLD);
718:   }

720:   /* create vectors */
721:   DMCreateGlobalVector(networkdm, &X);
722:   DMCreateLocalVector(networkdm, &wash->localX);
723:   DMCreateLocalVector(networkdm, &wash->localXdot);

725:   /* PipeSetUp -- each process only sets its own pipes */
726:   /*---------------------------------------------------*/
727:   DMNetworkGetVertexRange(networkdm, &vStart, &vEnd);

729:   userJac = PETSC_TRUE;
730:   DMNetworkHasJacobian(networkdm, userJac, userJac);
731:   DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
732:   for (e = eStart; e < eEnd; e++) { /* each edge has only one component, pipe */
733:     DMNetworkGetComponent(networkdm, e, 0, &type, (void **)&pipe, NULL);

735:     wash->nnodes_loc += pipe->nnodes;        /* local total number of nodes, will be used by PipesView() */
736:     PetscCall(PipeSetParameters(pipe, 600.0, /* length   */
737:                                 0.5,         /* diameter */
738:                                 1200.0,      /* a        */
739:                                 0.018));     /* friction */
740:     PipeSetUp(pipe);

742:     if (userJac) {
743:       /* Create Jacobian matrix structures for a Pipe */
744:       Mat *J;
745:       PipeCreateJacobian(pipe, NULL, &J);
746:       DMNetworkEdgeSetMatrix(networkdm, e, J);
747:     }
748:   }

750:   if (userJac) {
751:     DMNetworkGetVertexRange(networkdm, &vStart, &vEnd);
752:     for (v = vStart; v < vEnd; v++) {
753:       Mat *J;
754:       JunctionCreateJacobian(networkdm, v, NULL, &J);
755:       DMNetworkVertexSetMatrix(networkdm, v, J);

757:       DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL);
758:       junction->jacobian = J;
759:     }
760:   }

762:   /* Setup solver                                           */
763:   /*--------------------------------------------------------*/
764:   TSCreate(PETSC_COMM_WORLD, &ts);

766:   TSSetDM(ts, (DM)networkdm);
767:   TSSetIFunction(ts, NULL, WASHIFunction, wash);

769:   TSSetMaxSteps(ts, steps);
770:   TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER);
771:   TSSetTimeStep(ts, 0.1);
772:   TSSetType(ts, TSBEULER);
773:   if (size == 1 && monipipes) TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL);
774:   TSSetFromOptions(ts);

776:   WASHSetInitialSolution(networkdm, X, wash);

778:   TSSolve(ts, X);

780:   TSGetSolveTime(ts, &ftime);
781:   TSGetStepNumber(ts, &steps);
782:   TSGetConvergedReason(ts, &reason);
783:   PetscPrintf(PETSC_COMM_WORLD, "%s at time %g after %" PetscInt_FMT " steps\n", TSConvergedReasons[reason], (double)ftime, steps);
784:   if (viewX) VecView(X, PETSC_VIEWER_STDOUT_WORLD);

786:   viewpipes = PETSC_FALSE;
787:   PetscOptionsGetBool(NULL, NULL, "-Jac_view", &viewpipes, NULL);
788:   if (viewpipes) {
789:     SNES snes;
790:     Mat  Jac;
791:     TSGetSNES(ts, &snes);
792:     SNESGetJacobian(snes, &Jac, NULL, NULL, NULL);
793:     MatView(Jac, PETSC_VIEWER_DRAW_WORLD);
794:   }

796:   /* View solutions */
797:   /* -------------- */
798:   viewpipes = PETSC_FALSE;
799:   PetscOptionsGetBool(NULL, NULL, "-pipe_view", &viewpipes, NULL);
800:   if (viewpipes) PipesView(networkdm, KeyPipe, X);

802:   /* Test IS */
803:   viewjuncs = PETSC_FALSE;
804:   PetscOptionsGetBool(NULL, NULL, "-isjunc_view", &viewjuncs, NULL);
805:   if (viewjuncs) ISJunctionsView(networkdm, KeyJunction);

807:   /* Free spaces */
808:   /* ----------- */
809:   TSDestroy(&ts);
810:   VecDestroy(&X);
811:   VecDestroy(&wash->localX);
812:   VecDestroy(&wash->localXdot);

814:   /* Destroy objects from each pipe that are created in PipeSetUp() */
815:   DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd);
816:   for (i = eStart; i < eEnd; i++) {
817:     DMNetworkGetComponent(networkdm, i, 0, &key, (void **)&pipe, NULL);
818:     PipeDestroy(&pipe);
819:   }
820:   if (userJac) {
821:     for (v = vStart; v < vEnd; v++) {
822:       DMNetworkGetComponent(networkdm, v, 0, &vkey, (void **)&junction, NULL);
823:       JunctionDestroyJacobian(networkdm, v, junction);
824:     }
825:   }

827:   if (size == 1 && monipipes) DMNetworkMonitorDestroy(&monitor);
828:   DMDestroy(&networkdm);
829:   PetscFree(wash);

831:   if (rank) PetscFree2(junctions, pipes);
832:   PetscFinalize();
833:   return 0;
834: }

836: /*TEST

838:    build:
839:      depends: pipeInterface.c pipeImpls.c
840:      requires: mumps

842:    test:
843:       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
844:       localrunfiles: pOption
845:       output_file: output/pipes_1.out

847:    test:
848:       suffix: 2
849:       nsize: 2
850:       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
851:       localrunfiles: pOption
852:       output_file: output/pipes_2.out

854:    test:
855:       suffix: 3
856:       nsize: 2
857:       args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
858:       localrunfiles: pOption
859:       output_file: output/pipes_3.out

861:    test:
862:       suffix: 4
863:       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
864:       localrunfiles: pOption
865:       output_file: output/pipes_4.out

867:    test:
868:       suffix: 5
869:       nsize: 3
870:       args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
871:       localrunfiles: pOption
872:       output_file: output/pipes_5.out

874:    test:
875:       suffix: 6
876:       nsize: 2
877:       args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
878:       localrunfiles: pOption
879:       output_file: output/pipes_6.out

881:    test:
882:       suffix: 7
883:       nsize: 2
884:       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
885:       localrunfiles: pOption
886:       output_file: output/pipes_7.out

888:    test:
889:       suffix: 8
890:       nsize: 2
891:       requires: parmetis
892:       args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1
893:       localrunfiles: pOption
894:       output_file: output/pipes_8.out

896:    test:
897:       suffix: 9
898:       nsize: 2
899:       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view
900:       localrunfiles: pOption
901:       output_file: output/pipes_9.out

903:    test:
904:       suffix: 10
905:       nsize: 2
906:       args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view
907:       localrunfiles: pOption
908:       output_file: output/pipes_10.out

910: TEST*/