Actual source code: characteristic.c
2: #include <petsc/private/characteristicimpl.h>
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
17: PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar[]);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24: {
25: PetscBool iascii;
28: if (!viewer) PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer);
32: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
33: if (!iascii) PetscTryTypeMethod(c, view, viewer);
34: return 0;
35: }
37: PetscErrorCode CharacteristicDestroy(Characteristic *c)
38: {
39: if (!*c) return 0;
41: if (--((PetscObject)(*c))->refct > 0) return 0;
43: if ((*c)->ops->destroy) (*(*c)->ops->destroy)((*c));
44: MPI_Type_free(&(*c)->itemType);
45: PetscFree((*c)->queue);
46: PetscFree((*c)->queueLocal);
47: PetscFree((*c)->queueRemote);
48: PetscFree((*c)->neighbors);
49: PetscFree((*c)->needCount);
50: PetscFree((*c)->localOffsets);
51: PetscFree((*c)->fillCount);
52: PetscFree((*c)->remoteOffsets);
53: PetscFree((*c)->request);
54: PetscFree((*c)->status);
55: PetscHeaderDestroy(c);
56: return 0;
57: }
59: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
60: {
61: Characteristic newC;
64: *c = NULL;
65: CharacteristicInitializePackage();
67: PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView);
68: *c = newC;
70: newC->structured = PETSC_TRUE;
71: newC->numIds = 0;
72: newC->velocityDA = NULL;
73: newC->velocity = NULL;
74: newC->velocityOld = NULL;
75: newC->numVelocityComp = 0;
76: newC->velocityComp = NULL;
77: newC->velocityInterp = NULL;
78: newC->velocityInterpLocal = NULL;
79: newC->velocityCtx = NULL;
80: newC->fieldDA = NULL;
81: newC->field = NULL;
82: newC->numFieldComp = 0;
83: newC->fieldComp = NULL;
84: newC->fieldInterp = NULL;
85: newC->fieldInterpLocal = NULL;
86: newC->fieldCtx = NULL;
87: newC->itemType = 0;
88: newC->queue = NULL;
89: newC->queueSize = 0;
90: newC->queueMax = 0;
91: newC->queueLocal = NULL;
92: newC->queueLocalSize = 0;
93: newC->queueLocalMax = 0;
94: newC->queueRemote = NULL;
95: newC->queueRemoteSize = 0;
96: newC->queueRemoteMax = 0;
97: newC->numNeighbors = 0;
98: newC->neighbors = NULL;
99: newC->needCount = NULL;
100: newC->localOffsets = NULL;
101: newC->fillCount = NULL;
102: newC->remoteOffsets = NULL;
103: newC->request = NULL;
104: newC->status = NULL;
105: return 0;
106: }
108: /*@C
109: CharacteristicSetType - Builds Characteristic for a particular solver.
111: Logically Collective
113: Input Parameters:
114: + c - the method of characteristics context
115: - type - a known method
117: Options Database Key:
118: . -characteristic_type <method> - Sets the method; use -help for a list
119: of available methods
121: Level: intermediate
123: Notes:
124: See "include/petsccharacteristic.h" for available methods
126: Normally, it is best to use the CharacteristicSetFromOptions() command and
127: then set the Characteristic type from the options database rather than by using
128: this routine. Using the options database provides the user with
129: maximum flexibility in evaluating the many different Krylov methods.
130: The CharacteristicSetType() routine is provided for those situations where it
131: is necessary to set the iterative solver independently of the command
132: line or options database. This might be the case, for example, when
133: the choice of iterative solver changes during the execution of the
134: program, and the user's application is taking responsibility for
135: choosing the appropriate method. In other words, this routine is
136: not for beginners.
138: .seealso: [](chapter_ts), `CharacteristicType`
139: @*/
140: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
141: {
142: PetscBool match;
143: PetscErrorCode (*r)(Characteristic);
148: PetscObjectTypeCompare((PetscObject)c, type, &match);
149: if (match) return 0;
151: if (c->data) {
152: /* destroy the old private Characteristic context */
153: PetscUseTypeMethod(c, destroy);
154: c->ops->destroy = NULL;
155: c->data = NULL;
156: }
158: PetscFunctionListFind(CharacteristicList, type, &r);
160: c->setupcalled = 0;
161: (*r)(c);
162: PetscObjectChangeTypeName((PetscObject)c, type);
163: return 0;
164: }
166: /*@
167: CharacteristicSetUp - Sets up the internal data structures for the
168: later use of an iterative solver.
170: Collective
172: Input Parameter:
173: . ksp - iterative context obtained from CharacteristicCreate()
175: Level: developer
177: .seealso: [](chapter_ts), `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
178: @*/
179: PetscErrorCode CharacteristicSetUp(Characteristic c)
180: {
183: if (!((PetscObject)c)->type_name) CharacteristicSetType(c, CHARACTERISTICDA);
185: if (c->setupcalled == 2) return 0;
187: PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL);
188: if (!c->setupcalled) PetscUseTypeMethod(c, setup);
189: PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL);
190: c->setupcalled = 2;
191: return 0;
192: }
194: /*@C
195: CharacteristicRegister - Adds a solver to the method of characteristics package.
197: Not Collective
199: Input Parameters:
200: + name_solver - name of a new user-defined solver
201: - routine_create - routine to create method context
203: Level: advanced
205: Sample usage:
206: .vb
207: CharacteristicRegister("my_char", MyCharCreate);
208: .ve
210: Then, your Characteristic type can be chosen with the procedural interface via
211: .vb
212: CharacteristicCreate(MPI_Comm, Characteristic* &char);
213: CharacteristicSetType(char,"my_char");
214: .ve
215: or at runtime via the option
216: .vb
217: -characteristic_type my_char
218: .ve
220: Notes:
221: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
223: .seealso: [](chapter_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
224: @*/
225: PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
226: {
227: CharacteristicInitializePackage();
228: PetscFunctionListAdd(&CharacteristicList, sname, function);
229: return 0;
230: }
232: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
233: {
234: c->velocityDA = da;
235: c->velocity = v;
236: c->velocityOld = vOld;
237: c->numVelocityComp = numComponents;
238: c->velocityComp = components;
239: c->velocityInterp = interp;
240: c->velocityCtx = ctx;
241: return 0;
242: }
244: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
245: {
246: c->velocityDA = da;
247: c->velocity = v;
248: c->velocityOld = vOld;
249: c->numVelocityComp = numComponents;
250: c->velocityComp = components;
251: c->velocityInterpLocal = interp;
252: c->velocityCtx = ctx;
253: return 0;
254: }
256: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
257: {
258: #if 0
260: #endif
261: c->fieldDA = da;
262: c->field = v;
263: c->numFieldComp = numComponents;
264: c->fieldComp = components;
265: c->fieldInterp = interp;
266: c->fieldCtx = ctx;
267: return 0;
268: }
270: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
271: {
272: #if 0
274: #endif
275: c->fieldDA = da;
276: c->field = v;
277: c->numFieldComp = numComponents;
278: c->fieldComp = components;
279: c->fieldInterpLocal = interp;
280: c->fieldCtx = ctx;
281: return 0;
282: }
284: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
285: {
286: CharacteristicPointDA2D Qi;
287: DM da = c->velocityDA;
288: Vec velocityLocal, velocityLocalOld;
289: Vec fieldLocal;
290: DMDALocalInfo info;
291: PetscScalar **solArray;
292: void *velocityArray;
293: void *velocityArrayOld;
294: void *fieldArray;
295: PetscScalar *interpIndices;
296: PetscScalar *velocityValues, *velocityValuesOld;
297: PetscScalar *fieldValues;
298: PetscMPIInt rank;
299: PetscInt dim;
300: PetscMPIInt neighbors[9];
301: PetscInt dof;
302: PetscInt gx, gy;
303: PetscInt n, is, ie, js, je, comp;
305: c->queueSize = 0;
306: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
307: DMDAGetNeighborsRank(da, neighbors);
308: CharacteristicSetNeighbors(c, 9, neighbors);
309: CharacteristicSetUp(c);
310: /* global and local grid info */
311: DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
312: DMDAGetLocalInfo(da, &info);
313: is = info.xs;
314: ie = info.xs + info.xm;
315: js = info.ys;
316: je = info.ys + info.ym;
317: /* Allocation */
318: PetscMalloc1(dim, &interpIndices);
319: PetscMalloc1(c->numVelocityComp, &velocityValues);
320: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
321: PetscMalloc1(c->numFieldComp, &fieldValues);
322: PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL);
324: /* -----------------------------------------------------------------------
325: PART 1, AT t-dt/2
326: -----------------------------------------------------------------------*/
327: PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL);
328: /* GET POSITION AT HALF TIME IN THE PAST */
329: if (c->velocityInterpLocal) {
330: DMGetLocalVector(c->velocityDA, &velocityLocal);
331: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
332: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
333: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
334: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
335: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
336: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
337: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
338: }
339: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
340: for (Qi.j = js; Qi.j < je; Qi.j++) {
341: for (Qi.i = is; Qi.i < ie; Qi.i++) {
342: interpIndices[0] = Qi.i;
343: interpIndices[1] = Qi.j;
344: if (c->velocityInterpLocal) c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
345: else c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
346: Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
347: Qi.y = Qi.j - velocityValues[1] * dt / 2.0;
349: /* Determine whether the position at t - dt/2 is local */
350: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
352: /* Check for Periodic boundaries and move all periodic points back onto the domain */
353: DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y));
354: CharacteristicAddPoint(c, &Qi);
355: }
356: }
357: PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL);
359: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
360: CharacteristicSendCoordinatesBegin(c);
361: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
363: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL);
364: /* Calculate velocity at t_n+1/2 (local values) */
365: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
366: for (n = 0; n < c->queueSize; n++) {
367: Qi = c->queue[n];
368: if (c->neighbors[Qi.proc] == rank) {
369: interpIndices[0] = Qi.x;
370: interpIndices[1] = Qi.y;
371: if (c->velocityInterpLocal) {
372: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
373: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
374: } else {
375: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
376: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
377: }
378: Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
379: Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
380: }
381: c->queue[n] = Qi;
382: }
383: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL);
385: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
386: CharacteristicSendCoordinatesEnd(c);
387: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
389: /* Calculate velocity at t_n+1/2 (fill remote requests) */
390: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL);
391: PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
392: for (n = 0; n < c->queueRemoteSize; n++) {
393: Qi = c->queueRemote[n];
394: interpIndices[0] = Qi.x;
395: interpIndices[1] = Qi.y;
396: if (c->velocityInterpLocal) {
397: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
398: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
399: } else {
400: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
401: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
402: }
403: Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
404: Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
405: c->queueRemote[n] = Qi;
406: }
407: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL);
408: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
409: CharacteristicGetValuesBegin(c);
410: CharacteristicGetValuesEnd(c);
411: if (c->velocityInterpLocal) {
412: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
413: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
414: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
415: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
416: }
417: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
419: /* -----------------------------------------------------------------------
420: PART 2, AT t-dt
421: -----------------------------------------------------------------------*/
423: /* GET POSITION AT t_n (local values) */
424: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
425: PetscInfo(NULL, "Calculating position at t_{n}\n");
426: for (n = 0; n < c->queueSize; n++) {
427: Qi = c->queue[n];
428: Qi.x = Qi.i - Qi.x * dt;
429: Qi.y = Qi.j - Qi.y * dt;
431: /* Determine whether the position at t-dt is local */
432: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
434: /* Check for Periodic boundaries and move all periodic points back onto the domain */
435: DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y));
437: c->queue[n] = Qi;
438: }
439: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
441: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
442: CharacteristicSendCoordinatesBegin(c);
443: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
445: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
446: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
447: if (c->fieldInterpLocal) {
448: DMGetLocalVector(c->fieldDA, &fieldLocal);
449: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
450: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
451: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
452: }
453: PetscInfo(NULL, "Calculating local field at t_{n}\n");
454: for (n = 0; n < c->queueSize; n++) {
455: if (c->neighbors[c->queue[n].proc] == rank) {
456: interpIndices[0] = c->queue[n].x;
457: interpIndices[1] = c->queue[n].y;
458: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
459: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
460: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
461: }
462: }
463: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
465: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
466: CharacteristicSendCoordinatesEnd(c);
467: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
469: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
470: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL);
471: PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize);
472: for (n = 0; n < c->queueRemoteSize; n++) {
473: interpIndices[0] = c->queueRemote[n].x;
474: interpIndices[1] = c->queueRemote[n].y;
476: /* for debugging purposes */
477: if (1) { /* hacked bounds test...let's do better */
478: PetscScalar im = interpIndices[0];
479: PetscScalar jm = interpIndices[1];
482: }
484: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
485: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
486: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
487: }
488: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL);
490: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
491: CharacteristicGetValuesBegin(c);
492: CharacteristicGetValuesEnd(c);
493: if (c->fieldInterpLocal) {
494: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
495: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
496: }
497: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
499: /* Return field of characteristics at t_n-1 */
500: PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL);
501: DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL);
502: DMDAVecGetArray(c->fieldDA, solution, &solArray);
503: for (n = 0; n < c->queueSize; n++) {
504: Qi = c->queue[n];
505: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
506: }
507: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
508: PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL);
509: PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL);
511: /* Cleanup */
512: PetscFree(interpIndices);
513: PetscFree(velocityValues);
514: PetscFree(velocityValuesOld);
515: PetscFree(fieldValues);
516: return 0;
517: }
519: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
520: {
521: c->numNeighbors = numNeighbors;
522: PetscFree(c->neighbors);
523: PetscMalloc1(numNeighbors, &c->neighbors);
524: PetscArraycpy(c->neighbors, neighbors, numNeighbors);
525: return 0;
526: }
528: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
529: {
531: c->queue[c->queueSize++] = *point;
532: return 0;
533: }
535: int CharacteristicSendCoordinatesBegin(Characteristic c)
536: {
537: PetscMPIInt rank, tag = 121;
538: PetscInt i, n;
540: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
541: CharacteristicHeapSort(c, c->queue, c->queueSize);
542: PetscArrayzero(c->needCount, c->numNeighbors);
543: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
544: c->fillCount[0] = 0;
545: for (n = 1; n < c->numNeighbors; n++) MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
546: for (n = 1; n < c->numNeighbors; n++) MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
547: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
548: /* Initialize the remote queue */
549: c->queueLocalMax = c->localOffsets[0] = 0;
550: c->queueRemoteMax = c->remoteOffsets[0] = 0;
551: for (n = 1; n < c->numNeighbors; n++) {
552: c->remoteOffsets[n] = c->queueRemoteMax;
553: c->queueRemoteMax += c->fillCount[n];
554: c->localOffsets[n] = c->queueLocalMax;
555: c->queueLocalMax += c->needCount[n];
556: }
557: /* HACK BEGIN */
558: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
559: c->needCount[0] = 0;
560: /* HACK END */
561: if (c->queueRemoteMax) {
562: PetscMalloc1(c->queueRemoteMax, &c->queueRemote);
563: } else c->queueRemote = NULL;
564: c->queueRemoteSize = c->queueRemoteMax;
566: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
567: for (n = 1; n < c->numNeighbors; n++) {
568: PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
569: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
570: }
571: for (n = 1; n < c->numNeighbors; n++) {
572: PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
573: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
574: }
575: return 0;
576: }
578: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
579: {
580: #if 0
581: PetscMPIInt rank;
582: PetscInt n;
583: #endif
585: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
586: #if 0
587: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
588: for (n = 0; n < c->queueRemoteSize; n++) {
590: }
591: #endif
592: return 0;
593: }
595: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
596: {
597: PetscMPIInt tag = 121;
598: PetscInt n;
600: /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */
601: for (n = 1; n < c->numNeighbors; n++) MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
602: for (n = 1; n < c->numNeighbors; n++) MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
603: return 0;
604: }
606: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
607: {
608: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
609: /* Free queue of requests from other procs */
610: PetscFree(c->queueRemote);
611: return 0;
612: }
614: /*---------------------------------------------------------------------*/
615: /*
616: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
617: */
618: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
619: /*---------------------------------------------------------------------*/
620: {
621: CharacteristicPointDA2D temp;
622: PetscInt n;
624: if (0) { /* Check the order of the queue before sorting */
625: PetscInfo(NULL, "Before Heap sort\n");
626: for (n = 0; n < size; n++) PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc);
627: }
629: /* SORTING PHASE */
630: for (n = (size / 2) - 1; n >= 0; n--) { CharacteristicSiftDown(c, queue, n, size - 1); /* Rich had size-1 here, Matt had size*/ }
631: for (n = size - 1; n >= 1; n--) {
632: temp = queue[0];
633: queue[0] = queue[n];
634: queue[n] = temp;
635: CharacteristicSiftDown(c, queue, 0, n - 1);
636: }
637: if (0) { /* Check the order of the queue after sorting */
638: PetscInfo(NULL, "Avter Heap sort\n");
639: for (n = 0; n < size; n++) PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc);
640: }
641: return 0;
642: }
644: /*---------------------------------------------------------------------*/
645: /*
646: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
647: */
648: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
649: /*---------------------------------------------------------------------*/
650: {
651: PetscBool done = PETSC_FALSE;
652: PetscInt maxChild;
653: CharacteristicPointDA2D temp;
655: while ((root * 2 <= bottom) && (!done)) {
656: if (root * 2 == bottom) maxChild = root * 2;
657: else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
658: else maxChild = root * 2 + 1;
660: if (queue[root].proc < queue[maxChild].proc) {
661: temp = queue[root];
662: queue[root] = queue[maxChild];
663: queue[maxChild] = temp;
664: root = maxChild;
665: } else done = PETSC_TRUE;
666: }
667: return 0;
668: }
670: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
671: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
672: {
673: DMBoundaryType bx, by;
674: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
675: MPI_Comm comm;
676: PetscMPIInt rank;
677: PetscInt **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;
679: PetscObjectGetComm((PetscObject)da, &comm);
680: MPI_Comm_rank(comm, &rank);
681: DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL);
683: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
684: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
686: neighbors[0] = rank;
687: rank = 0;
688: PetscMalloc1(PJ, &procs);
689: for (pj = 0; pj < PJ; pj++) {
690: PetscMalloc1(PI, &(procs[pj]));
691: for (pi = 0; pi < PI; pi++) {
692: procs[pj][pi] = rank;
693: rank++;
694: }
695: }
697: pi = neighbors[0] % PI;
698: pj = neighbors[0] / PI;
699: pim = pi - 1;
700: if (pim < 0) pim = PI - 1;
701: pip = (pi + 1) % PI;
702: pjm = pj - 1;
703: if (pjm < 0) pjm = PJ - 1;
704: pjp = (pj + 1) % PJ;
706: neighbors[1] = procs[pj][pim];
707: neighbors[2] = procs[pjp][pim];
708: neighbors[3] = procs[pjp][pi];
709: neighbors[4] = procs[pjp][pip];
710: neighbors[5] = procs[pj][pip];
711: neighbors[6] = procs[pjm][pip];
712: neighbors[7] = procs[pjm][pi];
713: neighbors[8] = procs[pjm][pim];
715: if (!IPeriodic) {
716: if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
717: if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
718: }
720: if (!JPeriodic) {
721: if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
722: if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
723: }
725: for (pj = 0; pj < PJ; pj++) PetscFree(procs[pj]);
726: PetscFree(procs);
727: return 0;
728: }
730: /*
731: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
732: 2 | 3 | 4
733: __|___|__
734: 1 | 0 | 5
735: __|___|__
736: 8 | 7 | 6
737: | |
738: */
739: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
740: {
741: DMDALocalInfo info;
742: PetscReal is, ie, js, je;
744: DMDAGetLocalInfo(da, &info);
745: is = (PetscReal)info.xs - 0.5;
746: ie = (PetscReal)info.xs + info.xm - 0.5;
747: js = (PetscReal)info.ys - 0.5;
748: je = (PetscReal)info.ys + info.ym - 0.5;
750: if (ir >= is && ir <= ie) { /* center column */
751: if (jr >= js && jr <= je) return 0;
752: else if (jr < js) return 7;
753: else return 3;
754: } else if (ir < is) { /* left column */
755: if (jr >= js && jr <= je) return 1;
756: else if (jr < js) return 8;
757: else return 2;
758: } else { /* right column */
759: if (jr >= js && jr <= je) return 5;
760: else if (jr < js) return 6;
761: else return 4;
762: }
763: }