Actual source code: ex5adj_mf.c
1: static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
3: /*
4: See ex5.c for details on the equation.
5: This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations.
6: It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions.
7: The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run.
9: Runtime options:
10: -forwardonly - run the forward simulation without adjoint
11: -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used
12: -aijpc - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL)
13: */
15: #include "reaction_diffusion.h"
16: #include <petscdm.h>
17: #include <petscdmda.h>
19: /* Matrix free support */
20: typedef struct {
21: PetscReal time;
22: Vec U;
23: Vec Udot;
24: PetscReal shift;
25: AppCtx *appctx;
26: TS ts;
27: } MCtx;
29: /*
30: User-defined routines
31: */
32: PetscErrorCode InitialConditions(DM, Vec);
33: PetscErrorCode RHSJacobianShell(TS, PetscReal, Vec, Mat, Mat, void *);
34: PetscErrorCode IJacobianShell(TS, PetscReal, Vec, Vec, PetscReal, Mat, Mat, void *);
36: PetscErrorCode InitializeLambda(DM da, Vec lambda, PetscReal x, PetscReal y)
37: {
38: PetscInt i, j, Mx, My, xs, ys, xm, ym;
39: Field **l;
41: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
42: /* locate the global i index for x and j index for y */
43: i = (PetscInt)(x * (Mx - 1));
44: j = (PetscInt)(y * (My - 1));
45: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
47: if (xs <= i && i < xs + xm && ys <= j && j < ys + ym) {
48: /* the i,j vertex is on this process */
49: DMDAVecGetArray(da, lambda, &l);
50: l[j][i].u = 1.0;
51: l[j][i].v = 1.0;
52: DMDAVecRestoreArray(da, lambda, &l);
53: }
54: return 0;
55: }
57: static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell, Vec X, Vec Y)
58: {
59: MCtx *mctx;
60: AppCtx *appctx;
61: DM da;
62: PetscInt i, j, Mx, My, xs, ys, xm, ym;
63: PetscReal hx, hy, sx, sy;
64: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
65: Field **u, **x, **y;
66: Vec localX;
69: MatShellGetContext(A_shell, &mctx);
70: appctx = mctx->appctx;
71: TSGetDM(mctx->ts, &da);
72: DMGetLocalVector(da, &localX);
73: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
74: hx = 2.50 / (PetscReal)Mx;
75: sx = 1.0 / (hx * hx);
76: hy = 2.50 / (PetscReal)My;
77: sy = 1.0 / (hy * hy);
79: /* Scatter ghost points to local vector,using the 2-step process
80: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
81: By placing code between these two statements, computations can be
82: done while messages are in transition. */
83: DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX);
84: DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX);
86: /* Get pointers to vector data */
87: DMDAVecGetArrayRead(da, localX, &x);
88: DMDAVecGetArrayRead(da, mctx->U, &u);
89: DMDAVecGetArray(da, Y, &y);
91: /* Get local grid boundaries */
92: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
94: /* Compute function over the locally owned part of the grid */
95: for (j = ys; j < ys + ym; j++) {
96: for (i = xs; i < xs + xm; i++) {
97: uc = u[j][i].u;
98: ucb = x[j][i].u;
99: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
100: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
101: vc = u[j][i].v;
102: vcb = x[j][i].v;
103: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
104: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
105: y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
106: y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
107: }
108: }
109: DMDAVecRestoreArrayRead(da, localX, &x);
110: DMDAVecRestoreArrayRead(da, mctx->U, &u);
111: DMDAVecRestoreArray(da, Y, &y);
112: DMRestoreLocalVector(da, &localX);
113: return 0;
114: }
116: static PetscErrorCode MyIMatMultTranspose(Mat A_shell, Vec X, Vec Y)
117: {
118: MCtx *mctx;
119: AppCtx *appctx;
120: DM da;
121: PetscInt i, j, Mx, My, xs, ys, xm, ym;
122: PetscReal hx, hy, sx, sy;
123: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
124: Field **u, **x, **y;
125: Vec localX;
128: MatShellGetContext(A_shell, &mctx);
129: appctx = mctx->appctx;
130: TSGetDM(mctx->ts, &da);
131: DMGetLocalVector(da, &localX);
132: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
133: hx = 2.50 / (PetscReal)Mx;
134: sx = 1.0 / (hx * hx);
135: hy = 2.50 / (PetscReal)My;
136: sy = 1.0 / (hy * hy);
138: /* Scatter ghost points to local vector,using the 2-step process
139: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
140: By placing code between these two statements, computations can be
141: done while messages are in transition. */
142: DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX);
143: DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX);
145: /* Get pointers to vector data */
146: DMDAVecGetArrayRead(da, localX, &x);
147: DMDAVecGetArrayRead(da, mctx->U, &u);
148: DMDAVecGetArray(da, Y, &y);
150: /* Get local grid boundaries */
151: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
153: /* Compute function over the locally owned part of the grid */
154: for (j = ys; j < ys + ym; j++) {
155: for (i = xs; i < xs + xm; i++) {
156: uc = u[j][i].u;
157: ucb = x[j][i].u;
158: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
159: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
160: vc = u[j][i].v;
161: vcb = x[j][i].v;
162: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
163: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
164: y[j][i].u = appctx->D1 * (uxx + uyy) - ucb * (vc * vc + appctx->gamma) + vc * vc * vcb;
165: y[j][i].v = appctx->D2 * (vxx + vyy) - 2.0 * uc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
166: y[j][i].u = mctx->shift * ucb - y[j][i].u;
167: y[j][i].v = mctx->shift * vcb - y[j][i].v;
168: }
169: }
170: DMDAVecRestoreArrayRead(da, localX, &x);
171: DMDAVecRestoreArrayRead(da, mctx->U, &u);
172: DMDAVecRestoreArray(da, Y, &y);
173: DMRestoreLocalVector(da, &localX);
174: return 0;
175: }
177: static PetscErrorCode MyIMatMult(Mat A_shell, Vec X, Vec Y)
178: {
179: MCtx *mctx;
180: AppCtx *appctx;
181: DM da;
182: PetscInt i, j, Mx, My, xs, ys, xm, ym;
183: PetscReal hx, hy, sx, sy;
184: PetscScalar uc, uxx, uyy, vc, vxx, vyy, ucb, vcb;
185: Field **u, **x, **y;
186: Vec localX;
189: MatShellGetContext(A_shell, &mctx);
190: appctx = mctx->appctx;
191: TSGetDM(mctx->ts, &da);
192: DMGetLocalVector(da, &localX);
193: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
194: hx = 2.50 / (PetscReal)Mx;
195: sx = 1.0 / (hx * hx);
196: hy = 2.50 / (PetscReal)My;
197: sy = 1.0 / (hy * hy);
199: /* Scatter ghost points to local vector,using the 2-step process
200: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
201: By placing code between these two statements, computations can be
202: done while messages are in transition. */
203: DMGlobalToLocalBegin(da, X, INSERT_VALUES, localX);
204: DMGlobalToLocalEnd(da, X, INSERT_VALUES, localX);
206: /* Get pointers to vector data */
207: DMDAVecGetArrayRead(da, localX, &x);
208: DMDAVecGetArrayRead(da, mctx->U, &u);
209: DMDAVecGetArray(da, Y, &y);
211: /* Get local grid boundaries */
212: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
214: /* Compute function over the locally owned part of the grid */
215: for (j = ys; j < ys + ym; j++) {
216: for (i = xs; i < xs + xm; i++) {
217: uc = u[j][i].u;
218: ucb = x[j][i].u;
219: uxx = (-2.0 * ucb + x[j][i - 1].u + x[j][i + 1].u) * sx;
220: uyy = (-2.0 * ucb + x[j - 1][i].u + x[j + 1][i].u) * sy;
221: vc = u[j][i].v;
222: vcb = x[j][i].v;
223: vxx = (-2.0 * vcb + x[j][i - 1].v + x[j][i + 1].v) * sx;
224: vyy = (-2.0 * vcb + x[j - 1][i].v + x[j + 1][i].v) * sy;
225: y[j][i].u = appctx->D1 * (uxx + uyy) - (vc * vc + appctx->gamma) * ucb - 2.0 * uc * vc * vcb;
226: y[j][i].v = appctx->D2 * (vxx + vyy) + vc * vc * ucb + (2.0 * uc * vc - appctx->gamma - appctx->kappa) * vcb;
227: y[j][i].u = mctx->shift * ucb - y[j][i].u;
228: y[j][i].v = mctx->shift * vcb - y[j][i].v;
229: }
230: }
231: DMDAVecRestoreArrayRead(da, localX, &x);
232: DMDAVecRestoreArrayRead(da, mctx->U, &u);
233: DMDAVecRestoreArray(da, Y, &y);
234: DMRestoreLocalVector(da, &localX);
235: return 0;
236: }
238: int main(int argc, char **argv)
239: {
240: TS ts; /* ODE integrator */
241: Vec x; /* solution */
242: DM da;
243: AppCtx appctx;
244: MCtx mctx;
245: Vec lambda[1];
246: PetscBool forwardonly = PETSC_FALSE, implicitform = PETSC_TRUE, mf = PETSC_FALSE;
247: PetscLogDouble v1, v2;
248: #if defined(PETSC_USE_LOG)
249: PetscLogStage stage;
250: #endif
253: PetscInitialize(&argc, &argv, (char *)0, help);
254: PetscOptionsGetBool(NULL, NULL, "-forwardonly", &forwardonly, NULL);
255: PetscOptionsGetBool(NULL, NULL, "-implicitform", &implicitform, NULL);
256: appctx.aijpc = PETSC_FALSE;
257: PetscOptionsGetBool(NULL, NULL, "-aijpc", &appctx.aijpc, NULL);
258: PetscOptionsGetBool(NULL, NULL, "-mf", &mf, NULL);
260: appctx.D1 = 8.0e-5;
261: appctx.D2 = 4.0e-5;
262: appctx.gamma = .024;
263: appctx.kappa = .06;
265: PetscLogStageRegister("MyAdjoint", &stage);
266: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
267: Create distributed array (DMDA) to manage parallel grid and vectors
268: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
269: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 64, 64, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da);
270: DMSetFromOptions(da);
271: DMSetUp(da);
272: DMDASetFieldName(da, 0, "u");
273: DMDASetFieldName(da, 1, "v");
275: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
276: Extract global vectors from DMDA; then duplicate for remaining
277: vectors that are the same types
278: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
279: DMCreateGlobalVector(da, &x);
281: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
282: Create timestepping solver context
283: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
284: TSCreate(PETSC_COMM_WORLD, &ts);
285: TSSetDM(ts, da);
286: TSSetProblemType(ts, TS_NONLINEAR);
287: TSSetEquationType(ts, TS_EQ_ODE_EXPLICIT); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */
288: if (!implicitform) {
289: TSSetType(ts, TSRK);
290: TSSetRHSFunction(ts, NULL, RHSFunction, &appctx);
291: TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx);
292: } else {
293: TSSetType(ts, TSCN);
294: TSSetIFunction(ts, NULL, IFunction, &appctx);
295: if (appctx.aijpc) {
296: Mat A, B;
298: DMSetMatType(da, MATSELL);
299: DMCreateMatrix(da, &A);
300: MatConvert(A, MATAIJ, MAT_INITIAL_MATRIX, &B);
301: /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
302: TSSetIJacobian(ts, A, B, IJacobian, &appctx);
303: MatDestroy(&A);
304: MatDestroy(&B);
305: } else {
306: TSSetIJacobian(ts, NULL, NULL, IJacobian, &appctx);
307: }
308: }
310: if (mf) {
311: PetscInt xm, ym, Mx, My, dof;
312: mctx.ts = ts;
313: mctx.appctx = &appctx;
314: VecDuplicate(x, &mctx.U);
315: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
316: Create matrix free context
317: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
318: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, &dof, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
319: DMDAGetCorners(da, NULL, NULL, NULL, &xm, &ym, NULL);
320: MatCreateShell(PETSC_COMM_WORLD, dof * xm * ym, PETSC_DETERMINE, dof * Mx * My, dof * Mx * My, &mctx, &appctx.A);
321: MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyRHSMatMultTranspose);
322: if (!implicitform) { /* for explicit methods only */
323: TSSetRHSJacobian(ts, appctx.A, appctx.A, RHSJacobianShell, &appctx);
324: } else {
325: /* VecDuplicate(appctx.U,&mctx.Udot); */
326: MatShellSetOperation(appctx.A, MATOP_MULT, (void (*)(void))MyIMatMult);
327: MatShellSetOperation(appctx.A, MATOP_MULT_TRANSPOSE, (void (*)(void))MyIMatMultTranspose);
328: TSSetIJacobian(ts, appctx.A, appctx.A, IJacobianShell, &appctx);
329: }
330: }
332: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
333: Set initial conditions
334: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
335: InitialConditions(da, x);
336: TSSetSolution(ts, x);
338: /*
339: Have the TS save its trajectory so that TSAdjointSolve() may be used
340: */
341: if (!forwardonly) TSSetSaveTrajectory(ts);
343: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
344: Set solver options
345: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
346: TSSetMaxTime(ts, 200.0);
347: TSSetTimeStep(ts, 0.5);
348: TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
349: TSSetFromOptions(ts);
351: PetscTime(&v1);
352: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
353: Solve ODE system
354: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
355: TSSolve(ts, x);
356: if (!forwardonly) {
357: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
358: Start the Adjoint model
359: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
360: VecDuplicate(x, &lambda[0]);
361: /* Reset initial conditions for the adjoint integration */
362: InitializeLambda(da, lambda[0], 0.5, 0.5);
363: TSSetCostGradients(ts, 1, lambda, NULL);
364: PetscLogStagePush(stage);
365: TSAdjointSolve(ts);
366: PetscLogStagePop();
367: VecDestroy(&lambda[0]);
368: }
369: PetscTime(&v2);
370: PetscPrintf(PETSC_COMM_WORLD, " %.3lf ", v2 - v1);
372: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
373: Free work space. All PETSc objects should be destroyed when they
374: are no longer needed.
375: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
376: VecDestroy(&x);
377: TSDestroy(&ts);
378: DMDestroy(&da);
379: if (mf) {
380: VecDestroy(&mctx.U);
381: /* VecDestroy(&mctx.Udot);*/
382: MatDestroy(&appctx.A);
383: }
384: PetscFinalize();
385: return 0;
386: }
388: /* ------------------------------------------------------------------- */
389: PetscErrorCode RHSJacobianShell(TS ts, PetscReal t, Vec U, Mat A, Mat BB, void *ctx)
390: {
391: MCtx *mctx;
393: MatShellGetContext(A, &mctx);
394: VecCopy(U, mctx->U);
395: return 0;
396: }
398: PetscErrorCode IJacobianShell(TS ts, PetscReal t, Vec U, Vec Udot, PetscReal a, Mat A, Mat BB, void *ctx)
399: {
400: MCtx *mctx;
402: MatShellGetContext(A, &mctx);
403: VecCopy(U, mctx->U);
404: /* VecCopy(Udot,mctx->Udot); */
405: mctx->shift = a;
406: return 0;
407: }
409: /* ------------------------------------------------------------------- */
410: PetscErrorCode InitialConditions(DM da, Vec U)
411: {
412: PetscInt i, j, xs, ys, xm, ym, Mx, My;
413: Field **u;
414: PetscReal hx, hy, x, y;
416: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
418: hx = 2.5 / (PetscReal)Mx;
419: hy = 2.5 / (PetscReal)My;
421: /*
422: Get pointers to vector data
423: */
424: DMDAVecGetArray(da, U, &u);
426: /*
427: Get local grid boundaries
428: */
429: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
431: /*
432: Compute function over the locally owned part of the grid
433: */
434: for (j = ys; j < ys + ym; j++) {
435: y = j * hy;
436: for (i = xs; i < xs + xm; i++) {
437: x = i * hx;
438: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25 * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0);
439: else u[j][i].v = 0.0;
441: u[j][i].u = 1.0 - 2.0 * u[j][i].v;
442: }
443: }
445: /*
446: Restore vectors
447: */
448: DMDAVecRestoreArray(da, U, &u);
449: return 0;
450: }
452: /*TEST
454: build:
455: depends: reaction_diffusion.c
456: requires: !complex !single
458: test:
459: requires: cams
460: args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams
461: TEST*/