Actual source code: ex5.c
2: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";
4: /*F
5: This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
6: W. Hundsdorf and J.G. Verwer, Page 21, Pattern Formation with Reaction-Diffusion Equations
7: \begin{eqnarray*}
8: u_t = D_1 (u_{xx} + u_{yy}) - u*v^2 + \gamma(1 -u) \\
9: v_t = D_2 (v_{xx} + v_{yy}) + u*v^2 - (\gamma + \kappa)v
10: \end{eqnarray*}
11: Unlike in the book this uses periodic boundary conditions instead of Neumann
12: (since they are easier for finite differences).
13: F*/
15: /*
16: Helpful runtime monitor options:
17: -ts_monitor_draw_solution
18: -draw_save -draw_save_movie
20: Helpful runtime linear solver options:
21: -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
23: Point your browser to localhost:8080 to monitor the simulation
24: ./ex5 -ts_view_pre saws -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .
26: */
28: /*
30: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
31: Include "petscts.h" so that we can use SNES numerical (ODE) integrators. Note that this
32: file automatically includes:
33: petscsys.h - base PETSc routines petscvec.h - vectors
34: petscmat.h - matrices petscis.h - index sets
35: petscksp.h - Krylov subspace methods petscpc.h - preconditioners
36: petscviewer.h - viewers petscsnes.h - nonlinear solvers
37: */
38: #include "reaction_diffusion.h"
39: #include <petscdm.h>
40: #include <petscdmda.h>
42: /* ------------------------------------------------------------------- */
43: PetscErrorCode InitialConditions(DM da, Vec U)
44: {
45: PetscInt i, j, xs, ys, xm, ym, Mx, My;
46: Field **u;
47: PetscReal hx, hy, x, y;
49: DMDAGetInfo(da, PETSC_IGNORE, &Mx, &My, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
51: hx = 2.5 / (PetscReal)(Mx);
52: hy = 2.5 / (PetscReal)(My);
54: /*
55: Get pointers to actual vector data
56: */
57: DMDAVecGetArray(da, U, &u);
59: /*
60: Get local grid boundaries
61: */
62: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
64: /*
65: Compute function over the locally owned part of the grid
66: */
67: for (j = ys; j < ys + ym; j++) {
68: y = j * hy;
69: for (i = xs; i < xs + xm; i++) {
70: x = i * hx;
71: if (PetscApproximateGTE(x, 1.0) && PetscApproximateLTE(x, 1.5) && PetscApproximateGTE(y, 1.0) && PetscApproximateLTE(y, 1.5))
72: u[j][i].v = PetscPowReal(PetscSinReal(4.0 * PETSC_PI * x), 2.0) * PetscPowReal(PetscSinReal(4.0 * PETSC_PI * y), 2.0) / 4.0;
73: else u[j][i].v = 0.0;
75: u[j][i].u = 1.0 - 2.0 * u[j][i].v;
76: }
77: }
79: /*
80: Restore access to vector
81: */
82: DMDAVecRestoreArray(da, U, &u);
83: return 0;
84: }
86: int main(int argc, char **argv)
87: {
88: TS ts; /* ODE integrator */
89: Vec x; /* solution */
90: DM da;
91: AppCtx appctx;
93: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: Initialize program
95: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: PetscInitialize(&argc, &argv, (char *)0, help);
99: appctx.D1 = 8.0e-5;
100: appctx.D2 = 4.0e-5;
101: appctx.gamma = .024;
102: appctx.kappa = .06;
103: appctx.aijpc = PETSC_FALSE;
105: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106: Create distributed array (DMDA) to manage parallel grid and vectors
107: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DMDA_STENCIL_STAR, 65, 65, PETSC_DECIDE, PETSC_DECIDE, 2, 1, NULL, NULL, &da);
109: DMSetFromOptions(da);
110: DMSetUp(da);
111: DMDASetFieldName(da, 0, "u");
112: DMDASetFieldName(da, 1, "v");
114: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: Create global vector from DMDA; this will be used to store the solution
116: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
117: DMCreateGlobalVector(da, &x);
119: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: Create timestepping solver context
121: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122: TSCreate(PETSC_COMM_WORLD, &ts);
123: TSSetType(ts, TSARKIMEX);
124: TSARKIMEXSetFullyImplicit(ts, PETSC_TRUE);
125: TSSetDM(ts, da);
126: TSSetProblemType(ts, TS_NONLINEAR);
127: TSSetRHSFunction(ts, NULL, RHSFunction, &appctx);
128: TSSetRHSJacobian(ts, NULL, NULL, RHSJacobian, &appctx);
130: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
131: Set initial conditions
132: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133: InitialConditions(da, x);
134: TSSetSolution(ts, x);
136: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: Set solver options
138: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
139: TSSetMaxTime(ts, 2000.0);
140: TSSetTimeStep(ts, .0001);
141: TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER);
142: TSSetFromOptions(ts);
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Solve ODE system
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
147: TSSolve(ts, x);
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Free work space.
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152: VecDestroy(&x);
153: TSDestroy(&ts);
154: DMDestroy(&da);
156: PetscFinalize();
157: return 0;
158: }
160: /*TEST
162: build:
163: depends: reaction_diffusion.c
165: test:
166: args: -ts_view -ts_monitor -ts_max_time 500
167: requires: double
168: timeoutfactor: 3
170: test:
171: suffix: 2
172: args: -ts_view -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
173: requires: x double
174: output_file: output/ex5_1.out
175: timeoutfactor: 3
177: TEST*/