Actual source code: ex12.c
2: static char help[] = "Tests PetscObjectSetOptions() for TS object\n\n";
4: /* ------------------------------------------------------------------------
6: This program solves the PDE
8: u * u_xx
9: u_t = ---------
10: 2*(t+1)^2
12: on the domain 0 <= x <= 1, with boundary conditions
13: u(t,0) = t + 1, u(t,1) = 2*t + 2,
14: and initial condition
15: u(0,x) = 1 + x*x.
17: The exact solution is:
18: u(t,x) = (1 + x*x) * (1 + t)
20: Note that since the solution is linear in time and quadratic in x,
21: the finite difference scheme actually computes the "exact" solution.
23: We use by default the backward Euler method.
25: ------------------------------------------------------------------------- */
27: /*
28: Include "petscts.h" to use the PETSc timestepping routines. Note that
29: this file automatically includes "petscsys.h" and other lower-level
30: PETSc include files.
32: Include the "petscdmda.h" to allow us to use the distributed array data
33: structures to manage the parallel grid.
34: */
35: #include <petscts.h>
36: #include <petscdm.h>
37: #include <petscdmda.h>
38: #include <petscdraw.h>
40: /*
41: User-defined application context - contains data needed by the
42: application-provided callback routines.
43: */
44: typedef struct {
45: MPI_Comm comm; /* communicator */
46: DM da; /* distributed array data structure */
47: Vec localwork; /* local ghosted work vector */
48: Vec u_local; /* local ghosted approximate solution vector */
49: Vec solution; /* global exact solution vector */
50: PetscInt m; /* total number of grid points */
51: PetscReal h; /* mesh width: h = 1/(m-1) */
52: } AppCtx;
54: /*
55: User-defined routines, provided below.
56: */
57: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
58: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
59: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
60: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
62: int main(int argc, char **argv)
63: {
64: AppCtx appctx; /* user-defined application context */
65: TS ts; /* timestepping context */
66: Mat A; /* Jacobian matrix data structure */
67: Vec u; /* approximate solution vector */
68: PetscInt time_steps_max = 100; /* default max timesteps */
69: PetscReal dt;
70: PetscReal time_total_max = 100.0; /* default max total time */
71: PetscOptions options, optionscopy;
73: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
74: Initialize program and set problem parameters
75: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
78: PetscInitialize(&argc, &argv, (char *)0, help);
80: PetscOptionsCreate(&options);
81: PetscOptionsSetValue(options, "-ts_monitor", "ascii");
82: PetscOptionsSetValue(options, "-snes_monitor", "ascii");
83: PetscOptionsSetValue(options, "-ksp_monitor", "ascii");
85: appctx.comm = PETSC_COMM_WORLD;
86: appctx.m = 60;
88: PetscOptionsGetInt(options, NULL, "-M", &appctx.m, NULL);
90: appctx.h = 1.0 / (appctx.m - 1.0);
92: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93: Create vector data structures
94: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
96: /*
97: Create distributed array (DMDA) to manage parallel grid and vectors
98: and to set up the ghost point communication pattern. There are M
99: total grid values spread equally among all the processors.
100: */
101: DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da);
102: PetscObjectSetOptions((PetscObject)appctx.da, options);
103: DMSetFromOptions(appctx.da);
104: DMSetUp(appctx.da);
106: /*
107: Extract global and local vectors from DMDA; we use these to store the
108: approximate solution. Then duplicate these for remaining vectors that
109: have the same types.
110: */
111: DMCreateGlobalVector(appctx.da, &u);
112: DMCreateLocalVector(appctx.da, &appctx.u_local);
114: /*
115: Create local work vector for use in evaluating right-hand-side function;
116: create global work vector for storing exact solution.
117: */
118: VecDuplicate(appctx.u_local, &appctx.localwork);
119: VecDuplicate(u, &appctx.solution);
121: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122: Create timestepping solver context; set callback routine for
123: right-hand-side function evaluation.
124: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: TSCreate(PETSC_COMM_WORLD, &ts);
127: PetscObjectSetOptions((PetscObject)ts, options);
128: TSSetProblemType(ts, TS_NONLINEAR);
129: TSSetRHSFunction(ts, NULL, RHSFunction, &appctx);
131: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: For nonlinear problems, the user can provide a Jacobian evaluation
133: routine (or use a finite differencing approximation).
135: Create matrix data structure; set Jacobian evaluation routine.
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: MatCreate(PETSC_COMM_WORLD, &A);
139: PetscObjectSetOptions((PetscObject)A, options);
140: MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m);
141: MatSetFromOptions(A);
142: MatSetUp(A);
143: TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx);
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Set solution vector and initial timestep
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
149: dt = appctx.h / 2.0;
150: TSSetTimeStep(ts, dt);
152: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: Customize timestepping solver:
154: - Set the solution method to be the Backward Euler method.
155: - Set timestepping duration info
156: Then set runtime options, which can override these defaults.
157: For example,
158: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
159: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
160: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
162: TSSetType(ts, TSBEULER);
163: TSSetMaxSteps(ts, time_steps_max);
164: TSSetMaxTime(ts, time_total_max);
165: TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER);
166: TSSetFromOptions(ts);
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Solve the problem
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172: /*
173: Evaluate initial conditions
174: */
175: InitialConditions(u, &appctx);
177: /*
178: Run the timestepping solver
179: */
180: TSSolve(ts, u);
182: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183: Free work space. All PETSc objects should be destroyed when they
184: are no longer needed.
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
187: PetscObjectGetOptions((PetscObject)ts, &optionscopy);
190: TSDestroy(&ts);
191: VecDestroy(&u);
192: MatDestroy(&A);
193: DMDestroy(&appctx.da);
194: VecDestroy(&appctx.localwork);
195: VecDestroy(&appctx.solution);
196: VecDestroy(&appctx.u_local);
197: PetscOptionsDestroy(&options);
199: /*
200: Always call PetscFinalize() before exiting a program. This routine
201: - finalizes the PETSc libraries as well as MPI
202: - provides summary and diagnostic information if certain runtime
203: options are chosen (e.g., -log_view).
204: */
205: PetscFinalize();
206: return 0;
207: }
208: /* --------------------------------------------------------------------- */
209: /*
210: InitialConditions - Computes the solution at the initial time.
212: Input Parameters:
213: u - uninitialized solution vector (global)
214: appctx - user-defined application context
216: Output Parameter:
217: u - vector with solution at initial time (global)
218: */
219: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
220: {
221: PetscScalar *u_localptr, h = appctx->h, x;
222: PetscInt i, mybase, myend;
224: /*
225: Determine starting point of each processor's range of
226: grid values.
227: */
228: VecGetOwnershipRange(u, &mybase, &myend);
230: /*
231: Get a pointer to vector data.
232: - For default PETSc vectors, VecGetArray() returns a pointer to
233: the data array. Otherwise, the routine is implementation dependent.
234: - You MUST call VecRestoreArray() when you no longer need access to
235: the array.
236: - Note that the Fortran interface to VecGetArray() differs from the
237: C version. See the users manual for details.
238: */
239: VecGetArray(u, &u_localptr);
241: /*
242: We initialize the solution array by simply writing the solution
243: directly into the array locations. Alternatively, we could use
244: VecSetValues() or VecSetValuesLocal().
245: */
246: for (i = mybase; i < myend; i++) {
247: x = h * (PetscReal)i; /* current location in global grid */
248: u_localptr[i - mybase] = 1.0 + x * x;
249: }
251: /*
252: Restore vector
253: */
254: VecRestoreArray(u, &u_localptr);
256: return 0;
257: }
258: /* --------------------------------------------------------------------- */
259: /*
260: ExactSolution - Computes the exact solution at a given time.
262: Input Parameters:
263: t - current time
264: solution - vector in which exact solution will be computed
265: appctx - user-defined application context
267: Output Parameter:
268: solution - vector with the newly computed exact solution
269: */
270: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
271: {
272: PetscScalar *s_localptr, h = appctx->h, x;
273: PetscInt i, mybase, myend;
275: /*
276: Determine starting and ending points of each processor's
277: range of grid values
278: */
279: VecGetOwnershipRange(solution, &mybase, &myend);
281: /*
282: Get a pointer to vector data.
283: */
284: VecGetArray(solution, &s_localptr);
286: /*
287: Simply write the solution directly into the array locations.
288: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
289: */
290: for (i = mybase; i < myend; i++) {
291: x = h * (PetscReal)i;
292: s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
293: }
295: /*
296: Restore vector
297: */
298: VecRestoreArray(solution, &s_localptr);
299: return 0;
300: }
301: /* --------------------------------------------------------------------- */
302: /*
303: RHSFunction - User-provided routine that evalues the right-hand-side
304: function of the ODE. This routine is set in the main program by
305: calling TSSetRHSFunction(). We compute:
306: global_out = F(global_in)
308: Input Parameters:
309: ts - timesteping context
310: t - current time
311: global_in - vector containing the current iterate
312: ctx - (optional) user-provided context for function evaluation.
313: In this case we use the appctx defined above.
315: Output Parameter:
316: global_out - vector containing the newly evaluated function
317: */
318: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
319: {
320: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
321: DM da = appctx->da; /* distributed array */
322: Vec local_in = appctx->u_local; /* local ghosted input vector */
323: Vec localwork = appctx->localwork; /* local ghosted work vector */
324: PetscInt i, localsize;
325: PetscMPIInt rank, size;
326: PetscScalar *copyptr, sc;
327: const PetscScalar *localptr;
329: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
330: Get ready for local function computations
331: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
332: /*
333: Scatter ghost points to local vector, using the 2-step process
334: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
335: By placing code between these two statements, computations can be
336: done while messages are in transition.
337: */
338: DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
339: DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);
341: /*
342: Access directly the values in our local INPUT work array
343: */
344: VecGetArrayRead(local_in, &localptr);
346: /*
347: Access directly the values in our local OUTPUT work array
348: */
349: VecGetArray(localwork, ©ptr);
351: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
353: /*
354: Evaluate our function on the nodes owned by this processor
355: */
356: VecGetLocalSize(local_in, &localsize);
358: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
359: Compute entries for the locally owned part
360: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
362: /*
363: Handle boundary conditions: This is done by using the boundary condition
364: u(t,boundary) = g(t,boundary)
365: for some function g. Now take the derivative with respect to t to obtain
366: u_{t}(t,boundary) = g_{t}(t,boundary)
368: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
369: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
370: */
371: MPI_Comm_rank(appctx->comm, &rank);
372: MPI_Comm_size(appctx->comm, &size);
373: if (rank == 0) copyptr[0] = 1.0;
374: if (rank == size - 1) copyptr[localsize - 1] = 2.0;
376: /*
377: Handle the interior nodes where the PDE is replace by finite
378: difference operators.
379: */
380: for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
382: /*
383: Restore vectors
384: */
385: VecRestoreArrayRead(local_in, &localptr);
386: VecRestoreArray(localwork, ©ptr);
388: /*
389: Insert values from the local OUTPUT vector into the global
390: output vector
391: */
392: DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out);
393: DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out);
395: return 0;
396: }
397: /* --------------------------------------------------------------------- */
398: /*
399: RHSJacobian - User-provided routine to compute the Jacobian of
400: the nonlinear right-hand-side function of the ODE.
402: Input Parameters:
403: ts - the TS context
404: t - current time
405: global_in - global input vector
406: dummy - optional user-defined context, as set by TSetRHSJacobian()
408: Output Parameters:
409: AA - Jacobian matrix
410: BB - optionally different preconditioning matrix
411: str - flag indicating matrix structure
413: Notes:
414: RHSJacobian computes entries for the locally owned part of the Jacobian.
415: - Currently, all PETSc parallel matrix formats are partitioned by
416: contiguous chunks of rows across the processors.
417: - Each processor needs to insert only elements that it owns
418: locally (but any non-local elements will be sent to the
419: appropriate processor during matrix assembly).
420: - Always specify global row and columns of matrix entries when
421: using MatSetValues().
422: - Here, we set all entries for a particular row at once.
423: - Note that MatSetValues() uses 0-based row and column numbers
424: in Fortran as well as in C.
425: */
426: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
427: {
428: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
429: Vec local_in = appctx->u_local; /* local ghosted input vector */
430: DM da = appctx->da; /* distributed array */
431: PetscScalar v[3], sc;
432: const PetscScalar *localptr;
433: PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
435: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
436: Get ready for local Jacobian computations
437: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
438: /*
439: Scatter ghost points to local vector, using the 2-step process
440: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
441: By placing code between these two statements, computations can be
442: done while messages are in transition.
443: */
444: DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
445: DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);
447: /*
448: Get pointer to vector data
449: */
450: VecGetArrayRead(local_in, &localptr);
452: /*
453: Get starting and ending locally owned rows of the matrix
454: */
455: MatGetOwnershipRange(BB, &mstarts, &mends);
456: mstart = mstarts;
457: mend = mends;
459: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
460: Compute entries for the locally owned part of the Jacobian.
461: - Currently, all PETSc parallel matrix formats are partitioned by
462: contiguous chunks of rows across the processors.
463: - Each processor needs to insert only elements that it owns
464: locally (but any non-local elements will be sent to the
465: appropriate processor during matrix assembly).
466: - Here, we set all entries for a particular row at once.
467: - We can set matrix entries either using either
468: MatSetValuesLocal() or MatSetValues().
469: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
471: /*
472: Set matrix rows corresponding to boundary data
473: */
474: if (mstart == 0) {
475: v[0] = 0.0;
476: MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES);
477: mstart++;
478: }
479: if (mend == appctx->m) {
480: mend--;
481: v[0] = 0.0;
482: MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES);
483: }
485: /*
486: Set matrix rows corresponding to interior data. We construct the
487: matrix one row at a time.
488: */
489: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
490: for (i = mstart; i < mend; i++) {
491: idx[0] = i - 1;
492: idx[1] = i;
493: idx[2] = i + 1;
494: is = i - mstart + 1;
495: v[0] = sc * localptr[is];
496: v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
497: v[2] = sc * localptr[is];
498: MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES);
499: }
501: /*
502: Restore vector
503: */
504: VecRestoreArrayRead(local_in, &localptr);
506: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
507: Complete the matrix assembly process and set some options
508: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
509: /*
510: Assemble matrix, using the 2-step process:
511: MatAssemblyBegin(), MatAssemblyEnd()
512: Computations can be done while messages are in transition
513: by placing code between these two statements.
514: */
515: MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY);
516: MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY);
517: if (BB != AA) {
518: MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY);
519: MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY);
520: }
522: /*
523: Set and option to indicate that we will never add a new nonzero location
524: to the matrix. If we do, it will generate an error.
525: */
526: MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE);
528: return 0;
529: }
531: /*TEST
533: test:
534: requires: !single
536: TEST*/