Actual source code: ex21.c
2: static char help[] = "Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
3: timestepping. Runtime options include:\n\
4: -M <xg>, where <xg> = number of grid points\n\
5: -debug : Activate debugging printouts\n\
6: -nox : Deactivate x-window graphics\n\
7: -ul : lower bound\n\
8: -uh : upper bound\n\n";
10: /* ------------------------------------------------------------------------
12: This is a variation of ex2.c to solve the PDE
14: u * u_xx
15: u_t = ---------
16: 2*(t+1)^2
18: with box constraints on the interior grid points
19: ul <= u(t,x) <= uh with x != 0,1
20: on the domain 0 <= x <= 1, with boundary conditions
21: u(t,0) = t + 1, u(t,1) = 2*t + 2,
22: and initial condition
23: u(0,x) = 1 + x*x.
25: The exact solution is:
26: u(t,x) = (1 + x*x) * (1 + t)
28: We use by default the backward Euler method.
30: ------------------------------------------------------------------------- */
32: /*
33: Include "petscts.h" to use the PETSc timestepping routines. Note that
34: this file automatically includes "petscsys.h" and other lower-level
35: PETSc include files.
37: Include the "petscdmda.h" to allow us to use the distributed array data
38: structures to manage the parallel grid.
39: */
40: #include <petscts.h>
41: #include <petscdm.h>
42: #include <petscdmda.h>
43: #include <petscdraw.h>
45: /*
46: User-defined application context - contains data needed by the
47: application-provided callback routines.
48: */
49: typedef struct {
50: MPI_Comm comm; /* communicator */
51: DM da; /* distributed array data structure */
52: Vec localwork; /* local ghosted work vector */
53: Vec u_local; /* local ghosted approximate solution vector */
54: Vec solution; /* global exact solution vector */
55: PetscInt m; /* total number of grid points */
56: PetscReal h; /* mesh width: h = 1/(m-1) */
57: PetscBool debug; /* flag (1 indicates activation of debugging printouts) */
58: } AppCtx;
60: /*
61: User-defined routines, provided below.
62: */
63: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
64: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
65: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
66: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
67: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
68: extern PetscErrorCode SetBounds(Vec, Vec, PetscScalar, PetscScalar, AppCtx *);
70: int main(int argc, char **argv)
71: {
72: AppCtx appctx; /* user-defined application context */
73: TS ts; /* timestepping context */
74: Mat A; /* Jacobian matrix data structure */
75: Vec u; /* approximate solution vector */
76: Vec r; /* residual vector */
77: PetscInt time_steps_max = 1000; /* default max timesteps */
78: PetscReal dt;
79: PetscReal time_total_max = 100.0; /* default max total time */
80: Vec xl, xu; /* Lower and upper bounds on variables */
81: PetscScalar ul = 0.0, uh = 3.0;
82: PetscBool mymonitor;
83: PetscReal bounds[] = {1.0, 3.3};
85: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: Initialize program and set problem parameters
87: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
90: PetscInitialize(&argc, &argv, (char *)0, help);
91: PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds);
93: appctx.comm = PETSC_COMM_WORLD;
94: appctx.m = 60;
95: PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL);
96: PetscOptionsGetScalar(NULL, NULL, "-ul", &ul, NULL);
97: PetscOptionsGetScalar(NULL, NULL, "-uh", &uh, NULL);
98: PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug);
99: PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor);
100: appctx.h = 1.0 / (appctx.m - 1.0);
102: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: Create vector data structures
104: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
106: /*
107: Create distributed array (DMDA) to manage parallel grid and vectors
108: and to set up the ghost point communication pattern. There are M
109: total grid values spread equally among all the processors.
110: */
111: DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da);
112: DMSetFromOptions(appctx.da);
113: DMSetUp(appctx.da);
115: /*
116: Extract global and local vectors from DMDA; we use these to store the
117: approximate solution. Then duplicate these for remaining vectors that
118: have the same types.
119: */
120: DMCreateGlobalVector(appctx.da, &u);
121: DMCreateLocalVector(appctx.da, &appctx.u_local);
123: /*
124: Create local work vector for use in evaluating right-hand-side function;
125: create global work vector for storing exact solution.
126: */
127: VecDuplicate(appctx.u_local, &appctx.localwork);
128: VecDuplicate(u, &appctx.solution);
130: /* Create residual vector */
131: VecDuplicate(u, &r);
132: /* Create lower and upper bound vectors */
133: VecDuplicate(u, &xl);
134: VecDuplicate(u, &xu);
135: SetBounds(xl, xu, ul, uh, &appctx);
137: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: Create timestepping solver context; set callback routine for
139: right-hand-side function evaluation.
140: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142: TSCreate(PETSC_COMM_WORLD, &ts);
143: TSSetProblemType(ts, TS_NONLINEAR);
144: TSSetRHSFunction(ts, r, RHSFunction, &appctx);
146: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: Set optional user-defined monitoring routine
148: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150: if (mymonitor) TSMonitorSet(ts, Monitor, &appctx, NULL);
152: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: For nonlinear problems, the user can provide a Jacobian evaluation
154: routine (or use a finite differencing approximation).
156: Create matrix data structure; set Jacobian evaluation routine.
157: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
159: MatCreate(PETSC_COMM_WORLD, &A);
160: MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m);
161: MatSetFromOptions(A);
162: MatSetUp(A);
163: TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx);
165: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: Set solution vector and initial timestep
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169: dt = appctx.h / 2.0;
170: TSSetTimeStep(ts, dt);
172: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: Customize timestepping solver:
174: - Set the solution method to be the Backward Euler method.
175: - Set timestepping duration info
176: Then set runtime options, which can override these defaults.
177: For example,
178: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
179: to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
180: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
182: TSSetType(ts, TSBEULER);
183: TSSetMaxSteps(ts, time_steps_max);
184: TSSetMaxTime(ts, time_total_max);
185: TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER);
186: /* Set lower and upper bound on the solution vector for each time step */
187: TSVISetVariableBounds(ts, xl, xu);
188: TSSetFromOptions(ts);
190: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: Solve the problem
192: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
194: /*
195: Evaluate initial conditions
196: */
197: InitialConditions(u, &appctx);
199: /*
200: Run the timestepping solver
201: */
202: TSSolve(ts, u);
204: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: Free work space. All PETSc objects should be destroyed when they
206: are no longer needed.
207: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
209: VecDestroy(&r);
210: VecDestroy(&xl);
211: VecDestroy(&xu);
212: TSDestroy(&ts);
213: VecDestroy(&u);
214: MatDestroy(&A);
215: DMDestroy(&appctx.da);
216: VecDestroy(&appctx.localwork);
217: VecDestroy(&appctx.solution);
218: VecDestroy(&appctx.u_local);
220: /*
221: Always call PetscFinalize() before exiting a program. This routine
222: - finalizes the PETSc libraries as well as MPI
223: - provides summary and diagnostic information if certain runtime
224: options are chosen (e.g., -log_view).
225: */
226: PetscFinalize();
227: return 0;
228: }
229: /* --------------------------------------------------------------------- */
230: /*
231: InitialConditions - Computes the solution at the initial time.
233: Input Parameters:
234: u - uninitialized solution vector (global)
235: appctx - user-defined application context
237: Output Parameter:
238: u - vector with solution at initial time (global)
239: */
240: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
241: {
242: PetscScalar *u_localptr, h = appctx->h, x;
243: PetscInt i, mybase, myend;
245: /*
246: Determine starting point of each processor's range of
247: grid values.
248: */
249: VecGetOwnershipRange(u, &mybase, &myend);
251: /*
252: Get a pointer to vector data.
253: - For default PETSc vectors, VecGetArray() returns a pointer to
254: the data array. Otherwise, the routine is implementation dependent.
255: - You MUST call VecRestoreArray() when you no longer need access to
256: the array.
257: - Note that the Fortran interface to VecGetArray() differs from the
258: C version. See the users manual for details.
259: */
260: VecGetArray(u, &u_localptr);
262: /*
263: We initialize the solution array by simply writing the solution
264: directly into the array locations. Alternatively, we could use
265: VecSetValues() or VecSetValuesLocal().
266: */
267: for (i = mybase; i < myend; i++) {
268: x = h * (PetscReal)i; /* current location in global grid */
269: u_localptr[i - mybase] = 1.0 + x * x;
270: }
272: /*
273: Restore vector
274: */
275: VecRestoreArray(u, &u_localptr);
277: /*
278: Print debugging information if desired
279: */
280: if (appctx->debug) {
281: PetscPrintf(appctx->comm, "initial guess vector\n");
282: VecView(u, PETSC_VIEWER_STDOUT_WORLD);
283: }
285: return 0;
286: }
288: /* --------------------------------------------------------------------- */
289: /*
290: SetBounds - Sets the lower and uper bounds on the interior points
292: Input parameters:
293: xl - vector of lower bounds
294: xu - vector of upper bounds
295: ul - constant lower bound for all variables
296: uh - constant upper bound for all variables
297: appctx - Application context
298: */
299: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh, AppCtx *appctx)
300: {
301: PetscScalar *l, *u;
302: PetscMPIInt rank, size;
303: PetscInt localsize;
306: VecSet(xl, ul);
307: VecSet(xu, uh);
308: VecGetLocalSize(xl, &localsize);
309: VecGetArray(xl, &l);
310: VecGetArray(xu, &u);
312: MPI_Comm_rank(appctx->comm, &rank);
313: MPI_Comm_size(appctx->comm, &size);
314: if (rank == 0) {
315: l[0] = -PETSC_INFINITY;
316: u[0] = PETSC_INFINITY;
317: }
318: if (rank == size - 1) {
319: l[localsize - 1] = -PETSC_INFINITY;
320: u[localsize - 1] = PETSC_INFINITY;
321: }
322: VecRestoreArray(xl, &l);
323: VecRestoreArray(xu, &u);
324: return 0;
325: }
327: /* --------------------------------------------------------------------- */
328: /*
329: ExactSolution - Computes the exact solution at a given time.
331: Input Parameters:
332: t - current time
333: solution - vector in which exact solution will be computed
334: appctx - user-defined application context
336: Output Parameter:
337: solution - vector with the newly computed exact solution
338: */
339: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
340: {
341: PetscScalar *s_localptr, h = appctx->h, x;
342: PetscInt i, mybase, myend;
344: /*
345: Determine starting and ending points of each processor's
346: range of grid values
347: */
348: VecGetOwnershipRange(solution, &mybase, &myend);
350: /*
351: Get a pointer to vector data.
352: */
353: VecGetArray(solution, &s_localptr);
355: /*
356: Simply write the solution directly into the array locations.
357: Alternatively, we could use VecSetValues() or VecSetValuesLocal().
358: */
359: for (i = mybase; i < myend; i++) {
360: x = h * (PetscReal)i;
361: s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
362: }
364: /*
365: Restore vector
366: */
367: VecRestoreArray(solution, &s_localptr);
368: return 0;
369: }
370: /* --------------------------------------------------------------------- */
371: /*
372: Monitor - User-provided routine to monitor the solution computed at
373: each timestep. This example plots the solution and computes the
374: error in two different norms.
376: Input Parameters:
377: ts - the timestep context
378: step - the count of the current step (with 0 meaning the
379: initial condition)
380: time - the current time
381: u - the solution at this timestep
382: ctx - the user-provided context for this monitoring routine.
383: In this case we use the application context which contains
384: information about the problem size, workspace and the exact
385: solution.
386: */
387: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
388: {
389: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
390: PetscReal en2, en2s, enmax;
391: PetscDraw draw;
393: /*
394: We use the default X windows viewer
395: PETSC_VIEWER_DRAW_(appctx->comm)
396: that is associated with the current communicator. This saves
397: the effort of calling PetscViewerDrawOpen() to create the window.
398: Note that if we wished to plot several items in separate windows we
399: would create each viewer with PetscViewerDrawOpen() and store them in
400: the application context, appctx.
402: PetscReal buffering makes graphics look better.
403: */
404: PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw);
405: PetscDrawSetDoubleBuffer(draw);
406: VecView(u, PETSC_VIEWER_DRAW_(appctx->comm));
408: /*
409: Compute the exact solution at this timestep
410: */
411: ExactSolution(time, appctx->solution, appctx);
413: /*
414: Print debugging information if desired
415: */
416: if (appctx->debug) {
417: PetscPrintf(appctx->comm, "Computed solution vector\n");
418: VecView(u, PETSC_VIEWER_STDOUT_WORLD);
419: PetscPrintf(appctx->comm, "Exact solution vector\n");
420: VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD);
421: }
423: /*
424: Compute the 2-norm and max-norm of the error
425: */
426: VecAXPY(appctx->solution, -1.0, u);
427: VecNorm(appctx->solution, NORM_2, &en2);
428: en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
429: VecNorm(appctx->solution, NORM_MAX, &enmax);
431: /*
432: PetscPrintf() causes only the first processor in this
433: communicator to print the timestep information.
434: */
435: PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g,2-norm error = %g, max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax);
437: /*
438: Print debugging information if desired
439: */
440: /* if (appctx->debug) {
441: PetscPrintf(appctx->comm,"Error vector\n");
442: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
443: } */
444: return 0;
445: }
446: /* --------------------------------------------------------------------- */
447: /*
448: RHSFunction - User-provided routine that evalues the right-hand-side
449: function of the ODE. This routine is set in the main program by
450: calling TSSetRHSFunction(). We compute:
451: global_out = F(global_in)
453: Input Parameters:
454: ts - timesteping context
455: t - current time
456: global_in - vector containing the current iterate
457: ctx - (optional) user-provided context for function evaluation.
458: In this case we use the appctx defined above.
460: Output Parameter:
461: global_out - vector containing the newly evaluated function
462: */
463: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
464: {
465: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
466: DM da = appctx->da; /* distributed array */
467: Vec local_in = appctx->u_local; /* local ghosted input vector */
468: Vec localwork = appctx->localwork; /* local ghosted work vector */
469: PetscInt i, localsize;
470: PetscMPIInt rank, size;
471: PetscScalar *copyptr, sc;
472: const PetscScalar *localptr;
474: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
475: Get ready for local function computations
476: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
477: /*
478: Scatter ghost points to local vector, using the 2-step process
479: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
480: By placing code between these two statements, computations can be
481: done while messages are in transition.
482: */
483: DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
484: DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);
486: /*
487: Access directly the values in our local INPUT work array
488: */
489: VecGetArrayRead(local_in, &localptr);
491: /*
492: Access directly the values in our local OUTPUT work array
493: */
494: VecGetArray(localwork, ©ptr);
496: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
498: /*
499: Evaluate our function on the nodes owned by this processor
500: */
501: VecGetLocalSize(local_in, &localsize);
503: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
504: Compute entries for the locally owned part
505: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
507: /*
508: Handle boundary conditions: This is done by using the boundary condition
509: u(t,boundary) = g(t,boundary)
510: for some function g. Now take the derivative with respect to t to obtain
511: u_{t}(t,boundary) = g_{t}(t,boundary)
513: In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
514: and u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
515: */
516: MPI_Comm_rank(appctx->comm, &rank);
517: MPI_Comm_size(appctx->comm, &size);
518: if (rank == 0) copyptr[0] = 1.0;
519: if (rank == size - 1) copyptr[localsize - 1] = (t < .5) ? 2.0 : 0.0;
521: /*
522: Handle the interior nodes where the PDE is replace by finite
523: difference operators.
524: */
525: for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
527: /*
528: Restore vectors
529: */
530: VecRestoreArrayRead(local_in, &localptr);
531: VecRestoreArray(localwork, ©ptr);
533: /*
534: Insert values from the local OUTPUT vector into the global
535: output vector
536: */
537: DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out);
538: DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out);
540: /* Print debugging information if desired */
541: /* if (appctx->debug) {
542: PetscPrintf(appctx->comm,"RHS function vector\n");
543: VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
544: } */
546: return 0;
547: }
548: /* --------------------------------------------------------------------- */
549: /*
550: RHSJacobian - User-provided routine to compute the Jacobian of
551: the nonlinear right-hand-side function of the ODE.
553: Input Parameters:
554: ts - the TS context
555: t - current time
556: global_in - global input vector
557: dummy - optional user-defined context, as set by TSetRHSJacobian()
559: Output Parameters:
560: AA - Jacobian matrix
561: BB - optionally different preconditioning matrix
562: str - flag indicating matrix structure
564: Notes:
565: RHSJacobian computes entries for the locally owned part of the Jacobian.
566: - Currently, all PETSc parallel matrix formats are partitioned by
567: contiguous chunks of rows across the processors.
568: - Each processor needs to insert only elements that it owns
569: locally (but any non-local elements will be sent to the
570: appropriate processor during matrix assembly).
571: - Always specify global row and columns of matrix entries when
572: using MatSetValues().
573: - Here, we set all entries for a particular row at once.
574: - Note that MatSetValues() uses 0-based row and column numbers
575: in Fortran as well as in C.
576: */
577: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat B, void *ctx)
578: {
579: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
580: Vec local_in = appctx->u_local; /* local ghosted input vector */
581: DM da = appctx->da; /* distributed array */
582: PetscScalar v[3], sc;
583: const PetscScalar *localptr;
584: PetscInt i, mstart, mend, mstarts, mends, idx[3], is;
586: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
587: Get ready for local Jacobian computations
588: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
589: /*
590: Scatter ghost points to local vector, using the 2-step process
591: DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
592: By placing code between these two statements, computations can be
593: done while messages are in transition.
594: */
595: DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
596: DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);
598: /*
599: Get pointer to vector data
600: */
601: VecGetArrayRead(local_in, &localptr);
603: /*
604: Get starting and ending locally owned rows of the matrix
605: */
606: MatGetOwnershipRange(B, &mstarts, &mends);
607: mstart = mstarts;
608: mend = mends;
610: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
611: Compute entries for the locally owned part of the Jacobian.
612: - Currently, all PETSc parallel matrix formats are partitioned by
613: contiguous chunks of rows across the processors.
614: - Each processor needs to insert only elements that it owns
615: locally (but any non-local elements will be sent to the
616: appropriate processor during matrix assembly).
617: - Here, we set all entries for a particular row at once.
618: - We can set matrix entries either using either
619: MatSetValuesLocal() or MatSetValues().
620: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
622: /*
623: Set matrix rows corresponding to boundary data
624: */
625: if (mstart == 0) {
626: v[0] = 0.0;
627: MatSetValues(B, 1, &mstart, 1, &mstart, v, INSERT_VALUES);
628: mstart++;
629: }
630: if (mend == appctx->m) {
631: mend--;
632: v[0] = 0.0;
633: MatSetValues(B, 1, &mend, 1, &mend, v, INSERT_VALUES);
634: }
636: /*
637: Set matrix rows corresponding to interior data. We construct the
638: matrix one row at a time.
639: */
640: sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
641: for (i = mstart; i < mend; i++) {
642: idx[0] = i - 1;
643: idx[1] = i;
644: idx[2] = i + 1;
645: is = i - mstart + 1;
646: v[0] = sc * localptr[is];
647: v[1] = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
648: v[2] = sc * localptr[is];
649: MatSetValues(B, 1, &i, 3, idx, v, INSERT_VALUES);
650: }
652: /*
653: Restore vector
654: */
655: VecRestoreArrayRead(local_in, &localptr);
657: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
658: Complete the matrix assembly process and set some options
659: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
660: /*
661: Assemble matrix, using the 2-step process:
662: MatAssemblyBegin(), MatAssemblyEnd()
663: Computations can be done while messages are in transition
664: by placing code between these two statements.
665: */
666: MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY);
667: MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY);
669: /*
670: Set and option to indicate that we will never add a new nonzero location
671: to the matrix. If we do, it will generate an error.
672: */
673: MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE);
675: return 0;
676: }
678: /*TEST
680: test:
681: args: -snes_type vinewtonrsls -ts_type glee -mymonitor -ts_max_steps 10 -nox
682: requires: !single
684: TEST*/