Actual source code: ex2.c


  2: static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\n";

  8: /* ------------------------------------------------------------------------

 10:    This program solves the PDE

 12:                u * u_xx
 13:          u_t = ---------
 14:                2*(t+1)^2

 16:     on the domain 0 <= x <= 1, with boundary conditions
 17:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 18:     and initial condition
 19:          u(0,x) = 1 + x*x.

 21:     The exact solution is:
 22:          u(t,x) = (1 + x*x) * (1 + t)

 24:     Note that since the solution is linear in time and quadratic in x,
 25:     the finite difference scheme actually computes the "exact" solution.

 27:     We use by default the backward Euler method.

 29:   ------------------------------------------------------------------------- */

 31: /*
 32:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 33:    this file automatically includes "petscsys.h" and other lower-level
 34:    PETSc include files.

 36:    Include the "petscdmda.h" to allow us to use the distributed array data
 37:    structures to manage the parallel grid.
 38: */
 39: #include <petscts.h>
 40: #include <petscdm.h>
 41: #include <petscdmda.h>
 42: #include <petscdraw.h>

 44: /*
 45:    User-defined application context - contains data needed by the
 46:    application-provided callback routines.
 47: */
 48: typedef struct {
 49:   MPI_Comm  comm;      /* communicator */
 50:   DM        da;        /* distributed array data structure */
 51:   Vec       localwork; /* local ghosted work vector */
 52:   Vec       u_local;   /* local ghosted approximate solution vector */
 53:   Vec       solution;  /* global exact solution vector */
 54:   PetscInt  m;         /* total number of grid points */
 55:   PetscReal h;         /* mesh width: h = 1/(m-1) */
 56:   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
 57: } AppCtx;

 59: /*
 60:    User-defined routines, provided below.
 61: */
 62: extern PetscErrorCode InitialConditions(Vec, AppCtx *);
 63: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
 64: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
 65: extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
 66: extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);

 68: int main(int argc, char **argv)
 69: {
 70:   AppCtx    appctx;               /* user-defined application context */
 71:   TS        ts;                   /* timestepping context */
 72:   Mat       A;                    /* Jacobian matrix data structure */
 73:   Vec       u;                    /* approximate solution vector */
 74:   PetscInt  time_steps_max = 100; /* default max timesteps */
 75:   PetscReal dt;
 76:   PetscReal time_total_max = 100.0; /* default max total time */
 77:   PetscBool mymonitor      = PETSC_FALSE;
 78:   PetscReal bounds[]       = {1.0, 3.3};

 80:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 81:      Initialize program and set problem parameters
 82:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 85:   PetscInitialize(&argc, &argv, (char *)0, help);
 86:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds);

 88:   appctx.comm = PETSC_COMM_WORLD;
 89:   appctx.m    = 60;

 91:   PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL);
 92:   PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug);
 93:   PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor);

 95:   appctx.h = 1.0 / (appctx.m - 1.0);

 97:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 98:      Create vector data structures
 99:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

101:   /*
102:      Create distributed array (DMDA) to manage parallel grid and vectors
103:      and to set up the ghost point communication pattern.  There are M
104:      total grid values spread equally among all the processors.
105:   */
106:   DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da);
107:   DMSetFromOptions(appctx.da);
108:   DMSetUp(appctx.da);

110:   /*
111:      Extract global and local vectors from DMDA; we use these to store the
112:      approximate solution.  Then duplicate these for remaining vectors that
113:      have the same types.
114:   */
115:   DMCreateGlobalVector(appctx.da, &u);
116:   DMCreateLocalVector(appctx.da, &appctx.u_local);

118:   /*
119:      Create local work vector for use in evaluating right-hand-side function;
120:      create global work vector for storing exact solution.
121:   */
122:   VecDuplicate(appctx.u_local, &appctx.localwork);
123:   VecDuplicate(u, &appctx.solution);

125:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126:      Create timestepping solver context; set callback routine for
127:      right-hand-side function evaluation.
128:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

130:   TSCreate(PETSC_COMM_WORLD, &ts);
131:   TSSetProblemType(ts, TS_NONLINEAR);
132:   TSSetRHSFunction(ts, NULL, RHSFunction, &appctx);

134:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135:      Set optional user-defined monitoring routine
136:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

138:   if (mymonitor) TSMonitorSet(ts, Monitor, &appctx, NULL);

140:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141:      For nonlinear problems, the user can provide a Jacobian evaluation
142:      routine (or use a finite differencing approximation).

144:      Create matrix data structure; set Jacobian evaluation routine.
145:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

147:   MatCreate(PETSC_COMM_WORLD, &A);
148:   MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m);
149:   MatSetFromOptions(A);
150:   MatSetUp(A);
151:   TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx);

153:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
154:      Set solution vector and initial timestep
155:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

157:   dt = appctx.h / 2.0;
158:   TSSetTimeStep(ts, dt);

160:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161:      Customize timestepping solver:
162:        - Set the solution method to be the Backward Euler method.
163:        - Set timestepping duration info
164:      Then set runtime options, which can override these defaults.
165:      For example,
166:           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
167:      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
168:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

170:   TSSetType(ts, TSBEULER);
171:   TSSetMaxSteps(ts, time_steps_max);
172:   TSSetMaxTime(ts, time_total_max);
173:   TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER);
174:   TSSetFromOptions(ts);

176:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177:      Solve the problem
178:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

180:   /*
181:      Evaluate initial conditions
182:   */
183:   InitialConditions(u, &appctx);

185:   /*
186:      Run the timestepping solver
187:   */
188:   TSSolve(ts, u);

190:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191:      Free work space.  All PETSc objects should be destroyed when they
192:      are no longer needed.
193:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

195:   TSDestroy(&ts);
196:   VecDestroy(&u);
197:   MatDestroy(&A);
198:   DMDestroy(&appctx.da);
199:   VecDestroy(&appctx.localwork);
200:   VecDestroy(&appctx.solution);
201:   VecDestroy(&appctx.u_local);

203:   /*
204:      Always call PetscFinalize() before exiting a program.  This routine
205:        - finalizes the PETSc libraries as well as MPI
206:        - provides summary and diagnostic information if certain runtime
207:          options are chosen (e.g., -log_view).
208:   */
209:   PetscFinalize();
210:   return 0;
211: }
212: /* --------------------------------------------------------------------- */
213: /*
214:    InitialConditions - Computes the solution at the initial time.

216:    Input Parameters:
217:    u - uninitialized solution vector (global)
218:    appctx - user-defined application context

220:    Output Parameter:
221:    u - vector with solution at initial time (global)
222: */
223: PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
224: {
225:   PetscScalar *u_localptr, h = appctx->h, x;
226:   PetscInt     i, mybase, myend;

228:   /*
229:      Determine starting point of each processor's range of
230:      grid values.
231:   */
232:   VecGetOwnershipRange(u, &mybase, &myend);

234:   /*
235:     Get a pointer to vector data.
236:     - For default PETSc vectors, VecGetArray() returns a pointer to
237:       the data array.  Otherwise, the routine is implementation dependent.
238:     - You MUST call VecRestoreArray() when you no longer need access to
239:       the array.
240:     - Note that the Fortran interface to VecGetArray() differs from the
241:       C version.  See the users manual for details.
242:   */
243:   VecGetArray(u, &u_localptr);

245:   /*
246:      We initialize the solution array by simply writing the solution
247:      directly into the array locations.  Alternatively, we could use
248:      VecSetValues() or VecSetValuesLocal().
249:   */
250:   for (i = mybase; i < myend; i++) {
251:     x                      = h * (PetscReal)i; /* current location in global grid */
252:     u_localptr[i - mybase] = 1.0 + x * x;
253:   }

255:   /*
256:      Restore vector
257:   */
258:   VecRestoreArray(u, &u_localptr);

260:   /*
261:      Print debugging information if desired
262:   */
263:   if (appctx->debug) {
264:     PetscPrintf(appctx->comm, "initial guess vector\n");
265:     VecView(u, PETSC_VIEWER_STDOUT_WORLD);
266:   }

268:   return 0;
269: }
270: /* --------------------------------------------------------------------- */
271: /*
272:    ExactSolution - Computes the exact solution at a given time.

274:    Input Parameters:
275:    t - current time
276:    solution - vector in which exact solution will be computed
277:    appctx - user-defined application context

279:    Output Parameter:
280:    solution - vector with the newly computed exact solution
281: */
282: PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
283: {
284:   PetscScalar *s_localptr, h = appctx->h, x;
285:   PetscInt     i, mybase, myend;

287:   /*
288:      Determine starting and ending points of each processor's
289:      range of grid values
290:   */
291:   VecGetOwnershipRange(solution, &mybase, &myend);

293:   /*
294:      Get a pointer to vector data.
295:   */
296:   VecGetArray(solution, &s_localptr);

298:   /*
299:      Simply write the solution directly into the array locations.
300:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
301:   */
302:   for (i = mybase; i < myend; i++) {
303:     x                      = h * (PetscReal)i;
304:     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
305:   }

307:   /*
308:      Restore vector
309:   */
310:   VecRestoreArray(solution, &s_localptr);
311:   return 0;
312: }
313: /* --------------------------------------------------------------------- */
314: /*
315:    Monitor - User-provided routine to monitor the solution computed at
316:    each timestep.  This example plots the solution and computes the
317:    error in two different norms.

319:    Input Parameters:
320:    ts     - the timestep context
321:    step   - the count of the current step (with 0 meaning the
322:             initial condition)
323:    time   - the current time
324:    u      - the solution at this timestep
325:    ctx    - the user-provided context for this monitoring routine.
326:             In this case we use the application context which contains
327:             information about the problem size, workspace and the exact
328:             solution.
329: */
330: PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
331: {
332:   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
333:   PetscReal en2, en2s, enmax;
334:   PetscDraw draw;

336:   /*
337:      We use the default X Windows viewer
338:              PETSC_VIEWER_DRAW_(appctx->comm)
339:      that is associated with the current communicator. This saves
340:      the effort of calling PetscViewerDrawOpen() to create the window.
341:      Note that if we wished to plot several items in separate windows we
342:      would create each viewer with PetscViewerDrawOpen() and store them in
343:      the application context, appctx.

345:      PetscReal buffering makes graphics look better.
346:   */
347:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw);
348:   PetscDrawSetDoubleBuffer(draw);
349:   VecView(u, PETSC_VIEWER_DRAW_(appctx->comm));

351:   /*
352:      Compute the exact solution at this timestep
353:   */
354:   ExactSolution(time, appctx->solution, appctx);

356:   /*
357:      Print debugging information if desired
358:   */
359:   if (appctx->debug) {
360:     PetscPrintf(appctx->comm, "Computed solution vector\n");
361:     VecView(u, PETSC_VIEWER_STDOUT_WORLD);
362:     PetscPrintf(appctx->comm, "Exact solution vector\n");
363:     VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD);
364:   }

366:   /*
367:      Compute the 2-norm and max-norm of the error
368:   */
369:   VecAXPY(appctx->solution, -1.0, u);
370:   VecNorm(appctx->solution, NORM_2, &en2);
371:   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
372:   VecNorm(appctx->solution, NORM_MAX, &enmax);

374:   /*
375:      PetscPrintf() causes only the first processor in this
376:      communicator to print the timestep information.
377:   */
378:   PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax);

380:   /*
381:      Print debugging information if desired
382:   */
383:   if (appctx->debug) {
384:     PetscPrintf(appctx->comm, "Error vector\n");
385:     VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD);
386:   }
387:   return 0;
388: }
389: /* --------------------------------------------------------------------- */
390: /*
391:    RHSFunction - User-provided routine that evalues the right-hand-side
392:    function of the ODE.  This routine is set in the main program by
393:    calling TSSetRHSFunction().  We compute:
394:           global_out = F(global_in)

396:    Input Parameters:
397:    ts         - timesteping context
398:    t          - current time
399:    global_in  - vector containing the current iterate
400:    ctx        - (optional) user-provided context for function evaluation.
401:                 In this case we use the appctx defined above.

403:    Output Parameter:
404:    global_out - vector containing the newly evaluated function
405: */
406: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
407: {
408:   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
409:   DM                 da        = appctx->da;        /* distributed array */
410:   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
411:   Vec                localwork = appctx->localwork; /* local ghosted work vector */
412:   PetscInt           i, localsize;
413:   PetscMPIInt        rank, size;
414:   PetscScalar       *copyptr, sc;
415:   const PetscScalar *localptr;

417:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
418:      Get ready for local function computations
419:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
420:   /*
421:      Scatter ghost points to local vector, using the 2-step process
422:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
423:      By placing code between these two statements, computations can be
424:      done while messages are in transition.
425:   */
426:   DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
427:   DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);

429:   /*
430:       Access directly the values in our local INPUT work array
431:   */
432:   VecGetArrayRead(local_in, &localptr);

434:   /*
435:       Access directly the values in our local OUTPUT work array
436:   */
437:   VecGetArray(localwork, &copyptr);

439:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));

441:   /*
442:       Evaluate our function on the nodes owned by this processor
443:   */
444:   VecGetLocalSize(local_in, &localsize);

446:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
447:      Compute entries for the locally owned part
448:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

450:   /*
451:      Handle boundary conditions: This is done by using the boundary condition
452:         u(t,boundary) = g(t,boundary)
453:      for some function g. Now take the derivative with respect to t to obtain
454:         u_{t}(t,boundary) = g_{t}(t,boundary)

456:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
457:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
458:   */
459:   MPI_Comm_rank(appctx->comm, &rank);
460:   MPI_Comm_size(appctx->comm, &size);
461:   if (rank == 0) copyptr[0] = 1.0;
462:   if (rank == size - 1) copyptr[localsize - 1] = 2.0;

464:   /*
465:      Handle the interior nodes where the PDE is replace by finite
466:      difference operators.
467:   */
468:   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);

470:   /*
471:      Restore vectors
472:   */
473:   VecRestoreArrayRead(local_in, &localptr);
474:   VecRestoreArray(localwork, &copyptr);

476:   /*
477:      Insert values from the local OUTPUT vector into the global
478:      output vector
479:   */
480:   DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out);
481:   DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out);

483:   /* Print debugging information if desired */
484:   if (appctx->debug) {
485:     PetscPrintf(appctx->comm, "RHS function vector\n");
486:     VecView(global_out, PETSC_VIEWER_STDOUT_WORLD);
487:   }

489:   return 0;
490: }
491: /* --------------------------------------------------------------------- */
492: /*
493:    RHSJacobian - User-provided routine to compute the Jacobian of
494:    the nonlinear right-hand-side function of the ODE.

496:    Input Parameters:
497:    ts - the TS context
498:    t - current time
499:    global_in - global input vector
500:    dummy - optional user-defined context, as set by TSetRHSJacobian()

502:    Output Parameters:
503:    AA - Jacobian matrix
504:    BB - optionally different preconditioning matrix
505:    str - flag indicating matrix structure

507:   Notes:
508:   RHSJacobian computes entries for the locally owned part of the Jacobian.
509:    - Currently, all PETSc parallel matrix formats are partitioned by
510:      contiguous chunks of rows across the processors.
511:    - Each processor needs to insert only elements that it owns
512:      locally (but any non-local elements will be sent to the
513:      appropriate processor during matrix assembly).
514:    - Always specify global row and columns of matrix entries when
515:      using MatSetValues().
516:    - Here, we set all entries for a particular row at once.
517:    - Note that MatSetValues() uses 0-based row and column numbers
518:      in Fortran as well as in C.
519: */
520: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
521: {
522:   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
523:   Vec                local_in = appctx->u_local; /* local ghosted input vector */
524:   DM                 da       = appctx->da;      /* distributed array */
525:   PetscScalar        v[3], sc;
526:   const PetscScalar *localptr;
527:   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;

529:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
530:      Get ready for local Jacobian computations
531:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
532:   /*
533:      Scatter ghost points to local vector, using the 2-step process
534:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
535:      By placing code between these two statements, computations can be
536:      done while messages are in transition.
537:   */
538:   DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in);
539:   DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in);

541:   /*
542:      Get pointer to vector data
543:   */
544:   VecGetArrayRead(local_in, &localptr);

546:   /*
547:      Get starting and ending locally owned rows of the matrix
548:   */
549:   MatGetOwnershipRange(BB, &mstarts, &mends);
550:   mstart = mstarts;
551:   mend   = mends;

553:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
554:      Compute entries for the locally owned part of the Jacobian.
555:       - Currently, all PETSc parallel matrix formats are partitioned by
556:         contiguous chunks of rows across the processors.
557:       - Each processor needs to insert only elements that it owns
558:         locally (but any non-local elements will be sent to the
559:         appropriate processor during matrix assembly).
560:       - Here, we set all entries for a particular row at once.
561:       - We can set matrix entries either using either
562:         MatSetValuesLocal() or MatSetValues().
563:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

565:   /*
566:      Set matrix rows corresponding to boundary data
567:   */
568:   if (mstart == 0) {
569:     v[0] = 0.0;
570:     MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES);
571:     mstart++;
572:   }
573:   if (mend == appctx->m) {
574:     mend--;
575:     v[0] = 0.0;
576:     MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES);
577:   }

579:   /*
580:      Set matrix rows corresponding to interior data.  We construct the
581:      matrix one row at a time.
582:   */
583:   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
584:   for (i = mstart; i < mend; i++) {
585:     idx[0] = i - 1;
586:     idx[1] = i;
587:     idx[2] = i + 1;
588:     is     = i - mstart + 1;
589:     v[0]   = sc * localptr[is];
590:     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
591:     v[2]   = sc * localptr[is];
592:     MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES);
593:   }

595:   /*
596:      Restore vector
597:   */
598:   VecRestoreArrayRead(local_in, &localptr);

600:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
601:      Complete the matrix assembly process and set some options
602:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
603:   /*
604:      Assemble matrix, using the 2-step process:
605:        MatAssemblyBegin(), MatAssemblyEnd()
606:      Computations can be done while messages are in transition
607:      by placing code between these two statements.
608:   */
609:   MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY);
610:   MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY);
611:   if (BB != AA) {
612:     MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY);
613:     MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY);
614:   }

616:   /*
617:      Set and option to indicate that we will never add a new nonzero location
618:      to the matrix. If we do, it will generate an error.
619:   */
620:   MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE);

622:   return 0;
623: }

625: /*TEST

627:     test:
628:       args: -nox -ts_dt 10 -mymonitor
629:       nsize: 2
630:       requires: !single

632:     test:
633:       suffix: tut_1
634:       nsize: 1
635:       args: -ts_max_steps 10 -ts_monitor

637:     test:
638:       suffix: tut_2
639:       nsize: 4
640:       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor

642:     test:
643:       suffix: tut_3
644:       nsize: 4
645:       args: -ts_max_steps 10 -ts_monitor -M 128

647: TEST*/