Actual source code: ex50.c
2: static char help[] = "Solves one dimensional Burger's equation compares with exact solution\n\n";
4: /*
6: Not yet tested in parallel
8: */
10: /* ------------------------------------------------------------------------
12: This program uses the one-dimensional Burger's equation
13: u_t = mu*u_xx - u u_x,
14: on the domain 0 <= x <= 1, with periodic boundary conditions
16: The operators are discretized with the spectral element method
18: See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO
19: by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution
20: used
22: See src/tao/unconstrained/tutorials/burgers_spectral.c
24: ------------------------------------------------------------------------- */
26: #include <petscts.h>
27: #include <petscdt.h>
28: #include <petscdraw.h>
29: #include <petscdmda.h>
31: /*
32: User-defined application context - contains data needed by the
33: application-provided call-back routines.
34: */
36: typedef struct {
37: PetscInt n; /* number of nodes */
38: PetscReal *nodes; /* GLL nodes */
39: PetscReal *weights; /* GLL weights */
40: } PetscGLL;
42: typedef struct {
43: PetscInt N; /* grid points per elements*/
44: PetscInt E; /* number of elements */
45: PetscReal tol_L2, tol_max; /* error norms */
46: PetscInt steps; /* number of timesteps */
47: PetscReal Tend; /* endtime */
48: PetscReal mu; /* viscosity */
49: PetscReal L; /* total length of domain */
50: PetscReal Le;
51: PetscReal Tadj;
52: } PetscParam;
54: typedef struct {
55: Vec grid; /* total grid */
56: Vec curr_sol;
57: } PetscData;
59: typedef struct {
60: Vec grid; /* total grid */
61: Vec mass; /* mass matrix for total integration */
62: Mat stiff; /* stifness matrix */
63: Mat keptstiff;
64: Mat grad;
65: PetscGLL gll;
66: } PetscSEMOperators;
68: typedef struct {
69: DM da; /* distributed array data structure */
70: PetscSEMOperators SEMop;
71: PetscParam param;
72: PetscData dat;
73: TS ts;
74: PetscReal initial_dt;
75: } AppCtx;
77: /*
78: User-defined routines
79: */
80: extern PetscErrorCode RHSMatrixLaplaciangllDM(TS, PetscReal, Vec, Mat, Mat, void *);
81: extern PetscErrorCode RHSMatrixAdvectiongllDM(TS, PetscReal, Vec, Mat, Mat, void *);
82: extern PetscErrorCode TrueSolution(TS, PetscReal, Vec, AppCtx *);
83: extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
84: extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
86: int main(int argc, char **argv)
87: {
88: AppCtx appctx; /* user-defined application context */
89: PetscInt i, xs, xm, ind, j, lenglob;
90: PetscReal x, *wrk_ptr1, *wrk_ptr2;
91: MatNullSpace nsp;
92: PetscMPIInt size;
94: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: Initialize program and set problem parameters
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
100: PetscInitialize(&argc, &argv, (char *)0, help);
102: /*initialize parameters */
103: appctx.param.N = 10; /* order of the spectral element */
104: appctx.param.E = 10; /* number of elements */
105: appctx.param.L = 4.0; /* length of the domain */
106: appctx.param.mu = 0.01; /* diffusion coefficient */
107: appctx.initial_dt = 5e-3;
108: appctx.param.steps = PETSC_MAX_INT;
109: appctx.param.Tend = 4;
111: PetscOptionsGetInt(NULL, NULL, "-N", &appctx.param.N, NULL);
112: PetscOptionsGetInt(NULL, NULL, "-E", &appctx.param.E, NULL);
113: PetscOptionsGetReal(NULL, NULL, "-Tend", &appctx.param.Tend, NULL);
114: PetscOptionsGetReal(NULL, NULL, "-mu", &appctx.param.mu, NULL);
115: appctx.param.Le = appctx.param.L / appctx.param.E;
117: MPI_Comm_size(PETSC_COMM_WORLD, &size);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create GLL data structures
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: PetscMalloc2(appctx.param.N, &appctx.SEMop.gll.nodes, appctx.param.N, &appctx.SEMop.gll.weights);
124: PetscDTGaussLobattoLegendreQuadrature(appctx.param.N, PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA, appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights);
125: appctx.SEMop.gll.n = appctx.param.N;
126: lenglob = appctx.param.E * (appctx.param.N - 1);
128: /*
129: Create distributed array (DMDA) to manage parallel grid and vectors
130: and to set up the ghost point communication pattern. There are E*(Nl-1)+1
131: total grid values spread equally among all the processors, except first and last
132: */
134: DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_PERIODIC, lenglob, 1, 1, NULL, &appctx.da);
135: DMSetFromOptions(appctx.da);
136: DMSetUp(appctx.da);
138: /*
139: Extract global and local vectors from DMDA; we use these to store the
140: approximate solution. Then duplicate these for remaining vectors that
141: have the same types.
142: */
144: DMCreateGlobalVector(appctx.da, &appctx.dat.curr_sol);
145: VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.grid);
146: VecDuplicate(appctx.dat.curr_sol, &appctx.SEMop.mass);
148: DMDAGetCorners(appctx.da, &xs, NULL, NULL, &xm, NULL, NULL);
149: DMDAVecGetArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1);
150: DMDAVecGetArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2);
152: /* Compute function over the locally owned part of the grid */
154: xs = xs / (appctx.param.N - 1);
155: xm = xm / (appctx.param.N - 1);
157: /*
158: Build total grid and mass over entire mesh (multi-elemental)
159: */
161: for (i = xs; i < xs + xm; i++) {
162: for (j = 0; j < appctx.param.N - 1; j++) {
163: x = (appctx.param.Le / 2.0) * (appctx.SEMop.gll.nodes[j] + 1.0) + appctx.param.Le * i;
164: ind = i * (appctx.param.N - 1) + j;
165: wrk_ptr1[ind] = x;
166: wrk_ptr2[ind] = .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
167: if (j == 0) wrk_ptr2[ind] += .5 * appctx.param.Le * appctx.SEMop.gll.weights[j];
168: }
169: }
170: DMDAVecRestoreArray(appctx.da, appctx.SEMop.grid, &wrk_ptr1);
171: DMDAVecRestoreArray(appctx.da, appctx.SEMop.mass, &wrk_ptr2);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: Create matrix data structure; set matrix evaluation routine.
175: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
176: DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);
177: DMCreateMatrix(appctx.da, &appctx.SEMop.stiff);
178: DMCreateMatrix(appctx.da, &appctx.SEMop.grad);
179: /*
180: For linear problems with a time-dependent f(u,t) in the equation
181: u_t = f(u,t), the user provides the discretized right-hand-side
182: as a time-dependent matrix.
183: */
184: RHSMatrixLaplaciangllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.stiff, appctx.SEMop.stiff, &appctx);
185: RHSMatrixAdvectiongllDM(appctx.ts, 0.0, appctx.dat.curr_sol, appctx.SEMop.grad, appctx.SEMop.grad, &appctx);
186: /*
187: For linear problems with a time-dependent f(u,t) in the equation
188: u_t = f(u,t), the user provides the discretized right-hand-side
189: as a time-dependent matrix.
190: */
192: MatDuplicate(appctx.SEMop.stiff, MAT_COPY_VALUES, &appctx.SEMop.keptstiff);
194: /* attach the null space to the matrix, this probably is not needed but does no harm */
195: MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp);
196: MatSetNullSpace(appctx.SEMop.stiff, nsp);
197: MatSetNullSpace(appctx.SEMop.keptstiff, nsp);
198: MatNullSpaceTest(nsp, appctx.SEMop.stiff, NULL);
199: MatNullSpaceDestroy(&nsp);
200: /* attach the null space to the matrix, this probably is not needed but does no harm */
201: MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, NULL, &nsp);
202: MatSetNullSpace(appctx.SEMop.grad, nsp);
203: MatNullSpaceTest(nsp, appctx.SEMop.grad, NULL);
204: MatNullSpaceDestroy(&nsp);
206: /* Create the TS solver that solves the ODE and its adjoint; set its options */
207: TSCreate(PETSC_COMM_WORLD, &appctx.ts);
208: TSSetProblemType(appctx.ts, TS_NONLINEAR);
209: TSSetType(appctx.ts, TSRK);
210: TSSetDM(appctx.ts, appctx.da);
211: TSSetTime(appctx.ts, 0.0);
212: TSSetTimeStep(appctx.ts, appctx.initial_dt);
213: TSSetMaxSteps(appctx.ts, appctx.param.steps);
214: TSSetMaxTime(appctx.ts, appctx.param.Tend);
215: TSSetExactFinalTime(appctx.ts, TS_EXACTFINALTIME_MATCHSTEP);
216: TSSetTolerances(appctx.ts, 1e-7, NULL, 1e-7, NULL);
217: TSSetSaveTrajectory(appctx.ts);
218: TSSetFromOptions(appctx.ts);
219: TSSetRHSFunction(appctx.ts, NULL, RHSFunction, &appctx);
220: TSSetRHSJacobian(appctx.ts, appctx.SEMop.stiff, appctx.SEMop.stiff, RHSJacobian, &appctx);
222: /* Set Initial conditions for the problem */
223: TrueSolution(appctx.ts, 0, appctx.dat.curr_sol, &appctx);
225: TSSetSolutionFunction(appctx.ts, (PetscErrorCode(*)(TS, PetscReal, Vec, void *))TrueSolution, &appctx);
226: TSSetTime(appctx.ts, 0.0);
227: TSSetStepNumber(appctx.ts, 0);
229: TSSolve(appctx.ts, appctx.dat.curr_sol);
231: MatDestroy(&appctx.SEMop.stiff);
232: MatDestroy(&appctx.SEMop.keptstiff);
233: MatDestroy(&appctx.SEMop.grad);
234: VecDestroy(&appctx.SEMop.grid);
235: VecDestroy(&appctx.SEMop.mass);
236: VecDestroy(&appctx.dat.curr_sol);
237: PetscFree2(appctx.SEMop.gll.nodes, appctx.SEMop.gll.weights);
238: DMDestroy(&appctx.da);
239: TSDestroy(&appctx.ts);
241: /*
242: Always call PetscFinalize() before exiting a program. This routine
243: - finalizes the PETSc libraries as well as MPI
244: - provides summary and diagnostic information if certain runtime
245: options are chosen (e.g., -log_summary).
246: */
247: PetscFinalize();
248: return 0;
249: }
251: /*
252: TrueSolution() computes the true solution for the PDE
254: Input Parameter:
255: u - uninitialized solution vector (global)
256: appctx - user-defined application context
258: Output Parameter:
259: u - vector with solution at initial time (global)
260: */
261: PetscErrorCode TrueSolution(TS ts, PetscReal t, Vec u, AppCtx *appctx)
262: {
263: PetscScalar *s;
264: const PetscScalar *xg;
265: PetscInt i, xs, xn;
267: DMDAVecGetArray(appctx->da, u, &s);
268: DMDAVecGetArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg);
269: DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL);
270: for (i = xs; i < xs + xn; i++) {
271: s[i] = 2.0 * appctx->param.mu * PETSC_PI * PetscSinScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t) / (2.0 + PetscCosScalar(PETSC_PI * xg[i]) * PetscExpReal(-appctx->param.mu * PETSC_PI * PETSC_PI * t));
272: }
273: DMDAVecRestoreArray(appctx->da, u, &s);
274: DMDAVecRestoreArrayRead(appctx->da, appctx->SEMop.grid, (void *)&xg);
275: return 0;
276: }
278: PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec globalin, Vec globalout, void *ctx)
279: {
280: AppCtx *appctx = (AppCtx *)ctx;
283: MatMult(appctx->SEMop.grad, globalin, globalout); /* grad u */
284: VecPointwiseMult(globalout, globalin, globalout); /* u grad u */
285: VecScale(globalout, -1.0);
286: MatMultAdd(appctx->SEMop.keptstiff, globalin, globalout, globalout);
287: return 0;
288: }
290: /*
292: K is the discretiziation of the Laplacian
293: G is the discretization of the gradient
295: Computes Jacobian of K u + diag(u) G u which is given by
296: K + diag(u)G + diag(Gu)
297: */
298: PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec globalin, Mat A, Mat B, void *ctx)
299: {
300: AppCtx *appctx = (AppCtx *)ctx;
301: Vec Gglobalin;
304: /* A = diag(u) G */
306: MatCopy(appctx->SEMop.grad, A, SAME_NONZERO_PATTERN);
307: MatDiagonalScale(A, globalin, NULL);
309: /* A = A + diag(Gu) */
310: VecDuplicate(globalin, &Gglobalin);
311: MatMult(appctx->SEMop.grad, globalin, Gglobalin);
312: MatDiagonalSet(A, Gglobalin, ADD_VALUES);
313: VecDestroy(&Gglobalin);
315: /* A = K - A */
316: MatScale(A, -1.0);
317: MatAXPY(A, 0.0, appctx->SEMop.keptstiff, SAME_NONZERO_PATTERN);
318: return 0;
319: }
321: /* --------------------------------------------------------------------- */
323: #include "petscblaslapack.h"
324: /*
325: Matrix free operation of 1d Laplacian and Grad for GLL spectral elements
326: */
327: PetscErrorCode MatMult_Laplacian(Mat A, Vec x, Vec y)
328: {
329: AppCtx *appctx;
330: PetscReal **temp, vv;
331: PetscInt i, j, xs, xn;
332: Vec xlocal, ylocal;
333: const PetscScalar *xl;
334: PetscScalar *yl;
335: PetscBLASInt _One = 1, n;
336: PetscScalar _DOne = 1;
338: MatShellGetContext(A, &appctx);
339: DMGetLocalVector(appctx->da, &xlocal);
340: DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal);
341: DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal);
342: DMGetLocalVector(appctx->da, &ylocal);
343: VecSet(ylocal, 0.0);
344: PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
345: for (i = 0; i < appctx->param.N; i++) {
346: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
347: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
348: }
349: DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl);
350: DMDAVecGetArray(appctx->da, ylocal, &yl);
351: DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL);
352: PetscBLASIntCast(appctx->param.N, &n);
353: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
354: DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl);
355: DMDAVecRestoreArray(appctx->da, ylocal, &yl);
356: PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
357: VecSet(y, 0.0);
358: DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y);
359: DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y);
360: DMRestoreLocalVector(appctx->da, &xlocal);
361: DMRestoreLocalVector(appctx->da, &ylocal);
362: VecPointwiseDivide(y, y, appctx->SEMop.mass);
363: return 0;
364: }
366: PetscErrorCode MatMult_Advection(Mat A, Vec x, Vec y)
367: {
368: AppCtx *appctx;
369: PetscReal **temp;
370: PetscInt j, xs, xn;
371: Vec xlocal, ylocal;
372: const PetscScalar *xl;
373: PetscScalar *yl;
374: PetscBLASInt _One = 1, n;
375: PetscScalar _DOne = 1;
377: MatShellGetContext(A, &appctx);
378: DMGetLocalVector(appctx->da, &xlocal);
379: DMGlobalToLocalBegin(appctx->da, x, INSERT_VALUES, xlocal);
380: DMGlobalToLocalEnd(appctx->da, x, INSERT_VALUES, xlocal);
381: DMGetLocalVector(appctx->da, &ylocal);
382: VecSet(ylocal, 0.0);
383: PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
384: DMDAVecGetArrayRead(appctx->da, xlocal, (void *)&xl);
385: DMDAVecGetArray(appctx->da, ylocal, &yl);
386: DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL);
387: PetscBLASIntCast(appctx->param.N, &n);
388: for (j = xs; j < xs + xn; j += appctx->param.N - 1) PetscCallBLAS("BLASgemv", BLASgemv_("N", &n, &n, &_DOne, &temp[0][0], &n, &xl[j], &_One, &_DOne, &yl[j], &_One));
389: DMDAVecRestoreArrayRead(appctx->da, xlocal, (void *)&xl);
390: DMDAVecRestoreArray(appctx->da, ylocal, &yl);
391: PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
392: VecSet(y, 0.0);
393: DMLocalToGlobalBegin(appctx->da, ylocal, ADD_VALUES, y);
394: DMLocalToGlobalEnd(appctx->da, ylocal, ADD_VALUES, y);
395: DMRestoreLocalVector(appctx->da, &xlocal);
396: DMRestoreLocalVector(appctx->da, &ylocal);
397: VecPointwiseDivide(y, y, appctx->SEMop.mass);
398: VecScale(y, -1.0);
399: return 0;
400: }
402: /*
403: RHSMatrixLaplacian - User-provided routine to compute the right-hand-side
404: matrix for the Laplacian operator
406: Input Parameters:
407: ts - the TS context
408: t - current time (ignored)
409: X - current solution (ignored)
410: dummy - optional user-defined context, as set by TSetRHSJacobian()
412: Output Parameters:
413: AA - Jacobian matrix
414: BB - optionally different matrix from which the preconditioner is built
415: str - flag indicating matrix structure
417: */
418: PetscErrorCode RHSMatrixLaplaciangllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
419: {
420: PetscReal **temp;
421: PetscReal vv;
422: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
423: PetscInt i, xs, xn, l, j;
424: PetscInt *rowsDM;
425: PetscBool flg = PETSC_FALSE;
427: PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL);
429: if (!flg) {
430: /*
431: Creates the element stiffness matrix for the given gll
432: */
433: PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
434: /* workaround for clang analyzer warning: Division by zero */
437: /* scale by the size of the element */
438: for (i = 0; i < appctx->param.N; i++) {
439: vv = -appctx->param.mu * 2.0 / appctx->param.Le;
440: for (j = 0; j < appctx->param.N; j++) temp[i][j] = temp[i][j] * vv;
441: }
443: MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
444: DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL);
446: xs = xs / (appctx->param.N - 1);
447: xn = xn / (appctx->param.N - 1);
449: PetscMalloc1(appctx->param.N, &rowsDM);
450: /*
451: loop over local elements
452: */
453: for (j = xs; j < xs + xn; j++) {
454: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
455: MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES);
456: }
457: PetscFree(rowsDM);
458: MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
459: MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
460: VecReciprocal(appctx->SEMop.mass);
461: MatDiagonalScale(A, appctx->SEMop.mass, 0);
462: VecReciprocal(appctx->SEMop.mass);
464: PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
465: } else {
466: MatSetType(A, MATSHELL);
467: MatSetUp(A);
468: MatShellSetContext(A, appctx);
469: MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Laplacian);
470: }
471: return 0;
472: }
474: /*
475: RHSMatrixAdvection - User-provided routine to compute the right-hand-side
476: matrix for the Advection (gradient) operator.
478: Input Parameters:
479: ts - the TS context
480: t - current time
481: global_in - global input vector
482: dummy - optional user-defined context, as set by TSetRHSJacobian()
484: Output Parameters:
485: AA - Jacobian matrix
486: BB - optionally different preconditioning matrix
487: str - flag indicating matrix structure
489: */
490: PetscErrorCode RHSMatrixAdvectiongllDM(TS ts, PetscReal t, Vec X, Mat A, Mat BB, void *ctx)
491: {
492: PetscReal **temp;
493: AppCtx *appctx = (AppCtx *)ctx; /* user-defined application context */
494: PetscInt xs, xn, l, j;
495: PetscInt *rowsDM;
496: PetscBool flg = PETSC_FALSE;
498: PetscOptionsGetBool(NULL, NULL, "-gll_mf", &flg, NULL);
500: if (!flg) {
501: /*
502: Creates the advection matrix for the given gll
503: */
504: PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
505: MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
506: DMDAGetCorners(appctx->da, &xs, NULL, NULL, &xn, NULL, NULL);
507: xs = xs / (appctx->param.N - 1);
508: xn = xn / (appctx->param.N - 1);
510: PetscMalloc1(appctx->param.N, &rowsDM);
511: for (j = xs; j < xs + xn; j++) {
512: for (l = 0; l < appctx->param.N; l++) rowsDM[l] = 1 + (j - xs) * (appctx->param.N - 1) + l;
513: MatSetValuesLocal(A, appctx->param.N, rowsDM, appctx->param.N, rowsDM, &temp[0][0], ADD_VALUES);
514: }
515: PetscFree(rowsDM);
516: MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
517: MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
519: VecReciprocal(appctx->SEMop.mass);
520: MatDiagonalScale(A, appctx->SEMop.mass, 0);
521: VecReciprocal(appctx->SEMop.mass);
523: PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n, appctx->SEMop.gll.nodes, appctx->SEMop.gll.weights, &temp);
524: } else {
525: MatSetType(A, MATSHELL);
526: MatSetUp(A);
527: MatShellSetContext(A, appctx);
528: MatShellSetOperation(A, MATOP_MULT, (void (*)(void))MatMult_Advection);
529: }
530: return 0;
531: }
533: /*TEST
535: build:
536: requires: !complex
538: test:
539: suffix: 1
540: requires: !single
542: test:
543: suffix: 2
544: nsize: 5
545: requires: !single
547: test:
548: suffix: 3
549: requires: !single
550: args: -ts_view -ts_type beuler -gll_mf -pc_type none -ts_max_steps 5 -ts_monitor_error
552: test:
553: suffix: 4
554: requires: !single
555: args: -ts_view -ts_type beuler -pc_type none -ts_max_steps 5 -ts_monitor_error
557: TEST*/