Actual source code: ex69.c
2: static char help[] = "Tests recovery from domain errors in MatMult() and PCApply()\n\n";
4: /*
5: See src/ksp/ksp/tutorials/ex19.c from which this was copied
6: */
8: #include <petscsnes.h>
9: #include <petscdm.h>
10: #include <petscdmda.h>
12: /*
13: User-defined routines and data structures
14: */
15: typedef struct {
16: PetscScalar u, v, omega, temp;
17: } Field;
19: PetscErrorCode FormFunctionLocal(DMDALocalInfo *, Field **, Field **, void *);
21: typedef struct {
22: PetscReal lidvelocity, prandtl, grashof; /* physical parameters */
23: PetscBool draw_contours; /* flag - 1 indicates drawing contours */
24: PetscBool errorindomain;
25: PetscBool errorindomainmf;
26: SNES snes;
27: } AppCtx;
29: typedef struct {
30: Mat Jmf;
31: } MatShellCtx;
33: extern PetscErrorCode FormInitialGuess(AppCtx *, DM, Vec);
34: extern PetscErrorCode MatMult_MyShell(Mat, Vec, Vec);
35: extern PetscErrorCode MatAssemblyEnd_MyShell(Mat, MatAssemblyType);
36: extern PetscErrorCode PCApply_MyShell(PC, Vec, Vec);
37: extern PetscErrorCode SNESComputeJacobian_MyShell(SNES, Vec, Mat, Mat, void *);
39: int main(int argc, char **argv)
40: {
41: AppCtx user; /* user-defined work context */
42: PetscInt mx, my;
43: MPI_Comm comm;
44: DM da;
45: Vec x;
46: Mat J = NULL, Jmf = NULL;
47: MatShellCtx matshellctx;
48: PetscInt mlocal, nlocal;
49: PC pc;
50: KSP ksp;
51: PetscBool errorinmatmult = PETSC_FALSE, errorinpcapply = PETSC_FALSE, errorinpcsetup = PETSC_FALSE;
54: PetscInitialize(&argc, &argv, (char *)0, help);
55: PetscOptionsGetBool(NULL, NULL, "-error_in_matmult", &errorinmatmult, NULL);
56: PetscOptionsGetBool(NULL, NULL, "-error_in_pcapply", &errorinpcapply, NULL);
57: PetscOptionsGetBool(NULL, NULL, "-error_in_pcsetup", &errorinpcsetup, NULL);
58: user.errorindomain = PETSC_FALSE;
59: PetscOptionsGetBool(NULL, NULL, "-error_in_domain", &user.errorindomain, NULL);
60: user.errorindomainmf = PETSC_FALSE;
61: PetscOptionsGetBool(NULL, NULL, "-error_in_domainmf", &user.errorindomainmf, NULL);
63: comm = PETSC_COMM_WORLD;
64: SNESCreate(comm, &user.snes);
66: /*
67: Create distributed array object to manage parallel grid and vectors
68: for principal unknowns (x) and governing residuals (f)
69: */
70: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 4, 1, 0, 0, &da);
71: DMSetFromOptions(da);
72: DMSetUp(da);
73: SNESSetDM(user.snes, da);
75: DMDAGetInfo(da, 0, &mx, &my, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE, PETSC_IGNORE);
76: /*
77: Problem parameters (velocity of lid, prandtl, and grashof numbers)
78: */
79: user.lidvelocity = 1.0 / (mx * my);
80: user.prandtl = 1.0;
81: user.grashof = 1.0;
83: PetscOptionsGetReal(NULL, NULL, "-lidvelocity", &user.lidvelocity, NULL);
84: PetscOptionsGetReal(NULL, NULL, "-prandtl", &user.prandtl, NULL);
85: PetscOptionsGetReal(NULL, NULL, "-grashof", &user.grashof, NULL);
86: PetscOptionsHasName(NULL, NULL, "-contours", &user.draw_contours);
88: DMDASetFieldName(da, 0, "x_velocity");
89: DMDASetFieldName(da, 1, "y_velocity");
90: DMDASetFieldName(da, 2, "Omega");
91: DMDASetFieldName(da, 3, "temperature");
93: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
94: Create user context, set problem data, create vector data structures.
95: Also, compute the initial guess.
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
98: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99: Create nonlinear solver context
101: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102: DMSetApplicationContext(da, &user);
103: DMDASNESSetFunctionLocal(da, INSERT_VALUES, (PetscErrorCode(*)(DMDALocalInfo *, void *, void *, void *))FormFunctionLocal, &user);
105: if (errorinmatmult) {
106: MatCreateSNESMF(user.snes, &Jmf);
107: MatSetFromOptions(Jmf);
108: MatGetLocalSize(Jmf, &mlocal, &nlocal);
109: matshellctx.Jmf = Jmf;
110: MatCreateShell(PetscObjectComm((PetscObject)Jmf), mlocal, nlocal, PETSC_DECIDE, PETSC_DECIDE, &matshellctx, &J);
111: MatShellSetOperation(J, MATOP_MULT, (void (*)(void))MatMult_MyShell);
112: MatShellSetOperation(J, MATOP_ASSEMBLY_END, (void (*)(void))MatAssemblyEnd_MyShell);
113: SNESSetJacobian(user.snes, J, J, MatMFFDComputeJacobian, NULL);
114: }
116: SNESSetFromOptions(user.snes);
117: PetscPrintf(comm, "lid velocity = %g, prandtl # = %g, grashof # = %g\n", (double)user.lidvelocity, (double)user.prandtl, (double)user.grashof);
119: if (errorinpcapply) {
120: SNESGetKSP(user.snes, &ksp);
121: KSPGetPC(ksp, &pc);
122: PCSetType(pc, PCSHELL);
123: PCShellSetApply(pc, PCApply_MyShell);
124: }
126: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127: Solve the nonlinear system
128: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129: DMCreateGlobalVector(da, &x);
130: FormInitialGuess(&user, da, x);
132: if (errorinpcsetup) {
133: SNESSetUp(user.snes);
134: SNESSetJacobian(user.snes, NULL, NULL, SNESComputeJacobian_MyShell, NULL);
135: }
136: SNESSolve(user.snes, NULL, x);
138: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: Free work space. All PETSc objects should be destroyed when they
140: are no longer needed.
141: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
142: MatDestroy(&J);
143: MatDestroy(&Jmf);
144: VecDestroy(&x);
145: DMDestroy(&da);
146: SNESDestroy(&user.snes);
147: PetscFinalize();
148: return 0;
149: }
151: /*
152: FormInitialGuess - Forms initial approximation.
154: Input Parameters:
155: user - user-defined application context
156: X - vector
158: Output Parameter:
159: X - vector
160: */
161: PetscErrorCode FormInitialGuess(AppCtx *user, DM da, Vec X)
162: {
163: PetscInt i, j, mx, xs, ys, xm, ym;
164: PetscReal grashof, dx;
165: Field **x;
168: grashof = user->grashof;
170: DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
171: dx = 1.0 / (mx - 1);
173: /*
174: Get local grid boundaries (for 2-dimensional DMDA):
175: xs, ys - starting grid indices (no ghost points)
176: xm, ym - widths of local grid (no ghost points)
177: */
178: DMDAGetCorners(da, &xs, &ys, NULL, &xm, &ym, NULL);
180: /*
181: Get a pointer to vector data.
182: - For default PETSc vectors, VecGetArray() returns a pointer to
183: the data array. Otherwise, the routine is implementation dependent.
184: - You MUST call VecRestoreArray() when you no longer need access to
185: the array.
186: */
187: DMDAVecGetArray(da, X, &x);
189: /*
190: Compute initial guess over the locally owned part of the grid
191: Initial condition is motionless fluid and equilibrium temperature
192: */
193: for (j = ys; j < ys + ym; j++) {
194: for (i = xs; i < xs + xm; i++) {
195: x[j][i].u = 0.0;
196: x[j][i].v = 0.0;
197: x[j][i].omega = 0.0;
198: x[j][i].temp = (grashof > 0) * i * dx;
199: }
200: }
202: /*
203: Restore vector
204: */
205: DMDAVecRestoreArray(da, X, &x);
206: return 0;
207: }
209: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, Field **x, Field **f, void *ptr)
210: {
211: AppCtx *user = (AppCtx *)ptr;
212: PetscInt xints, xinte, yints, yinte, i, j;
213: PetscReal hx, hy, dhx, dhy, hxdhy, hydhx;
214: PetscReal grashof, prandtl, lid;
215: PetscScalar u, uxx, uyy, vx, vy, avx, avy, vxp, vxm, vyp, vym;
216: static PetscInt fail = 0;
219: if ((fail++ > 7 && user->errorindomainmf) || (fail++ > 36 && user->errorindomain)) {
220: PetscMPIInt rank;
221: MPI_Comm_rank(PetscObjectComm((PetscObject)user->snes), &rank);
222: if (rank == 0) SNESSetFunctionDomainError(user->snes);
223: }
224: grashof = user->grashof;
225: prandtl = user->prandtl;
226: lid = user->lidvelocity;
228: /*
229: Define mesh intervals ratios for uniform grid.
231: Note: FD formulae below are normalized by multiplying through by
232: local volume element (i.e. hx*hy) to obtain coefficients O(1) in two dimensions.
234: */
235: dhx = (PetscReal)(info->mx - 1);
236: dhy = (PetscReal)(info->my - 1);
237: hx = 1.0 / dhx;
238: hy = 1.0 / dhy;
239: hxdhy = hx * dhy;
240: hydhx = hy * dhx;
242: xints = info->xs;
243: xinte = info->xs + info->xm;
244: yints = info->ys;
245: yinte = info->ys + info->ym;
247: /* Test whether we are on the bottom edge of the global array */
248: if (yints == 0) {
249: j = 0;
250: yints = yints + 1;
251: /* bottom edge */
252: for (i = info->xs; i < info->xs + info->xm; i++) {
253: f[j][i].u = x[j][i].u;
254: f[j][i].v = x[j][i].v;
255: f[j][i].omega = x[j][i].omega + (x[j + 1][i].u - x[j][i].u) * dhy;
256: f[j][i].temp = x[j][i].temp - x[j + 1][i].temp;
257: }
258: }
260: /* Test whether we are on the top edge of the global array */
261: if (yinte == info->my) {
262: j = info->my - 1;
263: yinte = yinte - 1;
264: /* top edge */
265: for (i = info->xs; i < info->xs + info->xm; i++) {
266: f[j][i].u = x[j][i].u - lid;
267: f[j][i].v = x[j][i].v;
268: f[j][i].omega = x[j][i].omega + (x[j][i].u - x[j - 1][i].u) * dhy;
269: f[j][i].temp = x[j][i].temp - x[j - 1][i].temp;
270: }
271: }
273: /* Test whether we are on the left edge of the global array */
274: if (xints == 0) {
275: i = 0;
276: xints = xints + 1;
277: /* left edge */
278: for (j = info->ys; j < info->ys + info->ym; j++) {
279: f[j][i].u = x[j][i].u;
280: f[j][i].v = x[j][i].v;
281: f[j][i].omega = x[j][i].omega - (x[j][i + 1].v - x[j][i].v) * dhx;
282: f[j][i].temp = x[j][i].temp;
283: }
284: }
286: /* Test whether we are on the right edge of the global array */
287: if (xinte == info->mx) {
288: i = info->mx - 1;
289: xinte = xinte - 1;
290: /* right edge */
291: for (j = info->ys; j < info->ys + info->ym; j++) {
292: f[j][i].u = x[j][i].u;
293: f[j][i].v = x[j][i].v;
294: f[j][i].omega = x[j][i].omega - (x[j][i].v - x[j][i - 1].v) * dhx;
295: f[j][i].temp = x[j][i].temp - (PetscReal)(grashof > 0);
296: }
297: }
299: /* Compute over the interior points */
300: for (j = yints; j < yinte; j++) {
301: for (i = xints; i < xinte; i++) {
302: /*
303: convective coefficients for upwinding
304: */
305: vx = x[j][i].u;
306: avx = PetscAbsScalar(vx);
307: vxp = .5 * (vx + avx);
308: vxm = .5 * (vx - avx);
309: vy = x[j][i].v;
310: avy = PetscAbsScalar(vy);
311: vyp = .5 * (vy + avy);
312: vym = .5 * (vy - avy);
314: /* U velocity */
315: u = x[j][i].u;
316: uxx = (2.0 * u - x[j][i - 1].u - x[j][i + 1].u) * hydhx;
317: uyy = (2.0 * u - x[j - 1][i].u - x[j + 1][i].u) * hxdhy;
318: f[j][i].u = uxx + uyy - .5 * (x[j + 1][i].omega - x[j - 1][i].omega) * hx;
320: /* V velocity */
321: u = x[j][i].v;
322: uxx = (2.0 * u - x[j][i - 1].v - x[j][i + 1].v) * hydhx;
323: uyy = (2.0 * u - x[j - 1][i].v - x[j + 1][i].v) * hxdhy;
324: f[j][i].v = uxx + uyy + .5 * (x[j][i + 1].omega - x[j][i - 1].omega) * hy;
326: /* Omega */
327: u = x[j][i].omega;
328: uxx = (2.0 * u - x[j][i - 1].omega - x[j][i + 1].omega) * hydhx;
329: uyy = (2.0 * u - x[j - 1][i].omega - x[j + 1][i].omega) * hxdhy;
330: f[j][i].omega = uxx + uyy + (vxp * (u - x[j][i - 1].omega) + vxm * (x[j][i + 1].omega - u)) * hy + (vyp * (u - x[j - 1][i].omega) + vym * (x[j + 1][i].omega - u)) * hx - .5 * grashof * (x[j][i + 1].temp - x[j][i - 1].temp) * hy;
332: /* Temperature */
333: u = x[j][i].temp;
334: uxx = (2.0 * u - x[j][i - 1].temp - x[j][i + 1].temp) * hydhx;
335: uyy = (2.0 * u - x[j - 1][i].temp - x[j + 1][i].temp) * hxdhy;
336: f[j][i].temp = uxx + uyy + prandtl * ((vxp * (u - x[j][i - 1].temp) + vxm * (x[j][i + 1].temp - u)) * hy + (vyp * (u - x[j - 1][i].temp) + vym * (x[j + 1][i].temp - u)) * hx);
337: }
338: }
340: /*
341: Flop count (multiply-adds are counted as 2 operations)
342: */
343: PetscLogFlops(84.0 * info->ym * info->xm);
344: return 0;
345: }
347: PetscErrorCode MatMult_MyShell(Mat A, Vec x, Vec y)
348: {
349: MatShellCtx *matshellctx;
350: static PetscInt fail = 0;
352: MatShellGetContext(A, &matshellctx);
353: MatMult(matshellctx->Jmf, x, y);
354: if (fail++ > 5) {
355: PetscMPIInt rank;
356: MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank);
357: if (rank == 0) VecSetInf(y);
358: }
359: return 0;
360: }
362: PetscErrorCode MatAssemblyEnd_MyShell(Mat A, MatAssemblyType tp)
363: {
364: MatShellCtx *matshellctx;
366: MatShellGetContext(A, &matshellctx);
367: MatAssemblyEnd(matshellctx->Jmf, tp);
368: return 0;
369: }
371: PetscErrorCode PCApply_MyShell(PC pc, Vec x, Vec y)
372: {
373: static PetscInt fail = 0;
375: VecCopy(x, y);
376: if (fail++ > 3) {
377: PetscMPIInt rank;
378: MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);
379: if (rank == 0) VecSetInf(y);
380: }
381: return 0;
382: }
384: PETSC_EXTERN PetscErrorCode SNESComputeJacobian_DMDA(SNES, Vec, Mat, Mat, void *);
386: PetscErrorCode SNESComputeJacobian_MyShell(SNES snes, Vec X, Mat A, Mat B, void *ctx)
387: {
388: static PetscInt fail = 0;
390: SNESComputeJacobian_DMDA(snes, X, A, B, ctx);
391: if (fail++ > 0) MatZeroEntries(A);
392: return 0;
393: }
395: /*TEST
397: test:
398: args: -snes_converged_reason -ksp_converged_reason
400: test:
401: suffix: 2
402: args: -snes_converged_reason -ksp_converged_reason -error_in_matmult
404: test:
405: suffix: 3
406: args: -snes_converged_reason -ksp_converged_reason -error_in_pcapply
408: test:
409: suffix: 4
410: args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup
412: test:
413: suffix: 5
414: args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type bjacobi
416: test:
417: suffix: 5_fieldsplit
418: args: -snes_converged_reason -ksp_converged_reason -error_in_pcsetup -pc_type fieldsplit
419: output_file: output/ex69_5.out
421: test:
422: suffix: 6
423: args: -snes_converged_reason -ksp_converged_reason -error_in_domainmf -snes_mf -pc_type none
425: test:
426: suffix: 7
427: args: -snes_converged_reason -ksp_converged_reason -error_in_domain
429: test:
430: suffix: 8
431: args: -snes_converged_reason -ksp_converged_reason -error_in_domain -snes_mf -pc_type none
433: TEST*/