Actual source code: ex35.cxx
2: /*
3: Inhomogeneous Laplacian in 2D. Modeled by the partial differential equation
5: -div \rho grad u = f, 0 < x,y < 1,
7: Problem 1: (Default)
9: Use the following exact solution with Dirichlet boundary condition
11: u = sin(pi*x)*sin(pi*y)
13: and generate an appropriate forcing function to measure convergence.
15: Usage:
17: Measure convergence rate with uniform refinement with the options: "-problem 1 -error".
19: mpiexec -n $NP ./ex35 -problem 1 -error -n 16 -levels 5 -pc_type gamg
20: mpiexec -n $NP ./ex35 -problem 1 -error -n 32 -levels 4 -pc_type gamg
21: mpiexec -n $NP ./ex35 -problem 1 -error -n 64 -levels 3 -pc_type mg
22: mpiexec -n $NP ./ex35 -problem 1 -error -n 128 -levels 2 -pc_type mg
23: mpiexec -n $NP ./ex35 -problem 1 -error -n 256 -levels 1 -mg
24: mpiexec -n $NP ./ex35 -problem 1 -error -n 512 -levels 0 -mg
26: Or with an external mesh file representing [0, 1]^2,
28: mpiexec -n $NP ./ex35 -problem 1 -file ./external_mesh.h5m -levels 2 -error -pc_type mg
30: Problem 2:
32: Use the forcing function
34: f = e^{-((x-xr)^2+(y-yr)^2)/\nu}
36: with Dirichlet boundary conditions
38: u = f(x,y) for x = 0, x = 1, y = 0, y = 1
40: or pure Neumman boundary conditions
42: Usage:
44: Run with different values of \rho and \nu (problem 1) to control diffusion and gaussian source spread. This uses the internal mesh generator implemented in DMMoab.
46: mpiexec -n $NP ./ex35 -problem 2 -n 20 -nu 0.02 -rho 0.01
47: mpiexec -n $NP ./ex35 -problem 2 -n 40 -nu 0.01 -rho 0.005 -io -ksp_monitor
48: mpiexec -n $NP ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type hypre
49: mpiexec -n $NP ./ex35 -problem 2 -n 160 -bc neumann -nu 0.005 -rho 0.01 -io
50: mpiexec -n $NP ./ex35 -problem 2 -n 320 -bc neumann -nu 0.001 -rho 1 -io
52: Or with an external mesh file representing [0, 1]^2,
54: mpiexec -n $NP ./ex35 -problem 2 -file ./external_mesh.h5m -levels 1 -pc_type gamg
56: Problem 3:
58: Use the forcing function
60: f = \nu*sin(\pi*x/LX)*sin(\pi*y/LY)
62: with Dirichlet boundary conditions
64: u = 0.0, for x = 0, x = 1, y = 0, y = 1 (outer boundary) &&
65: u = 1.0, for (x-0.5)^2 + (y-0.5)^2 = 0.01
67: Usage:
69: Now, you could alternately load an external MOAB mesh that discretizes the unit square and use that to run the solver.
71: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg
72: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -mg -levels 2 -io -ksp_monitor
73: mpiexec -n $NP ./ex35 -problem 3 -file input/square_with_hole.h5m -io -ksp_monitor -pc_type hypre
75: */
77: static char help[] = "\
78: Solves the 2D inhomogeneous Laplacian equation.\n \
79: Usage: ./ex35 -problem 1 -error -n 4 -levels 3 -mg\n \
80: Usage: ./ex35 -problem 2 -n 80 -nu 0.01 -rho 0.005 -io -ksp_monitor -pc_type gamg\n \
81: Usage: ./ex35 -problem 3 -file input/square_with_hole.h5m -mg\n";
83: /* PETSc includes */
84: #include <petscksp.h>
85: #include <petscdmmoab.h>
87: #define LOCAL_ASSEMBLY
89: typedef enum {
90: DIRICHLET,
91: NEUMANN
92: } BCType;
94: typedef struct {
95: PetscInt dim, n, problem, nlevels;
96: PetscReal rho;
97: PetscReal bounds[6];
98: PetscReal xref, yref;
99: PetscReal nu;
100: PetscInt VPERE;
101: BCType bcType;
102: PetscBool use_extfile, io, error, usetri, usemg;
103: char filename[PETSC_MAX_PATH_LEN];
104: } UserContext;
106: static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
107: static PetscErrorCode ComputeRHS(KSP, Vec, void *);
108: static PetscErrorCode ComputeDiscreteL2Error(KSP, Vec, UserContext *);
109: static PetscErrorCode InitializeOptions(UserContext *);
111: int main(int argc, char **argv)
112: {
113: KSP ksp;
114: PC pc;
115: Mat R;
116: DM dm, dmref, *dmhierarchy;
118: UserContext user;
119: const char *fields[1] = {"T-Variable"};
120: PetscInt k;
121: Vec b, x, errv;
124: PetscInitialize(&argc, &argv, (char *)0, help);
126: InitializeOptions(&user);
128: /* Create the DM object from either a mesh file or from in-memory structured grid */
129: if (user.use_extfile) {
130: DMMoabLoadFromFile(PETSC_COMM_WORLD, user.dim, 1, user.filename, "", &dm);
131: } else {
132: DMMoabCreateBoxMesh(PETSC_COMM_WORLD, user.dim, user.usetri, user.bounds, user.n, 1, &dm);
133: }
134: DMSetFromOptions(dm);
135: DMMoabSetFieldNames(dm, 1, fields);
137: /* SetUp the data structures for DMMOAB */
138: DMSetUp(dm);
140: DMSetApplicationContext(dm, &user);
142: KSPCreate(PETSC_COMM_WORLD, &ksp);
143: KSPSetComputeRHS(ksp, ComputeRHS, &user);
144: KSPSetComputeOperators(ksp, ComputeMatrix, &user);
146: if (user.nlevels) {
147: KSPGetPC(ksp, &pc);
148: PetscMalloc(sizeof(DM) * (user.nlevels + 1), &dmhierarchy);
149: for (k = 0; k <= user.nlevels; k++) dmhierarchy[k] = NULL;
151: PetscPrintf(PETSC_COMM_WORLD, "Number of mesh hierarchy levels: %d\n", user.nlevels);
152: DMMoabGenerateHierarchy(dm, user.nlevels, PETSC_NULL);
154: /* coarsest grid = 0, finest grid = nlevels */
155: dmhierarchy[0] = dm;
156: PetscBool usehierarchy = PETSC_FALSE;
157: if (usehierarchy) {
158: DMRefineHierarchy(dm, user.nlevels, &dmhierarchy[1]);
159: } else {
160: for (k = 1; k <= user.nlevels; k++) DMRefine(dmhierarchy[k - 1], MPI_COMM_NULL, &dmhierarchy[k]);
161: }
162: dmref = dmhierarchy[user.nlevels];
163: PetscObjectReference((PetscObject)dmref);
165: if (user.usemg) {
166: PCSetType(pc, PCMG);
167: PCMGSetLevels(pc, user.nlevels + 1, NULL);
168: PCMGSetType(pc, PC_MG_MULTIPLICATIVE);
169: PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH);
170: PCMGSetCycleType(pc, PC_MG_CYCLE_V);
171: PCMGSetNumberSmooth(pc, 2);
173: for (k = 1; k <= user.nlevels; k++) {
174: DMCreateInterpolation(dmhierarchy[k - 1], dmhierarchy[k], &R, NULL);
175: PCMGSetInterpolation(pc, k, R);
176: MatDestroy(&R);
177: }
178: }
180: for (k = 1; k <= user.nlevels; k++) DMDestroy(&dmhierarchy[k]);
181: PetscFree(dmhierarchy);
182: } else {
183: dmref = dm;
184: PetscObjectReference((PetscObject)dm);
185: }
187: KSPSetDM(ksp, dmref);
188: KSPSetFromOptions(ksp);
190: /* Perform the actual solve */
191: KSPSolve(ksp, NULL, NULL);
192: KSPGetSolution(ksp, &x);
193: KSPGetRhs(ksp, &b);
195: if (user.error) {
196: VecDuplicate(b, &errv);
197: ComputeDiscreteL2Error(ksp, errv, &user);
198: VecDestroy(&errv);
199: }
201: if (user.io) {
202: /* Write out the solution along with the mesh */
203: DMMoabSetGlobalFieldVector(dmref, x);
204: #ifdef MOAB_HAVE_HDF5
205: DMMoabOutput(dmref, "ex35.h5m", NULL);
206: #else
207: /* MOAB does not support true parallel writers that aren't HDF5 based
208: And so if you are using VTK as the output format in parallel,
209: the data could be jumbled due to the order in which the processors
210: write out their parts of the mesh and solution tags */
211: DMMoabOutput(dmref, "ex35.vtk", NULL);
212: #endif
213: }
215: /* Cleanup objects */
216: KSPDestroy(&ksp);
217: DMDestroy(&dmref);
218: DMDestroy(&dm);
219: PetscFinalize();
220: return 0;
221: }
223: PetscScalar ComputeDiffusionCoefficient(PetscReal coords[3], UserContext *user)
224: {
225: switch (user->problem) {
226: case 2:
227: if ((coords[0] > user->bounds[1] / 3.0) && (coords[0] < 2.0 * user->bounds[1] / 3.0) && (coords[1] > user->bounds[3] / 3.0) && (coords[1] < 2.0 * user->bounds[3] / 3.0)) {
228: return user->rho;
229: } else {
230: return 1.0;
231: }
232: case 1:
233: case 3:
234: default:
235: return user->rho;
236: }
237: }
239: PetscScalar ExactSolution(PetscReal coords[3], UserContext *user)
240: {
241: switch (user->problem) {
242: case 1:
243: return sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
244: case 3:
245: case 2:
246: default:
247: SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Exact solution for -problem = [%" PetscInt_FMT "] is not available.", user->problem);
248: }
249: }
251: PetscScalar ComputeForcingFunction(PetscReal coords[3], UserContext *user)
252: {
253: switch (user->problem) {
254: case 3:
255: return user->nu * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
256: case 2:
257: return PetscExpScalar(-((coords[0] - user->xref) * (coords[0] - user->xref) + (coords[1] - user->yref) * (coords[1] - user->yref)) / user->nu);
258: case 1:
259: default:
260: return PETSC_PI * PETSC_PI * ComputeDiffusionCoefficient(coords, user) * (1.0 / user->bounds[1] / user->bounds[1] + 1.0 / user->bounds[3] / user->bounds[3]) * sin(PETSC_PI * coords[0] / user->bounds[1]) * sin(PETSC_PI * coords[1] / user->bounds[3]);
261: }
262: }
264: #define BCHECKEPS 1e-10
265: #define BCHECK(coordxyz, truetrace) ((coordxyz < truetrace + BCHECKEPS && coordxyz > truetrace - BCHECKEPS))
267: PetscScalar EvaluateStrongDirichletCondition(PetscReal coords[3], UserContext *user)
268: {
269: switch (user->problem) {
270: case 3:
271: if (BCHECK(coords[0], user->bounds[0]) || BCHECK(coords[0], user->bounds[1]) || BCHECK(coords[1], user->bounds[2]) || BCHECK(coords[1], user->bounds[3])) return 0.0;
272: else // ( coords[0]*coords[0] + coords[1]*coords[1] < 0.04 + BCHECKEPS)
273: return 1.0;
274: case 2:
275: return ComputeForcingFunction(coords, user);
276: case 1:
277: default:
278: return ExactSolution(coords, user);
279: }
280: }
282: PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ptr)
283: {
284: UserContext *user = (UserContext *)ptr;
285: DM dm;
286: PetscInt dof_indices[4];
287: PetscBool dbdry[4];
288: PetscReal vpos[4 * 3];
289: PetscScalar ff;
290: PetscInt i, q, nconn, nc, npoints;
291: const moab::EntityHandle *connect;
292: const moab::Range *elocal;
293: moab::Interface *mbImpl;
294: PetscScalar localv[4];
295: PetscReal *phi, *phypts, *jxw;
296: PetscBool elem_on_boundary;
297: PetscQuadrature quadratureObj;
299: KSPGetDM(ksp, &dm);
301: /* reset the RHS */
302: VecSet(b, 0.0);
304: DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj);
305: PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL);
306: PetscMalloc3(user->VPERE * npoints, &phi, npoints * 3, &phypts, npoints, &jxw);
308: /* get the essential MOAB mesh related quantities needed for FEM assembly */
309: DMMoabGetInterface(dm, &mbImpl);
310: DMMoabGetLocalElements(dm, &elocal);
312: /* loop over local elements */
313: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
314: const moab::EntityHandle ehandle = *iter;
316: /* Get connectivity information: */
317: DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect);
320: PetscArrayzero(localv, nconn);
322: /* get the coordinates of the element vertices */
323: DMMoabGetVertexCoordinates(dm, nconn, connect, vpos);
325: /* get the local DoF numbers to appropriately set the element contribution in the operator */
326: #ifdef LOCAL_ASSEMBLY
327: DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices);
328: #else
329: DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices);
330: #endif
332: /* 1) compute the basis functions and the derivatives wrt x and y directions
333: 2) compute the quadrature points transformed to the physical space */
334: DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, NULL);
336: /* Compute function over the locally owned part of the grid */
337: for (q = 0; q < npoints; ++q) {
338: ff = ComputeForcingFunction(&phypts[3 * q], user);
340: for (i = 0; i < nconn; ++i) localv[i] += jxw[q] * phi[q * nconn + i] * ff;
341: }
343: /* check if element is on the boundary */
344: DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary);
346: /* apply dirichlet boundary conditions */
347: if (elem_on_boundary && user->bcType == DIRICHLET) {
348: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
349: DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry);
351: for (i = 0; i < nconn; ++i) {
352: if (dbdry[i]) { /* dirichlet node */
353: /* think about strongly imposing dirichlet */
354: localv[i] = EvaluateStrongDirichletCondition(&vpos[3 * i], user);
355: }
356: }
357: }
359: #ifdef LOCAL_ASSEMBLY
360: /* set the values directly into appropriate locations. Can alternately use VecSetValues */
361: VecSetValuesLocal(b, nconn, dof_indices, localv, ADD_VALUES);
362: #else
363: VecSetValues(b, nconn, dof_indices, localv, ADD_VALUES);
364: #endif
365: }
367: /* force right hand side to be consistent for singular matrix */
368: /* note this is really a hack, normally the model would provide you with a consistent right handside */
369: if (user->bcType == NEUMANN) {
370: MatNullSpace nullspace;
371: MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace);
372: MatNullSpaceRemove(nullspace, b);
373: MatNullSpaceDestroy(&nullspace);
374: }
376: /* Restore vectors */
377: VecAssemblyBegin(b);
378: VecAssemblyEnd(b);
379: PetscFree3(phi, phypts, jxw);
380: PetscQuadratureDestroy(&quadratureObj);
381: return 0;
382: }
384: PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
385: {
386: UserContext *user = (UserContext *)ctx;
387: DM dm;
388: PetscInt i, j, q, nconn, nglobale, nglobalv, nc, npoints, hlevel;
389: PetscInt dof_indices[4];
390: PetscReal vpos[4 * 3], rho;
391: PetscBool dbdry[4];
392: const moab::EntityHandle *connect;
393: const moab::Range *elocal;
394: moab::Interface *mbImpl;
395: PetscBool elem_on_boundary;
396: PetscScalar array[4 * 4];
397: PetscReal *phi, *dphi[2], *phypts, *jxw;
398: PetscQuadrature quadratureObj;
401: KSPGetDM(ksp, &dm);
403: /* get the essential MOAB mesh related quantities needed for FEM assembly */
404: DMMoabGetInterface(dm, &mbImpl);
405: DMMoabGetLocalElements(dm, &elocal);
406: DMMoabGetSize(dm, &nglobale, &nglobalv);
407: DMMoabGetHierarchyLevel(dm, &hlevel);
408: PetscPrintf(PETSC_COMM_WORLD, "ComputeMatrix: Level = %d, N(elements) = %d, N(vertices) = %d \n", hlevel, nglobale, nglobalv);
410: DMMoabFEMCreateQuadratureDefault(2, user->VPERE, &quadratureObj);
411: PetscQuadratureGetData(quadratureObj, NULL, &nc, &npoints, NULL, NULL);
412: PetscMalloc5(user->VPERE * npoints, &phi, user->VPERE * npoints, &dphi[0], user->VPERE * npoints, &dphi[1], npoints * 3, &phypts, npoints, &jxw);
414: /* loop over local elements */
415: for (moab::Range::iterator iter = elocal->begin(); iter != elocal->end(); iter++) {
416: const moab::EntityHandle ehandle = *iter;
418: // Get connectivity information:
419: DMMoabGetElementConnectivity(dm, ehandle, &nconn, &connect);
422: /* compute the mid-point of the element and use a 1-point lumped quadrature */
423: DMMoabGetVertexCoordinates(dm, nconn, connect, vpos);
425: /* get the global DOF number to appropriately set the element contribution in the RHS vector */
426: #ifdef LOCAL_ASSEMBLY
427: DMMoabGetFieldDofsLocal(dm, nconn, connect, 0, dof_indices);
428: #else
429: DMMoabGetFieldDofs(dm, nconn, connect, 0, dof_indices);
430: #endif
432: /* 1) compute the basis functions and the derivatives wrt x and y directions
433: 2) compute the quadrature points transformed to the physical space */
434: DMMoabFEMComputeBasis(2, nconn, vpos, quadratureObj, phypts, jxw, phi, dphi);
436: PetscArrayzero(array, nconn * nconn);
438: /* Compute function over the locally owned part of the grid */
439: for (q = 0; q < npoints; ++q) {
440: /* compute the inhomogeneous (piece-wise constant) diffusion coefficient at the quadrature point
441: -- for large spatial variations (within an element), embed this property evaluation inside the quadrature loop
442: */
443: rho = ComputeDiffusionCoefficient(&phypts[q * 3], user);
445: for (i = 0; i < nconn; ++i) {
446: for (j = 0; j < nconn; ++j) array[i * nconn + j] += jxw[q] * rho * (dphi[0][q * nconn + i] * dphi[0][q * nconn + j] + dphi[1][q * nconn + i] * dphi[1][q * nconn + j]);
447: }
448: }
450: /* check if element is on the boundary */
451: DMMoabIsEntityOnBoundary(dm, ehandle, &elem_on_boundary);
453: /* apply dirichlet boundary conditions */
454: if (elem_on_boundary && user->bcType == DIRICHLET) {
455: /* get the list of nodes on boundary so that we can enforce dirichlet conditions strongly */
456: DMMoabCheckBoundaryVertices(dm, nconn, connect, dbdry);
458: for (i = 0; i < nconn; ++i) {
459: if (dbdry[i]) { /* dirichlet node */
460: /* think about strongly imposing dirichlet */
461: for (j = 0; j < nconn; ++j) {
462: /* TODO: symmetrize the system - need the RHS */
463: array[i * nconn + j] = 0.0;
464: }
465: array[i * nconn + i] = 1.0;
466: }
467: }
468: }
470: /* set the values directly into appropriate locations. */
471: #ifdef LOCAL_ASSEMBLY
472: MatSetValuesLocal(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES);
473: #else
474: MatSetValues(jac, nconn, dof_indices, nconn, dof_indices, array, ADD_VALUES);
475: #endif
476: }
478: MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY);
479: MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY);
481: if (user->bcType == NEUMANN) {
482: MatNullSpace nullspace;
484: MatNullSpaceCreate(PETSC_COMM_WORLD, PETSC_TRUE, 0, 0, &nullspace);
485: MatSetNullSpace(J, nullspace);
486: MatNullSpaceDestroy(&nullspace);
487: }
488: PetscFree5(phi, dphi[0], dphi[1], phypts, jxw);
489: PetscQuadratureDestroy(&quadratureObj);
490: return 0;
491: }
493: PetscErrorCode ComputeDiscreteL2Error(KSP ksp, Vec err, UserContext *user)
494: {
495: DM dm;
496: Vec sol;
497: PetscScalar vpos[3];
498: const PetscScalar *x;
499: PetscScalar *e;
500: PetscReal l2err = 0.0, linferr = 0.0, global_l2, global_linf;
501: PetscInt dof_index, N;
502: const moab::Range *ownedvtx;
504: KSPGetDM(ksp, &dm);
506: /* get the solution vector */
507: KSPGetSolution(ksp, &sol);
509: /* Get the internal reference to the vector arrays */
510: VecGetArrayRead(sol, &x);
511: VecGetSize(sol, &N);
512: if (err) {
513: /* reset the error vector */
514: VecSet(err, 0.0);
515: /* get array reference */
516: VecGetArray(err, &e);
517: }
519: DMMoabGetLocalVertices(dm, &ownedvtx, NULL);
521: /* Compute function over the locally owned part of the grid */
522: for (moab::Range::iterator iter = ownedvtx->begin(); iter != ownedvtx->end(); iter++) {
523: const moab::EntityHandle vhandle = *iter;
525: /* get the local DoF numbers to appropriately set the element contribution in the operator */
526: #ifdef LOCAL_ASSEMBLY
527: DMMoabGetFieldDofsLocal(dm, 1, &vhandle, 0, &dof_index);
528: #else
529: DMMoabGetFieldDofs(dm, 1, &vhandle, 0, &dof_index);
530: #endif
532: /* compute the mid-point of the element and use a 1-point lumped quadrature */
533: DMMoabGetVertexCoordinates(dm, 1, &vhandle, vpos);
535: /* compute the discrete L2 error against the exact solution */
536: const PetscScalar lerr = (ExactSolution(vpos, user) - x[dof_index]);
537: l2err += lerr * lerr;
538: if (linferr < fabs(lerr)) linferr = fabs(lerr);
540: if (err) { /* set the discrete L2 error against the exact solution */
541: e[dof_index] = lerr;
542: }
543: }
545: MPI_Allreduce(&l2err, &global_l2, 1, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD);
546: MPI_Allreduce(&linferr, &global_linf, 1, MPI_DOUBLE, MPI_MAX, PETSC_COMM_WORLD);
547: PetscPrintf(PETSC_COMM_WORLD, "Computed Errors: L_2 = %f, L_inf = %f\n", sqrt(global_l2 / N), global_linf);
549: /* Restore vectors */
550: VecRestoreArrayRead(sol, &x);
551: if (err) VecRestoreArray(err, &e);
552: return 0;
553: }
555: PetscErrorCode InitializeOptions(UserContext *user)
556: {
557: const char *bcTypes[2] = {"dirichlet", "neumann"};
558: PetscInt bc;
560: /* set default parameters */
561: user->dim = 2;
562: user->problem = 1;
563: user->n = 2;
564: user->nlevels = 2;
565: user->rho = 0.1;
566: user->bounds[0] = user->bounds[2] = user->bounds[4] = 0.0;
567: user->bounds[1] = user->bounds[3] = user->bounds[5] = 1.0;
568: user->xref = user->bounds[1] / 2;
569: user->yref = user->bounds[3] / 2;
570: user->nu = 0.05;
571: user->usemg = PETSC_FALSE;
572: user->io = PETSC_FALSE;
573: user->usetri = PETSC_FALSE;
574: user->error = PETSC_FALSE;
575: bc = (PetscInt)DIRICHLET;
577: PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for the inhomogeneous Poisson equation", "ex35.cxx");
578: PetscOptionsInt("-problem", "The type of problem being solved (controls forcing function)", "ex35.cxx", user->problem, &user->problem, NULL);
579: PetscOptionsInt("-n", "The elements in each direction", "ex35.cxx", user->n, &user->n, NULL);
580: PetscOptionsInt("-levels", "Number of levels in the multigrid hierarchy", "ex35.cxx", user->nlevels, &user->nlevels, NULL);
581: PetscOptionsReal("-rho", "The conductivity", "ex35.cxx", user->rho, &user->rho, NULL);
582: PetscOptionsReal("-x", "The domain size in x-direction", "ex35.cxx", user->bounds[1], &user->bounds[1], NULL);
583: PetscOptionsReal("-y", "The domain size in y-direction", "ex35.cxx", user->bounds[3], &user->bounds[3], NULL);
584: PetscOptionsReal("-xref", "The x-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->xref, &user->xref, NULL);
585: PetscOptionsReal("-yref", "The y-coordinate of Gaussian center (for -problem 1)", "ex35.cxx", user->yref, &user->yref, NULL);
586: PetscOptionsReal("-nu", "The width of the Gaussian source (for -problem 1)", "ex35.cxx", user->nu, &user->nu, NULL);
587: PetscOptionsBool("-mg", "Use multigrid preconditioner", "ex35.cxx", user->usemg, &user->usemg, NULL);
588: PetscOptionsBool("-io", "Write out the solution and mesh data", "ex35.cxx", user->io, &user->io, NULL);
589: PetscOptionsBool("-tri", "Use triangles to discretize the domain", "ex35.cxx", user->usetri, &user->usetri, NULL);
590: PetscOptionsBool("-error", "Compute the discrete L_2 and L_inf errors of the solution", "ex35.cxx", user->error, &user->error, NULL);
591: PetscOptionsEList("-bc", "Type of boundary condition", "ex35.cxx", bcTypes, 2, bcTypes[0], &bc, NULL);
592: PetscOptionsString("-file", "The mesh file for the problem", "ex35.cxx", "", user->filename, sizeof(user->filename), &user->use_extfile);
593: PetscOptionsEnd();
595: if (user->problem < 1 || user->problem > 3) user->problem = 1;
596: user->bcType = (BCType)bc;
597: user->VPERE = (user->usetri ? 3 : 4);
598: if (user->problem == 3) {
599: user->bounds[0] = user->bounds[2] = -0.5;
600: user->bounds[1] = user->bounds[3] = 0.5;
601: user->bounds[4] = user->bounds[5] = 0.5;
602: }
603: return 0;
604: }
606: /*TEST
608: build:
609: requires: moab
611: test:
612: args: -levels 0 -nu .01 -n 10 -ksp_type cg -pc_type sor -ksp_converged_reason
614: test:
615: suffix: 2
616: nsize: 2
617: requires: hdf5
618: args: -levels 3 -nu .01 -n 2 -mg -ksp_converged_reason
620: test:
621: suffix: 3
622: nsize: 2
623: requires: hdf5
624: args: -problem 3 -file data/ex35_mesh.h5m -mg -levels 1 -ksp_converged_reason
625: localrunfiles: data
627: TEST*/