Actual source code: ex65.c
2: /*
3: Partial differential equation
5: d d u = 1, 0 < x < 1,
6: -- --
7: dx dx
8: with boundary conditions
10: u = 0 for x = 0, x = 1
12: This uses multigrid to solve the linear system
14: Demonstrates how to build a DMSHELL for managing multigrid. The DMSHELL simply creates a
15: DMDA1d to construct all the needed PETSc objects.
17: */
19: static char help[] = "Solves 1D constant coefficient Laplacian using DMSHELL and multigrid.\n\n";
21: #include <petscdm.h>
22: #include <petscdmda.h>
23: #include <petscdmshell.h>
24: #include <petscksp.h>
26: static PetscErrorCode ComputeMatrix(KSP, Mat, Mat, void *);
27: static PetscErrorCode ComputeRHS(KSP, Vec, void *);
28: static PetscErrorCode CreateMatrix(DM, Mat *);
29: static PetscErrorCode CreateGlobalVector(DM, Vec *);
30: static PetscErrorCode CreateLocalVector(DM, Vec *);
31: static PetscErrorCode Refine(DM, MPI_Comm, DM *);
32: static PetscErrorCode Coarsen(DM, MPI_Comm, DM *);
33: static PetscErrorCode CreateInterpolation(DM, DM, Mat *, Vec *);
34: static PetscErrorCode CreateRestriction(DM, DM, Mat *);
35: static PetscErrorCode Destroy(void *);
37: static PetscErrorCode MyDMShellCreate(MPI_Comm comm, DM da, DM *shell)
38: {
39: DMShellCreate(comm, shell);
40: DMShellSetContext(*shell, da);
41: DMShellSetCreateMatrix(*shell, CreateMatrix);
42: DMShellSetCreateGlobalVector(*shell, CreateGlobalVector);
43: DMShellSetCreateLocalVector(*shell, CreateLocalVector);
44: DMShellSetRefine(*shell, Refine);
45: DMShellSetCoarsen(*shell, Coarsen);
46: DMShellSetCreateInterpolation(*shell, CreateInterpolation);
47: DMShellSetCreateRestriction(*shell, CreateRestriction);
48: DMShellSetDestroyContext(*shell, Destroy);
49: return 0;
50: }
52: int main(int argc, char **argv)
53: {
54: KSP ksp;
55: DM da, shell;
56: PetscInt levels;
59: PetscInitialize(&argc, &argv, (char *)0, help);
60: KSPCreate(PETSC_COMM_WORLD, &ksp);
61: DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, 129, 1, 1, 0, &da);
62: DMSetFromOptions(da);
63: DMSetUp(da);
64: MyDMShellCreate(PETSC_COMM_WORLD, da, &shell);
65: /* these two lines are not needed but allow PCMG to automatically know how many multigrid levels the user wants */
66: DMGetRefineLevel(da, &levels);
67: DMSetRefineLevel(shell, levels);
69: KSPSetDM(ksp, shell);
70: KSPSetComputeRHS(ksp, ComputeRHS, NULL);
71: KSPSetComputeOperators(ksp, ComputeMatrix, NULL);
72: KSPSetFromOptions(ksp);
73: KSPSolve(ksp, NULL, NULL);
75: KSPDestroy(&ksp);
76: DMDestroy(&shell);
77: PetscFinalize();
78: return 0;
79: }
81: static PetscErrorCode Destroy(void *ctx)
82: {
83: DMDestroy((DM *)&ctx);
84: return 0;
85: }
87: static PetscErrorCode CreateMatrix(DM shell, Mat *A)
88: {
89: DM da;
91: DMShellGetContext(shell, &da);
92: DMCreateMatrix(da, A);
93: return 0;
94: }
96: static PetscErrorCode CreateInterpolation(DM dm1, DM dm2, Mat *mat, Vec *vec)
97: {
98: DM da1, da2;
100: DMShellGetContext(dm1, &da1);
101: DMShellGetContext(dm2, &da2);
102: DMCreateInterpolation(da1, da2, mat, vec);
103: return 0;
104: }
106: static PetscErrorCode CreateRestriction(DM dm1, DM dm2, Mat *mat)
107: {
108: DM da1, da2;
109: Mat tmat;
111: DMShellGetContext(dm1, &da1);
112: DMShellGetContext(dm2, &da2);
113: DMCreateInterpolation(da1, da2, &tmat, NULL);
114: MatTranspose(tmat, MAT_INITIAL_MATRIX, mat);
115: MatDestroy(&tmat);
116: return 0;
117: }
119: static PetscErrorCode CreateGlobalVector(DM shell, Vec *x)
120: {
121: DM da;
123: DMShellGetContext(shell, &da);
124: DMCreateGlobalVector(da, x);
125: VecSetDM(*x, shell);
126: return 0;
127: }
129: static PetscErrorCode CreateLocalVector(DM shell, Vec *x)
130: {
131: DM da;
133: DMShellGetContext(shell, &da);
134: DMCreateLocalVector(da, x);
135: VecSetDM(*x, shell);
136: return 0;
137: }
139: static PetscErrorCode Refine(DM shell, MPI_Comm comm, DM *dmnew)
140: {
141: DM da, dafine;
143: DMShellGetContext(shell, &da);
144: DMRefine(da, comm, &dafine);
145: MyDMShellCreate(PetscObjectComm((PetscObject)shell), dafine, dmnew);
146: return 0;
147: }
149: static PetscErrorCode Coarsen(DM shell, MPI_Comm comm, DM *dmnew)
150: {
151: DM da, dacoarse;
153: DMShellGetContext(shell, &da);
154: DMCoarsen(da, comm, &dacoarse);
155: MyDMShellCreate(PetscObjectComm((PetscObject)shell), dacoarse, dmnew);
156: return 0;
157: }
159: static PetscErrorCode ComputeRHS(KSP ksp, Vec b, void *ctx)
160: {
161: PetscInt mx, idx[2];
162: PetscScalar h, v[2];
163: DM da, shell;
166: KSPGetDM(ksp, &shell);
167: DMShellGetContext(shell, &da);
168: DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
169: h = 1.0 / ((mx - 1));
170: VecSet(b, h);
171: idx[0] = 0;
172: idx[1] = mx - 1;
173: v[0] = v[1] = 0.0;
174: VecSetValues(b, 2, idx, v, INSERT_VALUES);
175: VecAssemblyBegin(b);
176: VecAssemblyEnd(b);
177: return 0;
178: }
180: static PetscErrorCode ComputeMatrix(KSP ksp, Mat J, Mat jac, void *ctx)
181: {
182: PetscInt i, mx, xm, xs;
183: PetscScalar v[3], h;
184: MatStencil row, col[3];
185: DM da, shell;
188: KSPGetDM(ksp, &shell);
189: DMShellGetContext(shell, &da);
190: DMDAGetInfo(da, 0, &mx, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
191: DMDAGetCorners(da, &xs, 0, 0, &xm, 0, 0);
192: h = 1.0 / (mx - 1);
194: for (i = xs; i < xs + xm; i++) {
195: row.i = i;
196: if (i == 0 || i == mx - 1) {
197: v[0] = 2.0 / h;
198: MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES);
199: } else {
200: v[0] = (-1.0) / h;
201: col[0].i = i - 1;
202: v[1] = (2.0) / h;
203: col[1].i = row.i;
204: v[2] = (-1.0) / h;
205: col[2].i = i + 1;
206: MatSetValuesStencil(jac, 1, &row, 3, col, v, INSERT_VALUES);
207: }
208: }
209: MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY);
210: MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY);
211: return 0;
212: }
214: /*TEST
216: test:
217: nsize: 4
218: args: -ksp_monitor -pc_type mg -da_refine 3
220: TEST*/