Actual source code: ex19.c
2: static char help[] = "Solvers Laplacian with multigrid, bad way.\n\
3: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
4: -my <yg>, where <yg> = number of grid points in the y-direction\n\
5: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
6: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
8: /*
9: This problem is modeled by
10: the partial differential equation
12: -Laplacian u = g, 0 < x,y < 1,
14: with boundary conditions
16: u = 0 for x = 0, x = 1, y = 0, y = 1.
18: A finite difference approximation with the usual 5-point stencil
19: is used to discretize the boundary value problem to obtain a nonlinear
20: system of equations.
21: */
23: #include <petscksp.h>
24: #include <petscdm.h>
25: #include <petscdmda.h>
27: /* User-defined application contexts */
29: typedef struct {
30: PetscInt mx, my; /* number grid points in x and y direction */
31: Vec localX, localF; /* local vectors with ghost region */
32: DM da;
33: Vec x, b, r; /* global vectors */
34: Mat J; /* Jacobian on grid */
35: } GridCtx;
37: typedef struct {
38: GridCtx fine;
39: GridCtx coarse;
40: KSP ksp_coarse;
41: PetscInt ratio;
42: Mat Ii; /* interpolation from coarse to fine */
43: } AppCtx;
45: #define COARSE_LEVEL 0
46: #define FINE_LEVEL 1
48: extern PetscErrorCode FormJacobian_Grid(AppCtx *, GridCtx *, Mat *);
50: /*
51: Mm_ratio - ration of grid lines between fine and coarse grids.
52: */
53: int main(int argc, char **argv)
54: {
55: AppCtx user;
56: PetscInt its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal, Nlocal;
57: PetscMPIInt size;
58: KSP ksp, ksp_fine;
59: PC pc;
60: PetscScalar one = 1.0;
63: PetscInitialize(&argc, &argv, NULL, help);
64: user.ratio = 2;
65: user.coarse.mx = 5;
66: user.coarse.my = 5;
68: PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL);
69: PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL);
70: PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL);
72: user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
73: user.fine.my = user.ratio * (user.coarse.my - 1) + 1;
75: PetscPrintf(PETSC_COMM_WORLD, "Coarse grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my);
76: PetscPrintf(PETSC_COMM_WORLD, "Fine grid size %" PetscInt_FMT " by %" PetscInt_FMT "\n", user.fine.mx, user.fine.my);
78: n = user.fine.mx * user.fine.my;
79: N = user.coarse.mx * user.coarse.my;
81: MPI_Comm_size(PETSC_COMM_WORLD, &size);
82: PetscOptionsGetInt(NULL, NULL, "-Nx", &Nx, NULL);
83: PetscOptionsGetInt(NULL, NULL, "-Ny", &Ny, NULL);
85: /* Set up distributed array for fine grid */
86: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, Nx, Ny, 1, 1, NULL, NULL, &user.fine.da);
87: DMSetFromOptions(user.fine.da);
88: DMSetUp(user.fine.da);
89: DMCreateGlobalVector(user.fine.da, &user.fine.x);
90: VecDuplicate(user.fine.x, &user.fine.r);
91: VecDuplicate(user.fine.x, &user.fine.b);
92: VecGetLocalSize(user.fine.x, &nlocal);
93: DMCreateLocalVector(user.fine.da, &user.fine.localX);
94: VecDuplicate(user.fine.localX, &user.fine.localF);
95: MatCreateAIJ(PETSC_COMM_WORLD, nlocal, nlocal, n, n, 5, NULL, 3, NULL, &user.fine.J);
97: /* Set up distributed array for coarse grid */
98: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, Nx, Ny, 1, 1, NULL, NULL, &user.coarse.da);
99: DMSetFromOptions(user.coarse.da);
100: DMSetUp(user.coarse.da);
101: DMCreateGlobalVector(user.coarse.da, &user.coarse.x);
102: VecDuplicate(user.coarse.x, &user.coarse.b);
103: VecGetLocalSize(user.coarse.x, &Nlocal);
104: DMCreateLocalVector(user.coarse.da, &user.coarse.localX);
105: VecDuplicate(user.coarse.localX, &user.coarse.localF);
106: MatCreateAIJ(PETSC_COMM_WORLD, Nlocal, Nlocal, N, N, 5, NULL, 3, NULL, &user.coarse.J);
108: /* Create linear solver */
109: KSPCreate(PETSC_COMM_WORLD, &ksp);
111: /* set two level additive Schwarz preconditioner */
112: KSPGetPC(ksp, &pc);
113: PCSetType(pc, PCMG);
114: PCMGSetLevels(pc, 2, NULL);
115: PCMGSetType(pc, PC_MG_ADDITIVE);
117: FormJacobian_Grid(&user, &user.coarse, &user.coarse.J);
118: FormJacobian_Grid(&user, &user.fine, &user.fine.J);
120: /* Create coarse level */
121: PCMGGetCoarseSolve(pc, &user.ksp_coarse);
122: KSPSetOptionsPrefix(user.ksp_coarse, "coarse_");
123: KSPSetFromOptions(user.ksp_coarse);
124: KSPSetOperators(user.ksp_coarse, user.coarse.J, user.coarse.J);
125: PCMGSetX(pc, COARSE_LEVEL, user.coarse.x);
126: PCMGSetRhs(pc, COARSE_LEVEL, user.coarse.b);
128: /* Create fine level */
129: PCMGGetSmoother(pc, FINE_LEVEL, &ksp_fine);
130: KSPSetOptionsPrefix(ksp_fine, "fine_");
131: KSPSetFromOptions(ksp_fine);
132: KSPSetOperators(ksp_fine, user.fine.J, user.fine.J);
133: PCMGSetR(pc, FINE_LEVEL, user.fine.r);
135: /* Create interpolation between the levels */
136: DMCreateInterpolation(user.coarse.da, user.fine.da, &user.Ii, NULL);
137: PCMGSetInterpolation(pc, FINE_LEVEL, user.Ii);
138: PCMGSetRestriction(pc, FINE_LEVEL, user.Ii);
140: KSPSetOperators(ksp, user.fine.J, user.fine.J);
142: VecSet(user.fine.b, one);
144: /* Set options, then solve nonlinear system */
145: KSPSetFromOptions(ksp);
147: KSPSolve(ksp, user.fine.b, user.fine.x);
148: KSPGetIterationNumber(ksp, &its);
149: PetscPrintf(PETSC_COMM_WORLD, "Number of iterations = %" PetscInt_FMT "\n", its);
151: /* Free data structures */
152: MatDestroy(&user.fine.J);
153: VecDestroy(&user.fine.x);
154: VecDestroy(&user.fine.r);
155: VecDestroy(&user.fine.b);
156: DMDestroy(&user.fine.da);
157: VecDestroy(&user.fine.localX);
158: VecDestroy(&user.fine.localF);
160: MatDestroy(&user.coarse.J);
161: VecDestroy(&user.coarse.x);
162: VecDestroy(&user.coarse.b);
163: DMDestroy(&user.coarse.da);
164: VecDestroy(&user.coarse.localX);
165: VecDestroy(&user.coarse.localF);
167: KSPDestroy(&ksp);
168: MatDestroy(&user.Ii);
169: PetscFinalize();
170: return 0;
171: }
173: PetscErrorCode FormJacobian_Grid(AppCtx *user, GridCtx *grid, Mat *J)
174: {
175: Mat jac = *J;
176: PetscInt i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5];
177: PetscInt grow;
178: const PetscInt *ltog;
179: PetscScalar two = 2.0, one = 1.0, v[5], hx, hy, hxdhy, hydhx, value;
180: ISLocalToGlobalMapping ltogm;
182: mx = grid->mx;
183: my = grid->my;
184: hx = one / (PetscReal)(mx - 1);
185: hy = one / (PetscReal)(my - 1);
186: hxdhy = hx / hy;
187: hydhx = hy / hx;
189: /* Get ghost points */
190: DMDAGetCorners(grid->da, &xs, &ys, 0, &xm, &ym, 0);
191: DMDAGetGhostCorners(grid->da, &Xs, &Ys, 0, &Xm, &Ym, 0);
192: DMGetLocalToGlobalMapping(grid->da, <ogm);
193: ISLocalToGlobalMappingGetIndices(ltogm, <og);
195: /* Evaluate Jacobian of function */
196: for (j = ys; j < ys + ym; j++) {
197: row = (j - Ys) * Xm + xs - Xs - 1;
198: for (i = xs; i < xs + xm; i++) {
199: row++;
200: grow = ltog[row];
201: if (i > 0 && i < mx - 1 && j > 0 && j < my - 1) {
202: v[0] = -hxdhy;
203: col[0] = ltog[row - Xm];
204: v[1] = -hydhx;
205: col[1] = ltog[row - 1];
206: v[2] = two * (hydhx + hxdhy);
207: col[2] = grow;
208: v[3] = -hydhx;
209: col[3] = ltog[row + 1];
210: v[4] = -hxdhy;
211: col[4] = ltog[row + Xm];
212: MatSetValues(jac, 1, &grow, 5, col, v, INSERT_VALUES);
213: } else if ((i > 0 && i < mx - 1) || (j > 0 && j < my - 1)) {
214: value = .5 * two * (hydhx + hxdhy);
215: MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES);
216: } else {
217: value = .25 * two * (hydhx + hxdhy);
218: MatSetValues(jac, 1, &grow, 1, &grow, &value, INSERT_VALUES);
219: }
220: }
221: }
222: ISLocalToGlobalMappingRestoreIndices(ltogm, <og);
223: MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY);
224: MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY);
226: return 0;
227: }
229: /*TEST
231: test:
232: args: -ksp_gmres_cgs_refinement_type refine_always -pc_type jacobi -ksp_monitor_short -ksp_type gmres
234: test:
235: suffix: 2
236: nsize: 3
237: args: -ksp_monitor_short
239: TEST*/