Actual source code: ex16.c
2: /* Usage: mpiexec ex16 [-help] [all PETSc options] */
4: static char help[] = "Solves a sequence of linear systems with different right-hand-side vectors.\n\
5: Input parameters include:\n\
6: -ntimes <ntimes> : number of linear systems to solve\n\
7: -view_exact_sol : write exact solution vector to stdout\n\
8: -m <mesh_x> : number of mesh points in x-direction\n\
9: -n <mesh_y> : number of mesh points in y-direction\n\n";
11: /*
12: Include "petscksp.h" so that we can use KSP solvers. Note that this file
13: automatically includes:
14: petscsys.h - base PETSc routines petscvec.h - vectors
15: petscmat.h - matrices
16: petscis.h - index sets petscksp.h - Krylov subspace methods
17: petscviewer.h - viewers petscpc.h - preconditioners
18: */
19: #include <petscksp.h>
21: int main(int argc, char **args)
22: {
23: Vec x, b, u; /* approx solution, RHS, exact solution */
24: Mat A; /* linear system matrix */
25: KSP ksp; /* linear solver context */
26: PetscReal norm; /* norm of solution error */
27: PetscInt ntimes, i, j, k, Ii, J, Istart, Iend;
28: PetscInt m = 8, n = 7, its;
29: PetscBool flg = PETSC_FALSE;
30: PetscScalar v, one = 1.0, rhs;
33: PetscInitialize(&argc, &args, (char *)0, help);
34: PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL);
35: PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);
37: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38: Compute the matrix for use in solving a series of
39: linear systems of the form, A x_i = b_i, for i=1,2,...
40: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
41: /*
42: Create parallel matrix, specifying only its global dimensions.
43: When using MatCreate(), the matrix format can be specified at
44: runtime. Also, the parallel partitioning of the matrix is
45: determined by PETSc at runtime.
46: */
47: MatCreate(PETSC_COMM_WORLD, &A);
48: MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n);
49: MatSetFromOptions(A);
50: MatSetUp(A);
52: /*
53: Currently, all PETSc parallel matrix formats are partitioned by
54: contiguous chunks of rows across the processors. Determine which
55: rows of the matrix are locally owned.
56: */
57: MatGetOwnershipRange(A, &Istart, &Iend);
59: /*
60: Set matrix elements for the 2-D, five-point stencil in parallel.
61: - Each processor needs to insert only elements that it owns
62: locally (but any non-local elements will be sent to the
63: appropriate processor during matrix assembly).
64: - Always specify global rows and columns of matrix entries.
65: */
66: for (Ii = Istart; Ii < Iend; Ii++) {
67: v = -1.0;
68: i = Ii / n;
69: j = Ii - i * n;
70: if (i > 0) {
71: J = Ii - n;
72: MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
73: }
74: if (i < m - 1) {
75: J = Ii + n;
76: MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
77: }
78: if (j > 0) {
79: J = Ii - 1;
80: MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
81: }
82: if (j < n - 1) {
83: J = Ii + 1;
84: MatSetValues(A, 1, &Ii, 1, &J, &v, INSERT_VALUES);
85: }
86: v = 4.0;
87: MatSetValues(A, 1, &Ii, 1, &Ii, &v, INSERT_VALUES);
88: }
90: /*
91: Assemble matrix, using the 2-step process:
92: MatAssemblyBegin(), MatAssemblyEnd()
93: Computations can be done while messages are in transition
94: by placing code between these two statements.
95: */
96: MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
97: MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
99: /*
100: Create parallel vectors.
101: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
102: we specify only the vector's global
103: dimension; the parallel partitioning is determined at runtime.
104: - When solving a linear system, the vectors and matrices MUST
105: be partitioned accordingly. PETSc automatically generates
106: appropriately partitioned matrices and vectors when MatCreate()
107: and VecCreate() are used with the same communicator.
108: - Note: We form 1 vector from scratch and then duplicate as needed.
109: */
110: VecCreate(PETSC_COMM_WORLD, &u);
111: VecSetSizes(u, PETSC_DECIDE, m * n);
112: VecSetFromOptions(u);
113: VecDuplicate(u, &b);
114: VecDuplicate(b, &x);
116: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: Create the linear solver and set various options
118: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120: /*
121: Create linear solver context
122: */
123: KSPCreate(PETSC_COMM_WORLD, &ksp);
125: /*
126: Set operators. Here the matrix that defines the linear system
127: also serves as the preconditioning matrix.
128: */
129: KSPSetOperators(ksp, A, A);
131: /*
132: Set runtime options, e.g.,
133: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
134: These options will override those specified above as long as
135: KSPSetFromOptions() is called _after_ any other customization
136: routines.
137: */
138: KSPSetFromOptions(ksp);
140: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: Solve several linear systems of the form A x_i = b_i
142: I.e., we retain the same matrix (A) for all systems, but
143: change the right-hand-side vector (b_i) at each step.
145: In this case, we simply call KSPSolve() multiple times. The
146: preconditioner setup operations (e.g., factorization for ILU)
147: be done during the first call to KSPSolve() only; such operations
148: will NOT be repeated for successive solves.
149: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
151: ntimes = 2;
152: PetscOptionsGetInt(NULL, NULL, "-ntimes", &ntimes, NULL);
153: for (k = 1; k < ntimes + 1; k++) {
154: /*
155: Set exact solution; then compute right-hand-side vector. We use
156: an exact solution of a vector with all elements equal to 1.0*k.
157: */
158: rhs = one * (PetscReal)k;
159: VecSet(u, rhs);
160: MatMult(A, u, b);
162: /*
163: View the exact solution vector if desired
164: */
165: PetscOptionsGetBool(NULL, NULL, "-view_exact_sol", &flg, NULL);
166: if (flg) VecView(u, PETSC_VIEWER_STDOUT_WORLD);
168: KSPSolve(ksp, b, x);
170: /*
171: Check the error
172: */
173: VecAXPY(x, -1.0, u);
174: VecNorm(x, NORM_2, &norm);
175: KSPGetIterationNumber(ksp, &its);
176: /*
177: Print convergence information. PetscPrintf() produces a single
178: print statement from all processes that share a communicator.
179: */
180: PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g System %" PetscInt_FMT ": iterations %" PetscInt_FMT "\n", (double)norm, k, its);
181: }
183: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: Clean up
185: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186: /*
187: Free work space. All PETSc objects should be destroyed when they
188: are no longer needed.
189: */
190: KSPDestroy(&ksp);
191: VecDestroy(&u);
192: VecDestroy(&x);
193: VecDestroy(&b);
194: MatDestroy(&A);
196: /*
197: Always call PetscFinalize() before exiting a program. This routine
198: - finalizes the PETSc libraries as well as MPI
199: - provides summary and diagnostic information if certain runtime
200: options are chosen (e.g., -log_view).
201: */
202: PetscFinalize();
203: return 0;
204: }
206: /*TEST
208: test:
209: nsize: 2
210: args: -ntimes 4 -ksp_gmres_cgs_refinement_type refine_always
212: TEST*/