Actual source code: ex1.c
2: static char help[] = "Solves a tridiagonal linear system with KSP.\n\n";
4: /*
5: Include "petscksp.h" so that we can use KSP solvers. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices petscpc.h - preconditioners
9: petscis.h - index sets
10: petscviewer.h - viewers
12: Note: The corresponding parallel example is ex23.c
13: */
14: #include <petscksp.h>
16: int main(int argc, char **args)
17: {
18: Vec x, b, u; /* approx solution, RHS, exact solution */
19: Mat A; /* linear system matrix */
20: KSP ksp; /* linear solver context */
21: PC pc; /* preconditioner context */
22: PetscReal norm; /* norm of solution error */
23: PetscInt i, n = 10, col[3], its;
24: PetscMPIInt size;
25: PetscScalar value[3];
28: PetscInitialize(&argc, &args, (char *)0, help);
29: MPI_Comm_size(PETSC_COMM_WORLD, &size);
32: PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);
34: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: Compute the matrix and right-hand-side vector that define
36: the linear system, Ax = b.
37: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
39: /*
40: Create vectors. Note that we form 1 vector from scratch and
41: then duplicate as needed.
42: */
43: VecCreate(PETSC_COMM_SELF, &x);
44: PetscObjectSetName((PetscObject)x, "Solution");
45: VecSetSizes(x, PETSC_DECIDE, n);
46: VecSetFromOptions(x);
47: VecDuplicate(x, &b);
48: VecDuplicate(x, &u);
50: /*
51: Create matrix. When using MatCreate(), the matrix format can
52: be specified at runtime.
54: Performance tuning note: For problems of substantial size,
55: preallocation of matrix memory is crucial for attaining good
56: performance. See the matrix chapter of the users manual for details.
57: */
58: MatCreate(PETSC_COMM_SELF, &A);
59: MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, n, n);
60: MatSetFromOptions(A);
61: MatSetUp(A);
63: /*
64: Assemble matrix
65: */
66: value[0] = -1.0;
67: value[1] = 2.0;
68: value[2] = -1.0;
69: for (i = 1; i < n - 1; i++) {
70: col[0] = i - 1;
71: col[1] = i;
72: col[2] = i + 1;
73: MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES);
74: }
75: i = n - 1;
76: col[0] = n - 2;
77: col[1] = n - 1;
78: MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES);
79: i = 0;
80: col[0] = 0;
81: col[1] = 1;
82: value[0] = 2.0;
83: value[1] = -1.0;
84: MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES);
85: MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
86: MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
88: /*
89: Set exact solution; then compute right-hand-side vector.
90: */
91: VecSet(u, 1.0);
92: MatMult(A, u, b);
94: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
95: Create the linear solver and set various options
96: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
97: KSPCreate(PETSC_COMM_SELF, &ksp);
99: /*
100: Set operators. Here the matrix that defines the linear system
101: also serves as the matrix that defines the preconditioner.
102: */
103: KSPSetOperators(ksp, A, A);
105: /*
106: Set linear solver defaults for this problem (optional).
107: - By extracting the KSP and PC contexts from the KSP context,
108: we can then directly call any KSP and PC routines to set
109: various options.
110: - The following four statements are optional; all of these
111: parameters could alternatively be specified at runtime via
112: KSPSetFromOptions();
113: */
114: KSPGetPC(ksp, &pc);
115: PCSetType(pc, PCJACOBI);
116: KSPSetTolerances(ksp, 1.e-5, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
118: /*
119: Set runtime options, e.g.,
120: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
121: These options will override those specified above as long as
122: KSPSetFromOptions() is called _after_ any other customization
123: routines.
124: */
125: KSPSetFromOptions(ksp);
127: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: Solve the linear system
129: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130: KSPSolve(ksp, b, x);
132: /*
133: View solver info; we could instead use the option -ksp_view to
134: print this info to the screen at the conclusion of KSPSolve().
135: */
136: KSPView(ksp, PETSC_VIEWER_STDOUT_SELF);
138: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
139: Check the solution and clean up
140: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
141: VecAXPY(x, -1.0, u);
142: VecNorm(x, NORM_2, &norm);
143: KSPGetIterationNumber(ksp, &its);
144: PetscPrintf(PETSC_COMM_SELF, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its);
146: /* check that KSP automatically handles the fact that the the new non-zero values in the matrix are propagated to the KSP solver */
147: MatShift(A, 2.0);
148: KSPSolve(ksp, b, x);
150: /*
151: Free work space. All PETSc objects should be destroyed when they
152: are no longer needed.
153: */
154: VecDestroy(&x);
155: VecDestroy(&u);
156: VecDestroy(&b);
157: MatDestroy(&A);
158: KSPDestroy(&ksp);
160: /*
161: Always call PetscFinalize() before exiting a program. This routine
162: - finalizes the PETSc libraries as well as MPI
163: - provides summary and diagnostic information if certain runtime
164: options are chosen (e.g., -log_view).
165: */
166: PetscFinalize();
167: return 0;
168: }
170: /*TEST
172: test:
173: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
175: test:
176: suffix: 2
177: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
179: test:
180: suffix: 2_aijcusparse
181: requires: cuda
182: args: -pc_type sor -pc_sor_symmetric -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
184: test:
185: suffix: 3
186: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
188: test:
189: suffix: 3_aijcusparse
190: requires: cuda
191: args: -pc_type eisenstat -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
193: test:
194: suffix: aijcusparse
195: requires: cuda
196: args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always -mat_type aijcusparse -vec_type cuda
197: output_file: output/ex1_1_aijcusparse.out
199: test:
200: requires: defined(PETSC_USE_SINGLE_LIBRARY)
201: suffix: mpi_linear_solver_server_1
202: nsize: 3
203: filter: sed 's?ATOL?RTOL?g'
204: args: -mpi_linear_solver_server -mpi_linear_solver_server_view -pc_type mpi -ksp_type preonly -mpi_ksp_monitor -mpi_ksp_converged_reason -mat_view -mpi_pc_type none -mpi_ksp_view -mpi_mat_view -pc_mpi_minimum_count_per_rank 5
206: test:
207: requires: defined(PETSC_USE_SINGLE_LIBRARY)
208: suffix: mpi_linear_solver_server_2
209: nsize: 3
210: filter: sed 's?ATOL?RTOL?g'
211: args: -mpi_linear_solver_server -mpi_linear_solver_server_view -pc_type mpi -ksp_type preonly -mpi_ksp_monitor -mpi_ksp_converged_reason -mat_view -mpi_pc_type none -mpi_ksp_view
213: test:
214: requires: defined(PETSC_USE_SINGLE_LIBRARY)
215: suffix: mpi_linear_solver_server_3
216: nsize: 3
217: filter: sed 's?ATOL?RTOL?g'
218: args: -mpi_linear_solver_server -mpi_linear_solver_server_view -pc_type mpi -ksp_type preonly -mpi_ksp_monitor -mpi_ksp_converged_reason -mat_view -mpi_pc_type none -mpi_ksp_view -mpi_mat_view -pc_mpi_always_use_server
220: TEST*/