Actual source code: ex134.c
1: static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n";
3: #include <petscmat.h>
5: PetscErrorCode Assemble(MPI_Comm comm, PetscInt bs, MatType mtype)
6: {
7: const PetscInt rc[] = {0, 1, 2, 3};
8: const PetscScalar vals[] = {100, 2, 3, 4, 5, 600, 7, 8, 9, 100, 11, 1200, 13, 14, 15, 1600, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 2800, 29, 30, 31, 32,
9: 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 49, 60, 61, 62, 63, 64};
10: Mat A;
11: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
12: Mat F;
13: MatSolverType stype = MATSOLVERPETSC;
14: PetscRandom rdm;
15: Vec b, x, y;
16: PetscInt i, j;
17: PetscReal norm2, tol = 100 * PETSC_SMALL;
18: PetscBool issbaij;
19: #endif
20: PetscViewer viewer;
22: MatCreate(comm, &A);
23: MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, 4 * bs, 4 * bs);
24: MatSetType(A, mtype);
25: MatMPIBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL);
26: MatMPISBAIJSetPreallocation(A, bs, 2, NULL, 2, NULL);
27: MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
28: /* All processes contribute a global matrix */
29: MatSetValuesBlocked(A, 4, rc, 4, rc, vals, ADD_VALUES);
30: MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY);
31: MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY);
32: PetscPrintf(comm, "Matrix %s(%" PetscInt_FMT ")\n", mtype, bs);
33: PetscViewerASCIIGetStdout(comm, &viewer);
34: PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);
35: MatView(A, viewer);
36: PetscViewerPopFormat(viewer);
37: MatView(A, viewer);
38: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
39: PetscStrcmp(mtype, MATMPISBAIJ, &issbaij);
40: if (!issbaij) MatShift(A, 10);
41: PetscRandomCreate(PETSC_COMM_WORLD, &rdm);
42: PetscRandomSetFromOptions(rdm);
43: MatCreateVecs(A, &x, &y);
44: VecDuplicate(x, &b);
45: for (j = 0; j < 2; j++) {
46: #if defined(PETSC_HAVE_MUMPS)
47: if (j == 0) stype = MATSOLVERMUMPS;
48: #else
49: if (j == 0) continue;
50: #endif
51: #if defined(PETSC_HAVE_MKL_CPARDISO)
52: if (j == 1) stype = MATSOLVERMKL_CPARDISO;
53: #else
54: if (j == 1) continue;
55: #endif
56: if (issbaij) {
57: MatGetFactor(A, stype, MAT_FACTOR_CHOLESKY, &F);
58: MatCholeskyFactorSymbolic(F, A, NULL, NULL);
59: MatCholeskyFactorNumeric(F, A, NULL);
60: } else {
61: MatGetFactor(A, stype, MAT_FACTOR_LU, &F);
62: MatLUFactorSymbolic(F, A, NULL, NULL, NULL);
63: MatLUFactorNumeric(F, A, NULL);
64: }
65: for (i = 0; i < 10; i++) {
66: VecSetRandom(b, rdm);
67: MatSolve(F, b, y);
68: /* Check the error */
69: MatMult(A, y, x);
70: VecAXPY(x, -1.0, b);
71: VecNorm(x, NORM_2, &norm2);
72: if (norm2 > tol) PetscPrintf(PETSC_COMM_WORLD, "Error:MatSolve(), norm2: %g\n", (double)norm2);
73: }
74: MatDestroy(&F);
75: }
76: VecDestroy(&x);
77: VecDestroy(&y);
78: VecDestroy(&b);
79: PetscRandomDestroy(&rdm);
80: #endif
81: MatDestroy(&A);
82: return 0;
83: }
85: int main(int argc, char *argv[])
86: {
87: MPI_Comm comm;
88: PetscMPIInt size;
91: PetscInitialize(&argc, &argv, NULL, help);
92: comm = PETSC_COMM_WORLD;
93: MPI_Comm_size(comm, &size);
95: Assemble(comm, 2, MATMPIBAIJ);
96: Assemble(comm, 2, MATMPISBAIJ);
97: Assemble(comm, 1, MATMPIBAIJ);
98: Assemble(comm, 1, MATMPISBAIJ);
99: PetscFinalize();
100: return 0;
101: }
103: /*TEST
105: test:
106: nsize: 2
107: args: -mat_ignore_lower_triangular
108: filter: sed -e "s~mem [0-9]*~mem~g"
110: TEST*/