Actual source code: ex111.c
2: static char help[] = "Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12: */
14: #include <petscdm.h>
15: #include <petscdmda.h>
17: /* User-defined application contexts */
18: typedef struct {
19: PetscInt mx, my, mz; /* number grid points in x, y and z direction */
20: Vec localX, localF; /* local vectors with ghost region */
21: DM da;
22: Vec x, b, r; /* global vectors */
23: Mat J; /* Jacobian on grid */
24: } GridCtx;
25: typedef struct {
26: GridCtx fine;
27: GridCtx coarse;
28: PetscInt ratio;
29: Mat Ii; /* interpolation from coarse to fine */
30: } AppCtx;
32: #define COARSE_LEVEL 0
33: #define FINE_LEVEL 1
35: /*
36: Mm_ratio - ration of grid lines between fine and coarse grids.
37: */
38: int main(int argc, char **argv)
39: {
40: AppCtx user;
41: PetscMPIInt size, rank;
42: PetscInt m, n, M, N, i, nrows;
43: PetscScalar one = 1.0;
44: PetscReal fill = 2.0;
45: Mat A, P, R, C, PtAP, D;
46: PetscScalar *array;
47: PetscRandom rdm;
48: PetscBool Test_3D = PETSC_FALSE, flg;
49: const PetscInt *ia, *ja;
52: PetscInitialize(&argc, &argv, NULL, help);
53: MPI_Comm_size(PETSC_COMM_WORLD, &size);
54: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
56: /* Get size of fine grids and coarse grids */
57: user.ratio = 2;
58: user.coarse.mx = 4;
59: user.coarse.my = 4;
60: user.coarse.mz = 4;
62: PetscOptionsGetInt(NULL, NULL, "-Mx", &user.coarse.mx, NULL);
63: PetscOptionsGetInt(NULL, NULL, "-My", &user.coarse.my, NULL);
64: PetscOptionsGetInt(NULL, NULL, "-Mz", &user.coarse.mz, NULL);
65: PetscOptionsGetInt(NULL, NULL, "-ratio", &user.ratio, NULL);
66: if (user.coarse.mz) Test_3D = PETSC_TRUE;
68: user.fine.mx = user.ratio * (user.coarse.mx - 1) + 1;
69: user.fine.my = user.ratio * (user.coarse.my - 1) + 1;
70: user.fine.mz = user.ratio * (user.coarse.mz - 1) + 1;
72: if (rank == 0) {
73: if (!Test_3D) {
74: PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.fine.mx, user.fine.my);
75: } else {
76: PetscCall(PetscPrintf(PETSC_COMM_SELF, "coarse grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "; fine grids: %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", user.coarse.mx, user.coarse.my, user.coarse.mz, user.fine.mx,
77: user.fine.my, user.fine.mz));
78: }
79: }
81: /* Set up distributed array for fine grid */
82: if (!Test_3D) {
83: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.fine.da);
84: } else {
85: DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.fine.mx, user.fine.my, user.fine.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.fine.da);
86: }
87: DMSetFromOptions(user.fine.da);
88: DMSetUp(user.fine.da);
90: /* Create and set A at fine grids */
91: DMSetMatType(user.fine.da, MATAIJ);
92: DMCreateMatrix(user.fine.da, &A);
93: MatGetLocalSize(A, &m, &n);
94: MatGetSize(A, &M, &N);
96: /* set val=one to A (replace with random values!) */
97: PetscRandomCreate(PETSC_COMM_WORLD, &rdm);
98: PetscRandomSetFromOptions(rdm);
99: if (size == 1) {
100: MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
101: if (flg) {
102: MatSeqAIJGetArray(A, &array);
103: for (i = 0; i < ia[nrows]; i++) array[i] = one;
104: MatSeqAIJRestoreArray(A, &array);
105: }
106: MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
107: } else {
108: Mat AA, AB;
109: MatMPIAIJGetSeqAIJ(A, &AA, &AB, NULL);
110: MatGetRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
111: if (flg) {
112: MatSeqAIJGetArray(AA, &array);
113: for (i = 0; i < ia[nrows]; i++) array[i] = one;
114: MatSeqAIJRestoreArray(AA, &array);
115: }
116: MatRestoreRowIJ(AA, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
117: MatGetRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
118: if (flg) {
119: MatSeqAIJGetArray(AB, &array);
120: for (i = 0; i < ia[nrows]; i++) array[i] = one;
121: MatSeqAIJRestoreArray(AB, &array);
122: }
123: MatRestoreRowIJ(AB, 0, PETSC_FALSE, PETSC_FALSE, &nrows, &ia, &ja, &flg);
124: }
125: /* Set up distributed array for coarse grid */
126: if (!Test_3D) {
127: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &user.coarse.da);
128: } else {
129: DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, user.coarse.mx, user.coarse.my, user.coarse.mz, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, NULL, &user.coarse.da);
130: }
131: DMSetFromOptions(user.coarse.da);
132: DMSetUp(user.coarse.da);
134: /* Create interpolation between the fine and coarse grids */
135: DMCreateInterpolation(user.coarse.da, user.fine.da, &P, NULL);
137: /* Get R = P^T */
138: MatTranspose(P, MAT_INITIAL_MATRIX, &R);
140: /* C = R*A*P */
141: /* Developer's API */
142: MatProductCreate(R, A, P, &D);
143: MatProductSetType(D, MATPRODUCT_ABC);
144: MatProductSetFromOptions(D);
145: MatProductSymbolic(D);
146: MatProductNumeric(D);
147: MatProductNumeric(D); /* Test reuse symbolic D */
149: /* User's API */
150: { /* Test MatMatMatMult_Basic() */
151: Mat Adense, Cdense;
152: MatConvert(A, MATDENSE, MAT_INITIAL_MATRIX, &Adense);
153: MatMatMatMult(R, Adense, P, MAT_INITIAL_MATRIX, fill, &Cdense);
154: MatMatMatMult(R, Adense, P, MAT_REUSE_MATRIX, fill, &Cdense);
156: MatMultEqual(D, Cdense, 10, &flg);
158: MatDestroy(&Adense);
159: MatDestroy(&Cdense);
160: }
162: MatMatMatMult(R, A, P, MAT_INITIAL_MATRIX, fill, &C);
163: MatMatMatMult(R, A, P, MAT_REUSE_MATRIX, fill, &C);
164: MatProductClear(C);
166: /* Test D == C */
167: MatEqual(D, C, &flg);
170: /* Test C == PtAP */
171: MatPtAP(A, P, MAT_INITIAL_MATRIX, fill, &PtAP);
172: MatPtAP(A, P, MAT_REUSE_MATRIX, fill, &PtAP);
173: MatEqual(C, PtAP, &flg);
175: MatDestroy(&PtAP);
177: /* Clean up */
178: MatDestroy(&A);
179: PetscRandomDestroy(&rdm);
180: DMDestroy(&user.fine.da);
181: DMDestroy(&user.coarse.da);
182: MatDestroy(&P);
183: MatDestroy(&R);
184: MatDestroy(&C);
185: MatDestroy(&D);
186: PetscFinalize();
187: return 0;
188: }
190: /*TEST
192: test:
194: test:
195: suffix: 2
196: nsize: 2
197: args: -matmatmatmult_via scalable
199: test:
200: suffix: 3
201: nsize: 2
202: args: -matmatmatmult_via nonscalable
203: output_file: output/ex111_1.out
205: TEST*/