Actual source code: ex42.c
2: static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3: This example is similar to ex40.c; here the index sets used are random.\n\
4: Input arguments are:\n\
5: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6: -nd <size> : > 0 no of domains per processor \n\
7: -ov <overlap> : >=0 amount of overlap between domains\n\n";
9: #include <petscmat.h>
11: int main(int argc, char **args)
12: {
13: PetscInt nd = 2, ov = 1, i, j, lsize, m, n, *idx, bs;
14: PetscMPIInt rank, size;
15: PetscBool flg;
16: Mat A, B, *submatA, *submatB;
17: char file[PETSC_MAX_PATH_LEN];
18: PetscViewer fd;
19: IS *is1, *is2;
20: PetscRandom r;
21: PetscBool test_unsorted = PETSC_FALSE;
22: PetscScalar rand;
25: PetscInitialize(&argc, &args, (char *)0, help);
26: MPI_Comm_size(PETSC_COMM_WORLD, &size);
27: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
28: PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL);
29: PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL);
30: PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL);
31: PetscOptionsGetBool(NULL, NULL, "-test_unsorted", &test_unsorted, NULL);
33: /* Read matrix A and RHS */
34: PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd);
35: MatCreate(PETSC_COMM_WORLD, &A);
36: MatSetType(A, MATAIJ);
37: MatSetFromOptions(A);
38: MatLoad(A, fd);
39: PetscViewerDestroy(&fd);
41: /* Read the same matrix as a seq matrix B */
42: PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd);
43: MatCreate(PETSC_COMM_SELF, &B);
44: MatSetType(B, MATSEQAIJ);
45: MatSetFromOptions(B);
46: MatLoad(B, fd);
47: PetscViewerDestroy(&fd);
49: MatGetBlockSize(A, &bs);
51: /* Create the Random no generator */
52: MatGetSize(A, &m, &n);
53: PetscRandomCreate(PETSC_COMM_SELF, &r);
54: PetscRandomSetFromOptions(r);
56: /* Create the IS corresponding to subdomains */
57: PetscMalloc1(nd, &is1);
58: PetscMalloc1(nd, &is2);
59: PetscMalloc1(m, &idx);
60: for (i = 0; i < m; i++) idx[i] = i;
62: /* Create the random Index Sets */
63: for (i = 0; i < nd; i++) {
64: /* Skip a few,so that the IS on different procs are different*/
65: for (j = 0; j < rank; j++) PetscRandomGetValue(r, &rand);
66: PetscRandomGetValue(r, &rand);
67: lsize = (PetscInt)(rand * (m / bs));
68: /* shuffle */
69: for (j = 0; j < lsize; j++) {
70: PetscInt k, swap, l;
72: PetscRandomGetValue(r, &rand);
73: k = j + (PetscInt)(rand * ((m / bs) - j));
74: for (l = 0; l < bs; l++) {
75: swap = idx[bs * j + l];
76: idx[bs * j + l] = idx[bs * k + l];
77: idx[bs * k + l] = swap;
78: }
79: }
80: if (!test_unsorted) PetscSortInt(lsize * bs, idx);
81: ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is1 + i);
82: ISCreateGeneral(PETSC_COMM_SELF, lsize * bs, idx, PETSC_COPY_VALUES, is2 + i);
83: ISSetBlockSize(is1[i], bs);
84: ISSetBlockSize(is2[i], bs);
85: }
87: if (!test_unsorted) {
88: MatIncreaseOverlap(A, nd, is1, ov);
89: MatIncreaseOverlap(B, nd, is2, ov);
91: for (i = 0; i < nd; ++i) {
92: ISSort(is1[i]);
93: ISSort(is2[i]);
94: }
95: }
97: MatCreateSubMatrices(A, nd, is1, is1, MAT_INITIAL_MATRIX, &submatA);
98: MatCreateSubMatrices(B, nd, is2, is2, MAT_INITIAL_MATRIX, &submatB);
100: /* Now see if the serial and parallel case have the same answers */
101: for (i = 0; i < nd; ++i) {
102: MatEqual(submatA[i], submatB[i], &flg);
104: }
106: /* Free Allocated Memory */
107: for (i = 0; i < nd; ++i) {
108: ISDestroy(&is1[i]);
109: ISDestroy(&is2[i]);
110: }
111: MatDestroySubMatrices(nd, &submatA);
112: MatDestroySubMatrices(nd, &submatB);
114: PetscRandomDestroy(&r);
115: PetscFree(is1);
116: PetscFree(is2);
117: MatDestroy(&A);
118: MatDestroy(&B);
119: PetscFree(idx);
120: PetscFinalize();
121: return 0;
122: }
124: /*TEST
126: build:
127: requires: !complex
129: test:
130: nsize: 3
131: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
132: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
134: test:
135: suffix: 2
136: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
137: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
139: test:
140: suffix: unsorted_baij_mpi
141: nsize: 3
142: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
143: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
145: test:
146: suffix: unsorted_baij_seq
147: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
148: args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
150: test:
151: suffix: unsorted_mpi
152: nsize: 3
153: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
154: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
156: test:
157: suffix: unsorted_seq
158: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
159: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
161: TEST*/