Actual source code: ex40.c
2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
3: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
4: -nd <size> : > 0 number of domains per processor \n\
5: -ov <overlap> : >=0 amount of overlap between domains\n\n";
7: #include <petscmat.h>
9: PetscErrorCode ISAllGatherDisjoint(IS iis, IS **ois)
10: {
11: IS *is2, is;
12: const PetscInt *idxs;
13: PetscInt i, ls, *sizes;
14: PetscMPIInt size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)iis), &size);
18: PetscMalloc1(size, &is2);
19: PetscMalloc1(size, &sizes);
20: ISGetLocalSize(iis, &ls);
21: /* we don't have a public ISGetLayout */
22: MPI_Allgather(&ls, 1, MPIU_INT, sizes, 1, MPIU_INT, PetscObjectComm((PetscObject)iis));
23: ISAllGather(iis, &is);
24: ISGetIndices(is, &idxs);
25: for (i = 0, ls = 0; i < size; i++) {
26: ISCreateGeneral(PETSC_COMM_SELF, sizes[i], idxs + ls, PETSC_COPY_VALUES, &is2[i]);
27: ls += sizes[i];
28: }
29: ISRestoreIndices(is, &idxs);
30: ISDestroy(&is);
31: PetscFree(sizes);
32: *ois = is2;
33: return 0;
34: }
36: int main(int argc, char **args)
37: {
38: PetscInt nd = 2, ov = 1, ndpar, i, start, m, n, end, lsize;
39: PetscMPIInt rank;
40: PetscBool flg, useND = PETSC_FALSE;
41: Mat A, B;
42: char file[PETSC_MAX_PATH_LEN];
43: PetscViewer fd;
44: IS *is1, *is2;
45: PetscRandom r;
46: PetscScalar rand;
49: PetscInitialize(&argc, &args, (char *)0, help);
50: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
51: PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg);
53: PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL);
54: PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL);
55: PetscOptionsGetBool(NULL, NULL, "-nested_dissection", &useND, NULL);
57: /* Read matrix */
58: PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd);
59: MatCreate(PETSC_COMM_WORLD, &A);
60: MatSetType(A, MATMPIAIJ);
61: MatLoad(A, fd);
62: MatSetFromOptions(A);
63: PetscViewerDestroy(&fd);
65: /* Read the matrix again as a sequential matrix */
66: PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd);
67: MatCreate(PETSC_COMM_SELF, &B);
68: MatSetType(B, MATSEQAIJ);
69: MatLoad(B, fd);
70: MatSetFromOptions(B);
71: PetscViewerDestroy(&fd);
73: /* Create the IS corresponding to subdomains */
74: if (useND) {
75: MatPartitioning part;
76: IS ndmap;
77: PetscMPIInt size;
79: ndpar = 1;
80: MPI_Comm_size(PETSC_COMM_WORLD, &size);
81: nd = (PetscInt)size;
82: PetscMalloc1(ndpar, &is1);
83: MatPartitioningCreate(PETSC_COMM_WORLD, &part);
84: MatPartitioningSetAdjacency(part, A);
85: MatPartitioningSetFromOptions(part);
86: MatPartitioningApplyND(part, &ndmap);
87: MatPartitioningDestroy(&part);
88: ISBuildTwoSided(ndmap, NULL, &is1[0]);
89: ISDestroy(&ndmap);
90: ISAllGatherDisjoint(is1[0], &is2);
91: } else {
92: /* Create the random Index Sets */
93: PetscMalloc1(nd, &is1);
94: PetscMalloc1(nd, &is2);
96: MatGetSize(A, &m, &n);
97: PetscRandomCreate(PETSC_COMM_SELF, &r);
98: PetscRandomSetFromOptions(r);
99: for (i = 0; i < nd; i++) {
100: PetscRandomGetValue(r, &rand);
101: start = (PetscInt)(rand * m);
102: PetscRandomGetValue(r, &rand);
103: end = (PetscInt)(rand * m);
104: lsize = end - start;
105: if (start > end) {
106: start = end;
107: lsize = -lsize;
108: }
109: ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is1 + i);
110: ISCreateStride(PETSC_COMM_SELF, lsize, start, 1, is2 + i);
111: }
112: ndpar = nd;
113: PetscRandomDestroy(&r);
114: }
115: MatIncreaseOverlap(A, ndpar, is1, ov);
116: MatIncreaseOverlap(B, nd, is2, ov);
117: if (useND) {
118: IS *is;
120: ISAllGatherDisjoint(is1[0], &is);
121: ISDestroy(&is1[0]);
122: PetscFree(is1);
123: is1 = is;
124: }
125: /* Now see if the serial and parallel case have the same answers */
126: for (i = 0; i < nd; ++i) {
127: ISEqual(is1[i], is2[i], &flg);
128: if (!flg) {
129: ISViewFromOptions(is1[i], NULL, "-err_view");
130: ISViewFromOptions(is2[i], NULL, "-err_view");
131: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "proc:[%d], i=%" PetscInt_FMT ", flg =%d", rank, i, (int)flg);
132: }
133: }
135: /* Free allocated memory */
136: for (i = 0; i < nd; ++i) {
137: ISDestroy(&is1[i]);
138: ISDestroy(&is2[i]);
139: }
140: PetscFree(is1);
141: PetscFree(is2);
142: MatDestroy(&A);
143: MatDestroy(&B);
144: PetscFinalize();
145: return 0;
146: }
148: /*TEST
150: build:
151: requires: !complex
153: testset:
154: nsize: 5
155: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
156: args: -f ${DATAFILESPATH}/matrices/arco1 -viewer_binary_skip_info -ov 2
157: output_file: output/ex40_1.out
158: test:
159: suffix: 1
160: args: -nd 7
161: test:
162: requires: parmetis
163: suffix: 1_nd
164: args: -nested_dissection -mat_partitioning_type parmetis
166: testset:
167: nsize: 3
168: requires: double !defined(PETSC_USE_64BIT_INDICES) !complex
169: args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/ns-real-int32-float64 -mat_increase_overlap_scalable 1 -ov 2
170: output_file: output/ex40_1.out
171: test:
172: suffix: 2
173: args: -nd 7
174: test:
175: requires: parmetis
176: suffix: 2_nd
177: args: -nested_dissection -mat_partitioning_type parmetis
179: TEST*/