Actual source code: ex41.c
2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
4: -f <input_file> : file to load. For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
5: -nd <size> : > 0 no of domains per processor \n\
6: -ov <overlap> : >=0 amount of overlap between domains\n\n";
8: #include <petscmat.h>
10: int main(int argc, char **args)
11: {
12: PetscInt nd = 2, ov = 1, i, j, m, n, *idx, lsize;
13: PetscMPIInt rank;
14: PetscBool flg;
15: Mat A, B;
16: char file[PETSC_MAX_PATH_LEN];
17: PetscViewer fd;
18: IS *is1, *is2;
19: PetscRandom r;
20: PetscScalar rand;
23: PetscInitialize(&argc, &args, (char *)0, help);
24: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
25: PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), NULL);
26: PetscOptionsGetInt(NULL, NULL, "-nd", &nd, NULL);
27: PetscOptionsGetInt(NULL, NULL, "-ov", &ov, NULL);
29: /* Read matrix and RHS */
30: PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd);
31: MatCreate(PETSC_COMM_WORLD, &A);
32: MatSetType(A, MATMPIAIJ);
33: MatLoad(A, fd);
34: PetscViewerDestroy(&fd);
36: /* Read the matrix again as a seq matrix */
37: PetscViewerBinaryOpen(PETSC_COMM_SELF, file, FILE_MODE_READ, &fd);
38: MatCreate(PETSC_COMM_SELF, &B);
39: MatSetType(B, MATSEQAIJ);
40: MatLoad(B, fd);
41: PetscViewerDestroy(&fd);
43: /* Create the Random no generator */
44: MatGetSize(A, &m, &n);
45: PetscRandomCreate(PETSC_COMM_SELF, &r);
46: PetscRandomSetFromOptions(r);
48: /* Create the IS corresponding to subdomains */
49: PetscMalloc1(nd, &is1);
50: PetscMalloc1(nd, &is2);
51: PetscMalloc1(m, &idx);
53: /* Create the random Index Sets */
54: for (i = 0; i < nd; i++) {
55: for (j = 0; j < rank; j++) PetscRandomGetValue(r, &rand);
56: PetscRandomGetValue(r, &rand);
57: lsize = (PetscInt)(rand * m);
58: for (j = 0; j < lsize; j++) {
59: PetscRandomGetValue(r, &rand);
60: idx[j] = (PetscInt)(rand * m);
61: }
62: ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is1 + i);
63: ISCreateGeneral(PETSC_COMM_SELF, lsize, idx, PETSC_COPY_VALUES, is2 + i);
64: }
66: MatIncreaseOverlap(A, nd, is1, ov);
67: MatIncreaseOverlap(B, nd, is2, ov);
69: /* Now see if the serial and parallel case have the same answers */
70: for (i = 0; i < nd; ++i) {
71: PetscInt sz1, sz2;
72: ISEqual(is1[i], is2[i], &flg);
73: ISGetSize(is1[i], &sz1);
74: ISGetSize(is2[i], &sz2);
76: }
78: /* Free Allocated Memory */
79: for (i = 0; i < nd; ++i) {
80: ISDestroy(&is1[i]);
81: ISDestroy(&is2[i]);
82: }
83: PetscRandomDestroy(&r);
84: PetscFree(is1);
85: PetscFree(is2);
86: MatDestroy(&A);
87: MatDestroy(&B);
88: PetscFree(idx);
89: PetscFinalize();
90: return 0;
91: }
93: /*TEST
95: build:
96: requires: !complex
98: test:
99: nsize: 3
100: requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
101: args: -f ${DATAFILESPATH}/matrices/arco1 -nd 3 -ov 1
103: TEST*/