Actual source code: ex211.c
2: static char help[] = "Tests MatCreateSubmatrix() in parallel.";
4: #include <petscmat.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
7: PetscErrorCode ISGetSeqIS_SameColDist_Private(Mat mat, IS isrow, IS iscol, IS *isrow_d, IS *iscol_d, IS *iscol_o, const PetscInt *garray[])
8: {
9: Vec x, cmap;
10: const PetscInt *is_idx;
11: PetscScalar *xarray, *cmaparray;
12: PetscInt ncols, isstart, *idx, m, rstart, count;
13: Mat_MPIAIJ *a = (Mat_MPIAIJ *)mat->data;
14: Mat B = a->B;
15: Vec lvec = a->lvec, lcmap;
16: PetscInt i, cstart, cend, Bn = B->cmap->N;
17: MPI_Comm comm;
18: PetscMPIInt rank;
19: VecScatter Mvctx;
21: PetscObjectGetComm((PetscObject)mat, &comm);
22: MPI_Comm_rank(comm, &rank);
23: ISGetLocalSize(iscol, &ncols);
25: /* (1) iscol is a sub-column vector of mat, pad it with '-1.' to form a full vector x */
26: MatCreateVecs(mat, &x, NULL);
27: VecDuplicate(x, &cmap);
28: VecSet(x, -1.0);
29: VecSet(cmap, -1.0);
31: VecDuplicate(lvec, &lcmap);
33: /* Get start indices */
34: MPI_Scan(&ncols, &isstart, 1, MPIU_INT, MPI_SUM, comm);
35: isstart -= ncols;
36: MatGetOwnershipRangeColumn(mat, &cstart, &cend);
38: ISGetIndices(iscol, &is_idx);
39: VecGetArray(x, &xarray);
40: VecGetArray(cmap, &cmaparray);
41: PetscMalloc1(ncols, &idx);
42: for (i = 0; i < ncols; i++) {
43: xarray[is_idx[i] - cstart] = (PetscScalar)is_idx[i];
44: cmaparray[is_idx[i] - cstart] = (PetscScalar)(i + isstart); /* global index of iscol[i] */
45: idx[i] = is_idx[i] - cstart; /* local index of iscol[i] */
46: }
47: VecRestoreArray(x, &xarray);
48: VecRestoreArray(cmap, &cmaparray);
49: ISRestoreIndices(iscol, &is_idx);
51: /* Get iscol_d */
52: ISCreateGeneral(PETSC_COMM_SELF, ncols, idx, PETSC_OWN_POINTER, iscol_d);
53: ISGetBlockSize(iscol, &i);
54: ISSetBlockSize(*iscol_d, i);
56: /* Get isrow_d */
57: ISGetLocalSize(isrow, &m);
58: rstart = mat->rmap->rstart;
59: PetscMalloc1(m, &idx);
60: ISGetIndices(isrow, &is_idx);
61: for (i = 0; i < m; i++) idx[i] = is_idx[i] - rstart;
62: ISRestoreIndices(isrow, &is_idx);
64: ISCreateGeneral(PETSC_COMM_SELF, m, idx, PETSC_OWN_POINTER, isrow_d);
65: ISGetBlockSize(isrow, &i);
66: ISSetBlockSize(*isrow_d, i);
68: /* (2) Scatter x and cmap using aij->Mvctx to get their off-process portions (see MatMult_MPIAIJ) */
69: #if 0
70: if (!a->Mvctx_mpi1) {
71: /* a->Mvctx causes random 'count' in o-build? See src/mat/tests/runex59_2 */
72: a->Mvctx_mpi1_flg = PETSC_TRUE;
73: MatSetUpMultiply_MPIAIJ(mat);
74: }
75: Mvctx = a->Mvctx_mpi1;
76: #endif
77: Mvctx = a->Mvctx;
78: VecScatterBegin(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD);
79: VecScatterEnd(Mvctx, x, lvec, INSERT_VALUES, SCATTER_FORWARD);
81: VecScatterBegin(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD);
82: VecScatterEnd(Mvctx, cmap, lcmap, INSERT_VALUES, SCATTER_FORWARD);
84: /* (3) create sequential iscol_o (a subset of iscol) and isgarray */
85: /* off-process column indices */
86: count = 0;
87: PetscInt *cmap1;
88: PetscMalloc1(Bn, &idx);
89: PetscMalloc1(Bn, &cmap1);
91: VecGetArray(lvec, &xarray);
92: VecGetArray(lcmap, &cmaparray);
93: for (i = 0; i < Bn; i++) {
94: if (PetscRealPart(xarray[i]) > -1.0) {
95: idx[count] = i; /* local column index in off-diagonal part B */
96: cmap1[count] = (PetscInt)(PetscRealPart(cmaparray[i])); /* column index in submat */
97: count++;
98: }
99: }
100: printf("[%d] Bn %d, count %d\n", rank, Bn, count);
101: VecRestoreArray(lvec, &xarray);
102: VecRestoreArray(lcmap, &cmaparray);
103: if (count != 6) {
104: printf("[%d] count %d != 6 lvec:\n", rank, count);
105: VecView(lvec, 0);
107: printf("[%d] count %d != 6 lcmap:\n", rank, count);
108: VecView(lcmap, 0);
109: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "count %d != 6", count);
110: }
112: ISCreateGeneral(PETSC_COMM_SELF, count, idx, PETSC_COPY_VALUES, iscol_o);
113: /* cannot ensure iscol_o has same blocksize as iscol! */
115: PetscFree(idx);
117: *garray = cmap1;
119: VecDestroy(&x);
120: VecDestroy(&cmap);
121: VecDestroy(&lcmap);
122: return 0;
123: }
125: int main(int argc, char **args)
126: {
127: Mat C, A;
128: PetscInt i, j, m = 3, n = 2, rstart, rend;
129: PetscMPIInt size, rank;
130: PetscScalar v;
131: IS isrow, iscol;
134: PetscInitialize(&argc, &args, (char *)0, help);
135: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
136: MPI_Comm_size(PETSC_COMM_WORLD, &size);
137: n = 2 * size;
139: MatCreate(PETSC_COMM_WORLD, &C);
140: MatSetSizes(C, PETSC_DECIDE, PETSC_DECIDE, m * n, m * n);
141: MatSetFromOptions(C);
142: MatSetUp(C);
144: /*
145: This is JUST to generate a nice test matrix, all processors fill up
146: the entire matrix. This is not something one would ever do in practice.
147: */
148: MatGetOwnershipRange(C, &rstart, &rend);
149: for (i = rstart; i < rend; i++) {
150: for (j = 0; j < m * n; j++) {
151: v = i + j + 1;
152: MatSetValues(C, 1, &i, 1, &j, &v, INSERT_VALUES);
153: }
154: }
156: MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY);
157: MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY);
159: /*
160: Generate a new matrix consisting of every second row and column of
161: the original matrix
162: */
163: MatGetOwnershipRange(C, &rstart, &rend);
164: /* Create parallel IS with the rows we want on THIS processor */
165: ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &isrow);
166: /* Create parallel IS with the rows we want on THIS processor (same as rows for now) */
167: ISCreateStride(PETSC_COMM_WORLD, (rend - rstart) / 2, rstart, 2, &iscol);
169: IS iscol_d, isrow_d, iscol_o;
170: const PetscInt *garray;
171: ISGetSeqIS_SameColDist_Private(C, isrow, iscol, &isrow_d, &iscol_d, &iscol_o, &garray);
173: ISDestroy(&isrow_d);
174: ISDestroy(&iscol_d);
175: ISDestroy(&iscol_o);
176: PetscFree(garray);
178: MatCreateSubMatrix(C, isrow, iscol, MAT_INITIAL_MATRIX, &A);
179: MatCreateSubMatrix(C, isrow, iscol, MAT_REUSE_MATRIX, &A);
181: ISDestroy(&isrow);
182: ISDestroy(&iscol);
183: MatDestroy(&A);
184: MatDestroy(&C);
185: PetscFinalize();
186: return 0;
187: }