Actual source code: ex73.c


  2: static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";

  4: /*
  5:   Include "petscmat.h" so that we can use matrices.  Note that this file
  6:   automatically includes:
  7:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  8:      petscmat.h - matrices
  9:      petscis.h     - index sets
 10:      petscviewer.h - viewers

 12:   Example of usage:
 13:     mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
 14: */
 15: #include <petscmat.h>

 17: int main(int argc, char **args)
 18: {
 19:   MatType         mtype = MATMPIAIJ;        /* matrix format */
 20:   Mat             A, B;                     /* matrix */
 21:   PetscViewer     fd;                       /* viewer */
 22:   char            file[PETSC_MAX_PATH_LEN]; /* input file name */
 23:   PetscBool       flg, viewMats, viewIS, viewVecs, useND, noVecLoad = PETSC_FALSE;
 24:   PetscInt       *nlocal, m, n;
 25:   PetscMPIInt     rank, size;
 26:   MatPartitioning part;
 27:   IS              is, isn;
 28:   Vec             xin, xout;
 29:   VecScatter      scat;

 32:   PetscInitialize(&argc, &args, (char *)0, help);
 33:   MPI_Comm_size(PETSC_COMM_WORLD, &size);
 34:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
 35:   PetscOptionsHasName(NULL, NULL, "-view_mats", &viewMats);
 36:   PetscOptionsHasName(NULL, NULL, "-view_is", &viewIS);
 37:   PetscOptionsHasName(NULL, NULL, "-view_vecs", &viewVecs);
 38:   PetscOptionsHasName(NULL, NULL, "-use_nd", &useND);
 39:   PetscOptionsHasName(NULL, NULL, "-novec_load", &noVecLoad);

 41:   /*
 42:      Determine file from which we read the matrix
 43:   */
 44:   PetscOptionsGetString(NULL, NULL, "-f", file, sizeof(file), &flg);

 46:   /*
 47:        Open binary file.  Note that we use FILE_MODE_READ to indicate
 48:        reading from this file.
 49:   */
 50:   PetscViewerBinaryOpen(PETSC_COMM_WORLD, file, FILE_MODE_READ, &fd);

 52:   /*
 53:       Load the matrix and vector; then destroy the viewer.
 54:   */
 55:   MatCreate(PETSC_COMM_WORLD, &A);
 56:   MatSetType(A, mtype);
 57:   MatLoad(A, fd);
 58:   if (!noVecLoad) {
 59:     VecCreate(PETSC_COMM_WORLD, &xin);
 60:     VecLoad(xin, fd);
 61:   } else {
 62:     MatCreateVecs(A, &xin, NULL);
 63:     VecSetRandom(xin, NULL);
 64:   }
 65:   PetscViewerDestroy(&fd);
 66:   if (viewMats) {
 67:     PetscPrintf(PETSC_COMM_WORLD, "Original matrix:\n");
 68:     MatView(A, PETSC_VIEWER_DRAW_WORLD);
 69:   }
 70:   if (viewVecs) {
 71:     PetscPrintf(PETSC_COMM_WORLD, "Original vector:\n");
 72:     VecView(xin, PETSC_VIEWER_STDOUT_WORLD);
 73:   }

 75:   /* Partition the graph of the matrix */
 76:   MatPartitioningCreate(PETSC_COMM_WORLD, &part);
 77:   MatPartitioningSetAdjacency(part, A);
 78:   MatPartitioningSetFromOptions(part);

 80:   /* get new processor owner number of each vertex */
 81:   if (useND) {
 82:     MatPartitioningApplyND(part, &is);
 83:   } else {
 84:     MatPartitioningApply(part, &is);
 85:   }
 86:   if (viewIS) {
 87:     PetscPrintf(PETSC_COMM_WORLD, "IS1 - new processor ownership:\n");
 88:     ISView(is, PETSC_VIEWER_STDOUT_WORLD);
 89:   }

 91:   /* get new global number of each old global number */
 92:   ISPartitioningToNumbering(is, &isn);
 93:   if (viewIS) {
 94:     PetscPrintf(PETSC_COMM_WORLD, "IS2 - new global numbering:\n");
 95:     ISView(isn, PETSC_VIEWER_STDOUT_WORLD);
 96:   }

 98:   /* get number of new vertices for each processor */
 99:   PetscMalloc1(size, &nlocal);
100:   ISPartitioningCount(is, size, nlocal);
101:   ISDestroy(&is);

103:   /* get old global number of each new global number */
104:   ISInvertPermutation(isn, useND ? PETSC_DECIDE : nlocal[rank], &is);
105:   if (viewIS) {
106:     PetscPrintf(PETSC_COMM_WORLD, "IS3=inv(IS2) - old global number of each new global number:\n");
107:     ISView(is, PETSC_VIEWER_STDOUT_WORLD);
108:   }

110:   /* move the matrix rows to the new processes they have been assigned to by the permutation */
111:   MatCreateSubMatrix(A, is, is, MAT_INITIAL_MATRIX, &B);
112:   PetscFree(nlocal);
113:   ISDestroy(&isn);
114:   MatDestroy(&A);
115:   MatPartitioningDestroy(&part);
116:   if (viewMats) {
117:     PetscPrintf(PETSC_COMM_WORLD, "Partitioned matrix:\n");
118:     MatView(B, PETSC_VIEWER_DRAW_WORLD);
119:   }

121:   /* move the vector rows to the new processes they have been assigned to */
122:   MatGetLocalSize(B, &m, &n);
123:   VecCreateMPI(PETSC_COMM_WORLD, m, PETSC_DECIDE, &xout);
124:   VecScatterCreate(xin, is, xout, NULL, &scat);
125:   VecScatterBegin(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD);
126:   VecScatterEnd(scat, xin, xout, INSERT_VALUES, SCATTER_FORWARD);
127:   VecScatterDestroy(&scat);
128:   if (viewVecs) {
129:     PetscPrintf(PETSC_COMM_WORLD, "Mapped vector:\n");
130:     VecView(xout, PETSC_VIEWER_STDOUT_WORLD);
131:   }
132:   VecDestroy(&xout);
133:   ISDestroy(&is);

135:   {
136:     PetscInt           rstart, i, *nzd, *nzo, nzl, nzmax = 0, *ncols, nrow, j;
137:     Mat                J;
138:     const PetscInt    *cols;
139:     const PetscScalar *vals;
140:     PetscScalar       *nvals;

142:     MatGetOwnershipRange(B, &rstart, NULL);
143:     PetscCalloc2(2 * m, &nzd, 2 * m, &nzo);
144:     for (i = 0; i < m; i++) {
145:       MatGetRow(B, i + rstart, &nzl, &cols, NULL);
146:       for (j = 0; j < nzl; j++) {
147:         if (cols[j] >= rstart && cols[j] < rstart + n) {
148:           nzd[2 * i] += 2;
149:           nzd[2 * i + 1] += 2;
150:         } else {
151:           nzo[2 * i] += 2;
152:           nzo[2 * i + 1] += 2;
153:         }
154:       }
155:       nzmax = PetscMax(nzmax, nzd[2 * i] + nzo[2 * i]);
156:       MatRestoreRow(B, i + rstart, &nzl, &cols, NULL);
157:     }
158:     MatCreateAIJ(PETSC_COMM_WORLD, 2 * m, 2 * m, PETSC_DECIDE, PETSC_DECIDE, 0, nzd, 0, nzo, &J);
159:     PetscInfo(0, "Created empty Jacobian matrix\n");
160:     PetscFree2(nzd, nzo);
161:     PetscMalloc2(nzmax, &ncols, nzmax, &nvals);
162:     PetscArrayzero(nvals, nzmax);
163:     for (i = 0; i < m; i++) {
164:       MatGetRow(B, i + rstart, &nzl, &cols, &vals);
165:       for (j = 0; j < nzl; j++) {
166:         ncols[2 * j]     = 2 * cols[j];
167:         ncols[2 * j + 1] = 2 * cols[j] + 1;
168:       }
169:       nrow = 2 * (i + rstart);
170:       MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES);
171:       nrow = 2 * (i + rstart) + 1;
172:       MatSetValues(J, 1, &nrow, 2 * nzl, ncols, nvals, INSERT_VALUES);
173:       MatRestoreRow(B, i + rstart, &nzl, &cols, &vals);
174:     }
175:     MatAssemblyBegin(J, MAT_FINAL_ASSEMBLY);
176:     MatAssemblyEnd(J, MAT_FINAL_ASSEMBLY);
177:     if (viewMats) {
178:       PetscPrintf(PETSC_COMM_WORLD, "Jacobian matrix structure:\n");
179:       MatView(J, PETSC_VIEWER_DRAW_WORLD);
180:     }
181:     MatDestroy(&J);
182:     PetscFree2(ncols, nvals);
183:   }

185:   /*
186:        Free work space.  All PETSc objects should be destroyed when they
187:        are no longer needed.
188:   */
189:   MatDestroy(&B);
190:   VecDestroy(&xin);
191:   PetscFinalize();
192:   return 0;
193: }

195: /*TEST

197:    test:
198:       nsize: 3
199:       requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
200:       args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load

202:    test:
203:       requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
204:       output_file: output/ex73_1.out
205:       suffix: parmetis_nd_32
206:       nsize: 3
207:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

209:    test:
210:       requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
211:       output_file: output/ex73_1.out
212:       suffix: parmetis_nd_64
213:       nsize: 3
214:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load

216:    test:
217:       requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
218:       output_file: output/ex73_1.out
219:       suffix: ptscotch_nd_32
220:       nsize: 4
221:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

223:    test:
224:       requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
225:       output_file: output/ex73_1.out
226:       suffix: ptscotch_nd_64
227:       nsize: 4
228:       args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load

230: TEST*/