Actual source code: ex82.c


  2: static char help[] = "Partition a tiny grid using hierarchical partitioning.\n\n";

  4: /*
  5:   Include "petscmat.h" so that we can use matrices.  Note that this file
  6:   automatically includes:
  7:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  8:      petscmat.h - matrices
  9:      petscis.h     - index sets
 10:      petscviewer.h - viewers
 11: */
 12: #include <petscmat.h>

 14: int main(int argc, char **args)
 15: {
 16:   Mat             A;
 17:   PetscMPIInt     rank, size;
 18:   PetscInt       *ia, *ja;
 19:   MatPartitioning part;
 20:   IS              is, isn, isrows;
 21:   IS              coarseparts, fineparts;
 22:   MPI_Comm        comm;

 25:   PetscInitialize(&argc, &args, (char *)0, help);
 26:   comm = PETSC_COMM_WORLD;
 27:   MPI_Comm_size(comm, &size);
 29:   MPI_Comm_rank(comm, &rank);

 31:   PetscMalloc1(5, &ia);
 32:   PetscMalloc1(16, &ja);
 33:   if (rank == 0) {
 34:     ja[0] = 1;
 35:     ja[1] = 4;
 36:     ja[2] = 0;
 37:     ja[3] = 2;
 38:     ja[4] = 5;
 39:     ja[5] = 1;
 40:     ja[6] = 3;
 41:     ja[7] = 6;
 42:     ja[8] = 2;
 43:     ja[9] = 7;
 44:     ia[0] = 0;
 45:     ia[1] = 2;
 46:     ia[2] = 5;
 47:     ia[3] = 8;
 48:     ia[4] = 10;
 49:   } else if (rank == 1) {
 50:     ja[0]  = 0;
 51:     ja[1]  = 5;
 52:     ja[2]  = 8;
 53:     ja[3]  = 1;
 54:     ja[4]  = 4;
 55:     ja[5]  = 6;
 56:     ja[6]  = 9;
 57:     ja[7]  = 2;
 58:     ja[8]  = 5;
 59:     ja[9]  = 7;
 60:     ja[10] = 10;
 61:     ja[11] = 3;
 62:     ja[12] = 6;
 63:     ja[13] = 11;
 64:     ia[0]  = 0;
 65:     ia[1]  = 3;
 66:     ia[2]  = 7;
 67:     ia[3]  = 11;
 68:     ia[4]  = 14;
 69:   } else if (rank == 2) {
 70:     ja[0]  = 4;
 71:     ja[1]  = 9;
 72:     ja[2]  = 12;
 73:     ja[3]  = 5;
 74:     ja[4]  = 8;
 75:     ja[5]  = 10;
 76:     ja[6]  = 13;
 77:     ja[7]  = 6;
 78:     ja[8]  = 9;
 79:     ja[9]  = 11;
 80:     ja[10] = 14;
 81:     ja[11] = 7;
 82:     ja[12] = 10;
 83:     ja[13] = 15;
 84:     ia[0]  = 0;
 85:     ia[1]  = 3;
 86:     ia[2]  = 7;
 87:     ia[3]  = 11;
 88:     ia[4]  = 14;
 89:   } else {
 90:     ja[0] = 8;
 91:     ja[1] = 13;
 92:     ja[2] = 9;
 93:     ja[3] = 12;
 94:     ja[4] = 14;
 95:     ja[5] = 10;
 96:     ja[6] = 13;
 97:     ja[7] = 15;
 98:     ja[8] = 11;
 99:     ja[9] = 14;
100:     ia[0] = 0;
101:     ia[1] = 2;
102:     ia[2] = 5;
103:     ia[3] = 8;
104:     ia[4] = 10;
105:   }
106:   MatCreateMPIAdj(comm, 4, 16, ia, ja, NULL, &A);
107:   MatView(A, PETSC_VIEWER_STDOUT_WORLD);
108:   /*
109:    Partition the graph of the matrix
110:   */
111:   MatPartitioningCreate(comm, &part);
112:   MatPartitioningSetAdjacency(part, A);
113:   MatPartitioningSetType(part, MATPARTITIONINGHIERARCH);
114:   MatPartitioningHierarchicalSetNcoarseparts(part, 2);
115:   MatPartitioningHierarchicalSetNfineparts(part, 2);
116:   MatPartitioningSetFromOptions(part);
117:   /* get new processor owner number of each vertex */
118:   MatPartitioningApply(part, &is);
119:   /* coarse parts */
120:   MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts);
121:   ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD);
122:   /* fine parts */
123:   MatPartitioningHierarchicalGetFineparts(part, &fineparts);
124:   ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD);
125:   /* partitioning */
126:   ISView(is, PETSC_VIEWER_STDOUT_WORLD);
127:   /* get new global number of each old global number */
128:   ISPartitioningToNumbering(is, &isn);
129:   ISView(isn, PETSC_VIEWER_STDOUT_WORLD);
130:   ISBuildTwoSided(is, NULL, &isrows);
131:   ISView(isrows, PETSC_VIEWER_STDOUT_WORLD);
132:   ISDestroy(&is);
133:   ISDestroy(&coarseparts);
134:   ISDestroy(&fineparts);
135:   ISDestroy(&isrows);
136:   ISDestroy(&isn);
137:   MatPartitioningDestroy(&part);
138:   /*
139:     Free work space.  All PETSc objects should be destroyed when they
140:     are no longer needed.
141:   */
142:   MatDestroy(&A);
143:   PetscFinalize();
144:   return 0;
145: }

147: /*TEST

149:    test:
150:       nsize: 4
151:       requires: parmetis
152:       TODO: tests cannot use parmetis because it produces different results on different machines

154: TEST*/