Actual source code: ex83.c
2: static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";
4: /*
5: Include "petscmat.h" so that we can use matrices. Note that this file
6: automatically includes:
7: petscsys.h - base PETSc routines petscvec.h - vectors
8: petscmat.h - matrices
9: petscis.h - index sets
10: petscviewer.h - viewers
11: */
12: #include <petscmat.h>
14: int main(int argc, char **args)
15: {
16: Mat A, B;
17: PetscMPIInt rank, size, membershipKey;
18: PetscInt *ia, *ja, *indices_sc, isrows_localsize;
19: const PetscInt *indices;
20: MatPartitioning part;
21: IS is, isrows, isrows_sc;
22: IS coarseparts, fineparts;
23: MPI_Comm comm, scomm;
26: PetscInitialize(&argc, &args, (char *)0, help);
27: comm = PETSC_COMM_WORLD;
28: MPI_Comm_size(comm, &size);
30: MPI_Comm_rank(comm, &rank);
31: /*set a small matrix */
32: PetscMalloc1(5, &ia);
33: PetscMalloc1(16, &ja);
34: if (rank == 0) {
35: ja[0] = 1;
36: ja[1] = 4;
37: ja[2] = 0;
38: ja[3] = 2;
39: ja[4] = 5;
40: ja[5] = 1;
41: ja[6] = 3;
42: ja[7] = 6;
43: ja[8] = 2;
44: ja[9] = 7;
45: ia[0] = 0;
46: ia[1] = 2;
47: ia[2] = 5;
48: ia[3] = 8;
49: ia[4] = 10;
50: membershipKey = 0;
51: } else if (rank == 1) {
52: ja[0] = 0;
53: ja[1] = 5;
54: ja[2] = 8;
55: ja[3] = 1;
56: ja[4] = 4;
57: ja[5] = 6;
58: ja[6] = 9;
59: ja[7] = 2;
60: ja[8] = 5;
61: ja[9] = 7;
62: ja[10] = 10;
63: ja[11] = 3;
64: ja[12] = 6;
65: ja[13] = 11;
66: ia[0] = 0;
67: ia[1] = 3;
68: ia[2] = 7;
69: ia[3] = 11;
70: ia[4] = 14;
71: membershipKey = 0;
72: } else if (rank == 2) {
73: ja[0] = 4;
74: ja[1] = 9;
75: ja[2] = 12;
76: ja[3] = 5;
77: ja[4] = 8;
78: ja[5] = 10;
79: ja[6] = 13;
80: ja[7] = 6;
81: ja[8] = 9;
82: ja[9] = 11;
83: ja[10] = 14;
84: ja[11] = 7;
85: ja[12] = 10;
86: ja[13] = 15;
87: ia[0] = 0;
88: ia[1] = 3;
89: ia[2] = 7;
90: ia[3] = 11;
91: ia[4] = 14;
92: membershipKey = 1;
93: } else {
94: ja[0] = 8;
95: ja[1] = 13;
96: ja[2] = 9;
97: ja[3] = 12;
98: ja[4] = 14;
99: ja[5] = 10;
100: ja[6] = 13;
101: ja[7] = 15;
102: ja[8] = 11;
103: ja[9] = 14;
104: ia[0] = 0;
105: ia[1] = 2;
106: ia[2] = 5;
107: ia[3] = 8;
108: ia[4] = 10;
109: membershipKey = 1;
110: }
111: MatCreateMPIAdj(comm, 4, 16, ia, ja, NULL, &A);
112: MatView(A, PETSC_VIEWER_STDOUT_WORLD);
113: /*
114: Partition the graph of the matrix
115: */
116: MatPartitioningCreate(comm, &part);
117: MatPartitioningSetAdjacency(part, A);
118: MatPartitioningSetType(part, MATPARTITIONINGHIERARCH);
119: MatPartitioningHierarchicalSetNcoarseparts(part, 2);
120: MatPartitioningHierarchicalSetNfineparts(part, 2);
121: MatPartitioningSetFromOptions(part);
122: /* get new processor owner number of each vertex */
123: MatPartitioningApply(part, &is);
124: /* coarse parts */
125: MatPartitioningHierarchicalGetCoarseparts(part, &coarseparts);
126: ISView(coarseparts, PETSC_VIEWER_STDOUT_WORLD);
127: /* fine parts */
128: MatPartitioningHierarchicalGetFineparts(part, &fineparts);
129: ISView(fineparts, PETSC_VIEWER_STDOUT_WORLD);
130: /* partitioning */
131: ISView(is, PETSC_VIEWER_STDOUT_WORLD);
132: /* compute coming rows */
133: ISBuildTwoSided(is, NULL, &isrows);
134: ISView(isrows, PETSC_VIEWER_STDOUT_WORLD);
135: /*create a sub-communicator */
136: MPI_Comm_split(comm, membershipKey, rank, &scomm);
137: ISGetLocalSize(isrows, &isrows_localsize);
138: PetscMalloc1(isrows_localsize, &indices_sc);
139: ISGetIndices(isrows, &indices);
140: PetscArraycpy(indices_sc, indices, isrows_localsize);
141: ISRestoreIndices(isrows, &indices);
142: ISDestroy(&is);
143: ISDestroy(&coarseparts);
144: ISDestroy(&fineparts);
145: ISDestroy(&isrows);
146: MatPartitioningDestroy(&part);
147: /*create a sub-IS on the sub communicator */
148: ISCreateGeneral(scomm, isrows_localsize, indices_sc, PETSC_OWN_POINTER, &isrows_sc);
149: MatConvert(A, MATMPIAIJ, MAT_INITIAL_MATRIX, &B);
150: #if 1
151: MatView(B, PETSC_VIEWER_STDOUT_WORLD);
152: #endif
153: /*increase overlap */
154: MatIncreaseOverlapSplit(B, 1, &isrows_sc, 1);
155: ISView(isrows_sc, NULL);
156: ISDestroy(&isrows_sc);
157: /*
158: Free work space. All PETSc objects should be destroyed when they
159: are no longer needed.
160: */
161: MatDestroy(&A);
162: MatDestroy(&B);
163: PetscFinalize();
164: return 0;
165: }