Actual source code: ex14.c
2: static char help[] = "Test event log of VecScatter with various block sizes\n\n";
4: #include <petscvec.h>
6: int main(int argc, char **argv)
7: {
8: PetscInt i, j, low, high, n = 256, N, errors, tot_errors;
9: PetscInt bs = 1, ix[2], iy[2];
10: PetscMPIInt nproc, rank;
11: PetscScalar *xval;
12: const PetscScalar *yval;
13: Vec x, y;
14: IS isx, isy;
15: VecScatter ctx;
16: const PetscInt niter = 10;
17: #if defined(PETSC_USE_LOG)
18: PetscLogStage stage1, stage2;
19: PetscLogEvent event1, event2;
20: PetscLogDouble numMessages, messageLength;
21: PetscEventPerfInfo eventInfo;
22: PetscInt tot_msg, tot_len, avg_len;
23: #endif
26: PetscInitialize(&argc, &argv, (char *)0, help);
27: PetscLogDefaultBegin();
28: MPI_Comm_size(PETSC_COMM_WORLD, &nproc);
29: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
31: PetscLogStageRegister("Scatter(bs=1)", &stage1);
32: PetscLogEventRegister("VecScatter(bs=1)", PETSC_OBJECT_CLASSID, &event1);
33: PetscLogStageRegister("Scatter(bs=4)", &stage2);
34: PetscLogEventRegister("VecScatter(bs=4)", PETSC_OBJECT_CLASSID, &event2);
36: /* Create a parallel vector x and a sequential vector y */
37: VecCreate(PETSC_COMM_WORLD, &x);
38: VecSetSizes(x, n, PETSC_DECIDE);
39: VecSetFromOptions(x);
40: VecGetOwnershipRange(x, &low, &high);
41: VecGetSize(x, &N);
42: VecCreateSeq(PETSC_COMM_SELF, n, &y);
44: /*=======================================
45: test VecScatter with bs = 1
46: ======================================*/
48: /* the code works as if we are going to do 3-point stencil computations on a 1D domain x,
49: which has periodic boundary conditions but the two ghosts are scatterred to beginning of y.
50: */
51: bs = 1;
52: ix[0] = rank ? low - 1 : N - 1; /* ix[] contains global indices of the two ghost points */
53: ix[1] = (rank != nproc - 1) ? high : 0;
54: iy[0] = 0;
55: iy[1] = 1;
57: ISCreateGeneral(PETSC_COMM_SELF, 2, ix, PETSC_COPY_VALUES, &isx);
58: ISCreateGeneral(PETSC_COMM_SELF, 2, iy, PETSC_COPY_VALUES, &isy);
59: VecScatterCreate(x, isx, y, isy, &ctx);
60: VecScatterSetUp(ctx);
62: PetscLogStagePush(stage1);
63: PetscLogEventBegin(event1, 0, 0, 0, 0);
64: errors = 0;
65: for (i = 0; i < niter; i++) {
66: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
67: VecGetArray(x, &xval);
68: for (j = 0; j < n; j++) xval[j] = (PetscScalar)(low + j + i);
69: VecRestoreArray(x, &xval);
70: /* scatter the ghosts to y */
71: VecScatterBegin(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD);
72: VecScatterEnd(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD);
73: /* check if y has correct values */
74: VecGetArrayRead(y, &yval);
75: if ((PetscInt)PetscRealPart(yval[0]) != ix[0] + i) errors++;
76: if ((PetscInt)PetscRealPart(yval[1]) != ix[1] + i) errors++;
77: VecRestoreArrayRead(y, &yval);
78: }
79: PetscLogEventEnd(event1, 0, 0, 0, 0);
80: PetscLogStagePop();
82: /* check if we found wrong values on any processors */
83: MPI_Allreduce(&errors, &tot_errors, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD);
84: if (tot_errors) PetscPrintf(PETSC_COMM_WORLD, "Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n", bs);
86: /* print out event log of VecScatter(bs=1) */
87: #if defined(PETSC_USE_LOG)
88: PetscLogEventGetPerfInfo(stage1, event1, &eventInfo);
89: MPI_Allreduce(&eventInfo.numMessages, &numMessages, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD);
90: MPI_Allreduce(&eventInfo.messageLength, &messageLength, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD);
91: tot_msg = (PetscInt)numMessages * 0.5; /* two MPI calls (Send & Recv) per message */
92: tot_len = (PetscInt)messageLength * 0.5;
93: avg_len = tot_msg ? (PetscInt)(messageLength / numMessages) : 0;
94: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
95: PetscPrintf(PETSC_COMM_WORLD, "VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n", bs, tot_msg, tot_len, avg_len);
96: #endif
98: ISDestroy(&isx);
99: ISDestroy(&isy);
100: VecScatterDestroy(&ctx);
102: /*=======================================
103: test VecScatter with bs = 4
104: ======================================*/
106: /* similar to the 3-point stencil above, except that this time a ghost is a block */
107: bs = 4; /* n needs to be a multiple of bs to make the following code work */
108: ix[0] = rank ? low / bs - 1 : N / bs - 1; /* ix[] contains global indices of the two ghost blocks */
109: ix[1] = (rank != nproc - 1) ? high / bs : 0;
110: iy[0] = 0;
111: iy[1] = 1;
113: ISCreateBlock(PETSC_COMM_SELF, bs, 2, ix, PETSC_COPY_VALUES, &isx);
114: ISCreateBlock(PETSC_COMM_SELF, bs, 2, iy, PETSC_COPY_VALUES, &isy);
116: VecScatterCreate(x, isx, y, isy, &ctx);
117: /* Call SetUp explicitly, otherwise messages in implicit SetUp will be counted in events below */
118: VecScatterSetUp(ctx);
120: PetscLogStagePush(stage2);
121: PetscLogEventBegin(event2, 0, 0, 0, 0);
122: errors = 0;
123: for (i = 0; i < niter; i++) {
124: /* set x = 0+i, 1+i, 2+i, ..., N-1+i */
125: VecGetArray(x, &xval);
126: for (j = 0; j < n; j++) xval[j] = (PetscScalar)(low + j + i);
127: VecRestoreArray(x, &xval);
128: /* scatter the ghost blocks to y */
129: VecScatterBegin(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD);
130: VecScatterEnd(ctx, x, y, INSERT_VALUES, SCATTER_FORWARD);
131: /* check if y has correct values */
132: VecGetArrayRead(y, &yval);
133: if ((PetscInt)PetscRealPart(yval[0]) != ix[0] * bs + i) errors++;
134: if ((PetscInt)PetscRealPart(yval[bs]) != ix[1] * bs + i) errors++;
135: VecRestoreArrayRead(y, &yval);
136: }
137: PetscLogEventEnd(event2, 0, 0, 0, 0);
138: PetscLogStagePop();
140: /* check if we found wrong values on any processors */
141: MPI_Allreduce(&errors, &tot_errors, 1, MPIU_INT, MPI_SUM, PETSC_COMM_WORLD);
142: if (tot_errors) PetscPrintf(PETSC_COMM_WORLD, "Error: wrong values were scatterred in vecscatter with bs = %" PetscInt_FMT "\n", bs);
144: /* print out event log of VecScatter(bs=4) */
145: #if defined(PETSC_USE_LOG)
146: PetscLogEventGetPerfInfo(stage2, event2, &eventInfo);
147: MPI_Allreduce(&eventInfo.numMessages, &numMessages, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD);
148: MPI_Allreduce(&eventInfo.messageLength, &messageLength, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, PETSC_COMM_WORLD);
149: tot_msg = (PetscInt)numMessages * 0.5; /* two MPI calls (Send & Recv) per message */
150: tot_len = (PetscInt)messageLength * 0.5;
151: avg_len = tot_msg ? (PetscInt)(messageLength / numMessages) : 0;
152: /* when nproc > 2, tot_msg = 2*nproc*niter, tot_len = tot_msg*sizeof(PetscScalar)*bs */
153: PetscPrintf(PETSC_COMM_WORLD, "VecScatter(bs=%" PetscInt_FMT ") has sent out %" PetscInt_FMT " messages, total %" PetscInt_FMT " bytes, with average length %" PetscInt_FMT " bytes\n", bs, tot_msg, tot_len, avg_len);
154: #endif
156: PetscPrintf(PETSC_COMM_WORLD, "Program finished\n");
157: ISDestroy(&isx);
158: ISDestroy(&isy);
159: VecScatterDestroy(&ctx);
160: VecDestroy(&x);
161: VecDestroy(&y);
162: PetscFinalize();
163: return 0;
164: }
166: /*TEST
168: test:
169: nsize: 4
170: args:
171: requires: double defined(PETSC_USE_LOG)
173: TEST*/