Actual source code: ex17.c
1: static const char help[] = "Test PetscSF with MPI large count (more than 2 billion elements in messages)\n\n";
3: #include <petscsys.h>
4: #include <petscsf.h>
6: int main(int argc, char **argv)
7: {
8: PetscSF sf;
9: PetscInt i, nroots, nleaves;
10: PetscInt n = (1ULL << 31) + 1024; /* a little over 2G elements */
11: PetscSFNode *iremote = NULL;
12: PetscMPIInt rank, size;
13: char *rootdata = NULL, *leafdata = NULL;
14: Vec x, y;
15: VecScatter vscat;
16: PetscInt rstart, rend;
17: IS ix;
18: const PetscScalar *xv;
21: PetscInitialize(&argc, &argv, NULL, help);
22: MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
23: MPI_Comm_size(PETSC_COMM_WORLD, &size);
26: /* Test PetscSF */
27: PetscSFCreate(PETSC_COMM_WORLD, &sf);
28: PetscSFSetFromOptions(sf);
30: if (rank == 0) {
31: nroots = n;
32: nleaves = 0;
33: } else {
34: nroots = 0;
35: nleaves = n;
36: PetscMalloc1(nleaves, &iremote);
37: for (i = 0; i < nleaves; i++) {
38: iremote[i].rank = 0;
39: iremote[i].index = i;
40: }
41: }
42: PetscSFSetGraph(sf, nroots, nleaves, NULL, PETSC_COPY_VALUES, iremote, PETSC_COPY_VALUES);
43: PetscMalloc2(nroots, &rootdata, nleaves, &leafdata);
44: if (rank == 0) {
45: memset(rootdata, 11, nroots);
46: rootdata[nroots - 1] = 12; /* Use a different value at the end */
47: }
49: PetscSFBcastBegin(sf, MPI_SIGNED_CHAR, rootdata, leafdata, MPI_REPLACE); /* rank 0->1, bcast rootdata to leafdata */
50: PetscSFBcastEnd(sf, MPI_SIGNED_CHAR, rootdata, leafdata, MPI_REPLACE);
51: PetscSFReduceBegin(sf, MPI_SIGNED_CHAR, leafdata, rootdata, MPI_SUM); /* rank 1->0, add leafdata to rootdata */
52: PetscSFReduceEnd(sf, MPI_SIGNED_CHAR, leafdata, rootdata, MPI_SUM);
55: PetscFree2(rootdata, leafdata);
56: PetscFree(iremote);
57: PetscSFDestroy(&sf);
59: /* Test VecScatter */
60: VecCreate(PETSC_COMM_WORLD, &x);
61: VecCreate(PETSC_COMM_WORLD, &y);
62: VecSetSizes(x, rank == 0 ? n : 64, PETSC_DECIDE);
63: VecSetSizes(y, rank == 0 ? 64 : n, PETSC_DECIDE);
64: VecSetFromOptions(x);
65: VecSetFromOptions(y);
67: VecGetOwnershipRange(x, &rstart, &rend);
68: ISCreateStride(PETSC_COMM_SELF, rend - rstart, rstart, 1, &ix);
69: VecScatterCreate(x, ix, y, ix, &vscat);
71: VecSet(x, 3.0);
72: VecScatterBegin(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD);
73: VecScatterEnd(vscat, x, y, INSERT_VALUES, SCATTER_FORWARD);
75: VecScatterBegin(vscat, y, x, ADD_VALUES, SCATTER_REVERSE);
76: VecScatterEnd(vscat, y, x, ADD_VALUES, SCATTER_REVERSE);
78: VecGetArrayRead(x, &xv);
80: VecRestoreArrayRead(x, &xv);
81: VecDestroy(&x);
82: VecDestroy(&y);
83: VecScatterDestroy(&vscat);
84: ISDestroy(&ix);
86: PetscFinalize();
87: return 0;
88: }
90: /**TEST
91: test:
92: requires: defined(PETSC_HAVE_MPI_LARGE_COUNT) defined(PETSC_USE_64BIT_INDICES)
93: TODO: need a machine with big memory (~150GB) to run the test
94: nsize: 2
95: args: -sf_type {{basic neighbor}}
97: TEST**/