Actual source code: ex6.c

  1: static char help[] = "  Test VecScatter with x, y on different communicators\n\n";

  3: #include <petscvec.h>

  5: int main(int argc, char **argv)
  6: {
  7:   PetscInt           i, n = 5, rstart;
  8:   PetscScalar       *val;
  9:   const PetscScalar *dat;
 10:   Vec                x, y1, y2;
 11:   MPI_Comm           newcomm;
 12:   PetscMPIInt        nproc, rank;
 13:   IS                 ix;
 14:   VecScatter         vscat1, vscat2;

 17:   PetscInitialize(&argc, &argv, (char *)0, help);
 18:   MPI_Comm_size(PETSC_COMM_WORLD, &nproc);
 19:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);


 23:   /* Create MPI vectors x and y, which are on the same comm (i.e., MPI_IDENT) */
 24:   VecCreateMPI(PETSC_COMM_WORLD, n, PETSC_DECIDE, &x);
 25:   VecDuplicate(x, &y1);
 26:   VecGetOwnershipRange(x, &rstart, NULL);

 28:   /* Set x's value locally. x would be {0., 1., 2., ..., 9.} */
 29:   VecGetArray(x, &val);
 30:   for (i = 0; i < n; i++) val[i] = rstart + i;
 31:   VecRestoreArray(x, &val);

 33:   /* Create index set ix = {0, 1, 2, ..., 9} */
 34:   ISCreateStride(PETSC_COMM_WORLD, n, rstart, 1, &ix);

 36:   /* Create newcomm that reverses processes in x's comm, and then create y2 on it*/
 37:   MPI_Comm_split(PETSC_COMM_WORLD, 0 /*color*/, -rank /*key*/, &newcomm);
 38:   VecCreateMPI(newcomm, n, PETSC_DECIDE, &y2);

 40:   /* It looks vscat1/2 are the same, but actually not. y2 is on a different communicator than x */
 41:   VecScatterCreate(x, ix, y1, ix, &vscat1);
 42:   VecScatterCreate(x, ix, y2, ix, &vscat2);

 44:   VecScatterBegin(vscat1, x, y1, INSERT_VALUES, SCATTER_FORWARD);
 45:   VecScatterBegin(vscat2, x, y2, INSERT_VALUES, SCATTER_FORWARD);
 46:   VecScatterEnd(vscat1, x, y1, INSERT_VALUES, SCATTER_FORWARD);
 47:   VecScatterEnd(vscat2, x, y2, INSERT_VALUES, SCATTER_FORWARD);

 49:   /* View on rank 0 of x's comm, which is PETSC_COMM_WORLD */
 50:   if (rank == 0) {
 51:     /* Print the part of x on rank 0, which is 0 1 2 3 4 */
 52:     PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, x  = ");
 53:     VecGetArrayRead(x, &dat);
 54:     for (i = 0; i < n; i++) PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i]));
 55:     VecRestoreArrayRead(x, &dat);

 57:     /* Print the part of y1 on rank 0, which is 0 1 2 3 4 */
 58:     PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, y1 = ");
 59:     VecGetArrayRead(y1, &dat);
 60:     for (i = 0; i < n; i++) PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i]));
 61:     VecRestoreArrayRead(y1, &dat);

 63:     /* Print the part of y2 on rank 0, which is 5 6 7 8 9 since y2 swapped the processes of PETSC_COMM_WORLD */
 64:     PetscPrintf(PETSC_COMM_SELF, "\nOn rank 0 of PETSC_COMM_WORLD, y2 = ");
 65:     VecGetArrayRead(y2, &dat);
 66:     for (i = 0; i < n; i++) PetscPrintf(PETSC_COMM_SELF, " %.0g", (double)PetscRealPart(dat[i]));
 67:     VecRestoreArrayRead(y2, &dat);
 68:     PetscPrintf(PETSC_COMM_SELF, "\n");
 69:   }

 71:   ISDestroy(&ix);
 72:   VecDestroy(&x);
 73:   VecDestroy(&y1);
 74:   VecDestroy(&y2);
 75:   VecScatterDestroy(&vscat1);
 76:   VecScatterDestroy(&vscat2);
 77:   MPI_Comm_free(&newcomm);
 78:   PetscFinalize();
 79:   return 0;
 80: }

 82: /*TEST

 84:    test:
 85:       nsize: 2
 86:       requires: double
 87: TEST*/