Actual source code: ex3.c


  2: static char help[] = "Parallel vector layout.\n\n";

  4: /*
  5:   Include "petscvec.h" so that we can use vectors.  Note that this file
  6:   automatically includes:
  7:      petscsys.h       - base PETSc routines   petscis.h     - index sets
  8:      petscviewer.h - viewers
  9: */
 10: #include <petscvec.h>

 12: int main(int argc, char **argv)
 13: {
 14:   PetscMPIInt rank;
 15:   PetscInt    i, istart, iend, n = 6, nlocal;
 16:   PetscScalar v, *array;
 17:   Vec         x;
 18:   PetscViewer viewer;

 21:   PetscInitialize(&argc, &argv, (char *)0, help);
 22:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);

 24:   PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);

 26:   /*
 27:      Create a vector, specifying only its global dimension.
 28:      When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
 29:      the vector format (currently parallel or sequential) is
 30:      determined at runtime.  Also, the parallel partitioning of
 31:      the vector is determined by PETSc at runtime.
 32:   */
 33:   VecCreate(PETSC_COMM_WORLD, &x);
 34:   VecSetSizes(x, PETSC_DECIDE, n);
 35:   VecSetFromOptions(x);

 37:   /*
 38:      PETSc parallel vectors are partitioned by
 39:      contiguous chunks of rows across the processors.  Determine
 40:      which vector are locally owned.
 41:   */
 42:   VecGetOwnershipRange(x, &istart, &iend);

 44:   /* --------------------------------------------------------------------
 45:      Set the vector elements.
 46:       - Always specify global locations of vector entries.
 47:       - Each processor can insert into any location, even ones it does not own
 48:       - In this case each processor adds values to all the entries,
 49:          this is not practical, but is merely done as an example
 50:    */
 51:   for (i = 0; i < n; i++) {
 52:     v = (PetscReal)(rank * i);
 53:     VecSetValues(x, 1, &i, &v, ADD_VALUES);
 54:   }

 56:   /*
 57:      Assemble vector, using the 2-step process:
 58:        VecAssemblyBegin(), VecAssemblyEnd()
 59:      Computations can be done while messages are in transition
 60:      by placing code between these two statements.
 61:   */
 62:   VecAssemblyBegin(x);
 63:   VecAssemblyEnd(x);

 65:   /*
 66:      Open an X-window viewer.  Note that we specify the same communicator
 67:      for the viewer as we used for the distributed vector (PETSC_COMM_WORLD).
 68:        - Helpful runtime option:
 69:             -draw_pause <pause> : sets time (in seconds) that the
 70:                   program pauses after PetscDrawPause() has been called
 71:                   (0 is default, -1 implies until user input).

 73:   */
 74:   PetscViewerDrawOpen(PETSC_COMM_WORLD, NULL, NULL, 0, 0, 300, 300, &viewer);
 75:   PetscObjectSetName((PetscObject)viewer, "Line graph Plot");
 76:   PetscViewerPushFormat(viewer, PETSC_VIEWER_DRAW_LG);
 77:   /*
 78:      View the vector
 79:   */
 80:   VecView(x, viewer);

 82:   /* --------------------------------------------------------------------
 83:        Access the vector values directly. Each processor has access only
 84:     to its portion of the vector. For default PETSc vectors VecGetArray()
 85:     does NOT involve a copy
 86:   */
 87:   VecGetLocalSize(x, &nlocal);
 88:   VecGetArray(x, &array);
 89:   for (i = 0; i < nlocal; i++) array[i] = rank + 1;
 90:   VecRestoreArray(x, &array);

 92:   /*
 93:      View the vector
 94:   */
 95:   VecView(x, viewer);

 97:   /*
 98:      Free work space.  All PETSc objects should be destroyed when they
 99:      are no longer needed.
100:   */
101:   PetscViewerPopFormat(viewer);
102:   PetscViewerDestroy(&viewer);
103:   VecDestroy(&x);

105:   PetscFinalize();
106:   return 0;
107: }

109: /*TEST

111:      test:
112:        nsize: 2

114: TEST*/