Actual source code: ex6.c

  1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";

  3: #include <petscdm.h>
  4: #include <petscdmda.h>
  5: #include <petscao.h>

  7: int main(int argc, char **argv)
  8: {
  9:   PetscMPIInt     rank;
 10:   PetscInt        M = 3, N = 5, P = 3, s = 1, w = 2, nloc, l, i, j, k, kk, m = PETSC_DECIDE, n = PETSC_DECIDE, p = PETSC_DECIDE;
 11:   PetscInt        Xs, Xm, Ys, Ym, Zs, Zm, iloc, *iglobal;
 12:   const PetscInt *ltog;
 13:   PetscInt       *lx = NULL, *ly = NULL, *lz = NULL;
 14:   PetscBool       test_order = PETSC_FALSE;
 15:   DM              da;
 16:   PetscViewer     viewer;
 17:   Vec             local, global;
 18:   PetscScalar     value;
 19:   DMBoundaryType  bx = DM_BOUNDARY_NONE, by = DM_BOUNDARY_NONE, bz = DM_BOUNDARY_NONE;
 20:   DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
 21:   AO              ao;
 22:   PetscBool       flg = PETSC_FALSE;

 25:   PetscInitialize(&argc, &argv, (char *)0, help);
 26:   PetscViewerDrawOpen(PETSC_COMM_WORLD, 0, "", 300, 0, 400, 300, &viewer);

 28:   /* Read options */
 29:   PetscOptionsGetInt(NULL, NULL, "-NX", &M, NULL);
 30:   PetscOptionsGetInt(NULL, NULL, "-NY", &N, NULL);
 31:   PetscOptionsGetInt(NULL, NULL, "-NZ", &P, NULL);
 32:   PetscOptionsGetInt(NULL, NULL, "-m", &m, NULL);
 33:   PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL);
 34:   PetscOptionsGetInt(NULL, NULL, "-p", &p, NULL);
 35:   PetscOptionsGetInt(NULL, NULL, "-s", &s, NULL);
 36:   PetscOptionsGetInt(NULL, NULL, "-w", &w, NULL);
 37:   flg = PETSC_FALSE;
 38:   PetscOptionsGetBool(NULL, NULL, "-star", &flg, NULL);
 39:   if (flg) stencil_type = DMDA_STENCIL_STAR;
 40:   flg = PETSC_FALSE;
 41:   PetscOptionsGetBool(NULL, NULL, "-box", &flg, NULL);
 42:   if (flg) stencil_type = DMDA_STENCIL_BOX;

 44:   flg = PETSC_FALSE;
 45:   PetscOptionsGetBool(NULL, NULL, "-xperiodic", &flg, NULL);
 46:   if (flg) bx = DM_BOUNDARY_PERIODIC;
 47:   flg = PETSC_FALSE;
 48:   PetscOptionsGetBool(NULL, NULL, "-xghosted", &flg, NULL);
 49:   if (flg) bx = DM_BOUNDARY_GHOSTED;
 50:   flg = PETSC_FALSE;
 51:   PetscOptionsGetBool(NULL, NULL, "-xnonghosted", &flg, NULL);

 53:   flg = PETSC_FALSE;
 54:   PetscOptionsGetBool(NULL, NULL, "-yperiodic", &flg, NULL);
 55:   if (flg) by = DM_BOUNDARY_PERIODIC;
 56:   flg = PETSC_FALSE;
 57:   PetscOptionsGetBool(NULL, NULL, "-yghosted", &flg, NULL);
 58:   if (flg) by = DM_BOUNDARY_GHOSTED;
 59:   flg = PETSC_FALSE;
 60:   PetscOptionsGetBool(NULL, NULL, "-ynonghosted", &flg, NULL);

 62:   flg = PETSC_FALSE;
 63:   PetscOptionsGetBool(NULL, NULL, "-zperiodic", &flg, NULL);
 64:   if (flg) bz = DM_BOUNDARY_PERIODIC;
 65:   flg = PETSC_FALSE;
 66:   PetscOptionsGetBool(NULL, NULL, "-zghosted", &flg, NULL);
 67:   if (flg) bz = DM_BOUNDARY_GHOSTED;
 68:   flg = PETSC_FALSE;
 69:   PetscOptionsGetBool(NULL, NULL, "-znonghosted", &flg, NULL);

 71:   PetscOptionsGetBool(NULL, NULL, "-testorder", &test_order, NULL);

 73:   flg = PETSC_FALSE;
 74:   PetscOptionsGetBool(NULL, NULL, "-distribute", &flg, NULL);
 75:   if (flg) {
 77:     PetscMalloc1(m, &lx);
 78:     for (i = 0; i < m - 1; i++) lx[i] = 4;
 79:     lx[m - 1] = M - 4 * (m - 1);
 81:     PetscMalloc1(n, &ly);
 82:     for (i = 0; i < n - 1; i++) ly[i] = 2;
 83:     ly[n - 1] = N - 2 * (n - 1);
 85:     PetscMalloc1(p, &lz);
 86:     for (i = 0; i < p - 1; i++) lz[i] = 2;
 87:     lz[p - 1] = P - 2 * (p - 1);
 88:   }

 90:   /* Create distributed array and get vectors */
 91:   DMDACreate3d(PETSC_COMM_WORLD, bx, by, bz, stencil_type, M, N, P, m, n, p, w, s, lx, ly, lz, &da);
 92:   DMSetFromOptions(da);
 93:   DMSetUp(da);
 94:   PetscFree(lx);
 95:   PetscFree(ly);
 96:   PetscFree(lz);
 97:   DMView(da, viewer);
 98:   DMCreateGlobalVector(da, &global);
 99:   DMCreateLocalVector(da, &local);

101:   /* Set global vector; send ghost points to local vectors */
102:   value = 1;
103:   VecSet(global, value);
104:   DMGlobalToLocalBegin(da, global, INSERT_VALUES, local);
105:   DMGlobalToLocalEnd(da, global, INSERT_VALUES, local);

107:   /* Scale local vectors according to processor rank; pass to global vector */
108:   MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
109:   value = rank;
110:   VecScale(local, value);
111:   DMLocalToGlobalBegin(da, local, INSERT_VALUES, global);
112:   DMLocalToGlobalEnd(da, local, INSERT_VALUES, global);

114:   if (!test_order) { /* turn off printing when testing ordering mappings */
115:     if (M * N * P < 40) {
116:       PetscPrintf(PETSC_COMM_WORLD, "\nGlobal Vector:\n");
117:       VecView(global, PETSC_VIEWER_STDOUT_WORLD);
118:       PetscPrintf(PETSC_COMM_WORLD, "\n");
119:     }
120:   }

122:   /* Send ghost points to local vectors */
123:   DMGlobalToLocalBegin(da, global, INSERT_VALUES, local);
124:   DMGlobalToLocalEnd(da, global, INSERT_VALUES, local);

126:   flg = PETSC_FALSE;
127:   PetscOptionsGetBool(NULL, NULL, "-local_print", &flg, NULL);
128:   if (flg) {
129:     PetscViewer sviewer;
130:     PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
131:     PetscSynchronizedPrintf(PETSC_COMM_WORLD, "\nLocal Vector: processor %d\n", rank);
132:     PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer);
133:     VecView(local, sviewer);
134:     PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD, PETSC_COMM_SELF, &sviewer);
135:     PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
136:     PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
137:   }

139:   /* Tests mappings between application/PETSc orderings */
140:   if (test_order) {
141:     ISLocalToGlobalMapping ltogm;

143:     DMGetLocalToGlobalMapping(da, &ltogm);
144:     ISLocalToGlobalMappingGetSize(ltogm, &nloc);
145:     ISLocalToGlobalMappingGetIndices(ltogm, &ltog);

147:     DMDAGetGhostCorners(da, &Xs, &Ys, &Zs, &Xm, &Ym, &Zm);
148:     DMDAGetAO(da, &ao);
149:     /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
150:     PetscMalloc1(nloc, &iglobal);

152:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
153:        using the DMDA ordering of grid points */
154:     kk = 0;
155:     for (k = Zs; k < Zs + Zm; k++) {
156:       for (j = Ys; j < Ys + Ym; j++) {
157:         for (i = Xs; i < Xs + Xm; i++) {
158:           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
159:           for (l = 0; l < w; l++) iglobal[kk++] = ltog[iloc + l];
160:         }
161:       }
162:     }

164:     /* Map this to the application ordering (which for DMDAs is just the natural ordering
165:        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
166:     AOPetscToApplication(ao, nloc, iglobal);

168:     /* Then map the application ordering back to the PETSc DMDA ordering */
169:     AOApplicationToPetsc(ao, nloc, iglobal);

171:     /* Verify the mappings */
172:     kk = 0;
173:     for (k = Zs; k < Zs + Zm; k++) {
174:       for (j = Ys; j < Ys + Ym; j++) {
175:         for (i = Xs; i < Xs + Xm; i++) {
176:           iloc = w * ((k - Zs) * Xm * Ym + (j - Ys) * Xm + i - Xs);
177:           for (l = 0; l < w; l++) {
178:             if (iglobal[kk] != ltog[iloc + l]) {
179:               PetscPrintf(MPI_COMM_WORLD, "[%d] Problem with mapping: z=%" PetscInt_FMT ", j=%" PetscInt_FMT ", i=%" PetscInt_FMT ", l=%" PetscInt_FMT ", petsc1=%" PetscInt_FMT ", petsc2=%" PetscInt_FMT "\n", rank, k, j, i, l, ltog[iloc + l], iglobal[kk]);
180:             }
181:             kk++;
182:           }
183:         }
184:       }
185:     }
186:     PetscFree(iglobal);
187:     ISLocalToGlobalMappingRestoreIndices(ltogm, &ltog);
188:   }

190:   /* Free memory */
191:   PetscViewerDestroy(&viewer);
192:   VecDestroy(&local);
193:   VecDestroy(&global);
194:   DMDestroy(&da);
195:   PetscFinalize();
196:   return 0;
197: }

199: /*TEST

201:     test:
202:       args:  -testorder -nox

204:  TEST*/