Actual source code: ex3.c

  1: static char help[] = "Reduced formulation of the mother problem of PDE-constrained optimisation.\n";

  3: /*F
  4:   We solve the mother problem

  6:   min 1/2 || y - y_d ||^2_2 + \alpha/2 || u ||^2_2

  8:   subject to

 10:           - \laplace y = u          on \Omega
 11:                      y = 0          on \Gamma

 13:   where u is in L^2 and y is in H^1_0.

 15:   We formulate the reduced problem solely in terms of the control
 16:   by using the state equation to express y in terms of u, and then
 17:   apply LMVM/BLMVM to the resulting reduced problem.

 19:   Mesh independence is achieved by configuring the Riesz map for the control
 20:   space.

 22:   Example meshes where the Riesz map is crucial can be downloaded from the
 23:   http://people.maths.ox.ac.uk/~farrellp/meshes.tar.gz

 25:   Contributed by: Patrick Farrell <patrick.farrell@maths.ox.ac.uk>

 27:   Run with e.g.:
 28:   ./ex3 -laplace_ksp_type cg -laplace_pc_type hypre -mat_lmvm_ksp_type cg -mat_lmvm_pc_type gamg -laplace_ksp_monitor_true_residual -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-9 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5

 30:   and visualise in paraview with ../../../../petsc_gen_xdmf.py solution.h5.

 32:   Toggle the Riesz map (-use_riesz 0) to see the difference setting the Riesz maps makes.

 34:   TODO: broken for parallel runs
 35: F*/

 37: #include <petsc.h>
 38: #include <petscfe.h>
 39: #include <petscviewerhdf5.h>

 41: typedef struct {
 42:   DM           dm;
 43:   Mat          mass;
 44:   Vec          data;
 45:   Vec          state;
 46:   Vec          tmp1;
 47:   Vec          tmp2;
 48:   Vec          adjoint;
 49:   Mat          laplace;
 50:   KSP          ksp_laplace;
 51:   PetscInt     num_bc_dofs;
 52:   PetscInt    *bc_indices;
 53:   PetscScalar *bc_values;
 54:   PetscBool    use_riesz;
 55: } AppCtx;

 57: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 58: {
 59:   PetscBool flg;
 60:   char      filename[2048];

 63:   filename[0]     = '\0';
 64:   user->use_riesz = PETSC_TRUE;

 66:   PetscOptionsBegin(comm, "", "Poisson mother problem options", "DMPLEX");
 67:   PetscOptionsBool("-use_riesz", "Use the Riesz map to achieve mesh independence", "ex3.c", user->use_riesz, &user->use_riesz, NULL);
 68:   PetscOptionsString("-f", "filename to read", "ex3.c", filename, filename, sizeof(filename), &flg);
 69:   PetscOptionsEnd();

 71:   if (!flg) {
 72:     DMCreate(comm, dm);
 73:     DMSetType(*dm, DMPLEX);
 74:   } else {
 75:     /* TODO Eliminate this in favor of DMLoad() in new code */
 76: #if defined(PETSC_HAVE_HDF5)
 77:     const PetscInt vertices_per_cell = 3;
 78:     PetscViewer    viewer;
 79:     Vec            coordinates;
 80:     Vec            topology;
 81:     PetscInt       dim = 2, numCells;
 82:     PetscInt       numVertices;
 83:     PetscScalar   *coords;
 84:     PetscScalar   *topo_f;
 85:     PetscInt      *cells;
 86:     PetscInt       j;
 87:     DMLabel        label;

 89:     /* Read in FEniCS HDF5 output */
 90:     PetscViewerHDF5Open(comm, filename, FILE_MODE_READ, &viewer);

 92:     /* create Vecs to read in the data from H5 */
 93:     VecCreate(comm, &coordinates);
 94:     PetscObjectSetName((PetscObject)coordinates, "coordinates");
 95:     VecSetBlockSize(coordinates, dim);
 96:     VecCreate(comm, &topology);
 97:     PetscObjectSetName((PetscObject)topology, "topology");
 98:     VecSetBlockSize(topology, vertices_per_cell);

100:     /* navigate to the right group */
101:     PetscViewerHDF5PushGroup(viewer, "/Mesh/mesh");

103:     /* Read the Vecs */
104:     VecLoad(coordinates, viewer);
105:     VecLoad(topology, viewer);

107:     /* do some ugly calculations */
108:     VecGetSize(topology, &numCells);
109:     numCells = numCells / vertices_per_cell;
110:     VecGetSize(coordinates, &numVertices);
111:     numVertices = numVertices / dim;

113:     VecGetArray(coordinates, &coords);
114:     VecGetArray(topology, &topo_f);
115:     /* and now we have to convert the double representation to integers to pass over, argh */
116:     PetscMalloc1(numCells * vertices_per_cell, &cells);
117:     for (j = 0; j < numCells * vertices_per_cell; j++) cells[j] = (PetscInt)topo_f[j];

119:     /* Now create the DM */
120:     DMPlexCreateFromCellListPetsc(comm, dim, numCells, numVertices, vertices_per_cell, PETSC_TRUE, cells, dim, coords, dm);
121:     /* Check for flipped first cell */
122:     {
123:       PetscReal v0[3], J[9], invJ[9], detJ;

125:       DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ);
126:       if (detJ < 0) {
127:         DMPlexOrientPoint(*dm, 0, -1);
128:         DMPlexComputeCellGeometryFEM(*dm, 0, NULL, v0, J, invJ, &detJ);
130:       }
131:     }
132:     DMPlexOrient(*dm);
133:     DMCreateLabel(*dm, "marker");
134:     DMGetLabel(*dm, "marker", &label);
135:     DMPlexMarkBoundaryFaces(*dm, 1, label);
136:     DMPlexLabelComplete(*dm, label);

138:     PetscViewerDestroy(&viewer);
139:     VecRestoreArray(coordinates, &coords);
140:     VecRestoreArray(topology, &topo_f);
141:     PetscFree(cells);
142:     VecDestroy(&coordinates);
143:     VecDestroy(&topology);
144: #else
145:     SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_SUP, "Reconfigure PETSc with --download-hdf5");
146: #endif
147:   }
148:   DMSetFromOptions(*dm);
149:   DMViewFromOptions(*dm, NULL, "-dm_view");
150:   return 0;
151: }

153: void mass_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g0[])
154: {
155:   g0[0] = 1.0;
156: }

158: void laplace_kernel(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
159: {
160:   PetscInt d;
161:   for (d = 0; d < dim; ++d) g3[d * dim + d] = 1.0;
162: }

164: /* data we seek to match */
165: PetscErrorCode data_kernel(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *y, void *ctx)
166: {
167:   *y = 1.0 / (2 * PETSC_PI * PETSC_PI) * PetscSinReal(PETSC_PI * x[0]) * PetscSinReal(PETSC_PI * x[1]);
168:   /* the associated control is sin(pi*x[0])*sin(pi*x[1]) */
169:   return 0;
170: }
171: PetscErrorCode zero(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
172: {
173:   *u = 0.0;
174:   return 0;
175: }

177: PetscErrorCode CreateCtx(DM dm, AppCtx *user)
178: {
179:   DM              dm_mass;
180:   DM              dm_laplace;
181:   PetscDS         prob_mass;
182:   PetscDS         prob_laplace;
183:   PetscFE         fe;
184:   DMLabel         label;
185:   PetscSection    section;
186:   PetscInt        n, k, p, d;
187:   PetscInt        dof, off;
188:   IS              is;
189:   const PetscInt *points;
190:   const PetscInt  dim = 2;
191:   PetscErrorCode (**wtf)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);


195:   /* make the data we seek to match */
196:   PetscFECreateDefault(PetscObjectComm((PetscObject)dm), dim, 1, PETSC_TRUE, NULL, 4, &fe);

198:   DMSetField(dm, 0, NULL, (PetscObject)fe);
199:   DMCreateDS(dm);
200:   DMCreateGlobalVector(dm, &user->data);

202:   /* ugh, this is hideous */
203:   /* y_d = interpolate(Expression("sin(x[0]) + .."), V) */
204:   PetscMalloc(1 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &wtf);
205:   wtf[0] = data_kernel;
206:   DMProjectFunction(dm, 0.0, wtf, NULL, INSERT_VALUES, user->data);
207:   PetscFree(wtf);

209:   /* assemble(inner(u, v)*dx), almost */
210:   DMClone(dm, &dm_mass);
211:   DMCopyDisc(dm, dm_mass);
212:   DMSetNumFields(dm_mass, 1);
213:   DMPlexCopyCoordinates(dm, dm_mass); /* why do I have to do this separately? */
214:   DMGetDS(dm_mass, &prob_mass);
215:   PetscDSSetJacobian(prob_mass, 0, 0, mass_kernel, NULL, NULL, NULL);
216:   PetscDSSetDiscretization(prob_mass, 0, (PetscObject)fe);
217:   DMCreateMatrix(dm_mass, &user->mass);
218:   DMPlexSNESComputeJacobianFEM(dm_mass, user->data, user->mass, user->mass, NULL);
219:   MatSetOption(user->mass, MAT_SYMMETRIC, PETSC_TRUE);
220:   DMDestroy(&dm_mass);

222:   /* inner(grad(u), grad(v))*dx with homogeneous Dirichlet boundary conditions */
223:   DMClone(dm, &dm_laplace);
224:   DMCopyDisc(dm, dm_laplace);
225:   DMSetNumFields(dm_laplace, 1);
226:   DMPlexCopyCoordinates(dm, dm_laplace);
227:   DMGetDS(dm_laplace, &prob_laplace);
228:   PetscDSSetJacobian(prob_laplace, 0, 0, NULL, NULL, NULL, laplace_kernel);
229:   PetscDSSetDiscretization(prob_laplace, 0, (PetscObject)fe);
230:   DMCreateMatrix(dm_laplace, &user->laplace);
231:   DMPlexSNESComputeJacobianFEM(dm_laplace, user->data, user->laplace, user->laplace, NULL);

233:   DMGetLabel(dm_laplace, "marker", &label);
234:   DMGetLocalSection(dm_laplace, &section);
235:   DMLabelGetStratumSize(label, 1, &n);
236:   DMLabelGetStratumIS(label, 1, &is);
237:   ISGetIndices(is, &points);
238:   user->num_bc_dofs = 0;
239:   for (p = 0; p < n; ++p) {
240:     PetscSectionGetDof(section, points[p], &dof);
241:     user->num_bc_dofs += dof;
242:   }
243:   PetscMalloc1(user->num_bc_dofs, &user->bc_indices);
244:   for (p = 0, k = 0; p < n; ++p) {
245:     PetscSectionGetDof(section, points[p], &dof);
246:     PetscSectionGetOffset(section, points[p], &off);
247:     for (d = 0; d < dof; ++d) user->bc_indices[k++] = off + d;
248:   }
249:   ISRestoreIndices(is, &points);
250:   ISDestroy(&is);
251:   DMDestroy(&dm_laplace);

253:   /* This is how I handle boundary conditions. I can't figure out how to get
254:      plex to play with the way I want to impose the BCs. This loses symmetry,
255:      but not in a disastrous way. If someone can improve it, please do! */
256:   MatZeroRows(user->laplace, user->num_bc_dofs, user->bc_indices, 1.0, NULL, NULL);
257:   PetscCalloc1(user->num_bc_dofs, &user->bc_values);

259:   /* also create the KSP for solving the Laplace system */
260:   KSPCreate(PETSC_COMM_WORLD, &user->ksp_laplace);
261:   KSPSetOperators(user->ksp_laplace, user->laplace, user->laplace);
262:   KSPSetOptionsPrefix(user->ksp_laplace, "laplace_");
263:   KSPSetFromOptions(user->ksp_laplace);

265:   /* A bit of setting up the user context */
266:   user->dm = dm;
267:   VecDuplicate(user->data, &user->state);
268:   VecDuplicate(user->data, &user->adjoint);
269:   VecDuplicate(user->data, &user->tmp1);
270:   VecDuplicate(user->data, &user->tmp2);

272:   PetscFEDestroy(&fe);

274:   return 0;
275: }

277: PetscErrorCode DestroyCtx(AppCtx *user)
278: {

281:   MatDestroy(&user->mass);
282:   MatDestroy(&user->laplace);
283:   KSPDestroy(&user->ksp_laplace);
284:   VecDestroy(&user->data);
285:   VecDestroy(&user->state);
286:   VecDestroy(&user->adjoint);
287:   VecDestroy(&user->tmp1);
288:   VecDestroy(&user->tmp2);
289:   PetscFree(user->bc_indices);
290:   PetscFree(user->bc_values);

292:   return 0;
293: }

295: PetscErrorCode ReducedFunctionGradient(Tao tao, Vec u, PetscReal *func, Vec g, void *userv)
296: {
297:   AppCtx         *user  = (AppCtx *)userv;
298:   const PetscReal alpha = 1.0e-6; /* regularisation parameter */
299:   PetscReal       inner;


303:   MatMult(user->mass, u, user->tmp1);
304:   VecDot(u, user->tmp1, &inner); /* regularisation contribution to */
305:   *func = alpha * 0.5 * inner;              /* the functional                 */

307:   VecSet(g, 0.0);
308:   VecAXPY(g, alpha, user->tmp1); /* regularisation contribution to the gradient */

310:   /* Now compute the forward state. */
311:   VecSetValues(user->tmp1, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES);
312:   VecAssemblyBegin(user->tmp1);
313:   VecAssemblyEnd(user->tmp1);
314:   KSPSolve(user->ksp_laplace, user->tmp1, user->state); /* forward solve */

316:   /* Now compute the adjoint state also. */
317:   VecCopy(user->state, user->tmp1);
318:   VecAXPY(user->tmp1, -1.0, user->data);
319:   MatMult(user->mass, user->tmp1, user->tmp2);
320:   VecDot(user->tmp1, user->tmp2, &inner); /* misfit contribution to */
321:   *func += 0.5 * inner;                              /* the functional         */

323:   VecSetValues(user->tmp2, user->num_bc_dofs, user->bc_indices, user->bc_values, INSERT_VALUES);
324:   VecAssemblyBegin(user->tmp2);
325:   VecAssemblyEnd(user->tmp2);
326:   KSPSolve(user->ksp_laplace, user->tmp2, user->adjoint); /* adjoint solve */

328:   /* And bring it home with the gradient. */
329:   MatMult(user->mass, user->adjoint, user->tmp1);
330:   VecAXPY(g, 1.0, user->tmp1); /* adjoint contribution to the gradient */

332:   return 0;
333: }

335: int main(int argc, char **argv)
336: {
337:   DM     dm;
338:   Tao    tao;
339:   Vec    u, lb, ub;
340:   AppCtx user;

343:   PetscInitialize(&argc, &argv, NULL, help);
344:   CreateMesh(PETSC_COMM_WORLD, &user, &dm);
345:   CreateCtx(dm, &user);

347:   DMCreateGlobalVector(dm, &u);
348:   VecSet(u, 0.0);
349:   VecDuplicate(u, &lb);
350:   VecDuplicate(u, &ub);
351:   VecSet(lb, 0.0); /* satisfied at the minimum anyway */
352:   VecSet(ub, 0.8); /* a nontrivial upper bound */

354:   TaoCreate(PETSC_COMM_WORLD, &tao);
355:   TaoSetSolution(tao, u);
356:   TaoSetObjectiveAndGradient(tao, NULL, ReducedFunctionGradient, &user);
357:   TaoSetVariableBounds(tao, lb, ub);
358:   TaoSetType(tao, TAOBLMVM);
359:   TaoSetFromOptions(tao);

361:   if (user.use_riesz) {
362:     TaoLMVMSetH0(tao, user.mass); /* crucial for mesh independence */
363:     TaoSetGradientNorm(tao, user.mass);
364:   }

366:   TaoSolve(tao);

368:   DMViewFromOptions(dm, NULL, "-dm_view");
369:   VecViewFromOptions(u, NULL, "-sol_view");

371:   TaoDestroy(&tao);
372:   DMDestroy(&dm);
373:   VecDestroy(&u);
374:   VecDestroy(&lb);
375:   VecDestroy(&ub);
376:   DestroyCtx(&user);
377:   PetscFinalize();
378:   return 0;
379: }

381: /*TEST

383:     build:
384:       requires: !complex !single

386:     test:
387:       requires: hdf5 double datafilespath !defined(PETSC_USE_64BIT_INDICES) hypre !cuda
388:       args: -laplace_ksp_type cg -laplace_pc_type hypre -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 5 -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -tao_monitor -petscspace_degree 1 -tao_converged_reason -tao_gatol 1.0e-6 -dm_view hdf5:solution.h5 -sol_view hdf5:solution.h5::append -use_riesz 1 -f $DATAFILESPATH/meshes/mesh-1.h5
389:       filter: sed -e "s/-nan/nan/g"

391:     test:
392:       suffix: guess_pod
393:       requires: double triangle
394:       args: -laplace_ksp_type cg -laplace_pc_type gamg -laplace_ksp_monitor_true_residual -laplace_ksp_max_it 8 -laplace_pc_gamg_esteig_ksp_type cg -laplace_pc_gamg_esteig_ksp_max_it 5 -laplace_mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.0 -laplace_ksp_converged_reason -mat_lmvm_ksp_type cg -mat_lmvm_ksp_rtol 1e-5 -mat_lmvm_pc_type gamg -mat_lmvm_pc_gamg_esteig_ksp_type cg -mat_lmvm_pc_gamg_esteig_ksp_max_it 3 -tao_monitor -petscspace_degree 1 -tao_converged_reason -dm_refine 0 -laplace_ksp_guess_type pod -tao_gatol 1e-6 -laplace_pc_gamg_aggressive_coarsening 0
395:       filter: sed -e "s/-nan/nan/g" -e "s/-NaN/nan/g" -e "s/NaN/nan/g" -e "s/CONVERGED_RTOL iterations 9/CONVERGED_RTOL iterations 8/g" -e "s/CONVERGED_RTOL iterations 4/CONVERGED_RTOL iterations 3/g"

397: TEST*/