Actual source code: telescope.c

  1: #include <petsc/private/petscimpl.h>
  2: #include <petsc/private/matimpl.h>
  3: #include <petsc/private/pcimpl.h>
  4: #include <petscksp.h>
  5: #include <petscdm.h>
  6: #include "../src/ksp/pc/impls/telescope/telescope.h"

  8: static PetscBool  cited      = PETSC_FALSE;
  9: static const char citation[] = "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 10:                                "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 11:                                "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 12:                                "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 13:                                "  series    = {PASC '16},\n"
 14:                                "  isbn      = {978-1-4503-4126-4},\n"
 15:                                "  location  = {Lausanne, Switzerland},\n"
 16:                                "  pages     = {5:1--5:12},\n"
 17:                                "  articleno = {5},\n"
 18:                                "  numpages  = {12},\n"
 19:                                "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
 20:                                "  doi       = {10.1145/2929908.2929913},\n"
 21:                                "  acmid     = {2929913},\n"
 22:                                "  publisher = {ACM},\n"
 23:                                "  address   = {New York, NY, USA},\n"
 24:                                "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 25:                                "  year      = {2016}\n"
 26:                                "}\n";

 28: /*
 29:  default setup mode

 31:  [1a] scatter to (FORWARD)
 32:  x(comm) -> xtmp(comm)
 33:  [1b] local copy (to) ranks with color = 0
 34:  xred(subcomm) <- xtmp

 36:  [2] solve on sub KSP to obtain yred(subcomm)

 38:  [3a] local copy (from) ranks with color = 0
 39:  yred(subcomm) --> xtmp
 40:  [2b] scatter from (REVERSE)
 41:  xtmp(comm) -> y(comm)
 42: */

 44: /*
 45:   Collective[comm_f]
 46:   Notes
 47:    * Using comm_f = MPI_COMM_NULL will result in an error
 48:    * Using comm_c = MPI_COMM_NULL is valid. If all instances of comm_c are NULL the subcomm is not valid.
 49:    * If any non NULL comm_c communicator cannot map any of its ranks to comm_f, the subcomm is not valid.
 50: */
 51: PetscErrorCode PCTelescopeTestValidSubcomm(MPI_Comm comm_f, MPI_Comm comm_c, PetscBool *isvalid)
 52: {
 53:   PetscInt     valid = 1;
 54:   MPI_Group    group_f, group_c;
 55:   PetscMPIInt  count, k, size_f = 0, size_c = 0, size_c_sum = 0;
 56:   PetscMPIInt *ranks_f, *ranks_c;


 60:   MPI_Comm_group(comm_f, &group_f);
 61:   if (comm_c != MPI_COMM_NULL) MPI_Comm_group(comm_c, &group_c);

 63:   MPI_Comm_size(comm_f, &size_f);
 64:   if (comm_c != MPI_COMM_NULL) MPI_Comm_size(comm_c, &size_c);

 66:   /* check not all comm_c's are NULL */
 67:   size_c_sum = size_c;
 68:   MPI_Allreduce(MPI_IN_PLACE, &size_c_sum, 1, MPI_INT, MPI_SUM, comm_f);
 69:   if (size_c_sum == 0) valid = 0;

 71:   /* check we can map at least 1 rank in comm_c to comm_f */
 72:   PetscMalloc1(size_f, &ranks_f);
 73:   PetscMalloc1(size_c, &ranks_c);
 74:   for (k = 0; k < size_f; k++) ranks_f[k] = MPI_UNDEFINED;
 75:   for (k = 0; k < size_c; k++) ranks_c[k] = k;

 77:   /*
 78:    MPI_Group_translate_ranks() returns a non-zero exit code if any rank cannot be translated.
 79:    I do not want the code to terminate immediately if this occurs, rather I want to throw
 80:    the error later (during PCSetUp_Telescope()) via SETERRQ() with a message indicating
 81:    that comm_c is not a valid sub-communicator.
 82:    Hence I purposefully do not call PetscCall() after MPI_Group_translate_ranks().
 83:   */
 84:   count = 0;
 85:   if (comm_c != MPI_COMM_NULL) {
 86:     (void)MPI_Group_translate_ranks(group_c, size_c, ranks_c, group_f, ranks_f);
 87:     for (k = 0; k < size_f; k++) {
 88:       if (ranks_f[k] == MPI_UNDEFINED) count++;
 89:     }
 90:   }
 91:   if (count == size_f) valid = 0;

 93:   MPI_Allreduce(MPI_IN_PLACE, &valid, 1, MPIU_INT, MPI_MIN, comm_f);
 94:   if (valid == 1) *isvalid = PETSC_TRUE;
 95:   else *isvalid = PETSC_FALSE;

 97:   PetscFree(ranks_f);
 98:   PetscFree(ranks_c);
 99:   MPI_Group_free(&group_f);
100:   if (comm_c != MPI_COMM_NULL) MPI_Group_free(&group_c);
101:   return 0;
102: }

104: DM private_PCTelescopeGetSubDM(PC_Telescope sred)
105: {
106:   DM subdm = NULL;

108:   if (!PCTelescope_isActiveRank(sred)) {
109:     subdm = NULL;
110:   } else {
111:     switch (sred->sr_type) {
112:     case TELESCOPE_DEFAULT:
113:       subdm = NULL;
114:       break;
115:     case TELESCOPE_DMDA:
116:       subdm = ((PC_Telescope_DMDACtx *)sred->dm_ctx)->dmrepart;
117:       break;
118:     case TELESCOPE_DMPLEX:
119:       subdm = NULL;
120:       break;
121:     case TELESCOPE_COARSEDM:
122:       if (sred->ksp) KSPGetDM(sred->ksp, &subdm);
123:       break;
124:     }
125:   }
126:   return (subdm);
127: }

129: PetscErrorCode PCTelescopeSetUp_default(PC pc, PC_Telescope sred)
130: {
131:   PetscInt   m, M, bs, st, ed;
132:   Vec        x, xred, yred, xtmp;
133:   Mat        B;
134:   MPI_Comm   comm, subcomm;
135:   VecScatter scatter;
136:   IS         isin;
137:   VecType    vectype;

139:   PetscInfo(pc, "PCTelescope: setup (default)\n");
140:   comm    = PetscSubcommParent(sred->psubcomm);
141:   subcomm = PetscSubcommChild(sred->psubcomm);

143:   PCGetOperators(pc, NULL, &B);
144:   MatGetSize(B, &M, NULL);
145:   MatGetBlockSize(B, &bs);
146:   MatCreateVecs(B, &x, NULL);
147:   MatGetVecType(B, &vectype);

149:   xred = NULL;
150:   m    = 0;
151:   if (PCTelescope_isActiveRank(sred)) {
152:     VecCreate(subcomm, &xred);
153:     VecSetSizes(xred, PETSC_DECIDE, M);
154:     VecSetBlockSize(xred, bs);
155:     VecSetType(xred, vectype); /* Use the preconditioner matrix's vectype by default */
156:     VecSetFromOptions(xred);
157:     VecGetLocalSize(xred, &m);
158:   }

160:   yred = NULL;
161:   if (PCTelescope_isActiveRank(sred)) VecDuplicate(xred, &yred);

163:   VecCreate(comm, &xtmp);
164:   VecSetSizes(xtmp, m, PETSC_DECIDE);
165:   VecSetBlockSize(xtmp, bs);
166:   VecSetType(xtmp, vectype);

168:   if (PCTelescope_isActiveRank(sred)) {
169:     VecGetOwnershipRange(xred, &st, &ed);
170:     ISCreateStride(comm, (ed - st), st, 1, &isin);
171:   } else {
172:     VecGetOwnershipRange(x, &st, &ed);
173:     ISCreateStride(comm, 0, st, 1, &isin);
174:   }
175:   ISSetBlockSize(isin, bs);

177:   VecScatterCreate(x, isin, xtmp, NULL, &scatter);

179:   sred->isin    = isin;
180:   sred->scatter = scatter;
181:   sred->xred    = xred;
182:   sred->yred    = yred;
183:   sred->xtmp    = xtmp;
184:   VecDestroy(&x);
185:   return 0;
186: }

188: PetscErrorCode PCTelescopeMatCreate_default(PC pc, PC_Telescope sred, MatReuse reuse, Mat *A)
189: {
190:   MPI_Comm comm, subcomm;
191:   Mat      Bred, B;
192:   PetscInt nr, nc, bs;
193:   IS       isrow, iscol;
194:   Mat      Blocal, *_Blocal;

196:   PetscInfo(pc, "PCTelescope: updating the redundant preconditioned operator (default)\n");
197:   PetscObjectGetComm((PetscObject)pc, &comm);
198:   subcomm = PetscSubcommChild(sred->psubcomm);
199:   PCGetOperators(pc, NULL, &B);
200:   MatGetSize(B, &nr, &nc);
201:   isrow = sred->isin;
202:   ISCreateStride(PETSC_COMM_SELF, nc, 0, 1, &iscol);
203:   ISSetIdentity(iscol);
204:   MatGetBlockSizes(B, NULL, &bs);
205:   ISSetBlockSize(iscol, bs);
206:   MatSetOption(B, MAT_SUBMAT_SINGLEIS, PETSC_TRUE);
207:   MatCreateSubMatrices(B, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &_Blocal);
208:   Blocal = *_Blocal;
209:   PetscFree(_Blocal);
210:   Bred = NULL;
211:   if (PCTelescope_isActiveRank(sred)) {
212:     PetscInt mm;

214:     if (reuse != MAT_INITIAL_MATRIX) Bred = *A;

216:     MatGetSize(Blocal, &mm, NULL);
217:     MatCreateMPIMatConcatenateSeqMat(subcomm, Blocal, mm, reuse, &Bred);
218:   }
219:   *A = Bred;
220:   ISDestroy(&iscol);
221:   MatDestroy(&Blocal);
222:   return 0;
223: }

225: static PetscErrorCode PCTelescopeSubNullSpaceCreate_Telescope(PC pc, PC_Telescope sred, MatNullSpace nullspace, MatNullSpace *sub_nullspace)
226: {
227:   PetscBool  has_const;
228:   const Vec *vecs;
229:   Vec       *sub_vecs = NULL;
230:   PetscInt   i, k, n = 0;
231:   MPI_Comm   subcomm;

233:   subcomm = PetscSubcommChild(sred->psubcomm);
234:   MatNullSpaceGetVecs(nullspace, &has_const, &n, &vecs);

236:   if (PCTelescope_isActiveRank(sred)) {
237:     if (n) VecDuplicateVecs(sred->xred, n, &sub_vecs);
238:   }

240:   /* copy entries */
241:   for (k = 0; k < n; k++) {
242:     const PetscScalar *x_array;
243:     PetscScalar       *LA_sub_vec;
244:     PetscInt           st, ed;

246:     /* pull in vector x->xtmp */
247:     VecScatterBegin(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD);
248:     VecScatterEnd(sred->scatter, vecs[k], sred->xtmp, INSERT_VALUES, SCATTER_FORWARD);
249:     if (sub_vecs) {
250:       /* copy vector entries into xred */
251:       VecGetArrayRead(sred->xtmp, &x_array);
252:       if (sub_vecs[k]) {
253:         VecGetOwnershipRange(sub_vecs[k], &st, &ed);
254:         VecGetArray(sub_vecs[k], &LA_sub_vec);
255:         for (i = 0; i < ed - st; i++) LA_sub_vec[i] = x_array[i];
256:         VecRestoreArray(sub_vecs[k], &LA_sub_vec);
257:       }
258:       VecRestoreArrayRead(sred->xtmp, &x_array);
259:     }
260:   }

262:   if (PCTelescope_isActiveRank(sred)) {
263:     /* create new (near) nullspace for redundant object */
264:     MatNullSpaceCreate(subcomm, has_const, n, sub_vecs, sub_nullspace);
265:     VecDestroyVecs(n, &sub_vecs);
268:   }
269:   return 0;
270: }

272: static PetscErrorCode PCTelescopeMatNullSpaceCreate_default(PC pc, PC_Telescope sred, Mat sub_mat)
273: {
274:   Mat B;

276:   PCGetOperators(pc, NULL, &B);
277:   /* Propagate the nullspace if it exists */
278:   {
279:     MatNullSpace nullspace, sub_nullspace;
280:     MatGetNullSpace(B, &nullspace);
281:     if (nullspace) {
282:       PetscInfo(pc, "PCTelescope: generating nullspace (default)\n");
283:       PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nullspace, &sub_nullspace);
284:       if (PCTelescope_isActiveRank(sred)) {
285:         MatSetNullSpace(sub_mat, sub_nullspace);
286:         MatNullSpaceDestroy(&sub_nullspace);
287:       }
288:     }
289:   }
290:   /* Propagate the near nullspace if it exists */
291:   {
292:     MatNullSpace nearnullspace, sub_nearnullspace;
293:     MatGetNearNullSpace(B, &nearnullspace);
294:     if (nearnullspace) {
295:       PetscInfo(pc, "PCTelescope: generating near nullspace (default)\n");
296:       PCTelescopeSubNullSpaceCreate_Telescope(pc, sred, nearnullspace, &sub_nearnullspace);
297:       if (PCTelescope_isActiveRank(sred)) {
298:         MatSetNearNullSpace(sub_mat, sub_nearnullspace);
299:         MatNullSpaceDestroy(&sub_nearnullspace);
300:       }
301:     }
302:   }
303:   return 0;
304: }

306: static PetscErrorCode PCView_Telescope(PC pc, PetscViewer viewer)
307: {
308:   PC_Telescope sred = (PC_Telescope)pc->data;
309:   PetscBool    iascii, isstring;
310:   PetscViewer  subviewer;

312:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
313:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring);
314:   if (iascii) {
315:     {
316:       MPI_Comm    comm, subcomm;
317:       PetscMPIInt comm_size, subcomm_size;
318:       DM          dm = NULL, subdm = NULL;

320:       PCGetDM(pc, &dm);
321:       subdm = private_PCTelescopeGetSubDM(sred);

323:       if (sred->psubcomm) {
324:         comm    = PetscSubcommParent(sred->psubcomm);
325:         subcomm = PetscSubcommChild(sred->psubcomm);
326:         MPI_Comm_size(comm, &comm_size);
327:         MPI_Comm_size(subcomm, &subcomm_size);

329:         PetscViewerASCIIPushTab(viewer);
330:         PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent comm size reduction factor = %" PetscInt_FMT "\n", sred->redfactor);
331:         PetscViewerASCIIPrintf(viewer, "petsc subcomm: parent_size = %d , subcomm_size = %d\n", (int)comm_size, (int)subcomm_size);
332:         switch (sred->subcommtype) {
333:         case PETSC_SUBCOMM_INTERLACED:
334:           PetscViewerASCIIPrintf(viewer, "petsc subcomm: type = %s\n", PetscSubcommTypes[sred->subcommtype]);
335:           break;
336:         case PETSC_SUBCOMM_CONTIGUOUS:
337:           PetscViewerASCIIPrintf(viewer, "petsc subcomm type = %s\n", PetscSubcommTypes[sred->subcommtype]);
338:           break;
339:         default:
340:           SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "General subcomm type not supported by PCTelescope");
341:         }
342:         PetscViewerASCIIPopTab(viewer);
343:       } else {
344:         PetscObjectGetComm((PetscObject)pc, &comm);
345:         subcomm = sred->subcomm;
346:         if (!PCTelescope_isActiveRank(sred)) subcomm = PETSC_COMM_SELF;

348:         PetscViewerASCIIPushTab(viewer);
349:         PetscViewerASCIIPrintf(viewer, "subcomm: using user provided sub-communicator\n");
350:         PetscViewerASCIIPopTab(viewer);
351:       }

353:       PetscViewerGetSubViewer(viewer, subcomm, &subviewer);
354:       if (PCTelescope_isActiveRank(sred)) {
355:         PetscViewerASCIIPushTab(subviewer);

357:         if (dm && sred->ignore_dm) PetscViewerASCIIPrintf(subviewer, "ignoring DM\n");
358:         if (sred->ignore_kspcomputeoperators) PetscViewerASCIIPrintf(subviewer, "ignoring KSPComputeOperators\n");
359:         switch (sred->sr_type) {
360:         case TELESCOPE_DEFAULT:
361:           PetscViewerASCIIPrintf(subviewer, "setup type: default\n");
362:           break;
363:         case TELESCOPE_DMDA:
364:           PetscViewerASCIIPrintf(subviewer, "setup type: DMDA auto-repartitioning\n");
365:           DMView_DA_Short(subdm, subviewer);
366:           break;
367:         case TELESCOPE_DMPLEX:
368:           PetscViewerASCIIPrintf(subviewer, "setup type: DMPLEX auto-repartitioning\n");
369:           break;
370:         case TELESCOPE_COARSEDM:
371:           PetscViewerASCIIPrintf(subviewer, "setup type: coarse DM\n");
372:           break;
373:         }

375:         if (dm) {
376:           PetscObject obj = (PetscObject)dm;
377:           PetscViewerASCIIPrintf(subviewer, "Parent DM object:");
378:           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
379:           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
380:           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
381:           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
382:           PetscViewerASCIIPrintf(subviewer, "\n");
383:           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
384:         } else {
385:           PetscViewerASCIIPrintf(subviewer, "Parent DM object: NULL\n");
386:         }
387:         if (subdm) {
388:           PetscObject obj = (PetscObject)subdm;
389:           PetscViewerASCIIPrintf(subviewer, "Sub DM object:");
390:           PetscViewerASCIIUseTabs(subviewer, PETSC_FALSE);
391:           if (obj->type_name) { PetscViewerASCIIPrintf(subviewer, " type = %s;", obj->type_name); }
392:           if (obj->name) { PetscViewerASCIIPrintf(subviewer, " name = %s;", obj->name); }
393:           if (obj->prefix) PetscViewerASCIIPrintf(subviewer, " prefix = %s", obj->prefix);
394:           PetscViewerASCIIPrintf(subviewer, "\n");
395:           PetscViewerASCIIUseTabs(subviewer, PETSC_TRUE);
396:         } else {
397:           PetscViewerASCIIPrintf(subviewer, "Sub DM object: NULL\n");
398:         }

400:         KSPView(sred->ksp, subviewer);
401:         PetscViewerASCIIPopTab(subviewer);
402:       }
403:       PetscViewerRestoreSubViewer(viewer, subcomm, &subviewer);
404:     }
405:   }
406:   return 0;
407: }

409: static PetscErrorCode PCSetUp_Telescope(PC pc)
410: {
411:   PC_Telescope    sred = (PC_Telescope)pc->data;
412:   MPI_Comm        comm, subcomm = 0;
413:   PCTelescopeType sr_type;

415:   PetscObjectGetComm((PetscObject)pc, &comm);

417:   /* Determine type of setup/update */
418:   if (!pc->setupcalled) {
419:     PetscBool has_dm, same;
420:     DM        dm;

422:     sr_type = TELESCOPE_DEFAULT;
423:     has_dm  = PETSC_FALSE;
424:     PCGetDM(pc, &dm);
425:     if (dm) has_dm = PETSC_TRUE;
426:     if (has_dm) {
427:       /* check for dmda */
428:       PetscObjectTypeCompare((PetscObject)dm, DMDA, &same);
429:       if (same) {
430:         PetscInfo(pc, "PCTelescope: found DMDA\n");
431:         sr_type = TELESCOPE_DMDA;
432:       }
433:       /* check for dmplex */
434:       PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &same);
435:       if (same) {
436:         PetscInfo(pc, "PCTelescope: found DMPLEX\n");
437:         sr_type = TELESCOPE_DMPLEX;
438:       }

440:       if (sred->use_coarse_dm) {
441:         PetscInfo(pc, "PCTelescope: using coarse DM\n");
442:         sr_type = TELESCOPE_COARSEDM;
443:       }

445:       if (sred->ignore_dm) {
446:         PetscInfo(pc, "PCTelescope: ignoring DM\n");
447:         sr_type = TELESCOPE_DEFAULT;
448:       }
449:     }
450:     sred->sr_type = sr_type;
451:   } else {
452:     sr_type = sred->sr_type;
453:   }

455:   /* set function pointers for repartition setup, matrix creation/update, matrix (near) nullspace, and reset functionality */
456:   switch (sr_type) {
457:   case TELESCOPE_DEFAULT:
458:     sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
459:     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
460:     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
461:     sred->pctelescope_reset_type              = NULL;
462:     break;
463:   case TELESCOPE_DMDA:
464:     pc->ops->apply                            = PCApply_Telescope_dmda;
465:     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_dmda;
466:     sred->pctelescope_setup_type              = PCTelescopeSetUp_dmda;
467:     sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_dmda;
468:     sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_dmda;
469:     sred->pctelescope_reset_type              = PCReset_Telescope_dmda;
470:     break;
471:   case TELESCOPE_DMPLEX:
472:     SETERRQ(comm, PETSC_ERR_SUP, "Support for DMPLEX is currently not available");
473:   case TELESCOPE_COARSEDM:
474:     pc->ops->apply                            = PCApply_Telescope_CoarseDM;
475:     pc->ops->applyrichardson                  = PCApplyRichardson_Telescope_CoarseDM;
476:     sred->pctelescope_setup_type              = PCTelescopeSetUp_CoarseDM;
477:     sred->pctelescope_matcreate_type          = NULL;
478:     sred->pctelescope_matnullspacecreate_type = NULL; /* PCTelescopeMatNullSpaceCreate_CoarseDM; */
479:     sred->pctelescope_reset_type              = PCReset_Telescope_CoarseDM;
480:     break;
481:   default:
482:     SETERRQ(comm, PETSC_ERR_SUP, "Support only provided for: repartitioning an operator; repartitioning a DMDA; or using a coarse DM");
483:   }

485:   /* subcomm definition */
486:   if (!pc->setupcalled) {
487:     if ((sr_type == TELESCOPE_DEFAULT) || (sr_type == TELESCOPE_DMDA)) {
488:       if (!sred->psubcomm) {
489:         PetscSubcommCreate(comm, &sred->psubcomm);
490:         PetscSubcommSetNumber(sred->psubcomm, sred->redfactor);
491:         PetscSubcommSetType(sred->psubcomm, sred->subcommtype);
492:         sred->subcomm = PetscSubcommChild(sred->psubcomm);
493:       }
494:     } else { /* query PC for DM, check communicators */
495:       DM          dm, dm_coarse_partition          = NULL;
496:       MPI_Comm    comm_fine, comm_coarse_partition = MPI_COMM_NULL;
497:       PetscMPIInt csize_fine = 0, csize_coarse_partition = 0, cs[2], csg[2], cnt = 0;
498:       PetscBool   isvalidsubcomm;

500:       PCGetDM(pc, &dm);
501:       comm_fine = PetscObjectComm((PetscObject)dm);
502:       DMGetCoarseDM(dm, &dm_coarse_partition);
503:       if (dm_coarse_partition) cnt = 1;
504:       MPI_Allreduce(MPI_IN_PLACE, &cnt, 1, MPI_INT, MPI_SUM, comm_fine);

507:       MPI_Comm_size(comm_fine, &csize_fine);
508:       if (dm_coarse_partition) {
509:         comm_coarse_partition = PetscObjectComm((PetscObject)dm_coarse_partition);
510:         MPI_Comm_size(comm_coarse_partition, &csize_coarse_partition);
511:       }

513:       cs[0] = csize_fine;
514:       cs[1] = csize_coarse_partition;
515:       MPI_Allreduce(cs, csg, 2, MPI_INT, MPI_MAX, comm_fine);

518:       PCTelescopeTestValidSubcomm(comm_fine, comm_coarse_partition, &isvalidsubcomm);
520:       sred->subcomm = comm_coarse_partition;
521:     }
522:   }
523:   subcomm = sred->subcomm;

525:   /* internal KSP */
526:   if (!pc->setupcalled) {
527:     const char *prefix;

529:     if (PCTelescope_isActiveRank(sred)) {
530:       KSPCreate(subcomm, &sred->ksp);
531:       KSPSetErrorIfNotConverged(sred->ksp, pc->erroriffailure);
532:       PetscObjectIncrementTabLevel((PetscObject)sred->ksp, (PetscObject)pc, 1);
533:       KSPSetType(sred->ksp, KSPPREONLY);
534:       PCGetOptionsPrefix(pc, &prefix);
535:       KSPSetOptionsPrefix(sred->ksp, prefix);
536:       KSPAppendOptionsPrefix(sred->ksp, "telescope_");
537:     }
538:   }

540:   /* setup */
541:   if (!pc->setupcalled && sred->pctelescope_setup_type) sred->pctelescope_setup_type(pc, sred);
542:   /* update */
543:   if (!pc->setupcalled) {
544:     if (sred->pctelescope_matcreate_type) sred->pctelescope_matcreate_type(pc, sred, MAT_INITIAL_MATRIX, &sred->Bred);
545:     if (sred->pctelescope_matnullspacecreate_type) sred->pctelescope_matnullspacecreate_type(pc, sred, sred->Bred);
546:   } else {
547:     if (sred->pctelescope_matcreate_type) sred->pctelescope_matcreate_type(pc, sred, MAT_REUSE_MATRIX, &sred->Bred);
548:   }

550:   /* common - no construction */
551:   if (PCTelescope_isActiveRank(sred)) {
552:     KSPSetOperators(sred->ksp, sred->Bred, sred->Bred);
553:     if (pc->setfromoptionscalled && !pc->setupcalled) KSPSetFromOptions(sred->ksp);
554:   }
555:   return 0;
556: }

558: static PetscErrorCode PCApply_Telescope(PC pc, Vec x, Vec y)
559: {
560:   PC_Telescope       sred = (PC_Telescope)pc->data;
561:   Vec                xtmp, xred, yred;
562:   PetscInt           i, st, ed;
563:   VecScatter         scatter;
564:   PetscScalar       *array;
565:   const PetscScalar *x_array;

567:   PetscCitationsRegister(citation, &cited);

569:   xtmp    = sred->xtmp;
570:   scatter = sred->scatter;
571:   xred    = sred->xred;
572:   yred    = sred->yred;

574:   /* pull in vector x->xtmp */
575:   VecScatterBegin(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD);
576:   VecScatterEnd(scatter, x, xtmp, INSERT_VALUES, SCATTER_FORWARD);

578:   /* copy vector entries into xred */
579:   VecGetArrayRead(xtmp, &x_array);
580:   if (xred) {
581:     PetscScalar *LA_xred;
582:     VecGetOwnershipRange(xred, &st, &ed);
583:     VecGetArray(xred, &LA_xred);
584:     for (i = 0; i < ed - st; i++) LA_xred[i] = x_array[i];
585:     VecRestoreArray(xred, &LA_xred);
586:   }
587:   VecRestoreArrayRead(xtmp, &x_array);
588:   /* solve */
589:   if (PCTelescope_isActiveRank(sred)) {
590:     KSPSolve(sred->ksp, xred, yred);
591:     KSPCheckSolve(sred->ksp, pc, yred);
592:   }
593:   /* return vector */
594:   VecGetArray(xtmp, &array);
595:   if (yred) {
596:     const PetscScalar *LA_yred;
597:     VecGetOwnershipRange(yred, &st, &ed);
598:     VecGetArrayRead(yred, &LA_yred);
599:     for (i = 0; i < ed - st; i++) array[i] = LA_yred[i];
600:     VecRestoreArrayRead(yred, &LA_yred);
601:   }
602:   VecRestoreArray(xtmp, &array);
603:   VecScatterBegin(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE);
604:   VecScatterEnd(scatter, xtmp, y, INSERT_VALUES, SCATTER_REVERSE);
605:   return 0;
606: }

608: static PetscErrorCode PCApplyRichardson_Telescope(PC pc, Vec x, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
609: {
610:   PC_Telescope       sred = (PC_Telescope)pc->data;
611:   Vec                xtmp, yred;
612:   PetscInt           i, st, ed;
613:   VecScatter         scatter;
614:   const PetscScalar *x_array;
615:   PetscBool          default_init_guess_value;

617:   xtmp    = sred->xtmp;
618:   scatter = sred->scatter;
619:   yred    = sred->yred;

622:   *reason = (PCRichardsonConvergedReason)0;

624:   if (!zeroguess) {
625:     PetscInfo(pc, "PCTelescope: Scattering y for non-zero initial guess\n");
626:     /* pull in vector y->xtmp */
627:     VecScatterBegin(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD);
628:     VecScatterEnd(scatter, y, xtmp, INSERT_VALUES, SCATTER_FORWARD);

630:     /* copy vector entries into xred */
631:     VecGetArrayRead(xtmp, &x_array);
632:     if (yred) {
633:       PetscScalar *LA_yred;
634:       VecGetOwnershipRange(yred, &st, &ed);
635:       VecGetArray(yred, &LA_yred);
636:       for (i = 0; i < ed - st; i++) LA_yred[i] = x_array[i];
637:       VecRestoreArray(yred, &LA_yred);
638:     }
639:     VecRestoreArrayRead(xtmp, &x_array);
640:   }

642:   if (PCTelescope_isActiveRank(sred)) {
643:     KSPGetInitialGuessNonzero(sred->ksp, &default_init_guess_value);
644:     if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp, PETSC_TRUE);
645:   }

647:   PCApply_Telescope(pc, x, y);

649:   if (PCTelescope_isActiveRank(sred)) KSPSetInitialGuessNonzero(sred->ksp, default_init_guess_value);

651:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
652:   *outits = 1;
653:   return 0;
654: }

656: static PetscErrorCode PCReset_Telescope(PC pc)
657: {
658:   PC_Telescope sred = (PC_Telescope)pc->data;

660:   ISDestroy(&sred->isin);
661:   VecScatterDestroy(&sred->scatter);
662:   VecDestroy(&sred->xred);
663:   VecDestroy(&sred->yred);
664:   VecDestroy(&sred->xtmp);
665:   MatDestroy(&sred->Bred);
666:   KSPReset(sred->ksp);
667:   if (sred->pctelescope_reset_type) sred->pctelescope_reset_type(pc);
668:   return 0;
669: }

671: static PetscErrorCode PCDestroy_Telescope(PC pc)
672: {
673:   PC_Telescope sred = (PC_Telescope)pc->data;

675:   PCReset_Telescope(pc);
676:   KSPDestroy(&sred->ksp);
677:   PetscSubcommDestroy(&sred->psubcomm);
678:   PetscFree(sred->dm_ctx);
679:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", NULL);
680:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", NULL);
681:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", NULL);
682:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", NULL);
683:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", NULL);
684:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", NULL);
685:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", NULL);
686:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", NULL);
687:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", NULL);
688:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", NULL);
689:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", NULL);
690:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", NULL);
691:   PetscFree(pc->data);
692:   return 0;
693: }

695: static PetscErrorCode PCSetFromOptions_Telescope(PC pc, PetscOptionItems *PetscOptionsObject)
696: {
697:   PC_Telescope     sred = (PC_Telescope)pc->data;
698:   MPI_Comm         comm;
699:   PetscMPIInt      size;
700:   PetscBool        flg;
701:   PetscSubcommType subcommtype;

703:   PetscObjectGetComm((PetscObject)pc, &comm);
704:   MPI_Comm_size(comm, &size);
705:   PetscOptionsHeadBegin(PetscOptionsObject, "Telescope options");
706:   PetscOptionsEnum("-pc_telescope_subcomm_type", "Subcomm type (interlaced or contiguous)", "PCTelescopeSetSubcommType", PetscSubcommTypes, (PetscEnum)sred->subcommtype, (PetscEnum *)&subcommtype, &flg);
707:   if (flg) PCTelescopeSetSubcommType(pc, subcommtype);
708:   PetscOptionsInt("-pc_telescope_reduction_factor", "Factor to reduce comm size by", "PCTelescopeSetReductionFactor", sred->redfactor, &sred->redfactor, NULL);
710:   PetscOptionsBool("-pc_telescope_ignore_dm", "Ignore any DM attached to the PC", "PCTelescopeSetIgnoreDM", sred->ignore_dm, &sred->ignore_dm, NULL);
711:   PetscOptionsBool("-pc_telescope_ignore_kspcomputeoperators", "Ignore method used to compute A", "PCTelescopeSetIgnoreKSPComputeOperators", sred->ignore_kspcomputeoperators, &sred->ignore_kspcomputeoperators, NULL);
712:   PetscOptionsBool("-pc_telescope_use_coarse_dm", "Define sub-communicator from the coarse DM", "PCTelescopeSetUseCoarseDM", sred->use_coarse_dm, &sred->use_coarse_dm, NULL);
713:   PetscOptionsHeadEnd();
714:   return 0;
715: }

717: /* PC simplementation specific API's */

719: static PetscErrorCode PCTelescopeGetKSP_Telescope(PC pc, KSP *ksp)
720: {
721:   PC_Telescope red = (PC_Telescope)pc->data;
722:   if (ksp) *ksp = red->ksp;
723:   return 0;
724: }

726: static PetscErrorCode PCTelescopeGetSubcommType_Telescope(PC pc, PetscSubcommType *subcommtype)
727: {
728:   PC_Telescope red = (PC_Telescope)pc->data;
729:   if (subcommtype) *subcommtype = red->subcommtype;
730:   return 0;
731: }

733: static PetscErrorCode PCTelescopeSetSubcommType_Telescope(PC pc, PetscSubcommType subcommtype)
734: {
735:   PC_Telescope red = (PC_Telescope)pc->data;

738:   red->subcommtype = subcommtype;
739:   return 0;
740: }

742: static PetscErrorCode PCTelescopeGetReductionFactor_Telescope(PC pc, PetscInt *fact)
743: {
744:   PC_Telescope red = (PC_Telescope)pc->data;
745:   if (fact) *fact = red->redfactor;
746:   return 0;
747: }

749: static PetscErrorCode PCTelescopeSetReductionFactor_Telescope(PC pc, PetscInt fact)
750: {
751:   PC_Telescope red = (PC_Telescope)pc->data;
752:   PetscMPIInt  size;

754:   MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
757:   red->redfactor = fact;
758:   return 0;
759: }

761: static PetscErrorCode PCTelescopeGetIgnoreDM_Telescope(PC pc, PetscBool *v)
762: {
763:   PC_Telescope red = (PC_Telescope)pc->data;
764:   if (v) *v = red->ignore_dm;
765:   return 0;
766: }

768: static PetscErrorCode PCTelescopeSetIgnoreDM_Telescope(PC pc, PetscBool v)
769: {
770:   PC_Telescope red = (PC_Telescope)pc->data;
771:   red->ignore_dm = v;
772:   return 0;
773: }

775: static PetscErrorCode PCTelescopeGetUseCoarseDM_Telescope(PC pc, PetscBool *v)
776: {
777:   PC_Telescope red = (PC_Telescope)pc->data;
778:   if (v) *v = red->use_coarse_dm;
779:   return 0;
780: }

782: static PetscErrorCode PCTelescopeSetUseCoarseDM_Telescope(PC pc, PetscBool v)
783: {
784:   PC_Telescope red = (PC_Telescope)pc->data;
785:   red->use_coarse_dm = v;
786:   return 0;
787: }

789: static PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool *v)
790: {
791:   PC_Telescope red = (PC_Telescope)pc->data;
792:   if (v) *v = red->ignore_kspcomputeoperators;
793:   return 0;
794: }

796: static PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators_Telescope(PC pc, PetscBool v)
797: {
798:   PC_Telescope red = (PC_Telescope)pc->data;
799:   red->ignore_kspcomputeoperators = v;
800:   return 0;
801: }

803: static PetscErrorCode PCTelescopeGetDM_Telescope(PC pc, DM *dm)
804: {
805:   PC_Telescope red = (PC_Telescope)pc->data;
806:   *dm = private_PCTelescopeGetSubDM(red);
807:   return 0;
808: }

810: /*@
811:  PCTelescopeGetKSP - Gets the `KSP` created by the telescoping `PC`.

813:  Not Collective

815:  Input Parameter:
816: .  pc - the preconditioner context

818:  Output Parameter:
819: .  subksp - the `KSP` defined the smaller set of processes

821:  Level: advanced

823: .seealso: `PCTELESCOPE`
824: @*/
825: PetscErrorCode PCTelescopeGetKSP(PC pc, KSP *subksp)
826: {
827:   PetscUseMethod(pc, "PCTelescopeGetKSP_C", (PC, KSP *), (pc, subksp));
828:   return 0;
829: }

831: /*@
832:  PCTelescopeGetReductionFactor - Gets the factor by which the original number of MPI ranks  has been reduced by.

834:  Not Collective

836:  Input Parameter:
837: .  pc - the preconditioner context

839:  Output Parameter:
840: .  fact - the reduction factor

842:  Level: advanced

844: .seealso: `PCTELESCOPE`, `PCTelescopeSetReductionFactor()`
845: @*/
846: PetscErrorCode PCTelescopeGetReductionFactor(PC pc, PetscInt *fact)
847: {
848:   PetscUseMethod(pc, "PCTelescopeGetReductionFactor_C", (PC, PetscInt *), (pc, fact));
849:   return 0;
850: }

852: /*@
853:  PCTelescopeSetReductionFactor - Sets the factor by which the original number of MPI ranks will been reduced by.

855:  Not Collective

857:  Input Parameter:
858: .  pc - the preconditioner context

860:  Output Parameter:
861: .  fact - the reduction factor

863:  Level: advanced

865: .seealso: `PCTELESCOPE`, `PCTelescopeGetReductionFactor()`
866: @*/
867: PetscErrorCode PCTelescopeSetReductionFactor(PC pc, PetscInt fact)
868: {
869:   PetscTryMethod(pc, "PCTelescopeSetReductionFactor_C", (PC, PetscInt), (pc, fact));
870:   return 0;
871: }

873: /*@
874:  PCTelescopeGetIgnoreDM - Get the flag indicating if any `DM` attached to the `PC` will be used.

876:  Not Collective

878:  Input Parameter:
879: .  pc - the preconditioner context

881:  Output Parameter:
882: .  v - the flag

884:  Level: advanced

886: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`
887: @*/
888: PetscErrorCode PCTelescopeGetIgnoreDM(PC pc, PetscBool *v)
889: {
890:   PetscUseMethod(pc, "PCTelescopeGetIgnoreDM_C", (PC, PetscBool *), (pc, v));
891:   return 0;
892: }

894: /*@
895:  PCTelescopeSetIgnoreDM - Set a flag to ignore any DM attached to the PC.

897:  Not Collective

899:  Input Parameter:
900: .  pc - the preconditioner context

902:  Output Parameter:
903: .  v - Use PETSC_TRUE to ignore any DM

905:  Level: advanced

907: .seealso: `PCTELESCOPE`, `PCTelescopeGetIgnoreDM()`
908: @*/
909: PetscErrorCode PCTelescopeSetIgnoreDM(PC pc, PetscBool v)
910: {
911:   PetscTryMethod(pc, "PCTelescopeSetIgnoreDM_C", (PC, PetscBool), (pc, v));
912:   return 0;
913: }

915: /*@
916:  PCTelescopeGetUseCoarseDM - Get the flag indicating if the coarse `DM` attached to `DM` associated with the `PC` will be used.

918:  Not Collective

920:  Input Parameter:
921: .  pc - the preconditioner context

923:  Output Parameter:
924: .  v - the flag

926:  Level: advanced

928: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
929: @*/
930: PetscErrorCode PCTelescopeGetUseCoarseDM(PC pc, PetscBool *v)
931: {
932:   PetscUseMethod(pc, "PCTelescopeGetUseCoarseDM_C", (PC, PetscBool *), (pc, v));
933:   return 0;
934: }

936: /*@
937:  PCTelescopeSetUseCoarseDM - Set a flag to query the `DM` attached to the `PC` if it also has a coarse `DM`

939:  Not Collective

941:  Input Parameter:
942: .  pc - the preconditioner context

944:  Output Parameter:
945: .  v - Use `PETSC_FALSE` to ignore any coarse `DM`

947:  Notes:
948:  When you have specified to use a coarse `DM`, the communicator used to create the sub-KSP within `PCTELESCOPE`
949:  will be that of the coarse `DM`. Hence the flags -pc_telescope_reduction_factor and
950:  -pc_telescope_subcomm_type will no longer have any meaning.
951:  It is required that the communicator associated with the parent (fine) and the coarse `DM` are of different sizes.
952:  An error will occur of the size of the communicator associated with the coarse `DM`
953:  is the same as that of the parent `DM`.
954:  Furthermore, it is required that the communicator on the coarse DM is a sub-communicator of the parent.
955:  This will be checked at the time the preconditioner is setup and an error will occur if
956:  the coarse DM does not define a sub-communicator of that used by the parent DM.

958:  The particular Telescope setup invoked when using a coarse DM is agnostic with respect to the type of
959:  the `DM` used (e.g. it supports `DMSHELL`, `DMPLEX`, etc).

961:  Support is currently only provided for the case when you are using `KSPSetComputeOperators()`

963:  The user is required to compose a function with the parent DM to facilitate the transfer of fields (`Vec`) between the different decompositions defined by the fine and coarse `DM`s.
964:  In the user code, this is achieved via
965: .vb
966:    {
967:      DM dm_fine;
968:      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeFieldScatter",your_field_scatter_method);
969:    }
970: .ve
971:  The signature of the user provided field scatter method is
972: .vb
973:    PetscErrorCode your_field_scatter_method(DM dm_fine,Vec x_fine,ScatterMode mode,DM dm_coarse,Vec x_coarse);
974: .ve
975:  The user must provide support for both mode = `SCATTER_FORWARD` and mode = `SCATTER_REVERSE`.
976:  `SCATTER_FORWARD` implies the direction of transfer is from the parent (fine) `DM` to the coarse `DM`.

978:  Optionally, the user may also compose a function with the parent DM to facilitate the transfer
979:  of state variables between the fine and coarse `DM`s.
980:  In the context of a finite element discretization, an example state variable might be
981:  values associated with quadrature points within each element.
982:  A user provided state scatter method is composed via
983: .vb
984:    {
985:      DM dm_fine;
986:      PetscObjectCompose((PetscObject)dm_fine,"PCTelescopeStateScatter",your_state_scatter_method);
987:    }
988: .ve
989:  The signature of the user provided state scatter method is
990: .vb
991:    PetscErrorCode your_state_scatter_method(DM dm_fine,ScatterMode mode,DM dm_coarse);
992: .ve
993:  `SCATTER_FORWARD` implies the direction of transfer is from the fine `DM` to the coarse `DM`.
994:  The user is only required to support mode = `SCATTER_FORWARD`.
995:  No assumption is made about the data type of the state variables.
996:  These must be managed by the user and must be accessible from the `DM`.

998:  Care must be taken in defining the user context passed to `KSPSetComputeOperators()` which is to be
999:  associated with the sub-`KSP` residing within `PCTELESCOPE`.
1000:  In general, `PCTELESCOPE` assumes that the context on the fine and coarse `DM` used with
1001:  `KSPSetComputeOperators()` should be "similar" in type or origin.
1002:  Specifically the following rules are used to infer what context on the sub-`KSP` should be.

1004:  First the contexts from the `KSP` and the fine and coarse `DM`s are retrieved.
1005:  Note that the special case of a `DMSHELL` context is queried.

1007: .vb
1008:    DMKSPGetComputeOperators(dm_fine,&dmfine_kspfunc,&dmfine_kspctx);
1009:    DMGetApplicationContext(dm_fine,&dmfine_appctx);
1010:    DMShellGetContext(dm_fine,&dmfine_shellctx);

1012:    DMGetApplicationContext(dm_coarse,&dmcoarse_appctx);
1013:    DMShellGetContext(dm_coarse,&dmcoarse_shellctx);
1014: .ve

1016:  The following rules are then enforced:

1018:  1. If dmfine_kspctx = NULL, then we provide a NULL pointer as the context for the sub-KSP:
1019:  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,NULL);

1021:  2. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_appctx,
1022:  check that dmcoarse_appctx is also non-NULL. If this is true, then:
1023:  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_appctx);

1025:  3. If dmfine_kspctx != NULL and dmfine_kspctx == dmfine_shellctx,
1026:  check that dmcoarse_shellctx is also non-NULL. If this is true, then:
1027:  `KSPSetComputeOperators`(sub_ksp,dmfine_kspfunc,dmcoarse_shellctx);

1029:  If neither of the above three tests passed, then `PCTELESCOPE` cannot safely determine what
1030:  context should be provided to `KSPSetComputeOperators()` for use with the sub-`KSP`.
1031:  In this case, an additional mechanism is provided via a composed function which will return
1032:  the actual context to be used. To use this feature you must compose the "getter" function
1033:  with the coarse `DM`, e.g.
1034: .vb
1035:    {
1036:      DM dm_coarse;
1037:      PetscObjectCompose((PetscObject)dm_coarse,"PCTelescopeGetCoarseDMKSPContext",your_coarse_context_getter);
1038:    }
1039: .ve
1040:  The signature of the user provided method is
1041: .vb
1042:    PetscErrorCode your_coarse_context_getter(DM dm_coarse,void **your_kspcontext);
1043: .ve

1045:  Level: advanced

1047: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`
1048: @*/
1049: PetscErrorCode PCTelescopeSetUseCoarseDM(PC pc, PetscBool v)
1050: {
1051:   PetscTryMethod(pc, "PCTelescopeSetUseCoarseDM_C", (PC, PetscBool), (pc, v));
1052:   return 0;
1053: }

1055: /*@
1056:  PCTelescopeGetIgnoreKSPComputeOperators - Get the flag indicating if `KSPComputeOperators()` will be used.

1058:  Not Collective

1060:  Input Parameter:
1061: .  pc - the preconditioner context

1063:  Output Parameter:
1064: .  v - the flag

1066:  Level: advanced

1068: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeSetIgnoreKSPComputeOperators()`
1069: @*/
1070: PetscErrorCode PCTelescopeGetIgnoreKSPComputeOperators(PC pc, PetscBool *v)
1071: {
1072:   PetscUseMethod(pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", (PC, PetscBool *), (pc, v));
1073:   return 0;
1074: }

1076: /*@
1077:  PCTelescopeSetIgnoreKSPComputeOperators - Set a flag to ignore `KSPComputeOperators()`.

1079:  Not Collective

1081:  Input Parameter:
1082: .  pc - the preconditioner context

1084:  Output Parameter:
1085: .  v - Use `PETSC_TRUE` to ignore the method (if defined) set via `KSPSetComputeOperators()` on pc

1087:  Level: advanced

1089: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1090: @*/
1091: PetscErrorCode PCTelescopeSetIgnoreKSPComputeOperators(PC pc, PetscBool v)
1092: {
1093:   PetscTryMethod(pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", (PC, PetscBool), (pc, v));
1094:   return 0;
1095: }

1097: /*@
1098:  PCTelescopeGetDM - Get the re-partitioned `DM` attached to the sub-`KSP`.

1100:  Not Collective

1102:  Input Parameter:
1103: .  pc - the preconditioner context

1105:  Output Parameter:
1106: .  subdm - The re-partitioned DM

1108:  Level: advanced

1110: .seealso: `PCTELESCOPE`, `PCTelescopeSetIgnoreDM()`, `PCTelescopeSetUseCoarseDM()`, `PCTelescopeGetIgnoreKSPComputeOperators()`
1111: @*/
1112: PetscErrorCode PCTelescopeGetDM(PC pc, DM *subdm)
1113: {
1114:   PetscUseMethod(pc, "PCTelescopeGetDM_C", (PC, DM *), (pc, subdm));
1115:   return 0;
1116: }

1118: /*@
1119:  PCTelescopeSetSubcommType - set subcommunicator type (interlaced or contiguous)

1121:  Logically Collective

1123:  Input Parameters:
1124: +  pc - the preconditioner context
1125: -  subcommtype - the subcommunicator type (see `PetscSubcommType`)

1127:  Level: advanced

1129: .seealso: `PetscSubcommType`, `PetscSubcomm`, `PCTELESCOPE`
1130: @*/
1131: PetscErrorCode PCTelescopeSetSubcommType(PC pc, PetscSubcommType subcommtype)
1132: {
1133:   PetscTryMethod(pc, "PCTelescopeSetSubcommType_C", (PC, PetscSubcommType), (pc, subcommtype));
1134:   return 0;
1135: }

1137: /*@
1138:  PCTelescopeGetSubcommType - Get the subcommunicator type (interlaced or contiguous)

1140:  Not Collective

1142:  Input Parameter:
1143: .  pc - the preconditioner context

1145:  Output Parameter:
1146: .  subcommtype - the subcommunicator type (see `PetscSubcommType`)

1148:  Level: advanced

1150: .seealso: `PetscSubcomm`, `PetscSubcommType`, `PCTELESCOPE`
1151: @*/
1152: PetscErrorCode PCTelescopeGetSubcommType(PC pc, PetscSubcommType *subcommtype)
1153: {
1154:   PetscUseMethod(pc, "PCTelescopeGetSubcommType_C", (PC, PetscSubcommType *), (pc, subcommtype));
1155:   return 0;
1156: }

1158: /*MC
1159:    PCTELESCOPE - Runs a `KSP` solver on a sub-communicator. MPI ranks not in the sub-communicator are idle during the solve.

1161:    Options Database Keys:
1162: +  -pc_telescope_reduction_factor <r> - factor to reduce the communicator size by. e.g. with 64 MPI ranks and r=4, the new sub-communicator will have 64/4 = 16 ranks.
1163: .  -pc_telescope_ignore_dm  - flag to indicate whether an attached DM should be ignored.
1164: .  -pc_telescope_subcomm_type <interlaced,contiguous> - defines the selection of MPI ranks on the sub-communicator. see PetscSubcomm for more information.
1165: .  -pc_telescope_ignore_kspcomputeoperators - flag to indicate whether `KSPSetComputeOperators()` should be used on the sub-KSP.
1166: -  -pc_telescope_use_coarse_dm - flag to indicate whether the coarse `DM` should be used to define the sub-communicator.

1168:    Level: advanced

1170:    Notes:
1171:    Assuming that the parent preconditioner `PC` is defined on a communicator c, this implementation
1172:    creates a child sub-communicator (c') containing fewer MPI ranks than the original parent preconditioner `PC`.
1173:    The preconditioner is deemed telescopic as it only calls `KSPSolve()` on a single
1174:    sub-communicator, in contrast with `PCREDUNDANT` which calls `KSPSolve()` on N sub-communicators.
1175:    This means there will be MPI ranks which will be idle during the application of this preconditioner.
1176:    Additionally, in comparison with `PCREDUNDANT`, `PCTELESCOPE` can utilize an attached `DM`.

1178:    The default type of the sub `KSP` (the `KSP` defined on c') is `KSPPREONLY`.

1180:    There are three setup mechanisms for `PCTELESCOPE`. Features support by each type are described below.
1181:    In the following, we will refer to the operators B and B', these are the Bmat provided to the `KSP` on the
1182:    communicators c and c' respectively.

1184:    [1] Default setup
1185:    The sub-communicator c' is created via `PetscSubcommCreate()`.
1186:    Explicitly defined nullspace and near nullspace vectors will be propagated from B to B'.
1187:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1188:    No support is provided for `KSPSetComputeOperators()`.
1189:    Currently there is no support for the flag -pc_use_amat.

1191:    [2] `DM` aware setup
1192:    If a `DM` is attached to the `PC`, it is re-partitioned on the sub-communicator c'.
1193:    c' is created via `PetscSubcommCreate()`.
1194:    Both the Bmat operator and the right hand side vector are permuted into the new DOF ordering defined by the re-partitioned `DM`.
1195:    Currently only support for re-partitioning a `DMDA` is provided.
1196:    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, re-partitioned and set on the re-partitioned Bmat operator (B').
1197:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1198:    Support is provided for `KSPSetComputeOperators()`. The user provided function and context is propagated to the sub `KSP`.
1199:    This is fragile since the user must ensure that their user context is valid for use on c'.
1200:    Currently there is no support for the flag -pc_use_amat.

1202:    [3] Coarse `DM` setup
1203:    If a `DM` (dmfine) is attached to the `PC`, dmfine is queried for a "coarse" `DM` (call this dmcoarse) via `DMGetCoarseDM()`.
1204:    `PCTELESCOPE` will interpret the coarse `DM` as being defined on a sub-communicator of c.
1205:    The communicator associated with dmcoarse will define the c' to be used within `PCTELESCOPE`.
1206:    `PCTELESCOPE` will check that c' is in fact a sub-communicator of c. If it is not, an error will be reported.
1207:    The intention of this setup type is that `PCTELESCOPE` will use an existing (e.g. user defined) communicator hierarchy, say as would be
1208:    available with using multi-grid on unstructured meshes.
1209:    This setup will not use the command line options -pc_telescope_reduction_factor or -pc_telescope_subcomm_type.
1210:    Any explicitly defined nullspace or near nullspace vectors attached to the original Bmat operator (B) are extracted, scattered into the correct ordering consistent with dmcoarse and set on B'.
1211:    Currently there is no support define nullspaces via a user supplied method (e.g. as passed to `MatNullSpaceSetFunction()`).
1212:    There is no general method to permute field orderings, hence only `KSPSetComputeOperators()` is supported.
1213:    The user must use `PetscObjectComposeFunction()` with dmfine to define the method to scatter fields from dmfine to dmcoarse.
1214:    Propagation of the user context for `KSPSetComputeOperators()` on the sub `KSP` is attempted by querying the `DM` contexts associated with dmfine and dmcoarse. Alternatively, the user may use `PetscObjectComposeFunction()` with dmcoarse to define a method which will return the appropriate user context for `KSPSetComputeOperators()`.
1215:    Currently there is no support for the flag -pc_use_amat.
1216:    This setup can be invoked by the option -pc_telescope_use_coarse_dm or by calling `PCTelescopeSetUseCoarseDM`(pc,`PETSC_TRUE`);
1217:    Further information about the user-provided methods required by this setup type are described here `PCTelescopeSetUseCoarseDM()`.

1219:    Developer Notes:
1220:    During `PCSetup()`, the B operator is scattered onto c'.
1221:    Within `PCApply()`, the RHS vector (x) is scattered into a redundant vector, xred (defined on c').
1222:    Then, `KSPSolve()` is executed on the c' communicator.

1224:    The communicator used within the telescoping preconditioner is defined by a `PetscSubcomm` using the INTERLACED
1225:    creation routine by default (this can be changed with -pc_telescope_subcomm_type). We run the sub `KSP` on only the ranks within the communicator which have a color equal to zero.

1227:    The telescoping preconditioner is aware of nullspaces and near nullspaces which are attached to the B operator.
1228:    In the case where B has a (near) nullspace attached, the (near) nullspace vectors are extracted from B and mapped into
1229:    a new (near) nullspace, defined on the sub-communicator, which is attached to B' (the B operator which was scattered to c')

1231:    The telescoping preconditioner can re-partition an attached DM if it is a `DMDA` (2D or 3D -
1232:    support for 1D `DMDA`s is not provided). If a `DMDA` is found, a topologically equivalent `DMDA` is created on c'
1233:    and this new `DM` is attached the sub `KSP`. The design of telescope is such that it should be possible to extend support
1234:    for re-partitioning other to DM's (e.g. `DMPLEX`). The user can supply a flag to ignore attached DMs.
1235:    Alternatively, user-provided re-partitioned DMs can be used via -pc_telescope_use_coarse_dm.

1237:    With the default setup mode, B' is defined by fusing rows (in order) associated with MPI ranks common to c and c'.

1239:    When a `DMDA` is attached to the parent preconditioner, B' is defined by: (i) performing a symmetric permutation of B
1240:    into the ordering defined by the `DMDA` on c', (ii) extracting the local chunks via `MatCreateSubMatrices()`, (iii) fusing the
1241:    locally (sequential) matrices defined on the ranks common to c and c' into B' using `MatCreateMPIMatConcatenateSeqMat()`

1243:    Limitations/improvements include the following.
1244:    `VecPlaceArray()` could be used within `PCApply()` to improve efficiency and reduce memory usage.
1245:    A unified mechanism to query for user contexts as required by `KSPSetComputeOperators()` and `MatNullSpaceSetFunction()`.

1247:    The symmetric permutation used when a `DMDA` is encountered is performed via explicitly assembling a permutation matrix P,
1248:    and performing P^T.A.P. Possibly it might be more efficient to use `MatPermute()`. We opted to use P^T.A.P as it appears
1249:    `VecPermute()` does not support the use case required here. By computing P, one can permute both the operator and RHS in a
1250:    consistent manner.

1252:    Mapping of vectors (default setup mode) is performed in the following way.
1253:    Suppose the parent communicator size was 4, and we set a reduction factor of 2; this would give a comm size on c' of 2.
1254:    Using the interlaced creation routine, the ranks in c with color = 0 will be rank 0 and 2.
1255:    We perform the scatter to the sub-communicator in the following way.
1256:    [1] Given a vector x defined on communicator c

1258: .vb
1259:    rank(c)  local values of x
1260:    ------- ----------------------------------------
1261:         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0 ]
1262:         1   [  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1263:         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0 ]
1264:         3   [ 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1265: .ve

1267:    scatter into xtmp defined also on comm c, so that we have the following values

1269: .vb
1270:    rank(c)  local values of xtmp
1271:    ------- ----------------------------------------------------------------------------
1272:         0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1273:         1   [ ]
1274:         2   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1275:         3   [ ]
1276: .ve

1278:    The entries on rank 1 and 3 (ranks which do not have a color = 0 in c') have no values

1280:    [2] Copy the values from ranks 0, 2 (indices with respect to comm c) into the vector xred which is defined on communicator c'.
1281:    Ranks 0 and 2 are the only ranks in the subcomm which have a color = 0.

1283: .vb
1284:    rank(c')  local values of xred
1285:    -------- ----------------------------------------------------------------------------
1286:          0   [  0.0,  1.0,  2.0,  3.0,  4.0,  5.0,  6.0,  7.0,  8.0,  9.0, 10.0, 11.0 ]
1287:          1   [ 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0 ]
1288: .ve

1290:   Contributed by Dave May

1292:   Reference:
1293:   Dave A. May, Patrick Sanan, Karl Rupp, Matthew G. Knepley, and Barry F. Smith, "Extreme-Scale Multigrid Components within PETSc". 2016. In Proceedings of the Platform for Advanced Scientific Computing Conference (PASC '16). DOI: 10.1145/2929908.2929913

1295: .seealso: `PCTelescopeGetKSP()`, `PCTelescopeGetDM()`, `PCTelescopeGetReductionFactor()`, `PCTelescopeSetReductionFactor()`, `PCTelescopeGetIgnoreDM()`, `PCTelescopeSetIgnoreDM()`, `PCREDUNDANT`
1296: M*/
1297: PETSC_EXTERN PetscErrorCode PCCreate_Telescope(PC pc)
1298: {
1299:   struct _PC_Telescope *sred;

1301:   PetscNew(&sred);
1302:   sred->psubcomm                   = NULL;
1303:   sred->subcommtype                = PETSC_SUBCOMM_INTERLACED;
1304:   sred->subcomm                    = MPI_COMM_NULL;
1305:   sred->redfactor                  = 1;
1306:   sred->ignore_dm                  = PETSC_FALSE;
1307:   sred->ignore_kspcomputeoperators = PETSC_FALSE;
1308:   sred->use_coarse_dm              = PETSC_FALSE;
1309:   pc->data                         = (void *)sred;

1311:   pc->ops->apply           = PCApply_Telescope;
1312:   pc->ops->applytranspose  = NULL;
1313:   pc->ops->applyrichardson = PCApplyRichardson_Telescope;
1314:   pc->ops->setup           = PCSetUp_Telescope;
1315:   pc->ops->destroy         = PCDestroy_Telescope;
1316:   pc->ops->reset           = PCReset_Telescope;
1317:   pc->ops->setfromoptions  = PCSetFromOptions_Telescope;
1318:   pc->ops->view            = PCView_Telescope;

1320:   sred->pctelescope_setup_type              = PCTelescopeSetUp_default;
1321:   sred->pctelescope_matcreate_type          = PCTelescopeMatCreate_default;
1322:   sred->pctelescope_matnullspacecreate_type = PCTelescopeMatNullSpaceCreate_default;
1323:   sred->pctelescope_reset_type              = NULL;

1325:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetKSP_C", PCTelescopeGetKSP_Telescope);
1326:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetSubcommType_C", PCTelescopeGetSubcommType_Telescope);
1327:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetSubcommType_C", PCTelescopeSetSubcommType_Telescope);
1328:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetReductionFactor_C", PCTelescopeGetReductionFactor_Telescope);
1329:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetReductionFactor_C", PCTelescopeSetReductionFactor_Telescope);
1330:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreDM_C", PCTelescopeGetIgnoreDM_Telescope);
1331:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreDM_C", PCTelescopeSetIgnoreDM_Telescope);
1332:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetIgnoreKSPComputeOperators_C", PCTelescopeGetIgnoreKSPComputeOperators_Telescope);
1333:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetIgnoreKSPComputeOperators_C", PCTelescopeSetIgnoreKSPComputeOperators_Telescope);
1334:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetDM_C", PCTelescopeGetDM_Telescope);
1335:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeGetUseCoarseDM_C", PCTelescopeGetUseCoarseDM_Telescope);
1336:   PetscObjectComposeFunction((PetscObject)pc, "PCTelescopeSetUseCoarseDM_C", PCTelescopeSetUseCoarseDM_Telescope);
1337:   return 0;
1338: }