Actual source code: hypre.c

  1: /*
  2:    Provides an interface to the LLNL package hypre
  3: */

  5: #include <petscpkg_version.h>
  6: #include <petsc/private/pcimpl.h>
  7: /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */
  8: #include <petsc/private/matimpl.h>
  9: #include <petsc/private/vecimpl.h>
 10: #include <../src/vec/vec/impls/hypre/vhyp.h>
 11: #include <../src/mat/impls/hypre/mhypre.h>
 12: #include <../src/dm/impls/da/hypre/mhyp.h>
 13: #include <_hypre_parcsr_ls.h>
 14: #include <petscmathypre.h>

 16: #if defined(PETSC_HAVE_HYPRE_DEVICE)
 17: #include <petsc/private/deviceimpl.h>
 18: #endif

 20: static PetscBool  cite            = PETSC_FALSE;
 21: static const char hypreCitation[] = "@manual{hypre-web-page,\n  title  = {{\\sl hypre}: High Performance Preconditioners},\n  organization = {Lawrence Livermore National Laboratory},\n  note  = "
 22:                                     "{\\url{https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods}}\n}\n";

 24: /*
 25:    Private context (data structure) for the  preconditioner.
 26: */
 27: typedef struct {
 28:   HYPRE_Solver hsolver;
 29:   Mat          hpmat; /* MatHYPRE */

 31:   HYPRE_Int (*destroy)(HYPRE_Solver);
 32:   HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);
 33:   HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector);

 35:   MPI_Comm comm_hypre;
 36:   char    *hypre_type;

 38:   /* options for Pilut and BoomerAMG*/
 39:   PetscInt  maxiter;
 40:   PetscReal tol;

 42:   /* options for Pilut */
 43:   PetscInt factorrowsize;

 45:   /* options for ParaSails */
 46:   PetscInt  nlevels;
 47:   PetscReal threshold;
 48:   PetscReal filter;
 49:   PetscReal loadbal;
 50:   PetscInt  logging;
 51:   PetscInt  ruse;
 52:   PetscInt  symt;

 54:   /* options for BoomerAMG */
 55:   PetscBool printstatistics;

 57:   /* options for BoomerAMG */
 58:   PetscInt  cycletype;
 59:   PetscInt  maxlevels;
 60:   PetscReal strongthreshold;
 61:   PetscReal maxrowsum;
 62:   PetscInt  gridsweeps[3];
 63:   PetscInt  coarsentype;
 64:   PetscInt  measuretype;
 65:   PetscInt  smoothtype;
 66:   PetscInt  smoothnumlevels;
 67:   PetscInt  eu_level;         /* Number of levels for ILU(k) in Euclid */
 68:   PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */
 69:   PetscInt  eu_bj;            /* Defines use of Block Jacobi ILU in Euclid */
 70:   PetscInt  relaxtype[3];
 71:   PetscReal relaxweight;
 72:   PetscReal outerrelaxweight;
 73:   PetscInt  relaxorder;
 74:   PetscReal truncfactor;
 75:   PetscBool applyrichardson;
 76:   PetscInt  pmax;
 77:   PetscInt  interptype;
 78:   PetscInt  maxc;
 79:   PetscInt  minc;
 80: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
 81:   char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG
 82: #endif
 83:   /* GPU */
 84:   PetscBool keeptranspose;
 85:   PetscInt  rap2;
 86:   PetscInt  mod_rap2;

 88:   /* AIR */
 89:   PetscInt  Rtype;
 90:   PetscReal Rstrongthreshold;
 91:   PetscReal Rfilterthreshold;
 92:   PetscInt  Adroptype;
 93:   PetscReal Adroptol;

 95:   PetscInt  agg_nl;
 96:   PetscInt  agg_interptype;
 97:   PetscInt  agg_num_paths;
 98:   PetscBool nodal_relax;
 99:   PetscInt  nodal_relax_levels;

101:   PetscInt  nodal_coarsening;
102:   PetscInt  nodal_coarsening_diag;
103:   PetscInt  vec_interp_variant;
104:   PetscInt  vec_interp_qmax;
105:   PetscBool vec_interp_smooth;
106:   PetscInt  interp_refine;

108:   /* NearNullSpace support */
109:   VecHYPRE_IJVector *hmnull;
110:   HYPRE_ParVector   *phmnull;
111:   PetscInt           n_hmnull;
112:   Vec                hmnull_constant;

114:   /* options for AS (Auxiliary Space preconditioners) */
115:   PetscInt  as_print;
116:   PetscInt  as_max_iter;
117:   PetscReal as_tol;
118:   PetscInt  as_relax_type;
119:   PetscInt  as_relax_times;
120:   PetscReal as_relax_weight;
121:   PetscReal as_omega;
122:   PetscInt  as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */
123:   PetscReal as_amg_alpha_theta;   /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */
124:   PetscInt  as_amg_beta_opts[5];  /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */
125:   PetscReal as_amg_beta_theta;    /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS)  */
126:   PetscInt  ams_cycle_type;
127:   PetscInt  ads_cycle_type;

129:   /* additional data */
130:   Mat G;             /* MatHYPRE */
131:   Mat C;             /* MatHYPRE */
132:   Mat alpha_Poisson; /* MatHYPRE */
133:   Mat beta_Poisson;  /* MatHYPRE */

135:   /* extra information for AMS */
136:   PetscInt          dim; /* geometrical dimension */
137:   VecHYPRE_IJVector coords[3];
138:   VecHYPRE_IJVector constants[3];
139:   VecHYPRE_IJVector interior;
140:   Mat               RT_PiFull, RT_Pi[3];
141:   Mat               ND_PiFull, ND_Pi[3];
142:   PetscBool         ams_beta_is_zero;
143:   PetscBool         ams_beta_is_zero_part;
144:   PetscInt          ams_proj_freq;
145: } PC_HYPRE;

147: PetscErrorCode PCHYPREGetSolver(PC pc, HYPRE_Solver *hsolver)
148: {
149:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

151:   *hsolver = jac->hsolver;
152:   return 0;
153: }

155: /*
156:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
157:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
158:   It is used in PCHMG. Other users should avoid using this function.
159: */
160: static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[])
161: {
162:   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
163:   PetscBool            same = PETSC_FALSE;
164:   PetscInt             num_levels, l;
165:   Mat                 *mattmp;
166:   hypre_ParCSRMatrix **A_array;

168:   PetscStrcmp(jac->hypre_type, "boomeramg", &same);
170:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
171:   PetscMalloc1(num_levels, &mattmp);
172:   A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver));
173:   for (l = 1; l < num_levels; l++) {
174:     MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l]));
175:     /* We want to own the data, and HYPRE can not touch this matrix any more */
176:     A_array[l] = NULL;
177:   }
178:   *nlevels   = num_levels;
179:   *operators = mattmp;
180:   return 0;
181: }

183: /*
184:   Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix
185:   is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine.
186:   It is used in PCHMG. Other users should avoid using this function.
187: */
188: static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[])
189: {
190:   PC_HYPRE            *jac  = (PC_HYPRE *)pc->data;
191:   PetscBool            same = PETSC_FALSE;
192:   PetscInt             num_levels, l;
193:   Mat                 *mattmp;
194:   hypre_ParCSRMatrix **P_array;

196:   PetscStrcmp(jac->hypre_type, "boomeramg", &same);
198:   num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver));
199:   PetscMalloc1(num_levels, &mattmp);
200:   P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver));
201:   for (l = 1; l < num_levels; l++) {
202:     MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1]));
203:     /* We want to own the data, and HYPRE can not touch this matrix any more */
204:     P_array[num_levels - 1 - l] = NULL;
205:   }
206:   *nlevels        = num_levels;
207:   *interpolations = mattmp;
208:   return 0;
209: }

211: /* Resets (frees) Hypre's representation of the near null space */
212: static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc)
213: {
214:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
215:   PetscInt  i;

217:   for (i = 0; i < jac->n_hmnull; i++) VecHYPRE_IJVectorDestroy(&jac->hmnull[i]);
218:   PetscFree(jac->hmnull);
219:   PetscFree(jac->phmnull);
220:   VecDestroy(&jac->hmnull_constant);
221:   jac->n_hmnull = 0;
222:   return 0;
223: }

225: static PetscErrorCode PCSetUp_HYPRE(PC pc)
226: {
227:   PC_HYPRE          *jac = (PC_HYPRE *)pc->data;
228:   Mat_HYPRE         *hjac;
229:   HYPRE_ParCSRMatrix hmat;
230:   HYPRE_ParVector    bv, xv;
231:   PetscBool          ishypre;

233:   if (!jac->hypre_type) PCHYPRESetType(pc, "boomeramg");

235:   PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre);
236:   if (!ishypre) {
237:     MatDestroy(&jac->hpmat);
238:     MatConvert(pc->pmat, MATHYPRE, MAT_INITIAL_MATRIX, &jac->hpmat);
239:   } else {
240:     PetscObjectReference((PetscObject)pc->pmat);
241:     MatDestroy(&jac->hpmat);
242:     jac->hpmat = pc->pmat;
243:   }
244:   /* allow debug */
245:   MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view");
246:   hjac = (Mat_HYPRE *)(jac->hpmat->data);

248:   /* special case for BoomerAMG */
249:   if (jac->setup == HYPRE_BoomerAMGSetup) {
250:     MatNullSpace mnull;
251:     PetscBool    has_const;
252:     PetscInt     bs, nvec, i;
253:     const Vec   *vecs;

255:     MatGetBlockSize(pc->pmat, &bs);
256:     if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);
257:     MatGetNearNullSpace(pc->mat, &mnull);
258:     if (mnull) {
259:       PCHYPREResetNearNullSpace_Private(pc);
260:       MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs);
261:       PetscMalloc1(nvec + 1, &jac->hmnull);
262:       PetscMalloc1(nvec + 1, &jac->phmnull);
263:       for (i = 0; i < nvec; i++) {
264:         VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i]);
265:         VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i]);
266:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]);
267:       }
268:       if (has_const) {
269:         MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL);
270:         VecSet(jac->hmnull_constant, 1);
271:         VecNormalize(jac->hmnull_constant, NULL);
272:         VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec]);
273:         VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec]);
274:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]);
275:         nvec++;
276:       }
277:       PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull);
278:       jac->n_hmnull = nvec;
279:     }
280:   }

282:   /* special case for AMS */
283:   if (jac->setup == HYPRE_AMSSetup) {
284:     Mat_HYPRE         *hm;
285:     HYPRE_ParCSRMatrix parcsr;
286:     if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
287:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()");
288:     }
289:     if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim);
290:     if (jac->constants[0]) {
291:       HYPRE_ParVector ozz, zoz, zzo = NULL;
292:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz));
293:       PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz));
294:       if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo));
295:       PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo);
296:     }
297:     if (jac->coords[0]) {
298:       HYPRE_ParVector coords[3];
299:       coords[0] = NULL;
300:       coords[1] = NULL;
301:       coords[2] = NULL;
302:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
303:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
304:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
305:       PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
306:     }
308:     hm = (Mat_HYPRE *)(jac->G->data);
309:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
310:     PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr);
311:     if (jac->alpha_Poisson) {
312:       hm = (Mat_HYPRE *)(jac->alpha_Poisson->data);
313:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
314:       PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr);
315:     }
316:     if (jac->ams_beta_is_zero) {
317:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL);
318:     } else if (jac->beta_Poisson) {
319:       hm = (Mat_HYPRE *)(jac->beta_Poisson->data);
320:       PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
321:       PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr);
322:     } else if (jac->ams_beta_is_zero_part) {
323:       if (jac->interior) {
324:         HYPRE_ParVector interior = NULL;
325:         PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior));
326:         PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior);
327:       } else {
328:         jac->ams_beta_is_zero_part = PETSC_FALSE;
329:       }
330:     }
331:     if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) {
332:       PetscInt           i;
333:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
334:       if (jac->ND_PiFull) {
335:         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
336:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
337:       } else {
338:         nd_parcsrfull = NULL;
339:       }
340:       for (i = 0; i < 3; ++i) {
341:         if (jac->ND_Pi[i]) {
342:           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
343:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
344:         } else {
345:           nd_parcsr[i] = NULL;
346:         }
347:       }
348:       PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
349:     }
350:   }
351:   /* special case for ADS */
352:   if (jac->setup == HYPRE_ADSSetup) {
353:     Mat_HYPRE         *hm;
354:     HYPRE_ParCSRMatrix parcsr;
355:     if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) {
356:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations");
360:     if (jac->coords[0]) {
361:       HYPRE_ParVector coords[3];
362:       coords[0] = NULL;
363:       coords[1] = NULL;
364:       coords[2] = NULL;
365:       if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0]));
366:       if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1]));
367:       if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2]));
368:       PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]);
369:     }
370:     hm = (Mat_HYPRE *)(jac->G->data);
371:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
372:     PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr);
373:     hm = (Mat_HYPRE *)(jac->C->data);
374:     PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr));
375:     PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr);
376:     if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) {
377:       PetscInt           i;
378:       HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3];
379:       HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3];
380:       if (jac->RT_PiFull) {
381:         hm = (Mat_HYPRE *)(jac->RT_PiFull->data);
382:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull));
383:       } else {
384:         rt_parcsrfull = NULL;
385:       }
386:       for (i = 0; i < 3; ++i) {
387:         if (jac->RT_Pi[i]) {
388:           hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data);
389:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i]));
390:         } else {
391:           rt_parcsr[i] = NULL;
392:         }
393:       }
394:       if (jac->ND_PiFull) {
395:         hm = (Mat_HYPRE *)(jac->ND_PiFull->data);
396:         PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull));
397:       } else {
398:         nd_parcsrfull = NULL;
399:       }
400:       for (i = 0; i < 3; ++i) {
401:         if (jac->ND_Pi[i]) {
402:           hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data);
403:           PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i]));
404:         } else {
405:           nd_parcsr[i] = NULL;
406:         }
407:       }
408:       PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]);
409:     }
410:   }
411:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
412:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv);
413:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv);
414:   PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv);
415:   return 0;
416: }

418: static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x)
419: {
420:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
421:   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
422:   HYPRE_ParCSRMatrix hmat;
423:   HYPRE_ParVector    jbv, jxv;

425:   PetscCitationsRegister(hypreCitation, &cite);
426:   if (!jac->applyrichardson) VecSet(x, 0.0);
427:   VecHYPRE_IJVectorPushVecRead(hjac->b, b);
428:   if (jac->applyrichardson) VecHYPRE_IJVectorPushVec(hjac->x, x);
429:   else VecHYPRE_IJVectorPushVecWrite(hjac->x, x);
430:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
431:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
432:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);
433:   PetscStackCallExternalVoid(
434:     "Hypre solve", do {
435:       HYPRE_Int h(*jac->solve)(jac->hsolver, hmat, jbv, jxv);
436:       if (hierr) {
438:         hypre__global_error = 0;
439:       }
440:     } while (0));

442:   if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv);
443:   VecHYPRE_IJVectorPopVec(hjac->x);
444:   VecHYPRE_IJVectorPopVec(hjac->b);
445:   return 0;
446: }

448: static PetscErrorCode PCReset_HYPRE(PC pc)
449: {
450:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

452:   MatDestroy(&jac->hpmat);
453:   MatDestroy(&jac->G);
454:   MatDestroy(&jac->C);
455:   MatDestroy(&jac->alpha_Poisson);
456:   MatDestroy(&jac->beta_Poisson);
457:   MatDestroy(&jac->RT_PiFull);
458:   MatDestroy(&jac->RT_Pi[0]);
459:   MatDestroy(&jac->RT_Pi[1]);
460:   MatDestroy(&jac->RT_Pi[2]);
461:   MatDestroy(&jac->ND_PiFull);
462:   MatDestroy(&jac->ND_Pi[0]);
463:   MatDestroy(&jac->ND_Pi[1]);
464:   MatDestroy(&jac->ND_Pi[2]);
465:   VecHYPRE_IJVectorDestroy(&jac->coords[0]);
466:   VecHYPRE_IJVectorDestroy(&jac->coords[1]);
467:   VecHYPRE_IJVectorDestroy(&jac->coords[2]);
468:   VecHYPRE_IJVectorDestroy(&jac->constants[0]);
469:   VecHYPRE_IJVectorDestroy(&jac->constants[1]);
470:   VecHYPRE_IJVectorDestroy(&jac->constants[2]);
471:   VecHYPRE_IJVectorDestroy(&jac->interior);
472:   PCHYPREResetNearNullSpace_Private(pc);
473:   jac->ams_beta_is_zero      = PETSC_FALSE;
474:   jac->ams_beta_is_zero_part = PETSC_FALSE;
475:   jac->dim                   = 0;
476:   return 0;
477: }

479: static PetscErrorCode PCDestroy_HYPRE(PC pc)
480: {
481:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

483:   PCReset_HYPRE(pc);
484:   if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver);
485:   PetscFree(jac->hypre_type);
486: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
487:   PetscFree(jac->spgemm_type);
488: #endif
489:   if (jac->comm_hypre != MPI_COMM_NULL) PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre);
490:   PetscFree(pc->data);

492:   PetscObjectChangeTypeName((PetscObject)pc, 0);
493:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL);
494:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL);
495:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL);
496:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL);
497:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL);
498:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL);
499:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL);
500:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL);
501:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL);
502:   PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL);
503:   PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL);
504:   PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL);
505:   PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL);
506:   PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL);
507:   return 0;
508: }

510: static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject)
511: {
512:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
513:   PetscBool flag;

515:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options");
516:   PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag);
517:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter);
518:   PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag);
519:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol);
520:   PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag);
521:   if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize);
522:   PetscOptionsHeadEnd();
523:   return 0;
524: }

526: static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer)
527: {
528:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
529:   PetscBool iascii;

531:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
532:   if (iascii) {
533:     PetscViewerASCIIPrintf(viewer, "  HYPRE Pilut preconditioning\n");
534:     if (jac->maxiter != PETSC_DEFAULT) {
535:       PetscViewerASCIIPrintf(viewer, "    maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter);
536:     } else {
537:       PetscViewerASCIIPrintf(viewer, "    default maximum number of iterations \n");
538:     }
539:     if (jac->tol != PETSC_DEFAULT) {
540:       PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->tol);
541:     } else {
542:       PetscViewerASCIIPrintf(viewer, "    default drop tolerance \n");
543:     }
544:     if (jac->factorrowsize != PETSC_DEFAULT) {
545:       PetscViewerASCIIPrintf(viewer, "    factor row size %" PetscInt_FMT "\n", jac->factorrowsize);
546:     } else {
547:       PetscViewerASCIIPrintf(viewer, "    default factor row size \n");
548:     }
549:   }
550:   return 0;
551: }

553: static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject)
554: {
555:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
556:   PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE;

558:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options");
559:   PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag);
560:   if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level);

562:   PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag);
563:   if (flag) {
564:     PetscMPIInt size;

566:     MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);
568:     PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance);
569:   }

571:   PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag);
572:   if (flag) {
573:     jac->eu_bj = eu_bj ? 1 : 0;
574:     PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj);
575:   }
576:   PetscOptionsHeadEnd();
577:   return 0;
578: }

580: static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer)
581: {
582:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
583:   PetscBool iascii;

585:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
586:   if (iascii) {
587:     PetscViewerASCIIPrintf(viewer, "  HYPRE Euclid preconditioning\n");
588:     if (jac->eu_level != PETSC_DEFAULT) {
589:       PetscViewerASCIIPrintf(viewer, "    factorization levels %" PetscInt_FMT "\n", jac->eu_level);
590:     } else {
591:       PetscViewerASCIIPrintf(viewer, "    default factorization levels \n");
592:     }
593:     PetscViewerASCIIPrintf(viewer, "    drop tolerance %g\n", (double)jac->eu_droptolerance);
594:     PetscViewerASCIIPrintf(viewer, "    use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj);
595:   }
596:   return 0;
597: }

599: static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x)
600: {
601:   PC_HYPRE          *jac  = (PC_HYPRE *)pc->data;
602:   Mat_HYPRE         *hjac = (Mat_HYPRE *)(jac->hpmat->data);
603:   HYPRE_ParCSRMatrix hmat;
604:   HYPRE_ParVector    jbv, jxv;

606:   PetscCitationsRegister(hypreCitation, &cite);
607:   VecSet(x, 0.0);
608:   VecHYPRE_IJVectorPushVecRead(hjac->x, b);
609:   VecHYPRE_IJVectorPushVecWrite(hjac->b, x);

611:   PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat);
612:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv);
613:   PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv);

615:   PetscStackCallExternalVoid(
616:     "Hypre Transpose solve", do {
617:       HYPRE_Int hHYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv);
618:       if (hierr) {
619:         /* error code of 1 in BoomerAMG merely means convergence not achieved */
621:         hypre__global_error = 0;
622:       }
623:     } while (0));

625:   VecHYPRE_IJVectorPopVec(hjac->x);
626:   VecHYPRE_IJVectorPopVec(hjac->b);
627:   return 0;
628: }

630: static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[])
631: {
632:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
633:   PetscBool flag;

635: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
636:   if (jac->spgemm_type) {
637:     PetscStrcmp(jac->spgemm_type, name, &flag);
639:     return 0;
640:   } else {
641:     PetscStrallocpy(name, &jac->spgemm_type);
642:   }
643:   PetscStrcmp("cusparse", jac->spgemm_type, &flag);
644:   if (flag) {
645:     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1);
646:     return 0;
647:   }
648:   PetscStrcmp("hypre", jac->spgemm_type, &flag);
649:   if (flag) {
650:     PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0);
651:     return 0;
652:   }
653:   jac->spgemm_type = NULL;
654:   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEM type %s; Choices are cusparse, hypre", name);
655: #endif
656: }

658: static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[])
659: {
660:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

663: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
664:   *spgemm = jac->spgemm_type;
665: #endif
666:   return 0;
667: }

669: static const char *HYPREBoomerAMGCycleType[]   = {"", "V", "W"};
670: static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"};
671: static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"};
672: /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */
673: static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"};
674: static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"};
675: static const char    *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"};
676: static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject)
677: {
678:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
679:   PetscInt    bs, n, indx, level;
680:   PetscBool   flg, tmp_truth;
681:   double      tmpdbl, twodbl[2];
682:   const char *symtlist[]           = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};
683:   const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"};

685:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options");
686:   PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg);
687:   if (flg) {
688:     jac->cycletype = indx + 1;
689:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
690:   }
691:   PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg);
692:   if (flg) {
694:     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
695:   }
696:   PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg);
697:   if (flg) {
699:     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
700:   }
701:   PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg);
702:   if (flg) {
704:     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
705:   }
706:   bs = 1;
707:   if (pc->pmat) MatGetBlockSize(pc->pmat, &bs);
708:   PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg);
709:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs);

711:   PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg);
712:   if (flg) {
714:     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
715:   }

717:   PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg);
718:   if (flg) {
720:     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
721:   }

723:   PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels);
724:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);

726:   PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg);
727:   if (flg) {
729:     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
730:   }

732:   PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg);
733:   if (flg) {
735:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
736:   }
737:   PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg);
738:   if (flg) {
741:     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
742:   }

744:   /* Grid sweeps */
745:   PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg);
746:   if (flg) {
747:     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx);
748:     /* modify the jac structure so we can view the updated options with PC_View */
749:     jac->gridsweeps[0] = indx;
750:     jac->gridsweeps[1] = indx;
751:     /*defaults coarse to 1 */
752:     jac->gridsweeps[2] = 1;
753:   }
754:   PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg);
755:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening);
756:   PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg);
757:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag);
758:   PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg);
759:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant);
760:   PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg);
761:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax);
762:   PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg);
763:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth);
764:   PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg);
765:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine);
766:   PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg);
767:   if (flg) {
768:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1);
769:     jac->gridsweeps[0] = indx;
770:   }
771:   PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg);
772:   if (flg) {
773:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2);
774:     jac->gridsweeps[1] = indx;
775:   }
776:   PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg);
777:   if (flg) {
778:     PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3);
779:     jac->gridsweeps[2] = indx;
780:   }

782:   /* Smooth type */
783:   PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg);
784:   if (flg) {
785:     jac->smoothtype = indx;
786:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6);
787:     jac->smoothnumlevels = 25;
788:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25);
789:   }

791:   /* Number of smoothing levels */
792:   PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg);
793:   if (flg && (jac->smoothtype != -1)) {
794:     jac->smoothnumlevels = indx;
795:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx);
796:   }

798:   /* Number of levels for ILU(k) for Euclid */
799:   PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg);
800:   if (flg && (jac->smoothtype == 3)) {
801:     jac->eu_level = indx;
802:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx);
803:   }

805:   /* Filter for ILU(k) for Euclid */
806:   double droptolerance;
807:   PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg);
808:   if (flg && (jac->smoothtype == 3)) {
809:     jac->eu_droptolerance = droptolerance;
810:     PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance);
811:   }

813:   /* Use Block Jacobi ILUT for Euclid */
814:   PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg);
815:   if (flg && (jac->smoothtype == 3)) {
816:     jac->eu_bj = tmp_truth;
817:     PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj);
818:   }

820:   /* Relax type */
821:   PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg);
822:   if (flg) {
823:     jac->relaxtype[0] = jac->relaxtype[1] = indx;
824:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx);
825:     /* by default, coarse type set to 9 */
826:     jac->relaxtype[2] = 9;
827:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3);
828:   }
829:   PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg);
830:   if (flg) {
831:     jac->relaxtype[0] = indx;
832:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1);
833:   }
834:   PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg);
835:   if (flg) {
836:     jac->relaxtype[1] = indx;
837:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2);
838:   }
839:   PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg);
840:   if (flg) {
841:     jac->relaxtype[2] = indx;
842:     PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3);
843:   }

845:   /* Relaxation Weight */
846:   PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg);
847:   if (flg) {
848:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl);
849:     jac->relaxweight = tmpdbl;
850:   }

852:   n         = 2;
853:   twodbl[0] = twodbl[1] = 1.0;
854:   PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg);
855:   if (flg) {
856:     if (n == 2) {
857:       indx = (int)PetscAbsReal(twodbl[1]);
858:       PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx);
859:     } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
860:   }

862:   /* Outer relaxation Weight */
863:   PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg);
864:   if (flg) {
865:     PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl);
866:     jac->outerrelaxweight = tmpdbl;
867:   }

869:   n         = 2;
870:   twodbl[0] = twodbl[1] = 1.0;
871:   PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg);
872:   if (flg) {
873:     if (n == 2) {
874:       indx = (int)PetscAbsReal(twodbl[1]);
875:       PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx);
876:     } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n);
877:   }

879:   /* the Relax Order */
880:   PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg);

882:   if (flg && tmp_truth) {
883:     jac->relaxorder = 0;
884:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
885:   }
886:   PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg);
887:   if (flg) {
888:     jac->measuretype = indx;
889:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
890:   }
891:   /* update list length 3/07 */
892:   PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg);
893:   if (flg) {
894:     jac->coarsentype = indx;
895:     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
896:   }

898:   PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg);
899:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
900:   PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg);
901:   if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
902: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
903:   // global parameter but is closely associated with BoomerAMG
904:   PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg);
905:   if (!flg) indx = 0;
906:   PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx]);
907: #endif
908:   /* AIR */
909: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
910:   PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL);
911:   PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
912:   if (jac->Rtype) {
913:     jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */

915:     PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL);
916:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);

918:     PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL);
919:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);

921:     PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL);
922:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);

924:     PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL);
925:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
926:   }
927: #endif

929: #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9)
931: #endif

933:   /* new 3/07 */
934:   PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg);
935:   if (flg || jac->Rtype) {
936:     if (flg) jac->interptype = indx;
937:     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
938:   }

940:   PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg);
941:   if (flg) {
942:     level = 3;
943:     PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL);

945:     jac->printstatistics = PETSC_TRUE;
946:     PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level);
947:   }

949:   PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg);
950:   if (flg) {
951:     level = 3;
952:     PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL);

954:     jac->printstatistics = PETSC_TRUE;
955:     PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level);
956:   }

958:   PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg);
959:   if (flg && tmp_truth) {
960:     PetscInt tmp_int;
961:     PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg);
962:     if (flg) jac->nodal_relax_levels = tmp_int;
963:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6);
964:     PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1);
965:     PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0);
966:     PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels);
967:   }

969:   PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL);
970:   PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);

972:   /* options for ParaSails solvers */
973:   PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg);
974:   if (flg) {
975:     jac->symt = indx;
976:     PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt);
977:   }

979:   PetscOptionsHeadEnd();
980:   return 0;
981: }

983: static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
984: {
985:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
986:   HYPRE_Int oits;

988:   PetscCitationsRegister(hypreCitation, &cite);
989:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter);
990:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol);
991:   jac->applyrichardson = PETSC_TRUE;
992:   PCApply_HYPRE(pc, b, y);
993:   jac->applyrichardson = PETSC_FALSE;
994:   PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits);
995:   *outits = oits;
996:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
997:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
998:   PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
999:   PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1000:   return 0;
1001: }

1003: static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer)
1004: {
1005:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1006:   PetscBool iascii;

1008:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
1009:   if (iascii) {
1010:     PetscViewerASCIIPrintf(viewer, "  HYPRE BoomerAMG preconditioning\n");
1011:     PetscViewerASCIIPrintf(viewer, "    Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype]);
1012:     PetscViewerASCIIPrintf(viewer, "    Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels);
1013:     PetscViewerASCIIPrintf(viewer, "    Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter);
1014:     PetscViewerASCIIPrintf(viewer, "    Convergence tolerance PER hypre call %g\n", (double)jac->tol);
1015:     PetscViewerASCIIPrintf(viewer, "    Threshold for strong coupling %g\n", (double)jac->strongthreshold);
1016:     PetscViewerASCIIPrintf(viewer, "    Interpolation truncation factor %g\n", (double)jac->truncfactor);
1017:     PetscViewerASCIIPrintf(viewer, "    Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax);
1018:     if (jac->interp_refine) PetscViewerASCIIPrintf(viewer, "    Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine);
1019:     PetscViewerASCIIPrintf(viewer, "    Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl);
1020:     PetscViewerASCIIPrintf(viewer, "    Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths);

1022:     PetscViewerASCIIPrintf(viewer, "    Maximum row sums %g\n", (double)jac->maxrowsum);

1024:     PetscViewerASCIIPrintf(viewer, "    Sweeps down         %" PetscInt_FMT "\n", jac->gridsweeps[0]);
1025:     PetscViewerASCIIPrintf(viewer, "    Sweeps up           %" PetscInt_FMT "\n", jac->gridsweeps[1]);
1026:     PetscViewerASCIIPrintf(viewer, "    Sweeps on coarse    %" PetscInt_FMT "\n", jac->gridsweeps[2]);

1028:     PetscViewerASCIIPrintf(viewer, "    Relax down          %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]]);
1029:     PetscViewerASCIIPrintf(viewer, "    Relax up            %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]]);
1030:     PetscViewerASCIIPrintf(viewer, "    Relax on coarse     %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]]);

1032:     PetscViewerASCIIPrintf(viewer, "    Relax weight  (all)      %g\n", (double)jac->relaxweight);
1033:     PetscViewerASCIIPrintf(viewer, "    Outer relax weight (all) %g\n", (double)jac->outerrelaxweight);

1035:     if (jac->relaxorder) {
1036:       PetscViewerASCIIPrintf(viewer, "    Using CF-relaxation\n");
1037:     } else {
1038:       PetscViewerASCIIPrintf(viewer, "    Not using CF-relaxation\n");
1039:     }
1040:     if (jac->smoothtype != -1) {
1041:       PetscViewerASCIIPrintf(viewer, "    Smooth type          %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype]);
1042:       PetscViewerASCIIPrintf(viewer, "    Smooth num levels    %" PetscInt_FMT "\n", jac->smoothnumlevels);
1043:     } else {
1044:       PetscViewerASCIIPrintf(viewer, "    Not using more complex smoothers.\n");
1045:     }
1046:     if (jac->smoothtype == 3) {
1047:       PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level);
1048:       PetscViewerASCIIPrintf(viewer, "    Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance);
1049:       PetscViewerASCIIPrintf(viewer, "    Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj);
1050:     }
1051:     PetscViewerASCIIPrintf(viewer, "    Measure type        %s\n", HYPREBoomerAMGMeasureType[jac->measuretype]);
1052:     PetscViewerASCIIPrintf(viewer, "    Coarsen type        %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype]);
1053:     PetscViewerASCIIPrintf(viewer, "    Interpolation type  %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt");
1054:     if (jac->nodal_coarsening) PetscViewerASCIIPrintf(viewer, "    Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening);
1055:     if (jac->vec_interp_variant) {
1056:       PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant);
1057:       PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax);
1058:       PetscViewerASCIIPrintf(viewer, "    HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth);
1059:     }
1060:     if (jac->nodal_relax) PetscViewerASCIIPrintf(viewer, "    Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels);
1061: #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0)
1062:     PetscViewerASCIIPrintf(viewer, "    SpGEMM type         %s\n", jac->spgemm_type);
1063: #endif
1064:     /* AIR */
1065:     if (jac->Rtype) {
1066:       PetscViewerASCIIPrintf(viewer, "    Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype);
1067:       PetscViewerASCIIPrintf(viewer, "      Threshold for R %g\n", (double)jac->Rstrongthreshold);
1068:       PetscViewerASCIIPrintf(viewer, "      Filter for R %g\n", (double)jac->Rfilterthreshold);
1069:       PetscViewerASCIIPrintf(viewer, "      A drop tolerance %g\n", (double)jac->Adroptol);
1070:       PetscViewerASCIIPrintf(viewer, "      A drop type %" PetscInt_FMT "\n", jac->Adroptype);
1071:     }
1072:   }
1073:   return 0;
1074: }

1076: static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject)
1077: {
1078:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1079:   PetscInt    indx;
1080:   PetscBool   flag;
1081:   const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"};

1083:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options");
1084:   PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0);
1085:   PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag);
1086:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);

1088:   PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag);
1089:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);

1091:   PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag);
1092:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);

1094:   PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag);
1095:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);

1097:   PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag);
1098:   if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);

1100:   PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag);
1101:   if (flag) {
1102:     jac->symt = indx;
1103:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1104:   }

1106:   PetscOptionsHeadEnd();
1107:   return 0;
1108: }

1110: static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer)
1111: {
1112:   PC_HYPRE   *jac = (PC_HYPRE *)pc->data;
1113:   PetscBool   iascii;
1114:   const char *symt = 0;

1116:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
1117:   if (iascii) {
1118:     PetscViewerASCIIPrintf(viewer, "  HYPRE ParaSails preconditioning\n");
1119:     PetscViewerASCIIPrintf(viewer, "    nlevels %" PetscInt_FMT "\n", jac->nlevels);
1120:     PetscViewerASCIIPrintf(viewer, "    threshold %g\n", (double)jac->threshold);
1121:     PetscViewerASCIIPrintf(viewer, "    filter %g\n", (double)jac->filter);
1122:     PetscViewerASCIIPrintf(viewer, "    load balance %g\n", (double)jac->loadbal);
1123:     PetscViewerASCIIPrintf(viewer, "    reuse nonzero structure %s\n", PetscBools[jac->ruse]);
1124:     PetscViewerASCIIPrintf(viewer, "    print info to screen %s\n", PetscBools[jac->logging]);
1125:     if (!jac->symt) symt = "nonsymmetric matrix and preconditioner";
1126:     else if (jac->symt == 1) symt = "SPD matrix and preconditioner";
1127:     else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner";
1128:     else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt);
1129:     PetscViewerASCIIPrintf(viewer, "    %s\n", symt);
1130:   }
1131:   return 0;
1132: }

1134: static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject)
1135: {
1136:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1137:   PetscInt  n;
1138:   PetscBool flag, flag2, flag3, flag4;

1140:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options");
1141:   PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag);
1142:   if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1143:   PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag);
1144:   if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1145:   PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag);
1146:   if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1147:   PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag);
1148:   if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1149:   PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag);
1150:   PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2);
1151:   PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3);
1152:   PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4);
1153:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1154:   PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag);
1155:   n = 5;
1156:   PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2);
1157:   if (flag || flag2) {
1158:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1159:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1160:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1161:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1162:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1163:   }
1164:   PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag);
1165:   n = 5;
1166:   PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2);
1167:   if (flag || flag2) {
1168:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1169:                       jac->as_amg_beta_opts[1],                                           /* AMG agg_levels */
1170:                       jac->as_amg_beta_opts[2],                                           /* AMG relax_type */
1171:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                   /* AMG interp_type */
1172:                       jac->as_amg_beta_opts[4]);                                          /* AMG Pmax */
1173:   }
1174:   PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag);
1175:   if (flag) { /* override HYPRE's default only if the options is used */
1176:     PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq);
1177:   }
1178:   PetscOptionsHeadEnd();
1179:   return 0;
1180: }

1182: static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer)
1183: {
1184:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1185:   PetscBool iascii;

1187:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
1188:   if (iascii) {
1189:     PetscViewerASCIIPrintf(viewer, "  HYPRE AMS preconditioning\n");
1190:     PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter);
1191:     PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type);
1192:     PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol);
1193:     PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type);
1194:     PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times);
1195:     PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight);
1196:     PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega);
1197:     if (jac->alpha_Poisson) {
1198:       PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (passed in by user)\n");
1199:     } else {
1200:       PetscViewerASCIIPrintf(viewer, "    vector Poisson solver (computed) \n");
1201:     }
1202:     PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]);
1203:     PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]);
1204:     PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]);
1205:     PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]);
1206:     PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]);
1207:     PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta);
1208:     if (!jac->ams_beta_is_zero) {
1209:       if (jac->beta_Poisson) {
1210:         PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (passed in by user)\n");
1211:       } else {
1212:         PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver (computed) \n");
1213:       }
1214:       PetscViewerASCIIPrintf(viewer, "        boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]);
1215:       PetscViewerASCIIPrintf(viewer, "        boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]);
1216:       PetscViewerASCIIPrintf(viewer, "        boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]);
1217:       PetscViewerASCIIPrintf(viewer, "        boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]);
1218:       PetscViewerASCIIPrintf(viewer, "        boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]);
1219:       PetscViewerASCIIPrintf(viewer, "        boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta);
1220:       if (jac->ams_beta_is_zero_part) PetscViewerASCIIPrintf(viewer, "        compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq);
1221:     } else {
1222:       PetscViewerASCIIPrintf(viewer, "    scalar Poisson solver not used (zero-conductivity everywhere) \n");
1223:     }
1224:   }
1225:   return 0;
1226: }

1228: static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject)
1229: {
1230:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1231:   PetscInt  n;
1232:   PetscBool flag, flag2, flag3, flag4;

1234:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options");
1235:   PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag);
1236:   if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
1237:   PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag);
1238:   if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
1239:   PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag);
1240:   if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type);
1241:   PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag);
1242:   if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
1243:   PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag);
1244:   PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2);
1245:   PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3);
1246:   PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4);
1247:   if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1248:   PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag);
1249:   n = 5;
1250:   PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2);
1251:   PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3);
1252:   if (flag || flag2 || flag3) {
1253:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */
1254:                       jac->as_amg_alpha_opts[0],                                 /* AMG coarsen type */
1255:                       jac->as_amg_alpha_opts[1],                                 /* AMG agg_levels */
1256:                       jac->as_amg_alpha_opts[2],                                 /* AMG relax_type */
1257:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],        /* AMG interp_type */
1258:                       jac->as_amg_alpha_opts[4]);                                /* AMG Pmax */
1259:   }
1260:   PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag);
1261:   n = 5;
1262:   PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2);
1263:   if (flag || flag2) {
1264:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
1265:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
1266:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
1267:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
1268:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
1269:   }
1270:   PetscOptionsHeadEnd();
1271:   return 0;
1272: }

1274: static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer)
1275: {
1276:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1277:   PetscBool iascii;

1279:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
1280:   if (iascii) {
1281:     PetscViewerASCIIPrintf(viewer, "  HYPRE ADS preconditioning\n");
1282:     PetscViewerASCIIPrintf(viewer, "    subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter);
1283:     PetscViewerASCIIPrintf(viewer, "    subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type);
1284:     PetscViewerASCIIPrintf(viewer, "    subspace iteration tolerance %g\n", (double)jac->as_tol);
1285:     PetscViewerASCIIPrintf(viewer, "    smoother type %" PetscInt_FMT "\n", jac->as_relax_type);
1286:     PetscViewerASCIIPrintf(viewer, "    number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times);
1287:     PetscViewerASCIIPrintf(viewer, "    smoother weight %g\n", (double)jac->as_relax_weight);
1288:     PetscViewerASCIIPrintf(viewer, "    smoother omega %g\n", (double)jac->as_omega);
1289:     PetscViewerASCIIPrintf(viewer, "    AMS solver using boomerAMG\n");
1290:     PetscViewerASCIIPrintf(viewer, "        subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type);
1291:     PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0]);
1292:     PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1]);
1293:     PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2]);
1294:     PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3]);
1295:     PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4]);
1296:     PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_alpha_theta);
1297:     PetscViewerASCIIPrintf(viewer, "    vector Poisson solver using boomerAMG\n");
1298:     PetscViewerASCIIPrintf(viewer, "        coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0]);
1299:     PetscViewerASCIIPrintf(viewer, "        levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1]);
1300:     PetscViewerASCIIPrintf(viewer, "        relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2]);
1301:     PetscViewerASCIIPrintf(viewer, "        interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3]);
1302:     PetscViewerASCIIPrintf(viewer, "        max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4]);
1303:     PetscViewerASCIIPrintf(viewer, "        strength threshold %g\n", (double)jac->as_amg_beta_theta);
1304:   }
1305:   return 0;
1306: }

1308: static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G)
1309: {
1310:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1311:   PetscBool ishypre;

1313:   PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre);
1314:   if (ishypre) {
1315:     PetscObjectReference((PetscObject)G);
1316:     MatDestroy(&jac->G);
1317:     jac->G = G;
1318:   } else {
1319:     MatDestroy(&jac->G);
1320:     MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G);
1321:   }
1322:   return 0;
1323: }

1325: /*@
1326:    PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads

1328:    Collective

1330:    Input Parameters:
1331: +  pc - the preconditioning context
1332: -  G - the discrete gradient

1334:    Level: intermediate

1336:    Notes:
1337:     G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh

1339:     Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation

1341:    Developer Note:
1342:    This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1344: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()`
1345: @*/
1346: PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G)
1347: {
1351:   PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G));
1352:   return 0;
1353: }

1355: static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C)
1356: {
1357:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1358:   PetscBool ishypre;

1360:   PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre);
1361:   if (ishypre) {
1362:     PetscObjectReference((PetscObject)C);
1363:     MatDestroy(&jac->C);
1364:     jac->C = C;
1365:   } else {
1366:     MatDestroy(&jac->C);
1367:     MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C);
1368:   }
1369:   return 0;
1370: }

1372: /*@
1373:    PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads

1375:    Collective

1377:    Input Parameters:
1378: +  pc - the preconditioning context
1379: -  C - the discrete curl

1381:    Level: intermediate

1383:    Notes:
1384:     C should have as many rows as the number of faces and as many columns as the number of edges in the mesh

1386:     Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation

1388:    Developer Note:
1389:    This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1391:    If this is only for  `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()`

1393: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`
1394: @*/
1395: PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C)
1396: {
1400:   PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C));
1401:   return 0;
1402: }

1404: static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1405: {
1406:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1407:   PetscBool ishypre;
1408:   PetscInt  i;

1410:   MatDestroy(&jac->RT_PiFull);
1411:   MatDestroy(&jac->ND_PiFull);
1412:   for (i = 0; i < 3; ++i) {
1413:     MatDestroy(&jac->RT_Pi[i]);
1414:     MatDestroy(&jac->ND_Pi[i]);
1415:   }

1417:   jac->dim = dim;
1418:   if (RT_PiFull) {
1419:     PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre);
1420:     if (ishypre) {
1421:       PetscObjectReference((PetscObject)RT_PiFull);
1422:       jac->RT_PiFull = RT_PiFull;
1423:     } else {
1424:       MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull);
1425:     }
1426:   }
1427:   if (RT_Pi) {
1428:     for (i = 0; i < dim; ++i) {
1429:       if (RT_Pi[i]) {
1430:         PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre);
1431:         if (ishypre) {
1432:           PetscObjectReference((PetscObject)RT_Pi[i]);
1433:           jac->RT_Pi[i] = RT_Pi[i];
1434:         } else {
1435:           MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i]);
1436:         }
1437:       }
1438:     }
1439:   }
1440:   if (ND_PiFull) {
1441:     PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre);
1442:     if (ishypre) {
1443:       PetscObjectReference((PetscObject)ND_PiFull);
1444:       jac->ND_PiFull = ND_PiFull;
1445:     } else {
1446:       MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull);
1447:     }
1448:   }
1449:   if (ND_Pi) {
1450:     for (i = 0; i < dim; ++i) {
1451:       if (ND_Pi[i]) {
1452:         PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre);
1453:         if (ishypre) {
1454:           PetscObjectReference((PetscObject)ND_Pi[i]);
1455:           jac->ND_Pi[i] = ND_Pi[i];
1456:         } else {
1457:           MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i]);
1458:         }
1459:       }
1460:     }
1461:   }

1463:   return 0;
1464: }

1466: /*@
1467:    PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads

1469:    Collective

1471:    Input Parameters:
1472: +  pc - the preconditioning context
1473: -  dim - the dimension of the problem, only used in AMS
1474: -  RT_PiFull - Raviart-Thomas interpolation matrix
1475: -  RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix
1476: -  ND_PiFull - Nedelec interpolation matrix
1477: -  ND_Pi - x/y/z component of Nedelec interpolation matrix

1479:    Level: intermediate

1481:    Notes:
1482:     For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL.

1484:     For ADS, both type of interpolation matrices are needed.

1486:    Developer Note:
1487:    This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1489: .seealso: `PCHYPRE`
1490: @*/
1491: PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[])
1492: {
1493:   PetscInt i;

1496:   if (RT_PiFull) {
1499:   }
1500:   if (RT_Pi) {
1502:     for (i = 0; i < dim; ++i) {
1503:       if (RT_Pi[i]) {
1506:       }
1507:     }
1508:   }
1509:   if (ND_PiFull) {
1512:   }
1513:   if (ND_Pi) {
1515:     for (i = 0; i < dim; ++i) {
1516:       if (ND_Pi[i]) {
1519:       }
1520:     }
1521:   }
1522:   PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi));
1523:   return 0;
1524: }

1526: static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha)
1527: {
1528:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1529:   PetscBool ishypre;

1531:   PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre);
1532:   if (ishypre) {
1533:     if (isalpha) {
1534:       PetscObjectReference((PetscObject)A);
1535:       MatDestroy(&jac->alpha_Poisson);
1536:       jac->alpha_Poisson = A;
1537:     } else {
1538:       if (A) {
1539:         PetscObjectReference((PetscObject)A);
1540:       } else {
1541:         jac->ams_beta_is_zero = PETSC_TRUE;
1542:       }
1543:       MatDestroy(&jac->beta_Poisson);
1544:       jac->beta_Poisson = A;
1545:     }
1546:   } else {
1547:     if (isalpha) {
1548:       MatDestroy(&jac->alpha_Poisson);
1549:       MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson);
1550:     } else {
1551:       if (A) {
1552:         MatDestroy(&jac->beta_Poisson);
1553:         MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson);
1554:       } else {
1555:         MatDestroy(&jac->beta_Poisson);
1556:         jac->ams_beta_is_zero = PETSC_TRUE;
1557:       }
1558:     }
1559:   }
1560:   return 0;
1561: }

1563: /*@
1564:    PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams

1566:    Collective

1568:    Input Parameters:
1569: +  pc - the preconditioning context
1570: -  A - the matrix

1572:    Level: intermediate

1574:    Note:
1575:     A should be obtained by discretizing the vector valued Poisson problem with linear finite elements

1577:    Developer Note:
1578:    This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1580:    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()`

1582: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()`
1583: @*/
1584: PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A)
1585: {
1589:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE));
1590:   return 0;
1591: }

1593: /*@
1594:    PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams

1596:    Collective

1598:    Input Parameters:
1599: +  pc - the preconditioning context
1600: -  A - the matrix, or NULL to turn it off

1602:    Level: intermediate

1604:    Note:
1605:    A should be obtained by discretizing the Poisson problem with linear finite elements.

1607:    Developer Note:
1608:    This automatically converts the matrix to `MATHYPRE` if it is not already of that type

1610:    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()`

1612: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1613: @*/
1614: PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A)
1615: {
1617:   if (A) {
1620:   }
1621:   PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE));
1622:   return 0;
1623: }

1625: static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo)
1626: {
1627:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

1629:   /* throw away any vector if already set */
1630:   VecHYPRE_IJVectorDestroy(&jac->constants[0]);
1631:   VecHYPRE_IJVectorDestroy(&jac->constants[1]);
1632:   VecHYPRE_IJVectorDestroy(&jac->constants[2]);
1633:   VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0]);
1634:   VecHYPRE_IJVectorCopy(ozz, jac->constants[0]);
1635:   VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1]);
1636:   VecHYPRE_IJVectorCopy(zoz, jac->constants[1]);
1637:   jac->dim = 2;
1638:   if (zzo) {
1639:     VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2]);
1640:     VecHYPRE_IJVectorCopy(zzo, jac->constants[2]);
1641:     jac->dim++;
1642:   }
1643:   return 0;
1644: }

1646: /*@
1647:    PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams

1649:    Collective

1651:    Input Parameters:
1652: +  pc - the preconditioning context
1653: -  ozz - vector representing (1,0,0) (or (1,0) in 2D)
1654: -  zoz - vector representing (0,1,0) (or (0,1) in 2D)
1655: -  zzo - vector representing (0,0,1) (use NULL in 2D)

1657:    Level: intermediate

1659:    Developer Note:
1660:    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()`

1662: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1663: @*/
1664: PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo)
1665: {
1673:   PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo));
1674:   return 0;
1675: }

1677: static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior)
1678: {
1679:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

1681:   VecHYPRE_IJVectorDestroy(&jac->interior);
1682:   VecHYPRE_IJVectorCreate(interior->map, &jac->interior);
1683:   VecHYPRE_IJVectorCopy(interior, jac->interior);
1684:   jac->ams_beta_is_zero_part = PETSC_TRUE;
1685:   return 0;
1686: }

1688: /*@
1689:   PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams

1691:    Collective

1693:    Input Parameters:
1694: +  pc - the preconditioning context
1695: -  interior - vector. node is interior if its entry in the array is 1.0.

1697:    Level: intermediate

1699:    Note:
1700:    This calls `HYPRE_AMSSetInteriorNodes()`

1702:    Developer Note:
1703:    If this is only for  `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()`

1705: .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()`
1706: @*/
1707: PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior)
1708: {
1712:   PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior));
1713:   return 0;
1714: }

1716: static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
1717: {
1718:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1719:   Vec       tv;
1720:   PetscInt  i;

1722:   /* throw away any coordinate vector if already set */
1723:   VecHYPRE_IJVectorDestroy(&jac->coords[0]);
1724:   VecHYPRE_IJVectorDestroy(&jac->coords[1]);
1725:   VecHYPRE_IJVectorDestroy(&jac->coords[2]);
1726:   jac->dim = dim;

1728:   /* compute IJ vector for coordinates */
1729:   VecCreate(PetscObjectComm((PetscObject)pc), &tv);
1730:   VecSetType(tv, VECSTANDARD);
1731:   VecSetSizes(tv, nloc, PETSC_DECIDE);
1732:   for (i = 0; i < dim; i++) {
1733:     PetscScalar *array;
1734:     PetscInt     j;

1736:     VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i]);
1737:     VecGetArrayWrite(tv, &array);
1738:     for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i];
1739:     VecRestoreArrayWrite(tv, &array);
1740:     VecHYPRE_IJVectorCopy(tv, jac->coords[i]);
1741:   }
1742:   VecDestroy(&tv);
1743:   return 0;
1744: }

1746: static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[])
1747: {
1748:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;

1750:   *name = jac->hypre_type;
1751:   return 0;
1752: }

1754: static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[])
1755: {
1756:   PC_HYPRE *jac = (PC_HYPRE *)pc->data;
1757:   PetscBool flag;

1759:   if (jac->hypre_type) {
1760:     PetscStrcmp(jac->hypre_type, name, &flag);
1762:     return 0;
1763:   } else {
1764:     PetscStrallocpy(name, &jac->hypre_type);
1765:   }

1767:   jac->maxiter         = PETSC_DEFAULT;
1768:   jac->tol             = PETSC_DEFAULT;
1769:   jac->printstatistics = PetscLogPrintInfo;

1771:   PetscStrcmp("pilut", jac->hypre_type, &flag);
1772:   if (flag) {
1773:     PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre);
1774:     PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver);
1775:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut;
1776:     pc->ops->view           = PCView_HYPRE_Pilut;
1777:     jac->destroy            = HYPRE_ParCSRPilutDestroy;
1778:     jac->setup              = HYPRE_ParCSRPilutSetup;
1779:     jac->solve              = HYPRE_ParCSRPilutSolve;
1780:     jac->factorrowsize      = PETSC_DEFAULT;
1781:     return 0;
1782:   }
1783:   PetscStrcmp("euclid", jac->hypre_type, &flag);
1784:   if (flag) {
1785: #if defined(PETSC_USE_64BIT_INDICES)
1786:     SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64 bit indices");
1787: #endif
1788:     PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre);
1789:     PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver);
1790:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid;
1791:     pc->ops->view           = PCView_HYPRE_Euclid;
1792:     jac->destroy            = HYPRE_EuclidDestroy;
1793:     jac->setup              = HYPRE_EuclidSetup;
1794:     jac->solve              = HYPRE_EuclidSolve;
1795:     jac->factorrowsize      = PETSC_DEFAULT;
1796:     jac->eu_level           = PETSC_DEFAULT; /* default */
1797:     return 0;
1798:   }
1799:   PetscStrcmp("parasails", jac->hypre_type, &flag);
1800:   if (flag) {
1801:     PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre);
1802:     PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver);
1803:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails;
1804:     pc->ops->view           = PCView_HYPRE_ParaSails;
1805:     jac->destroy            = HYPRE_ParaSailsDestroy;
1806:     jac->setup              = HYPRE_ParaSailsSetup;
1807:     jac->solve              = HYPRE_ParaSailsSolve;
1808:     /* initialize */
1809:     jac->nlevels   = 1;
1810:     jac->threshold = .1;
1811:     jac->filter    = .1;
1812:     jac->loadbal   = 0;
1813:     if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE;
1814:     else jac->logging = (int)PETSC_FALSE;

1816:     jac->ruse = (int)PETSC_FALSE;
1817:     jac->symt = 0;
1818:     PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels);
1819:     PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter);
1820:     PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal);
1821:     PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging);
1822:     PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse);
1823:     PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt);
1824:     return 0;
1825:   }
1826:   PetscStrcmp("boomeramg", jac->hypre_type, &flag);
1827:   if (flag) {
1828:     PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver);
1829:     pc->ops->setfromoptions  = PCSetFromOptions_HYPRE_BoomerAMG;
1830:     pc->ops->view            = PCView_HYPRE_BoomerAMG;
1831:     pc->ops->applytranspose  = PCApplyTranspose_HYPRE_BoomerAMG;
1832:     pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG;
1833:     PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG);
1834:     PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG);
1835:     jac->destroy         = HYPRE_BoomerAMGDestroy;
1836:     jac->setup           = HYPRE_BoomerAMGSetup;
1837:     jac->solve           = HYPRE_BoomerAMGSolve;
1838:     jac->applyrichardson = PETSC_FALSE;
1839:     /* these defaults match the hypre defaults */
1840:     jac->cycletype       = 1;
1841:     jac->maxlevels       = 25;
1842:     jac->maxiter         = 1;
1843:     jac->tol             = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */
1844:     jac->truncfactor     = 0.0;
1845:     jac->strongthreshold = .25;
1846:     jac->maxrowsum       = .9;
1847:     jac->coarsentype     = 6;
1848:     jac->measuretype     = 0;
1849:     jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1;
1850:     jac->smoothtype                                              = -1; /* Not set by default */
1851:     jac->smoothnumlevels                                         = 25;
1852:     jac->eu_level                                                = 0;
1853:     jac->eu_droptolerance                                        = 0;
1854:     jac->eu_bj                                                   = 0;
1855:     jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */
1856:     jac->relaxtype[2]                     = 9; /*G.E. */
1857:     jac->relaxweight                      = 1.0;
1858:     jac->outerrelaxweight                 = 1.0;
1859:     jac->relaxorder                       = 1;
1860:     jac->interptype                       = 0;
1861:     jac->Rtype                            = 0;
1862:     jac->Rstrongthreshold                 = 0.25;
1863:     jac->Rfilterthreshold                 = 0.0;
1864:     jac->Adroptype                        = -1;
1865:     jac->Adroptol                         = 0.0;
1866:     jac->agg_nl                           = 0;
1867:     jac->agg_interptype                   = 4;
1868:     jac->pmax                             = 0;
1869:     jac->truncfactor                      = 0.0;
1870:     jac->agg_num_paths                    = 1;
1871:     jac->maxc                             = 9;
1872:     jac->minc                             = 1;
1873:     jac->nodal_coarsening                 = 0;
1874:     jac->nodal_coarsening_diag            = 0;
1875:     jac->vec_interp_variant               = 0;
1876:     jac->vec_interp_qmax                  = 0;
1877:     jac->vec_interp_smooth                = PETSC_FALSE;
1878:     jac->interp_refine                    = 0;
1879:     jac->nodal_relax                      = PETSC_FALSE;
1880:     jac->nodal_relax_levels               = 1;
1881:     jac->rap2                             = 0;

1883:     /* GPU defaults
1884:          from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options
1885:          and /src/parcsr_ls/par_amg.c */
1886: #if defined(PETSC_HAVE_HYPRE_DEVICE)
1887:     jac->keeptranspose  = PETSC_TRUE;
1888:     jac->mod_rap2       = 1;
1889:     jac->coarsentype    = 8;
1890:     jac->relaxorder     = 0;
1891:     jac->interptype     = 6;
1892:     jac->relaxtype[0]   = 18;
1893:     jac->relaxtype[1]   = 18;
1894:     jac->agg_interptype = 7;
1895: #else
1896:     jac->keeptranspose = PETSC_FALSE;
1897:     jac->mod_rap2      = 0;
1898: #endif
1899:     PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype);
1900:     PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels);
1901:     PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter);
1902:     PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol);
1903:     PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor);
1904:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold);
1905:     PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum);
1906:     PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype);
1907:     PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype);
1908:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder);
1909:     PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype);
1910:     PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl);
1911:     PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype);
1912:     PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax);
1913:     PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths);
1914:     PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]);  /* defaults coarse to 9 */
1915:     PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */
1916:     PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc);
1917:     PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc);
1918:     /* GPU */
1919: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
1920:     PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0);
1921:     PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2);
1922:     PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2);
1923: #endif

1925:     /* AIR */
1926: #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0)
1927:     PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype);
1928:     PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold);
1929:     PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold);
1930:     PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol);
1931:     PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype);
1932: #endif
1933:     return 0;
1934:   }
1935:   PetscStrcmp("ams", jac->hypre_type, &flag);
1936:   if (flag) {
1937:     HYPRE_AMSCreate(&jac->hsolver);
1938:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS;
1939:     pc->ops->view           = PCView_HYPRE_AMS;
1940:     jac->destroy            = HYPRE_AMSDestroy;
1941:     jac->setup              = HYPRE_AMSSetup;
1942:     jac->solve              = HYPRE_AMSSolve;
1943:     jac->coords[0]          = NULL;
1944:     jac->coords[1]          = NULL;
1945:     jac->coords[2]          = NULL;
1946:     jac->interior           = NULL;
1947:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */
1948:     jac->as_print       = 0;
1949:     jac->as_max_iter    = 1;  /* used as a preconditioner */
1950:     jac->as_tol         = 0.; /* used as a preconditioner */
1951:     jac->ams_cycle_type = 13;
1952:     /* Smoothing options */
1953:     jac->as_relax_type   = 2;
1954:     jac->as_relax_times  = 1;
1955:     jac->as_relax_weight = 1.0;
1956:     jac->as_omega        = 1.0;
1957:     /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
1958:     jac->as_amg_alpha_opts[0] = 10;
1959:     jac->as_amg_alpha_opts[1] = 1;
1960:     jac->as_amg_alpha_opts[2] = 6;
1961:     jac->as_amg_alpha_opts[3] = 6;
1962:     jac->as_amg_alpha_opts[4] = 4;
1963:     jac->as_amg_alpha_theta   = 0.25;
1964:     /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
1965:     jac->as_amg_beta_opts[0] = 10;
1966:     jac->as_amg_beta_opts[1] = 1;
1967:     jac->as_amg_beta_opts[2] = 6;
1968:     jac->as_amg_beta_opts[3] = 6;
1969:     jac->as_amg_beta_opts[4] = 4;
1970:     jac->as_amg_beta_theta   = 0.25;
1971:     PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print);
1972:     PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter);
1973:     PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type);
1974:     PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol);
1975:     PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
1976:     PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */
1977:                       jac->as_amg_alpha_opts[1],                                            /* AMG agg_levels */
1978:                       jac->as_amg_alpha_opts[2],                                            /* AMG relax_type */
1979:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],                   /* AMG interp_type */
1980:                       jac->as_amg_alpha_opts[4]);                                           /* AMG Pmax */
1981:     PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0],   /* AMG coarsen type */
1982:                       jac->as_amg_beta_opts[1],                                             /* AMG agg_levels */
1983:                       jac->as_amg_beta_opts[2],                                             /* AMG relax_type */
1984:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],                     /* AMG interp_type */
1985:                       jac->as_amg_beta_opts[4]);                                            /* AMG Pmax */
1986:     /* Zero conductivity */
1987:     jac->ams_beta_is_zero      = PETSC_FALSE;
1988:     jac->ams_beta_is_zero_part = PETSC_FALSE;
1989:     return 0;
1990:   }
1991:   PetscStrcmp("ads", jac->hypre_type, &flag);
1992:   if (flag) {
1993:     HYPRE_ADSCreate(&jac->hsolver);
1994:     pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS;
1995:     pc->ops->view           = PCView_HYPRE_ADS;
1996:     jac->destroy            = HYPRE_ADSDestroy;
1997:     jac->setup              = HYPRE_ADSSetup;
1998:     jac->solve              = HYPRE_ADSSolve;
1999:     jac->coords[0]          = NULL;
2000:     jac->coords[1]          = NULL;
2001:     jac->coords[2]          = NULL;
2002:     /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */
2003:     jac->as_print       = 0;
2004:     jac->as_max_iter    = 1;  /* used as a preconditioner */
2005:     jac->as_tol         = 0.; /* used as a preconditioner */
2006:     jac->ads_cycle_type = 13;
2007:     /* Smoothing options */
2008:     jac->as_relax_type   = 2;
2009:     jac->as_relax_times  = 1;
2010:     jac->as_relax_weight = 1.0;
2011:     jac->as_omega        = 1.0;
2012:     /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */
2013:     jac->ams_cycle_type       = 14;
2014:     jac->as_amg_alpha_opts[0] = 10;
2015:     jac->as_amg_alpha_opts[1] = 1;
2016:     jac->as_amg_alpha_opts[2] = 6;
2017:     jac->as_amg_alpha_opts[3] = 6;
2018:     jac->as_amg_alpha_opts[4] = 4;
2019:     jac->as_amg_alpha_theta   = 0.25;
2020:     /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */
2021:     jac->as_amg_beta_opts[0] = 10;
2022:     jac->as_amg_beta_opts[1] = 1;
2023:     jac->as_amg_beta_opts[2] = 6;
2024:     jac->as_amg_beta_opts[3] = 6;
2025:     jac->as_amg_beta_opts[4] = 4;
2026:     jac->as_amg_beta_theta   = 0.25;
2027:     PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print);
2028:     PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter);
2029:     PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type);
2030:     PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol);
2031:     PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega);
2032:     PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type,      /* AMG coarsen type */
2033:                       jac->as_amg_alpha_opts[0],                                      /* AMG coarsen type */
2034:                       jac->as_amg_alpha_opts[1],                                      /* AMG agg_levels */
2035:                       jac->as_amg_alpha_opts[2],                                      /* AMG relax_type */
2036:                       jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3],             /* AMG interp_type */
2037:                       jac->as_amg_alpha_opts[4]);                                     /* AMG Pmax */
2038:     PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */
2039:                       jac->as_amg_beta_opts[1],                                       /* AMG agg_levels */
2040:                       jac->as_amg_beta_opts[2],                                       /* AMG relax_type */
2041:                       jac->as_amg_beta_theta, jac->as_amg_beta_opts[3],               /* AMG interp_type */
2042:                       jac->as_amg_beta_opts[4]);                                      /* AMG Pmax */
2043:     return 0;
2044:   }
2045:   PetscFree(jac->hypre_type);

2047:   jac->hypre_type = NULL;
2048:   SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name);
2049: }

2051: /*
2052:     It only gets here if the HYPRE type has not been set before the call to
2053:    ...SetFromOptions() which actually is most of the time
2054: */
2055: PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject)
2056: {
2057:   PetscInt    indx;
2058:   const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"};
2059:   PetscBool   flg;

2061:   PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options");
2062:   PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg);
2063:   if (flg) {
2064:     PCHYPRESetType_HYPRE(pc, type[indx]);
2065:   } else {
2066:     PCHYPRESetType_HYPRE(pc, "boomeramg");
2067:   }
2068:   PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject);
2069:   PetscOptionsHeadEnd();
2070:   return 0;
2071: }

2073: /*@C
2074:      PCHYPRESetType - Sets which hypre preconditioner you wish to use

2076:    Input Parameters:
2077: +     pc - the preconditioner context
2078: -     name - either  euclid, pilut, parasails, boomeramg, ams, ads

2080:    Options Database Key:
2081:    -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads

2083:    Level: intermediate

2085: .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE`
2086: @*/
2087: PetscErrorCode PCHYPRESetType(PC pc, const char name[])
2088: {
2091:   PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name));
2092:   return 0;
2093: }

2095: /*@C
2096:      PCHYPREGetType - Gets which hypre preconditioner you are using

2098:    Input Parameter:
2099: .     pc - the preconditioner context

2101:    Output Parameter:
2102: .     name - either  euclid, pilut, parasails, boomeramg, ams, ads

2104:    Level: intermediate

2106: .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE`
2107: @*/
2108: PetscErrorCode PCHYPREGetType(PC pc, const char *name[])
2109: {
2112:   PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name));
2113:   return 0;
2114: }

2116: /*@C
2117:    PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs

2119:    Logically Collective

2121:    Input Parameters:
2122: +  pc - the hypre context
2123: -  type - one of 'cusparse', 'hypre'

2125:    Options Database Key:
2126: .  -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre

2128:    Level: intermediate

2130:    Developer Note:
2131:    How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`?

2133: .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()`
2134: @*/
2135: PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[])
2136: {
2138:   PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name));
2139:   return 0;
2140: }

2142: /*@C
2143:    PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs

2145:    Not Collective

2147:    Input Parameter:
2148: .  pc - the multigrid context

2150:    Output Parameter:
2151: .  name - one of 'cusparse', 'hypre'

2153:    Level: intermediate

2155: .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()`
2156: @*/
2157: PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[])
2158: {
2160:   PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name));
2161:   return 0;
2162: }

2164: /*MC
2165:      PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC`

2167:    Options Database Keys:
2168: +   -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads
2169: .   -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`)
2170: .   -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`)
2171: -   Many others, run with -pc_type hypre -pc_hypre_type XXX -help to see options for the XXX preconditioner

2173:    Level: intermediate

2175:    Notes:
2176:     Apart from pc_hypre_type (for which there is `PCHYPRESetType()`),
2177:           the many hypre options can ONLY be set via the options database (e.g. the command line
2178:           or with `PetscOptionsSetValue()`, there are no functions to set them)

2180:           The options -pc_hypre_boomeramg_max_iter and -pc_hypre_boomeramg_tol refer to the number of iterations
2181:           (V-cycles) and tolerance that boomeramg does EACH time it is called. So for example, if
2182:           -pc_hypre_boomeramg_max_iter is set to 2 then 2-V-cycles are being used to define the preconditioner
2183:           (-pc_hypre_boomeramg_tol should be set to 0.0 - the default - to strictly use a fixed number of
2184:           iterations per hypre call). -ksp_max_it and -ksp_rtol STILL determine the total number of iterations
2185:           and tolerance for the Krylov solver. For example, if -pc_hypre_boomeramg_max_iter is 2 and -ksp_max_it is 10
2186:           then AT MOST twenty V-cycles of boomeramg will be called.

2188:            Note that the option -pc_hypre_boomeramg_relax_type_all defaults to symmetric relaxation
2189:            (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry.
2190:            Otherwise, you may want to use -pc_hypre_boomeramg_relax_type_all SOR/Jacobi.
2191:           If you wish to use BoomerAMG WITHOUT a Krylov method use -ksp_type richardson NOT -ksp_type preonly
2192:           and use -ksp_max_it to control the number of V-cycles.
2193:           (see the PETSc FAQ.html at the PETSc website under the Documentation tab).

2195:           `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use
2196:           the following two options: ``-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>``

2198:           See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers

2200:           For `PCHYPRE` type of ams or ads auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`,
2201:           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2202:           `PCHYPREAMSSetInteriorNodes()`

2204:    PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems

2206:    GPU Notes:
2207:      To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda
2208:      Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2210:      To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip
2211:      Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers.

2213: .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`,
2214:           `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`,
2215:           PCHYPREAMSSetInteriorNodes()
2216: M*/

2218: PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc)
2219: {
2220:   PC_HYPRE *jac;

2222:   PetscNew(&jac);

2224:   pc->data                = jac;
2225:   pc->ops->reset          = PCReset_HYPRE;
2226:   pc->ops->destroy        = PCDestroy_HYPRE;
2227:   pc->ops->setfromoptions = PCSetFromOptions_HYPRE;
2228:   pc->ops->setup          = PCSetUp_HYPRE;
2229:   pc->ops->apply          = PCApply_HYPRE;
2230:   jac->comm_hypre         = MPI_COMM_NULL;
2231:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE);
2232:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE);
2233:   PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE);
2234:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE);
2235:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE);
2236:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE);
2237:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE);
2238:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE);
2239:   PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE);
2240:   PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG);
2241:   PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG);
2242: #if defined(PETSC_HAVE_HYPRE_DEVICE)
2243:   #if defined(HYPRE_USING_HIP)
2244:   PetscDeviceInitialize(PETSC_DEVICE_HIP);
2245:   #endif
2246:   #if defined(HYPRE_USING_CUDA)
2247:   PetscDeviceInitialize(PETSC_DEVICE_CUDA);
2248:   #endif
2249: #endif
2250:   return 0;
2251: }

2253: typedef struct {
2254:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2255:   HYPRE_StructSolver hsolver;

2257:   /* keep copy of PFMG options used so may view them */
2258:   PetscInt  its;
2259:   double    tol;
2260:   PetscInt  relax_type;
2261:   PetscInt  rap_type;
2262:   PetscInt  num_pre_relax, num_post_relax;
2263:   PetscInt  max_levels;
2264:   PetscInt  skip_relax;
2265:   PetscBool print_statistics;
2266: } PC_PFMG;

2268: PetscErrorCode PCDestroy_PFMG(PC pc)
2269: {
2270:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2272:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2273:   PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2274:   PetscFree(pc->data);
2275:   return 0;
2276: }

2278: static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"};
2279: static const char *PFMGRAPType[]   = {"Galerkin", "non-Galerkin"};

2281: PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer)
2282: {
2283:   PetscBool iascii;
2284:   PC_PFMG  *ex = (PC_PFMG *)pc->data;

2286:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
2287:   if (iascii) {
2288:     PetscViewerASCIIPrintf(viewer, "  HYPRE PFMG preconditioning\n");
2289:     PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its);
2290:     PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol);
2291:     PetscViewerASCIIPrintf(viewer, "    relax type %s\n", PFMGRelaxType[ex->relax_type]);
2292:     PetscViewerASCIIPrintf(viewer, "    RAP type %s\n", PFMGRAPType[ex->rap_type]);
2293:     PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax);
2294:     PetscViewerASCIIPrintf(viewer, "    max levels %" PetscInt_FMT "\n", ex->max_levels);
2295:     PetscViewerASCIIPrintf(viewer, "    skip relax %" PetscInt_FMT "\n", ex->skip_relax);
2296:   }
2297:   return 0;
2298: }

2300: PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2301: {
2302:   PC_PFMG *ex = (PC_PFMG *)pc->data;

2304:   PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options");
2305:   PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL);
2306:   PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL);
2307:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2308:   PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL);
2309:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2310:   PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL);
2311:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);

2313:   PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL);
2314:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);

2316:   PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL);
2317:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2318:   PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL);
2319:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2320:   PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL);
2321:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);
2322:   PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL);
2323:   PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax);
2324:   PetscOptionsHeadEnd();
2325:   return 0;
2326: }

2328: PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y)
2329: {
2330:   PC_PFMG           *ex = (PC_PFMG *)pc->data;
2331:   PetscScalar       *yy;
2332:   const PetscScalar *xx;
2333:   PetscInt           ilower[3], iupper[3];
2334:   HYPRE_Int          hlower[3], hupper[3];
2335:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);

2337:   PetscCitationsRegister(hypreCitation, &cite);
2338:   DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]);
2339:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2340:   iupper[0] += ilower[0] - 1;
2341:   iupper[1] += ilower[1] - 1;
2342:   iupper[2] += ilower[2] - 1;
2343:   hlower[0] = (HYPRE_Int)ilower[0];
2344:   hlower[1] = (HYPRE_Int)ilower[1];
2345:   hlower[2] = (HYPRE_Int)ilower[2];
2346:   hupper[0] = (HYPRE_Int)iupper[0];
2347:   hupper[1] = (HYPRE_Int)iupper[1];
2348:   hupper[2] = (HYPRE_Int)iupper[2];

2350:   /* copy x values over to hypre */
2351:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2352:   VecGetArrayRead(x, &xx);
2353:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2354:   VecRestoreArrayRead(x, &xx);
2355:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2356:   PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

2358:   /* copy solution values back to PETSc */
2359:   VecGetArray(y, &yy);
2360:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2361:   VecRestoreArray(y, &yy);
2362:   return 0;
2363: }

2365: static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2366: {
2367:   PC_PFMG  *jac = (PC_PFMG *)pc->data;
2368:   HYPRE_Int oits;

2370:   PetscCitationsRegister(hypreCitation, &cite);
2371:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its);
2372:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol);

2374:   PCApply_PFMG(pc, b, y);
2375:   PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits);
2376:   *outits = oits;
2377:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2378:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2379:   PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol);
2380:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its);
2381:   return 0;
2382: }

2384: PetscErrorCode PCSetUp_PFMG(PC pc)
2385: {
2386:   PC_PFMG         *ex = (PC_PFMG *)pc->data;
2387:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2388:   PetscBool        flg;

2390:   PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg);

2393:   /* create the hypre solver object and set its information */
2394:   if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver);
2395:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);

2397:   // Print Hypre statistics about the solve process
2398:   if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3);

2400:   // The hypre options must be repeated here because the StructPFMG was destroyed and recreated
2401:   PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its);
2402:   PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2403:   PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2404:   PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels);
2405:   PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol);
2406:   PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type);
2407:   PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type);

2409:   PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2410:   PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver);
2411:   return 0;
2412: }

2414: /*MC
2415:      PCPFMG - the hypre PFMG multigrid solver

2417:    Options Database Keys:
2418: + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner
2419: . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve
2420: . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve
2421: . -pc_pfmg_tol <tol> - tolerance of PFMG
2422: . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel
2423: . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin
2424: - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations
2425:                         when the underlying problem is isotropic, one of 0,1

2427:    Level: advanced

2429:    Notes:
2430:    This is for CELL-centered descretizations

2432:    See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG`

2434:    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

2436:    This must be used with the `MATHYPRESTRUCT` matrix type.

2438:    This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`.

2440: .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`
2441: M*/

2443: PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc)
2444: {
2445:   PC_PFMG *ex;

2447:   PetscNew(&ex);
2448:   pc->data = ex;

2450:   ex->its              = 1;
2451:   ex->tol              = 1.e-8;
2452:   ex->relax_type       = 1;
2453:   ex->rap_type         = 0;
2454:   ex->num_pre_relax    = 1;
2455:   ex->num_post_relax   = 1;
2456:   ex->max_levels       = 0;
2457:   ex->skip_relax       = 0;
2458:   ex->print_statistics = PETSC_FALSE;

2460:   pc->ops->setfromoptions  = PCSetFromOptions_PFMG;
2461:   pc->ops->view            = PCView_PFMG;
2462:   pc->ops->destroy         = PCDestroy_PFMG;
2463:   pc->ops->apply           = PCApply_PFMG;
2464:   pc->ops->applyrichardson = PCApplyRichardson_PFMG;
2465:   pc->ops->setup           = PCSetUp_PFMG;

2467:   PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2468:   PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver);
2469:   return 0;
2470: }

2472: /* we know we are working with a HYPRE_SStructMatrix */
2473: typedef struct {
2474:   MPI_Comm            hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */
2475:   HYPRE_SStructSolver ss_solver;

2477:   /* keep copy of SYSPFMG options used so may view them */
2478:   PetscInt its;
2479:   double   tol;
2480:   PetscInt relax_type;
2481:   PetscInt num_pre_relax, num_post_relax;
2482: } PC_SysPFMG;

2484: PetscErrorCode PCDestroy_SysPFMG(PC pc)
2485: {
2486:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2488:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2489:   PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2490:   PetscFree(pc->data);
2491:   return 0;
2492: }

2494: static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"};

2496: PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer)
2497: {
2498:   PetscBool   iascii;
2499:   PC_SysPFMG *ex = (PC_SysPFMG *)pc->data;

2501:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
2502:   if (iascii) {
2503:     PetscViewerASCIIPrintf(viewer, "  HYPRE SysPFMG preconditioning\n");
2504:     PetscViewerASCIIPrintf(viewer, "  max iterations %" PetscInt_FMT "\n", ex->its);
2505:     PetscViewerASCIIPrintf(viewer, "  tolerance %g\n", ex->tol);
2506:     PetscViewerASCIIPrintf(viewer, "  relax type %s\n", PFMGRelaxType[ex->relax_type]);
2507:     PetscViewerASCIIPrintf(viewer, "  number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax);
2508:   }
2509:   return 0;
2510: }

2512: PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject)
2513: {
2514:   PC_SysPFMG *ex  = (PC_SysPFMG *)pc->data;
2515:   PetscBool   flg = PETSC_FALSE;

2517:   PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options");
2518:   PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL);
2519:   if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3);
2520:   PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL);
2521:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its);
2522:   PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL);
2523:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax);
2524:   PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL);
2525:   PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax);

2527:   PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL);
2528:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol);
2529:   PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL);
2530:   PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type);
2531:   PetscOptionsHeadEnd();
2532:   return 0;
2533: }

2535: PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y)
2536: {
2537:   PC_SysPFMG        *ex = (PC_SysPFMG *)pc->data;
2538:   PetscScalar       *yy;
2539:   const PetscScalar *xx;
2540:   PetscInt           ilower[3], iupper[3];
2541:   HYPRE_Int          hlower[3], hupper[3];
2542:   Mat_HYPRESStruct  *mx       = (Mat_HYPRESStruct *)(pc->pmat->data);
2543:   PetscInt           ordering = mx->dofs_order;
2544:   PetscInt           nvars    = mx->nvars;
2545:   PetscInt           part     = 0;
2546:   PetscInt           size;
2547:   PetscInt           i;

2549:   PetscCitationsRegister(hypreCitation, &cite);
2550:   DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]);
2551:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2552:   iupper[0] += ilower[0] - 1;
2553:   iupper[1] += ilower[1] - 1;
2554:   iupper[2] += ilower[2] - 1;
2555:   hlower[0] = (HYPRE_Int)ilower[0];
2556:   hlower[1] = (HYPRE_Int)ilower[1];
2557:   hlower[2] = (HYPRE_Int)ilower[2];
2558:   hupper[0] = (HYPRE_Int)iupper[0];
2559:   hupper[1] = (HYPRE_Int)iupper[1];
2560:   hupper[2] = (HYPRE_Int)iupper[2];

2562:   size = 1;
2563:   for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1);

2565:   /* copy x values over to hypre for variable ordering */
2566:   if (ordering) {
2567:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2568:     VecGetArrayRead(x, &xx);
2569:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i)));
2570:     VecRestoreArrayRead(x, &xx);
2571:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2572:     PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x);
2573:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

2575:     /* copy solution values back to PETSc */
2576:     VecGetArray(y, &yy);
2577:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i)));
2578:     VecRestoreArray(y, &yy);
2579:   } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */
2580:     PetscScalar *z;
2581:     PetscInt     j, k;

2583:     PetscMalloc1(nvars * size, &z);
2584:     PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0);
2585:     VecGetArrayRead(x, &xx);

2587:     /* transform nodal to hypre's variable ordering for sys_pfmg */
2588:     for (i = 0; i < size; i++) {
2589:       k = i * nvars;
2590:       for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j];
2591:     }
2592:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2593:     VecRestoreArrayRead(x, &xx);
2594:     PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b);
2595:     PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);

2597:     /* copy solution values back to PETSc */
2598:     VecGetArray(y, &yy);
2599:     for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i)));
2600:     /* transform hypre's variable ordering for sys_pfmg to nodal ordering */
2601:     for (i = 0; i < size; i++) {
2602:       k = i * nvars;
2603:       for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i];
2604:     }
2605:     VecRestoreArray(y, &yy);
2606:     PetscFree(z);
2607:   }
2608:   return 0;
2609: }

2611: static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2612: {
2613:   PC_SysPFMG *jac = (PC_SysPFMG *)pc->data;
2614:   HYPRE_Int   oits;

2616:   PetscCitationsRegister(hypreCitation, &cite);
2617:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its);
2618:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol);
2619:   PCApply_SysPFMG(pc, b, y);
2620:   PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits);
2621:   *outits = oits;
2622:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2623:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2624:   PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol);
2625:   PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its);
2626:   return 0;
2627: }

2629: PetscErrorCode PCSetUp_SysPFMG(PC pc)
2630: {
2631:   PC_SysPFMG       *ex = (PC_SysPFMG *)pc->data;
2632:   Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data);
2633:   PetscBool         flg;

2635:   PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg);

2638:   /* create the hypre sstruct solver object and set its information */
2639:   if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver);
2640:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2641:   PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver);
2642:   PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x);
2643:   return 0;
2644: }

2646: /*MC
2647:      PCSYSPFMG - the hypre SysPFMG multigrid solver

2649:    Level: advanced

2651:    Options Database Keys:
2652: + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner
2653: . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid
2654: . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid
2655: . -pc_syspfmg_tol <tol> - tolerance of SysPFMG
2656: - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles

2658:    Notes:
2659:    See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG`

2661:    See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver

2663:    This is for CELL-centered descretizations

2665:    This must be used with the `MATHYPRESSTRUCT` matrix type.

2667:    This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`.

2669: .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG`
2670: M*/

2672: PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc)
2673: {
2674:   PC_SysPFMG *ex;

2676:   PetscNew(&ex);
2677:   pc->data = ex;

2679:   ex->its            = 1;
2680:   ex->tol            = 1.e-8;
2681:   ex->relax_type     = 1;
2682:   ex->num_pre_relax  = 1;
2683:   ex->num_post_relax = 1;

2685:   pc->ops->setfromoptions  = PCSetFromOptions_SysPFMG;
2686:   pc->ops->view            = PCView_SysPFMG;
2687:   pc->ops->destroy         = PCDestroy_SysPFMG;
2688:   pc->ops->apply           = PCApply_SysPFMG;
2689:   pc->ops->applyrichardson = PCApplyRichardson_SysPFMG;
2690:   pc->ops->setup           = PCSetUp_SysPFMG;

2692:   PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2693:   PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver);
2694:   return 0;
2695: }

2697: /* PC SMG */
2698: typedef struct {
2699:   MPI_Comm           hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */
2700:   HYPRE_StructSolver hsolver;
2701:   PetscInt           its; /* keep copy of SMG options used so may view them */
2702:   double             tol;
2703:   PetscBool          print_statistics;
2704:   PetscInt           num_pre_relax, num_post_relax;
2705: } PC_SMG;

2707: PetscErrorCode PCDestroy_SMG(PC pc)
2708: {
2709:   PC_SMG *ex = (PC_SMG *)pc->data;

2711:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver);
2712:   PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2713:   PetscFree(pc->data);
2714:   return 0;
2715: }

2717: PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer)
2718: {
2719:   PetscBool iascii;
2720:   PC_SMG   *ex = (PC_SMG *)pc->data;

2722:   PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
2723:   if (iascii) {
2724:     PetscViewerASCIIPrintf(viewer, "  HYPRE SMG preconditioning\n");
2725:     PetscViewerASCIIPrintf(viewer, "    max iterations %" PetscInt_FMT "\n", ex->its);
2726:     PetscViewerASCIIPrintf(viewer, "    tolerance %g\n", ex->tol);
2727:     PetscViewerASCIIPrintf(viewer, "    number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax);
2728:   }
2729:   return 0;
2730: }

2732: PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject)
2733: {
2734:   PC_SMG *ex = (PC_SMG *)pc->data;

2736:   PetscOptionsHeadBegin(PetscOptionsObject, "SMG options");

2738:   PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL);
2739:   PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL);
2740:   PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL);
2741:   PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL);

2743:   PetscOptionsHeadEnd();
2744:   return 0;
2745: }

2747: PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y)
2748: {
2749:   PC_SMG            *ex = (PC_SMG *)pc->data;
2750:   PetscScalar       *yy;
2751:   const PetscScalar *xx;
2752:   PetscInt           ilower[3], iupper[3];
2753:   HYPRE_Int          hlower[3], hupper[3];
2754:   Mat_HYPREStruct   *mx = (Mat_HYPREStruct *)(pc->pmat->data);

2756:   PetscCitationsRegister(hypreCitation, &cite);
2757:   DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2]);
2758:   /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */
2759:   iupper[0] += ilower[0] - 1;
2760:   iupper[1] += ilower[1] - 1;
2761:   iupper[2] += ilower[2] - 1;
2762:   hlower[0] = (HYPRE_Int)ilower[0];
2763:   hlower[1] = (HYPRE_Int)ilower[1];
2764:   hlower[2] = (HYPRE_Int)ilower[2];
2765:   hupper[0] = (HYPRE_Int)iupper[0];
2766:   hupper[1] = (HYPRE_Int)iupper[1];
2767:   hupper[2] = (HYPRE_Int)iupper[2];

2769:   /* copy x values over to hypre */
2770:   PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0);
2771:   VecGetArrayRead(x, &xx);
2772:   PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx);
2773:   VecRestoreArrayRead(x, &xx);
2774:   PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb);
2775:   PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx);

2777:   /* copy solution values back to PETSc */
2778:   VecGetArray(y, &yy);
2779:   PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy);
2780:   VecRestoreArray(y, &yy);
2781:   return 0;
2782: }

2784: static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason)
2785: {
2786:   PC_SMG   *jac = (PC_SMG *)pc->data;
2787:   HYPRE_Int oits;

2789:   PetscCitationsRegister(hypreCitation, &cite);
2790:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its);
2791:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol);

2793:   PCApply_SMG(pc, b, y);
2794:   PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits);
2795:   *outits = oits;
2796:   if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS;
2797:   else *reason = PCRICHARDSON_CONVERGED_RTOL;
2798:   PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol);
2799:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its);
2800:   return 0;
2801: }

2803: PetscErrorCode PCSetUp_SMG(PC pc)
2804: {
2805:   PetscInt         i, dim;
2806:   PC_SMG          *ex = (PC_SMG *)pc->data;
2807:   Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data);
2808:   PetscBool        flg;
2809:   DMBoundaryType   p[3];
2810:   PetscInt         M[3];

2812:   PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg);

2815:   DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0);
2816:   // Check if power of 2 in periodic directions
2817:   for (i = 0; i < dim; i++) {
2818:     if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) {
2819:       SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]);
2820:     }
2821:   }

2823:   /* create the hypre solver object and set its information */
2824:   if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver));
2825:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
2826:   // The hypre options must be set here and not in SetFromOptions because it is created here!
2827:   PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its);
2828:   PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax);
2829:   PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax);
2830:   PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol);

2832:   PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx);
2833:   PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver);
2834:   return 0;
2835: }

2837: /*MC
2838:      PCSMG - the hypre (structured grid) SMG multigrid solver

2840:    Level: advanced

2842:    Options Database Keys:
2843: + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner
2844: . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid
2845: . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid
2846: - -pc_smg_tol <tol> - tolerance of SMG

2848:    Notes:
2849:    This is for CELL-centered descretizations

2851:    This must be used with the `MATHYPRESTRUCT` `MatType`.

2853:    This does not provide all the functionality of  hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`.

2855:    See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners

2857: .seealso:  `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG`
2858: M*/

2860: PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc)
2861: {
2862:   PC_SMG *ex;

2864:   PetscNew(&ex);
2865:   pc->data = ex;

2867:   ex->its            = 1;
2868:   ex->tol            = 1.e-8;
2869:   ex->num_pre_relax  = 1;
2870:   ex->num_post_relax = 1;

2872:   pc->ops->setfromoptions  = PCSetFromOptions_SMG;
2873:   pc->ops->view            = PCView_SMG;
2874:   pc->ops->destroy         = PCDestroy_SMG;
2875:   pc->ops->apply           = PCApply_SMG;
2876:   pc->ops->applyrichardson = PCApplyRichardson_SMG;
2877:   pc->ops->setup           = PCSetUp_SMG;

2879:   PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm);
2880:   PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver);
2881:   return 0;
2882: }