Actual source code: tfs.c

  1: /*
  2:         Provides an interface to the Tufo-Fischer parallel direct solver
  3: */

  5: #include <petsc/private/pcimpl.h>
  6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  7: #include <../src/ksp/pc/impls/tfs/tfs.h>

  9: typedef struct {
 10:   xxt_ADT  xxt;
 11:   xyt_ADT  xyt;
 12:   Vec      b, xd, xo;
 13:   PetscInt nd;
 14: } PC_TFS;

 16: PetscErrorCode PCDestroy_TFS(PC pc)
 17: {
 18:   PC_TFS *tfs = (PC_TFS *)pc->data;

 20:   /* free the XXT datastructures */
 21:   if (tfs->xxt) XXT_free(tfs->xxt);
 22:   if (tfs->xyt) XYT_free(tfs->xyt);
 23:   VecDestroy(&tfs->b);
 24:   VecDestroy(&tfs->xd);
 25:   VecDestroy(&tfs->xo);
 26:   PetscFree(pc->data);
 27:   return 0;
 28: }

 30: static PetscErrorCode PCApply_TFS_XXT(PC pc, Vec x, Vec y)
 31: {
 32:   PC_TFS            *tfs = (PC_TFS *)pc->data;
 33:   PetscScalar       *yy;
 34:   const PetscScalar *xx;

 36:   VecGetArrayRead(x, &xx);
 37:   VecGetArray(y, &yy);
 38:   XXT_solve(tfs->xxt, yy, (PetscScalar *)xx);
 39:   VecRestoreArrayRead(x, &xx);
 40:   VecRestoreArray(y, &yy);
 41:   return 0;
 42: }

 44: static PetscErrorCode PCApply_TFS_XYT(PC pc, Vec x, Vec y)
 45: {
 46:   PC_TFS            *tfs = (PC_TFS *)pc->data;
 47:   PetscScalar       *yy;
 48:   const PetscScalar *xx;

 50:   VecGetArrayRead(x, &xx);
 51:   VecGetArray(y, &yy);
 52:   XYT_solve(tfs->xyt, yy, (PetscScalar *)xx);
 53:   VecRestoreArrayRead(x, &xx);
 54:   VecRestoreArray(y, &yy);
 55:   return 0;
 56: }

 58: static PetscErrorCode PCTFSLocalMult_TFS(PC pc, PetscScalar *xin, PetscScalar *xout)
 59: {
 60:   PC_TFS     *tfs = (PC_TFS *)pc->data;
 61:   Mat         A   = pc->pmat;
 62:   Mat_MPIAIJ *a   = (Mat_MPIAIJ *)A->data;

 64:   VecPlaceArray(tfs->b, xout);
 65:   VecPlaceArray(tfs->xd, xin);
 66:   VecPlaceArray(tfs->xo, xin + tfs->nd);
 67:   MatMult(a->A, tfs->xd, tfs->b);
 68:   MatMultAdd(a->B, tfs->xo, tfs->b, tfs->b);
 69:   VecResetArray(tfs->b);
 70:   VecResetArray(tfs->xd);
 71:   VecResetArray(tfs->xo);
 72:   return 0;
 73: }

 75: static PetscErrorCode PCSetUp_TFS(PC pc)
 76: {
 77:   PC_TFS     *tfs = (PC_TFS *)pc->data;
 78:   Mat         A   = pc->pmat;
 79:   Mat_MPIAIJ *a   = (Mat_MPIAIJ *)A->data;
 80:   PetscInt   *localtoglobal, ncol, i;
 81:   PetscBool   ismpiaij;

 83:   /*
 84:   PetscBool      issymmetric;
 85:   Petsc Real tol = 0.0;
 86:   */

 89:   PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPIAIJ, &ismpiaij);

 92:   /* generate the local to global mapping */
 93:   ncol = a->A->cmap->n + a->B->cmap->n;
 94:   PetscMalloc1(ncol, &localtoglobal);
 95:   for (i = 0; i < a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1;
 96:   for (i = 0; i < a->B->cmap->n; i++) localtoglobal[i + a->A->cmap->n] = a->garray[i] + 1;

 98:   /* generate the vectors needed for the local solves */
 99:   VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->rmap->n, NULL, &tfs->b);
100:   VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->cmap->n, NULL, &tfs->xd);
101:   VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->B->cmap->n, NULL, &tfs->xo);
102:   tfs->nd = a->A->cmap->n;

104:   /*  MatIsSymmetric(A,tol,&issymmetric); */
105:   /*  if (issymmetric) { */
106:   PetscBarrier((PetscObject)pc);
107:   if (A->symmetric == PETSC_BOOL3_TRUE) {
108:     tfs->xxt = XXT_new();
109:     XXT_factor(tfs->xxt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode(*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc);
110:     pc->ops->apply = PCApply_TFS_XXT;
111:   } else {
112:     tfs->xyt = XYT_new();
113:     XYT_factor(tfs->xyt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode(*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc);
114:     pc->ops->apply = PCApply_TFS_XYT;
115:   }

117:   PetscFree(localtoglobal);
118:   return 0;
119: }

121: static PetscErrorCode PCSetFromOptions_TFS(PC pc, PetscOptionItems *PetscOptionsObject)
122: {
123:   return 0;
124: }
125: static PetscErrorCode PCView_TFS(PC pc, PetscViewer viewer)
126: {
127:   return 0;
128: }

130: /*MC
131:      PCTFS - A parallel direct solver intended for problems with very few unknowns (like the
132:          coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by
133:          its local matrix vector product.

135:    Level: beginner

137:    Notes:
138:     Only implemented for the `MATMPIAIJ` matrices

140:     Only works on a solver object that lives on all of `PETSC_COMM_WORLD`!

142:     Only works for real numbers (is not built if `PetscScalar` is complex)

144:    Implemented by  Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT

146: .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`
147: M*/
148: PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc)
149: {
150:   PC_TFS     *tfs;
151:   PetscMPIInt cmp;

153:   MPI_Comm_compare(PETSC_COMM_WORLD, PetscObjectComm((PetscObject)pc), &cmp);
155:   PetscNew(&tfs);

157:   tfs->xxt = NULL;
158:   tfs->xyt = NULL;
159:   tfs->b   = NULL;
160:   tfs->xd  = NULL;
161:   tfs->xo  = NULL;
162:   tfs->nd  = 0;

164:   pc->ops->apply               = NULL;
165:   pc->ops->applytranspose      = NULL;
166:   pc->ops->setup               = PCSetUp_TFS;
167:   pc->ops->destroy             = PCDestroy_TFS;
168:   pc->ops->setfromoptions      = PCSetFromOptions_TFS;
169:   pc->ops->view                = PCView_TFS;
170:   pc->ops->applyrichardson     = NULL;
171:   pc->ops->applysymmetricleft  = NULL;
172:   pc->ops->applysymmetricright = NULL;
173:   pc->data                     = (void *)tfs;
174:   return 0;
175: }