Actual source code: sliced.c

  1: #include <petscdmsliced.h>
  2: #include <petscmat.h>
  3: #include <petsc/private/dmimpl.h>

  5: /* CSR storage of the nonzero structure of a bs*bs matrix */
  6: typedef struct {
  7:   PetscInt bs, nz, *i, *j;
  8: } DMSlicedBlockFills;

 10: typedef struct {
 11:   PetscInt            bs, n, N, Nghosts, *ghosts;
 12:   PetscInt            d_nz, o_nz, *d_nnz, *o_nnz;
 13:   DMSlicedBlockFills *dfill, *ofill;
 14: } DM_Sliced;

 16: PetscErrorCode DMCreateMatrix_Sliced(DM dm, Mat *J)
 17: {
 18:   PetscInt              *globals, *sd_nnz, *so_nnz, rstart, bs, i;
 19:   ISLocalToGlobalMapping lmap;
 20:   void (*aij)(void) = NULL;
 21:   DM_Sliced *slice  = (DM_Sliced *)dm->data;

 23:   bs = slice->bs;
 24:   MatCreate(PetscObjectComm((PetscObject)dm), J);
 25:   MatSetSizes(*J, slice->n * bs, slice->n * bs, PETSC_DETERMINE, PETSC_DETERMINE);
 26:   MatSetBlockSize(*J, bs);
 27:   MatSetType(*J, dm->mattype);
 28:   MatSeqBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz);
 29:   MatMPIBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz);
 30:   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
 31:   * good before going on with it. */
 32:   PetscObjectQueryFunction((PetscObject)*J, "MatMPIAIJSetPreallocation_C", &aij);
 33:   if (!aij) PetscObjectQueryFunction((PetscObject)*J, "MatSeqAIJSetPreallocation_C", &aij);
 34:   if (aij) {
 35:     if (bs == 1) {
 36:       MatSeqAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz);
 37:       MatMPIAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz);
 38:     } else if (!slice->d_nnz) {
 39:       MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, NULL);
 40:       MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, NULL, slice->o_nz * bs, NULL);
 41:     } else {
 42:       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
 43:       PetscMalloc2(slice->n * bs, &sd_nnz, (!!slice->o_nnz) * slice->n * bs, &so_nnz);
 44:       for (i = 0; i < slice->n * bs; i++) {
 45:         sd_nnz[i] = (slice->d_nnz[i / bs] - 1) * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs) + (slice->dfill ? slice->dfill->i[i % bs + 1] - slice->dfill->i[i % bs] : bs);
 46:         if (so_nnz) so_nnz[i] = slice->o_nnz[i / bs] * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs);
 47:       }
 48:       MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz);
 49:       MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz, slice->o_nz * bs, so_nnz);
 50:       PetscFree2(sd_nnz, so_nnz);
 51:     }
 52:   }

 54:   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
 55:   PetscMalloc1(slice->n + slice->Nghosts, &globals);
 56:   MatGetOwnershipRange(*J, &rstart, NULL);
 57:   for (i = 0; i < slice->n; i++) globals[i] = rstart / bs + i;

 59:   for (i = 0; i < slice->Nghosts; i++) globals[slice->n + i] = slice->ghosts[i];
 60:   ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, bs, slice->n + slice->Nghosts, globals, PETSC_OWN_POINTER, &lmap);
 61:   MatSetLocalToGlobalMapping(*J, lmap, lmap);
 62:   ISLocalToGlobalMappingDestroy(&lmap);
 63:   MatSetDM(*J, dm);
 64:   return 0;
 65: }

 67: /*@C
 68:     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
 69:       be ghosts on this process

 71:     Not Collective

 73:     Input Parameters:
 74: +    slice - the DM object
 75: .    bs - block size
 76: .    nlocal - number of local (owned, non-ghost) blocks
 77: .    Nghosts - number of ghost blocks on this process
 78: -    ghosts - global indices of each ghost block

 80:     Level: advanced

 82: .seealso `DMDestroy()`, `DMCreateGlobalVector()`

 84: @*/
 85: PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[])
 86: {
 87:   DM_Sliced *slice = (DM_Sliced *)dm->data;

 90:   PetscFree(slice->ghosts);
 91:   PetscMalloc1(Nghosts, &slice->ghosts);
 92:   PetscArraycpy(slice->ghosts, ghosts, Nghosts);
 93:   slice->bs      = bs;
 94:   slice->n       = nlocal;
 95:   slice->Nghosts = Nghosts;
 96:   return 0;
 97: }

 99: /*@C
100:     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced

102:     Not Collective

104:     Input Parameters:
105: +    slice - the DM object
106: .    d_nz  - number of block nonzeros per block row in diagonal portion of local
107:            submatrix  (same for all local rows)
108: .    d_nnz - array containing the number of block nonzeros in the various block rows
109:            of the in diagonal portion of the local (possibly different for each block
110:            row) or NULL.
111: .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
112:            submatrix (same for all local rows).
113: -    o_nnz - array containing the number of nonzeros in the various block rows of the
114:            off-diagonal portion of the local submatrix (possibly different for
115:            each block row) or NULL.

117:     Notes:
118:     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
119:     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().

121:     Level: advanced

123: .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `MatMPIAIJSetPreallocation()`,
124:          `MatMPIBAIJSetPreallocation()`, `DMSlicedGetMatrix()`, `DMSlicedSetBlockFills()`

126: @*/
127: PetscErrorCode DMSlicedSetPreallocation(DM dm, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
128: {
129:   DM_Sliced *slice = (DM_Sliced *)dm->data;

132:   slice->d_nz  = d_nz;
133:   slice->d_nnz = (PetscInt *)d_nnz;
134:   slice->o_nz  = o_nz;
135:   slice->o_nnz = (PetscInt *)o_nnz;
136:   return 0;
137: }

139: static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt *fill, DMSlicedBlockFills **inf)
140: {
141:   PetscInt            i, j, nz, *fi, *fj;
142:   DMSlicedBlockFills *f;

145:   if (*inf) PetscFree3(*inf, (*inf)->i, (*inf)->j);
146:   if (!fill) return 0;
147:   for (i = 0, nz = 0; i < bs * bs; i++)
148:     if (fill[i]) nz++;
149:   PetscMalloc3(1, &f, bs + 1, &fi, nz, &fj);
150:   f->bs = bs;
151:   f->nz = nz;
152:   f->i  = fi;
153:   f->j  = fj;
154:   for (i = 0, nz = 0; i < bs; i++) {
155:     fi[i] = nz;
156:     for (j = 0; j < bs; j++)
157:       if (fill[i * bs + j]) fj[nz++] = j;
158:   }
159:   fi[i] = nz;
160:   *inf  = f;
161:   return 0;
162: }

164: /*@C
165:     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
166:     of the matrix returned by DMSlicedGetMatrix().

168:     Logically Collective on dm

170:     Input Parameters:
171: +   sliced - the DM object
172: .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
173: -   ofill - the fill pattern in the off-diagonal blocks

175:     Notes:
176:     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
177:     See DMDASetBlockFills() for example usage.

179:     Level: advanced

181: .seealso `DMSlicedGetMatrix()`, `DMDASetBlockFills()`
182: @*/
183: PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt *dfill, const PetscInt *ofill)
184: {
185:   DM_Sliced *slice = (DM_Sliced *)dm->data;

188:   DMSlicedSetBlockFills_Private(slice->bs, dfill, &slice->dfill);
189:   DMSlicedSetBlockFills_Private(slice->bs, ofill, &slice->ofill);
190:   return 0;
191: }

193: static PetscErrorCode DMDestroy_Sliced(DM dm)
194: {
195:   DM_Sliced *slice = (DM_Sliced *)dm->data;

197:   PetscFree(slice->ghosts);
198:   if (slice->dfill) PetscFree3(slice->dfill, slice->dfill->i, slice->dfill->j);
199:   if (slice->ofill) PetscFree3(slice->ofill, slice->ofill->i, slice->ofill->j);
200:   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
201:   PetscFree(slice);
202:   return 0;
203: }

205: static PetscErrorCode DMCreateGlobalVector_Sliced(DM dm, Vec *gvec)
206: {
207:   DM_Sliced *slice = (DM_Sliced *)dm->data;

211:   *gvec = NULL;
212:   VecCreateGhostBlock(PetscObjectComm((PetscObject)dm), slice->bs, slice->n * slice->bs, PETSC_DETERMINE, slice->Nghosts, slice->ghosts, gvec);
213:   VecSetDM(*gvec, dm);
214:   return 0;
215: }

217: static PetscErrorCode DMGlobalToLocalBegin_Sliced(DM da, Vec g, InsertMode mode, Vec l)
218: {
219:   PetscBool flg;

221:   VecGhostIsLocalForm(g, l, &flg);
223:   VecGhostUpdateEnd(g, mode, SCATTER_FORWARD);
224:   VecGhostUpdateBegin(g, mode, SCATTER_FORWARD);
225:   return 0;
226: }

228: static PetscErrorCode DMGlobalToLocalEnd_Sliced(DM da, Vec g, InsertMode mode, Vec l)
229: {
230:   PetscBool flg;

232:   VecGhostIsLocalForm(g, l, &flg);
234:   VecGhostUpdateEnd(g, mode, SCATTER_FORWARD);
235:   return 0;
236: }

238: /*MC
239:    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields

241:    See DMCreateSliced() for details.

243:   Level: intermediate

245: .seealso: `DMType`, `DMCOMPOSITE`, `DMCreateSliced()`, `DMCreate()`
246: M*/

248: PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
249: {
250:   DM_Sliced *slice;

252:   PetscNew(&slice);
253:   p->data = slice;

255:   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
256:   p->ops->creatematrix       = DMCreateMatrix_Sliced;
257:   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
258:   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
259:   p->ops->destroy            = DMDestroy_Sliced;
260:   return 0;
261: }

263: /*@C
264:     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem

266:     Collective

268:     Input Parameters:
269: +   comm - the processors that will share the global vector
270: .   bs - the block size
271: .   nlocal - number of vector entries on this process
272: .   Nghosts - number of ghost points needed on this process
273: .   ghosts - global indices of all ghost points for this process
274: .   d_nnz - matrix preallocation information representing coupling within this process
275: -   o_nnz - matrix preallocation information representing coupling between this process and other processes

277:     Output Parameters:
278: .   slice - the slice object

280:     Notes:
281:         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
282:         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
283:         the ghost points.

285:         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().

287:     Level: advanced

289: .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMSetType()`, `DMSLICED`, `DMSlicedSetGhosts()`, `DMSlicedSetPreallocation()`, `VecGhostUpdateBegin()`, `VecGhostUpdateEnd()`,
290:          `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`

292: @*/
293: PetscErrorCode DMSlicedCreate(MPI_Comm comm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[], const PetscInt d_nnz[], const PetscInt o_nnz[], DM *dm)
294: {
296:   DMCreate(comm, dm);
297:   DMSetType(*dm, DMSLICED);
298:   DMSlicedSetGhosts(*dm, bs, nlocal, Nghosts, ghosts);
299:   if (d_nnz) DMSlicedSetPreallocation(*dm, 0, d_nnz, 0, o_nnz);
300:   return 0;
301: }