Actual source code: dmredundant.c
1: #include <petsc/private/dmimpl.h>
2: #include <petscdmredundant.h>
4: typedef struct {
5: PetscMPIInt rank; /* owner */
6: PetscInt N; /* total number of dofs */
7: PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */
8: } DM_Redundant;
10: static PetscErrorCode DMCreateMatrix_Redundant(DM dm, Mat *J)
11: {
12: DM_Redundant *red = (DM_Redundant *)dm->data;
13: ISLocalToGlobalMapping ltog;
14: PetscInt i, rstart, rend, *cols;
15: PetscScalar *vals;
17: MatCreate(PetscObjectComm((PetscObject)dm), J);
18: MatSetSizes(*J, red->n, red->n, red->N, red->N);
19: MatSetType(*J, dm->mattype);
20: MatSeqAIJSetPreallocation(*J, red->n, NULL);
21: MatSeqBAIJSetPreallocation(*J, 1, red->n, NULL);
22: MatMPIAIJSetPreallocation(*J, red->n, NULL, red->N - red->n, NULL);
23: MatMPIBAIJSetPreallocation(*J, 1, red->n, NULL, red->N - red->n, NULL);
25: DMGetLocalToGlobalMapping(dm, <og);
26: MatSetLocalToGlobalMapping(*J, ltog, ltog);
27: MatSetDM(*J, dm);
29: PetscMalloc2(red->N, &cols, red->N, &vals);
30: for (i = 0; i < red->N; i++) {
31: cols[i] = i;
32: vals[i] = 0.0;
33: }
34: MatGetOwnershipRange(*J, &rstart, &rend);
35: for (i = rstart; i < rend; i++) MatSetValues(*J, 1, &i, red->N, cols, vals, INSERT_VALUES);
36: PetscFree2(cols, vals);
37: MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY);
38: MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY);
39: return 0;
40: }
42: static PetscErrorCode DMDestroy_Redundant(DM dm)
43: {
44: PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", NULL);
45: PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", NULL);
46: PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", NULL);
47: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
48: PetscFree(dm->data);
49: return 0;
50: }
52: static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm, Vec *gvec)
53: {
54: DM_Redundant *red = (DM_Redundant *)dm->data;
55: ISLocalToGlobalMapping ltog;
59: *gvec = NULL;
60: VecCreate(PetscObjectComm((PetscObject)dm), gvec);
61: VecSetSizes(*gvec, red->n, red->N);
62: VecSetType(*gvec, dm->vectype);
63: DMGetLocalToGlobalMapping(dm, <og);
64: VecSetLocalToGlobalMapping(*gvec, ltog);
65: VecSetDM(*gvec, dm);
66: return 0;
67: }
69: static PetscErrorCode DMCreateLocalVector_Redundant(DM dm, Vec *lvec)
70: {
71: DM_Redundant *red = (DM_Redundant *)dm->data;
75: *lvec = NULL;
76: VecCreate(PETSC_COMM_SELF, lvec);
77: VecSetSizes(*lvec, red->N, red->N);
78: VecSetType(*lvec, dm->vectype);
79: VecSetDM(*lvec, dm);
80: return 0;
81: }
83: static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
84: {
85: DM_Redundant *red = (DM_Redundant *)dm->data;
86: const PetscScalar *lv;
87: PetscScalar *gv;
88: PetscMPIInt rank;
90: MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank);
91: VecGetArrayRead(l, &lv);
92: VecGetArray(g, &gv);
93: switch (imode) {
94: case ADD_VALUES:
95: case MAX_VALUES: {
96: void *source;
97: PetscScalar *buffer;
98: PetscInt i;
99: if (rank == red->rank) {
100: buffer = gv;
101: source = MPI_IN_PLACE;
102: if (imode == ADD_VALUES)
103: for (i = 0; i < red->N; i++) buffer[i] = gv[i] + lv[i];
104: #if !defined(PETSC_USE_COMPLEX)
105: if (imode == MAX_VALUES)
106: for (i = 0; i < red->N; i++) buffer[i] = PetscMax(gv[i], lv[i]);
107: #endif
108: } else source = (void *)lv;
109: MPI_Reduce(source, gv, red->N, MPIU_SCALAR, (imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX, red->rank, PetscObjectComm((PetscObject)dm));
110: } break;
111: case INSERT_VALUES:
112: PetscArraycpy(gv, lv, red->n);
113: break;
114: default:
115: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
116: }
117: VecRestoreArrayRead(l, &lv);
118: VecRestoreArray(g, &gv);
119: return 0;
120: }
122: static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
123: {
124: return 0;
125: }
127: static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
128: {
129: DM_Redundant *red = (DM_Redundant *)dm->data;
130: const PetscScalar *gv;
131: PetscScalar *lv;
133: VecGetArrayRead(g, &gv);
134: VecGetArray(l, &lv);
135: switch (imode) {
136: case INSERT_VALUES:
137: if (red->n) PetscArraycpy(lv, gv, red->n);
138: MPI_Bcast(lv, red->N, MPIU_SCALAR, red->rank, PetscObjectComm((PetscObject)dm));
139: break;
140: default:
141: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
142: }
143: VecRestoreArrayRead(g, &gv);
144: VecRestoreArray(l, &lv);
145: return 0;
146: }
148: static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
149: {
150: return 0;
151: }
153: static PetscErrorCode DMSetUp_Redundant(DM dm)
154: {
155: return 0;
156: }
158: static PetscErrorCode DMView_Redundant(DM dm, PetscViewer viewer)
159: {
160: DM_Redundant *red = (DM_Redundant *)dm->data;
161: PetscBool iascii;
163: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
164: if (iascii) PetscViewerASCIIPrintf(viewer, "redundant: rank=%d N=%" PetscInt_FMT "\n", red->rank, red->N);
165: return 0;
166: }
168: static PetscErrorCode DMCreateColoring_Redundant(DM dm, ISColoringType ctype, ISColoring *coloring)
169: {
170: DM_Redundant *red = (DM_Redundant *)dm->data;
171: PetscInt i, nloc;
172: ISColoringValue *colors;
174: switch (ctype) {
175: case IS_COLORING_GLOBAL:
176: nloc = red->n;
177: break;
178: case IS_COLORING_LOCAL:
179: nloc = red->N;
180: break;
181: default:
182: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Unknown ISColoringType %d", (int)ctype);
183: }
184: PetscMalloc1(nloc, &colors);
185: for (i = 0; i < nloc; i++) colors[i] = i;
186: ISColoringCreate(PetscObjectComm((PetscObject)dm), red->N, nloc, colors, PETSC_OWN_POINTER, coloring);
187: ISColoringSetType(*coloring, ctype);
188: return 0;
189: }
191: static PetscErrorCode DMRefine_Redundant(DM dmc, MPI_Comm comm, DM *dmf)
192: {
193: PetscMPIInt flag;
194: DM_Redundant *redc = (DM_Redundant *)dmc->data;
196: if (comm == MPI_COMM_NULL) PetscObjectGetComm((PetscObject)dmc, &comm);
197: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), comm, &flag);
199: DMRedundantCreate(comm, redc->rank, redc->N, dmf);
200: return 0;
201: }
203: static PetscErrorCode DMCoarsen_Redundant(DM dmf, MPI_Comm comm, DM *dmc)
204: {
205: PetscMPIInt flag;
206: DM_Redundant *redf = (DM_Redundant *)dmf->data;
208: if (comm == MPI_COMM_NULL) PetscObjectGetComm((PetscObject)dmf, &comm);
209: MPI_Comm_compare(PetscObjectComm((PetscObject)dmf), comm, &flag);
211: DMRedundantCreate(comm, redf->rank, redf->N, dmc);
212: return 0;
213: }
215: static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc, DM dmf, Mat *P, Vec *scale)
216: {
217: DM_Redundant *redc = (DM_Redundant *)dmc->data;
218: DM_Redundant *redf = (DM_Redundant *)dmf->data;
219: PetscMPIInt flag;
220: PetscInt i, rstart, rend;
222: MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), PetscObjectComm((PetscObject)dmf), &flag);
226: MatCreate(PetscObjectComm((PetscObject)dmc), P);
227: MatSetSizes(*P, redc->n, redc->n, redc->N, redc->N);
228: MatSetType(*P, MATAIJ);
229: MatSeqAIJSetPreallocation(*P, 1, NULL);
230: MatMPIAIJSetPreallocation(*P, 1, NULL, 0, NULL);
231: MatGetOwnershipRange(*P, &rstart, &rend);
232: for (i = rstart; i < rend; i++) MatSetValue(*P, i, i, 1.0, INSERT_VALUES);
233: MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY);
234: MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY);
235: if (scale) DMCreateInterpolationScale(dmc, dmf, *P, scale);
236: return 0;
237: }
239: /*@
240: DMRedundantSetSize - Sets the size of a densely coupled redundant object
242: Collective on dm
244: Input Parameters:
245: + dm - redundant DM
246: . rank - rank of process to own redundant degrees of freedom
247: - N - total number of redundant degrees of freedom
249: Level: advanced
251: .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantGetSize()`
252: @*/
253: PetscErrorCode DMRedundantSetSize(DM dm, PetscMPIInt rank, PetscInt N)
254: {
259: PetscTryMethod(dm, "DMRedundantSetSize_C", (DM, PetscMPIInt, PetscInt), (dm, rank, N));
260: return 0;
261: }
263: /*@
264: DMRedundantGetSize - Gets the size of a densely coupled redundant object
266: Not Collective
268: Input Parameter:
269: . dm - redundant DM
271: Output Parameters:
272: + rank - rank of process to own redundant degrees of freedom (or NULL)
273: - N - total number of redundant degrees of freedom (or NULL)
275: Level: advanced
277: .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantSetSize()`
278: @*/
279: PetscErrorCode DMRedundantGetSize(DM dm, PetscMPIInt *rank, PetscInt *N)
280: {
283: PetscUseMethod(dm, "DMRedundantGetSize_C", (DM, PetscMPIInt *, PetscInt *), (dm, rank, N));
284: return 0;
285: }
287: static PetscErrorCode DMRedundantSetSize_Redundant(DM dm, PetscMPIInt rank, PetscInt N)
288: {
289: DM_Redundant *red = (DM_Redundant *)dm->data;
290: PetscMPIInt myrank;
291: PetscInt i, *globals;
293: MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &myrank);
294: red->rank = rank;
295: red->N = N;
296: red->n = (myrank == rank) ? N : 0;
298: /* mapping is setup here */
299: PetscMalloc1(red->N, &globals);
300: for (i = 0; i < red->N; i++) globals[i] = i;
301: ISLocalToGlobalMappingDestroy(&dm->ltogmap);
302: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, red->N, globals, PETSC_OWN_POINTER, &dm->ltogmap);
303: return 0;
304: }
306: static PetscErrorCode DMRedundantGetSize_Redundant(DM dm, PetscInt *rank, PetscInt *N)
307: {
308: DM_Redundant *red = (DM_Redundant *)dm->data;
310: if (rank) *rank = red->rank;
311: if (N) *N = red->N;
312: return 0;
313: }
315: static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
316: {
317: return 0;
318: }
320: /*MC
321: DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
322: In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
323: no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
324: processes local computations).
326: This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
328: Level: intermediate
330: .seealso: `DMType`, `DMCOMPOSITE`, `DMCreate()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
331: M*/
333: PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
334: {
335: DM_Redundant *red;
337: PetscNew(&red);
338: dm->data = red;
340: dm->ops->setup = DMSetUp_Redundant;
341: dm->ops->view = DMView_Redundant;
342: dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
343: dm->ops->createlocalvector = DMCreateLocalVector_Redundant;
344: dm->ops->creatematrix = DMCreateMatrix_Redundant;
345: dm->ops->destroy = DMDestroy_Redundant;
346: dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
347: dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant;
348: dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
349: dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant;
350: dm->ops->refine = DMRefine_Redundant;
351: dm->ops->coarsen = DMCoarsen_Redundant;
352: dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
353: dm->ops->getcoloring = DMCreateColoring_Redundant;
355: PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", DMRedundantSetSize_Redundant);
356: PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", DMRedundantGetSize_Redundant);
357: PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", DMSetUpGLVisViewer_Redundant);
358: return 0;
359: }
361: /*@C
362: DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
364: Collective
366: Input Parameters:
367: + comm - the processors that will share the global vector
368: . rank - rank to own the redundant values
369: - N - total number of degrees of freedom
371: Output Parameters:
372: . dm - the redundant DM
374: Level: advanced
376: .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMCreateMatrix()`, `DMCompositeAddDM()`, `DMREDUNDANT`, `DMSetType()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
378: @*/
379: PetscErrorCode DMRedundantCreate(MPI_Comm comm, PetscMPIInt rank, PetscInt N, DM *dm)
380: {
382: DMCreate(comm, dm);
383: DMSetType(*dm, DMREDUNDANT);
384: DMRedundantSetSize(*dm, rank, N);
385: DMSetUp(*dm);
386: return 0;
387: }