Actual source code: isltog.c
2: #include <petsc/private/isimpl.h>
3: #include <petsc/private/hashmapi.h>
4: #include <petscsf.h>
5: #include <petscviewer.h>
7: PetscClassId IS_LTOGM_CLASSID;
8: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***);
10: typedef struct {
11: PetscInt *globals;
12: } ISLocalToGlobalMapping_Basic;
14: typedef struct {
15: PetscHMapI globalht;
16: } ISLocalToGlobalMapping_Hash;
18: /*@C
19: ISGetPointRange - Returns a description of the points in an `IS` suitable for traversal
21: Not collective
23: Input Parameter:
24: . pointIS - The `IS` object
26: Output Parameters:
27: + pStart - The first index, see notes
28: . pEnd - One past the last index, see notes
29: - points - The indices, see notes
31: Level: intermediate
33: Notes:
34: If the `IS` contains contiguous indices in an `ISSTRIDE`, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
35: .vb
36: ISGetPointRange(is, &pStart, &pEnd, &points);
37: for (p = pStart; p < pEnd; ++p) {
38: const PetscInt point = points ? points[p] : p;
39: }
40: ISRestorePointRange(is, &pstart, &pEnd, &points);
41: .ve
43: .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
44: @*/
45: PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
46: {
47: PetscInt numCells, step = 1;
48: PetscBool isStride;
51: *pStart = 0;
52: *points = NULL;
53: ISGetLocalSize(pointIS, &numCells);
54: PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride);
55: if (isStride) ISStrideGetInfo(pointIS, pStart, &step);
56: *pEnd = *pStart + numCells;
57: if (!isStride || step != 1) ISGetIndices(pointIS, points);
58: return 0;
59: }
61: /*@C
62: ISRestorePointRange - Destroys the traversal description created with `ISGetPointRange()`
64: Not collective
66: Input Parameters:
67: + pointIS - The `IS` object
68: . pStart - The first index, from `ISGetPointRange()`
69: . pEnd - One past the last index, from `ISGetPointRange()`
70: - points - The indices, from `ISGetPointRange()`
72: Level: intermediate
74: .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
75: @*/
76: PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
77: {
78: PetscInt step = 1;
79: PetscBool isStride;
82: PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride);
83: if (isStride) ISStrideGetInfo(pointIS, pStart, &step);
84: if (!isStride || step != 1) ISGetIndices(pointIS, points);
85: return 0;
86: }
88: /*@C
89: ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given
91: Not collective
93: Input Parameters:
94: + subpointIS - The `IS` object to be configured
95: . pStar t - The first index of the subrange
96: . pEnd - One past the last index for the subrange
97: - points - The indices for the entire range, from `ISGetPointRange()`
99: Output Parameters:
100: . subpointIS - The `IS` object now configured to be a subrange
102: Level: intermediate
104: Note:
105: The input `IS` will now respond properly to calls to `ISGetPointRange()` and return the subrange.
107: .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
108: @*/
109: PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
110: {
112: if (points) {
113: ISSetType(subpointIS, ISGENERAL);
114: ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER);
115: } else {
116: ISSetType(subpointIS, ISSTRIDE);
117: ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1);
118: }
119: return 0;
120: }
122: /* -----------------------------------------------------------------------------------------*/
124: /*
125: Creates the global mapping information in the ISLocalToGlobalMapping structure
127: If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
128: */
129: static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
130: {
131: PetscInt i, *idx = mapping->indices, n = mapping->n, end, start;
133: if (mapping->data) return 0;
134: end = 0;
135: start = PETSC_MAX_INT;
137: for (i = 0; i < n; i++) {
138: if (idx[i] < 0) continue;
139: if (idx[i] < start) start = idx[i];
140: if (idx[i] > end) end = idx[i];
141: }
142: if (start > end) {
143: start = 0;
144: end = -1;
145: }
146: mapping->globalstart = start;
147: mapping->globalend = end;
148: if (!((PetscObject)mapping)->type_name) {
149: if ((end - start) > PetscMax(4 * n, 1000000)) {
150: ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH);
151: } else {
152: ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC);
153: }
154: }
155: PetscTryTypeMethod(mapping, globaltolocalmappingsetup);
156: return 0;
157: }
159: static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
160: {
161: PetscInt i, *idx = mapping->indices, n = mapping->n, end, start, *globals;
162: ISLocalToGlobalMapping_Basic *map;
164: start = mapping->globalstart;
165: end = mapping->globalend;
166: PetscNew(&map);
167: PetscMalloc1(end - start + 2, &globals);
168: map->globals = globals;
169: for (i = 0; i < end - start + 1; i++) globals[i] = -1;
170: for (i = 0; i < n; i++) {
171: if (idx[i] < 0) continue;
172: globals[idx[i] - start] = i;
173: }
174: mapping->data = (void *)map;
175: return 0;
176: }
178: static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
179: {
180: PetscInt i, *idx = mapping->indices, n = mapping->n;
181: ISLocalToGlobalMapping_Hash *map;
183: PetscNew(&map);
184: PetscHMapICreate(&map->globalht);
185: for (i = 0; i < n; i++) {
186: if (idx[i] < 0) continue;
187: PetscHMapISet(map->globalht, idx[i], i);
188: }
189: mapping->data = (void *)map;
190: return 0;
191: }
193: static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
194: {
195: ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data;
197: if (!map) return 0;
198: PetscFree(map->globals);
199: PetscFree(mapping->data);
200: return 0;
201: }
203: static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
204: {
205: ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data;
207: if (!map) return 0;
208: PetscHMapIDestroy(&map->globalht);
209: PetscFree(mapping->data);
210: return 0;
211: }
213: #define GTOLTYPE _Basic
214: #define GTOLNAME _Basic
215: #define GTOLBS mapping->bs
216: #define GTOL(g, local) \
217: do { \
218: local = map->globals[g / bs - start]; \
219: if (local >= 0) local = bs * local + (g % bs); \
220: } while (0)
222: #include <../src/vec/is/utils/isltog.h>
224: #define GTOLTYPE _Basic
225: #define GTOLNAME Block_Basic
226: #define GTOLBS 1
227: #define GTOL(g, local) \
228: do { \
229: local = map->globals[g - start]; \
230: } while (0)
231: #include <../src/vec/is/utils/isltog.h>
233: #define GTOLTYPE _Hash
234: #define GTOLNAME _Hash
235: #define GTOLBS mapping->bs
236: #define GTOL(g, local) \
237: do { \
238: (void)PetscHMapIGet(map->globalht, g / bs, &local); \
239: if (local >= 0) local = bs * local + (g % bs); \
240: } while (0)
241: #include <../src/vec/is/utils/isltog.h>
243: #define GTOLTYPE _Hash
244: #define GTOLNAME Block_Hash
245: #define GTOLBS 1
246: #define GTOL(g, local) \
247: do { \
248: (void)PetscHMapIGet(map->globalht, g, &local); \
249: } while (0)
250: #include <../src/vec/is/utils/isltog.h>
252: /*@
253: ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
255: Not Collective
257: Input Parameter:
258: . ltog - local to global mapping
260: Output Parameter:
261: . nltog - the duplicated local to global mapping
263: Level: advanced
265: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
266: @*/
267: PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog)
268: {
269: ISLocalToGlobalMappingType l2gtype;
272: ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog);
273: ISLocalToGlobalMappingGetType(ltog, &l2gtype);
274: ISLocalToGlobalMappingSetType(*nltog, l2gtype);
275: return 0;
276: }
278: /*@
279: ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
281: Not Collective
283: Input Parameter:
284: . ltog - local to global mapping
286: Output Parameter:
287: . n - the number of entries in the local mapping, `ISLocalToGlobalMappingGetIndices()` returns an array of this length
289: Level: advanced
291: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
292: @*/
293: PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n)
294: {
297: *n = mapping->bs * mapping->n;
298: return 0;
299: }
301: /*@C
302: ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database
304: Collective
306: Input Parameters:
307: + A - the local to global mapping object
308: . obj - Optional object
309: - name - command line option
311: Level: intermediate
313: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
314: @*/
315: PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[])
316: {
318: PetscObjectViewFromOptions((PetscObject)A, obj, name);
319: return 0;
320: }
322: /*@C
323: ISLocalToGlobalMappingView - View a local to global mapping
325: Not Collective
327: Input Parameters:
328: + ltog - local to global mapping
329: - viewer - viewer
331: Level: advanced
333: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
334: @*/
335: PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer)
336: {
337: PetscInt i;
338: PetscMPIInt rank;
339: PetscBool iascii;
342: if (!viewer) PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer);
345: MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank);
346: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
347: if (iascii) {
348: PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer);
349: PetscViewerASCIIPushSynchronized(viewer);
350: for (i = 0; i < mapping->n; i++) PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, mapping->indices[i]);
351: PetscViewerFlush(viewer);
352: PetscViewerASCIIPopSynchronized(viewer);
353: }
354: return 0;
355: }
357: /*@
358: ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
359: ordering and a global parallel ordering.
361: Not collective
363: Input Parameter:
364: . is - index set containing the global numbers for each local number
366: Output Parameter:
367: . mapping - new mapping data structure
369: Level: advanced
371: Note:
372: the block size of the `IS` determines the block size of the mapping
374: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
375: @*/
376: PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping)
377: {
378: PetscInt n, bs;
379: const PetscInt *indices;
380: MPI_Comm comm;
381: PetscBool isblock;
386: PetscObjectGetComm((PetscObject)is, &comm);
387: ISGetLocalSize(is, &n);
388: PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock);
389: if (!isblock) {
390: ISGetIndices(is, &indices);
391: ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping);
392: ISRestoreIndices(is, &indices);
393: } else {
394: ISGetBlockSize(is, &bs);
395: ISBlockGetIndices(is, &indices);
396: ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping);
397: ISBlockRestoreIndices(is, &indices);
398: }
399: return 0;
400: }
402: /*@C
403: ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
404: ordering and a global parallel ordering.
406: Collective
408: Input Parameters:
409: + sf - star forest mapping contiguous local indices to (rank, offset)
410: - start - first global index on this process, or `PETSC_DECIDE` to compute contiguous global numbering automatically
412: Output Parameter:
413: . mapping - new mapping data structure
415: Level: advanced
417: Notes:
418: If any processor calls this with start = `PETSC_DECIDE` then all processors must, otherwise the program will hang.
420: .seealso: [](sec_scatter), `PetscSF`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
421: @*/
422: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping)
423: {
424: PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog;
425: MPI_Comm comm;
429: PetscObjectGetComm((PetscObject)sf, &comm);
430: PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL);
431: if (start == PETSC_DECIDE) {
432: start = 0;
433: MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm);
435: PetscSFGetLeafRange(sf, NULL, &maxlocal);
436: ++maxlocal;
437: PetscMalloc1(nroots, &globals);
438: PetscMalloc1(maxlocal, <og);
439: for (i = 0; i < nroots; i++) globals[i] = start + i;
440: for (i = 0; i < maxlocal; i++) ltog[i] = -1;
441: PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE);
442: PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE);
443: ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping);
444: PetscFree(globals);
445: return 0;
446: }
448: /*@
449: ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
451: Not collective
453: Input Parameters:
454: + mapping - mapping data structure
455: - bs - the blocksize
457: Level: advanced
459: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
460: @*/
461: PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs)
462: {
463: PetscInt *nid;
464: const PetscInt *oid;
465: PetscInt i, cn, on, obs, nn;
469: if (bs == mapping->bs) return 0;
470: on = mapping->n;
471: obs = mapping->bs;
472: oid = mapping->indices;
473: nn = (on * obs) / bs;
476: PetscMalloc1(nn, &nid);
477: ISLocalToGlobalMappingGetIndices(mapping, &oid);
478: for (i = 0; i < nn; i++) {
479: PetscInt j;
480: for (j = 0, cn = 0; j < bs - 1; j++) {
481: if (oid[i * bs + j] < 0) {
482: cn++;
483: continue;
484: }
486: }
487: if (oid[i * bs + j] < 0) cn++;
488: if (cn) {
490: nid[i] = -1;
491: } else {
492: nid[i] = oid[i * bs] / bs;
493: }
494: }
495: ISLocalToGlobalMappingRestoreIndices(mapping, &oid);
497: mapping->n = nn;
498: mapping->bs = bs;
499: PetscFree(mapping->indices);
500: mapping->indices = nid;
501: mapping->globalstart = 0;
502: mapping->globalend = 0;
504: /* reset the cached information */
505: PetscFree(mapping->info_procs);
506: PetscFree(mapping->info_numprocs);
507: if (mapping->info_indices) {
508: PetscInt i;
510: PetscFree((mapping->info_indices)[0]);
511: for (i = 1; i < mapping->info_nproc; i++) PetscFree(mapping->info_indices[i]);
512: PetscFree(mapping->info_indices);
513: }
514: mapping->info_cached = PETSC_FALSE;
516: PetscTryTypeMethod(mapping, destroy);
517: return 0;
518: }
520: /*@
521: ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
522: ordering and a global parallel ordering.
524: Not Collective
526: Input Parameters:
527: . mapping - mapping data structure
529: Output Parameter:
530: . bs - the blocksize
532: Level: advanced
534: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
535: @*/
536: PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs)
537: {
539: *bs = mapping->bs;
540: return 0;
541: }
543: /*@
544: ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
545: ordering and a global parallel ordering.
547: Not Collective, but communicator may have more than one process
549: Input Parameters:
550: + comm - MPI communicator
551: . bs - the block size
552: . n - the number of local elements divided by the block size, or equivalently the number of block indices
553: . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
554: - mode - see PetscCopyMode
556: Output Parameter:
557: . mapping - new mapping data structure
559: Level: advanced
561: Notes:
562: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
564: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType`
565: of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; this uses more memory but is faster; this approach is not scalable for extremely large mappings.
567: For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
568: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
570: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
571: `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
572: @*/
573: PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping)
574: {
575: PetscInt *in;
580: *mapping = NULL;
581: ISInitializePackage();
583: PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView);
584: (*mapping)->n = n;
585: (*mapping)->bs = bs;
586: if (mode == PETSC_COPY_VALUES) {
587: PetscMalloc1(n, &in);
588: PetscArraycpy(in, indices, n);
589: (*mapping)->indices = in;
590: (*mapping)->dealloc_indices = PETSC_TRUE;
591: } else if (mode == PETSC_OWN_POINTER) {
592: (*mapping)->indices = (PetscInt *)indices;
593: (*mapping)->dealloc_indices = PETSC_TRUE;
594: } else if (mode == PETSC_USE_POINTER) {
595: (*mapping)->indices = (PetscInt *)indices;
596: } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode);
597: return 0;
598: }
600: PetscFunctionList ISLocalToGlobalMappingList = NULL;
602: /*@
603: ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
605: Not collective
607: Input Parameters:
608: . mapping - mapping data structure
610: Level: advanced
612: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
613: `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
614: @*/
615: PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
616: {
617: char type[256];
618: ISLocalToGlobalMappingType defaulttype = "Not set";
619: PetscBool flg;
622: ISLocalToGlobalMappingRegisterAll();
623: PetscObjectOptionsBegin((PetscObject)mapping);
624: PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg);
625: if (flg) ISLocalToGlobalMappingSetType(mapping, type);
626: PetscOptionsEnd();
627: return 0;
628: }
630: /*@
631: ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
632: ordering and a global parallel ordering.
634: Note Collective
636: Input Parameters:
637: . mapping - mapping data structure
639: Level: advanced
641: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
642: @*/
643: PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
644: {
645: if (!*mapping) return 0;
647: if (--((PetscObject)(*mapping))->refct > 0) {
648: *mapping = NULL;
649: return 0;
650: }
651: if ((*mapping)->dealloc_indices) PetscFree((*mapping)->indices);
652: PetscFree((*mapping)->info_procs);
653: PetscFree((*mapping)->info_numprocs);
654: if ((*mapping)->info_indices) {
655: PetscInt i;
657: PetscFree(((*mapping)->info_indices)[0]);
658: for (i = 1; i < (*mapping)->info_nproc; i++) PetscFree(((*mapping)->info_indices)[i]);
659: PetscFree((*mapping)->info_indices);
660: }
661: if ((*mapping)->info_nodei) PetscFree(((*mapping)->info_nodei)[0]);
662: PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei);
663: if ((*mapping)->ops->destroy) (*(*mapping)->ops->destroy)(*mapping);
664: PetscHeaderDestroy(mapping);
665: *mapping = NULL;
666: return 0;
667: }
669: /*@
670: ISLocalToGlobalMappingApplyIS - Creates from an `IS` in the local numbering
671: a new index set using the global numbering defined in an `ISLocalToGlobalMapping`
672: context.
674: Collective
676: Input Parameters:
677: + mapping - mapping between local and global numbering
678: - is - index set in local numbering
680: Output Parameter:
681: . newis - index set in global numbering
683: Level: advanced
685: Note:
686: The output `IS` will have the same communicator of the input `IS`.
688: .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
689: `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
690: @*/
691: PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis)
692: {
693: PetscInt n, *idxout;
694: const PetscInt *idxin;
700: ISGetLocalSize(is, &n);
701: ISGetIndices(is, &idxin);
702: PetscMalloc1(n, &idxout);
703: ISLocalToGlobalMappingApply(mapping, n, idxin, idxout);
704: ISRestoreIndices(is, &idxin);
705: ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis);
706: return 0;
707: }
709: /*@
710: ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
711: and converts them to the global numbering.
713: Not collective
715: Input Parameters:
716: + mapping - the local to global mapping context
717: . N - number of integers
718: - in - input indices in local numbering
720: Output Parameter:
721: . out - indices in global numbering
723: Level: advanced
725: Note:
726: The in and out array parameters may be identical.
728: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
729: `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
730: `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
731: @*/
732: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
733: {
734: PetscInt i, bs, Nmax;
737: bs = mapping->bs;
738: Nmax = bs * mapping->n;
739: if (bs == 1) {
740: const PetscInt *idx = mapping->indices;
741: for (i = 0; i < N; i++) {
742: if (in[i] < 0) {
743: out[i] = in[i];
744: continue;
745: }
747: out[i] = idx[in[i]];
748: }
749: } else {
750: const PetscInt *idx = mapping->indices;
751: for (i = 0; i < N; i++) {
752: if (in[i] < 0) {
753: out[i] = in[i];
754: continue;
755: }
757: out[i] = idx[in[i] / bs] * bs + (in[i] % bs);
758: }
759: }
760: return 0;
761: }
763: /*@
764: ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
766: Not collective
768: Input Parameters:
769: + mapping - the local to global mapping context
770: . N - number of integers
771: - in - input indices in local block numbering
773: Output Parameter:
774: . out - indices in global block numbering
776: Level: advanced
778: Note:
779: The in and out array parameters may be identical.
781: Example:
782: If the index values are {0,1,6,7} set with a call to `ISLocalToGlobalMappingCreate`(`PETSC_COMM_SELF`,2,2,{0,3}) then the mapping applied to 0
783: (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
785: .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
786: `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
787: `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
788: @*/
789: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
790: {
791: PetscInt i, Nmax;
792: const PetscInt *idx;
795: Nmax = mapping->n;
796: idx = mapping->indices;
797: for (i = 0; i < N; i++) {
798: if (in[i] < 0) {
799: out[i] = in[i];
800: continue;
801: }
803: out[i] = idx[in[i]];
804: }
805: return 0;
806: }
808: /*@
809: ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
810: specified with a global numbering.
812: Not collective
814: Input Parameters:
815: + mapping - mapping between local and global numbering
816: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
817: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
818: . n - number of global indices to map
819: - idx - global indices to map
821: Output Parameters:
822: + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
823: - idxout - local index of each global index, one must pass in an array long enough
824: to hold all the indices. You can call `ISGlobalToLocalMappingApply()` with
825: idxout == NULL to determine the required length (returned in nout)
826: and then allocate the required space and call `ISGlobalToLocalMappingApply()`
827: a second time to set the values.
829: Level: advanced
831: Notes:
832: Either nout or idxout may be NULL. idx and idxout may be identical.
834: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
835: this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
836: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
838: Developer Note:
839: The manual page states that idx and idxout may be identical but the calling
840: sequence declares idx as const so it cannot be the same as idxout.
842: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
843: `ISLocalToGlobalMappingDestroy()`
844: @*/
845: PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
846: {
848: if (!mapping->data) ISGlobalToLocalMappingSetUp(mapping);
849: PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout);
850: return 0;
851: }
853: /*@
854: ISGlobalToLocalMappingApplyIS - Creates from an `IS` in the global numbering
855: a new index set using the local numbering defined in an `ISLocalToGlobalMapping`
856: context.
858: Not collective
860: Input Parameters:
861: + mapping - mapping between local and global numbering
862: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
863: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
864: - is - index set in global numbering
866: Output Parameters:
867: . newis - index set in local numbering
869: Level: advanced
871: Note:
872: The output `IS` will be sequential, as it encodes a purely local operation
874: .seealso: [](sec_scatter), `ISGlobalToLocalMapping`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
875: `ISLocalToGlobalMappingDestroy()`
876: @*/
877: PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis)
878: {
879: PetscInt n, nout, *idxout;
880: const PetscInt *idxin;
886: ISGetLocalSize(is, &n);
887: ISGetIndices(is, &idxin);
888: if (type == IS_GTOLM_MASK) {
889: PetscMalloc1(n, &idxout);
890: } else {
891: ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL);
892: PetscMalloc1(nout, &idxout);
893: }
894: ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout);
895: ISRestoreIndices(is, &idxin);
896: ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis);
897: return 0;
898: }
900: /*@
901: ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
902: specified with a block global numbering.
904: Not collective
906: Input Parameters:
907: + mapping - mapping between local and global numbering
908: . type - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
909: `IS_GTOLM_DROP` - drops the indices with no local value from the output list
910: . n - number of global indices to map
911: - idx - global indices to map
913: Output Parameters:
914: + nout - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
915: - idxout - local index of each global index, one must pass in an array long enough
916: to hold all the indices. You can call `ISGlobalToLocalMappingApplyBlock()` with
917: idxout == NULL to determine the required length (returned in nout)
918: and then allocate the required space and call `ISGlobalToLocalMappingApplyBlock()`
919: a second time to set the values.
921: Level: advanced
923: Notes:
924: Either nout or idxout may be NULL. idx and idxout may be identical.
926: For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
927: this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
928: Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
930: Developer Note:
931: The manual page states that idx and idxout may be identical but the calling
932: sequence declares idx as const so it cannot be the same as idxout.
934: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
935: `ISLocalToGlobalMappingDestroy()`
936: @*/
937: PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
938: {
940: if (!mapping->data) ISGlobalToLocalMappingSetUp(mapping);
941: PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout);
942: return 0;
943: }
945: /*@C
946: ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
947: each index shared by more than one processor
949: Collective
951: Input Parameter:
952: . mapping - the mapping from local to global indexing
954: Output Parameters:
955: + nproc - number of processors that are connected to this one
956: . proc - neighboring processors
957: . numproc - number of indices for each subdomain (processor)
958: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
960: Level: advanced
962: Fortran Usage:
963: .vb
964: PetscInt indices[nproc][numprocmax],ierr)
965: ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
966: ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
967: .ve
969: Fortran Note:
970: There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and
971: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
973: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
974: `ISLocalToGlobalMappingRestoreInfo()`
975: @*/
976: PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
977: {
979: if (mapping->info_cached) {
980: *nproc = mapping->info_nproc;
981: *procs = mapping->info_procs;
982: *numprocs = mapping->info_numprocs;
983: *indices = mapping->info_indices;
984: } else {
985: ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices);
986: }
987: return 0;
988: }
990: static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
991: {
992: PetscMPIInt size, rank, tag1, tag2, tag3, *len, *source, imdex;
993: PetscInt i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices;
994: PetscInt *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc;
995: PetscInt cnt, scale, *ownedsenders, *nownedsenders, rstart;
996: PetscInt node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp;
997: PetscInt first_procs, first_numprocs, *first_indices;
998: MPI_Request *recv_waits, *send_waits;
999: MPI_Status recv_status, *send_status, *recv_statuses;
1000: MPI_Comm comm;
1001: PetscBool debug = PETSC_FALSE;
1003: PetscObjectGetComm((PetscObject)mapping, &comm);
1004: MPI_Comm_size(comm, &size);
1005: MPI_Comm_rank(comm, &rank);
1006: if (size == 1) {
1007: *nproc = 0;
1008: *procs = NULL;
1009: PetscNew(numprocs);
1010: (*numprocs)[0] = 0;
1011: PetscNew(indices);
1012: (*indices)[0] = NULL;
1013: /* save info for reuse */
1014: mapping->info_nproc = *nproc;
1015: mapping->info_procs = *procs;
1016: mapping->info_numprocs = *numprocs;
1017: mapping->info_indices = *indices;
1018: mapping->info_cached = PETSC_TRUE;
1019: return 0;
1020: }
1022: PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL);
1024: /*
1025: Notes on ISLocalToGlobalMappingGetBlockInfo
1027: globally owned node - the nodes that have been assigned to this processor in global
1028: numbering, just for this routine.
1030: nontrivial globally owned node - node assigned to this processor that is on a subdomain
1031: boundary (i.e. is has more than one local owner)
1033: locally owned node - node that exists on this processors subdomain
1035: nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1036: local subdomain
1037: */
1038: PetscObjectGetNewTag((PetscObject)mapping, &tag1);
1039: PetscObjectGetNewTag((PetscObject)mapping, &tag2);
1040: PetscObjectGetNewTag((PetscObject)mapping, &tag3);
1042: for (i = 0; i < n; i++) {
1043: if (lindices[i] > max) max = lindices[i];
1044: }
1045: MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm);
1046: Ng++;
1047: MPI_Comm_size(comm, &size);
1048: MPI_Comm_rank(comm, &rank);
1049: scale = Ng / size + 1;
1050: ng = scale;
1051: if (rank == size - 1) ng = Ng - scale * (size - 1);
1052: ng = PetscMax(1, ng);
1053: rstart = scale * rank;
1055: /* determine ownership ranges of global indices */
1056: PetscMalloc1(2 * size, &nprocs);
1057: PetscArrayzero(nprocs, 2 * size);
1059: /* determine owners of each local node */
1060: PetscMalloc1(n, &owner);
1061: for (i = 0; i < n; i++) {
1062: proc = lindices[i] / scale; /* processor that globally owns this index */
1063: nprocs[2 * proc + 1] = 1; /* processor globally owns at least one of ours */
1064: owner[i] = proc;
1065: nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */
1066: }
1067: nsends = 0;
1068: for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1];
1069: PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends);
1071: /* inform other processors of number of messages and max length*/
1072: PetscMaxSum(comm, nprocs, &nmax, &nrecvs);
1073: PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs);
1075: /* post receives for owned rows */
1076: PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs);
1077: PetscMalloc1(nrecvs + 1, &recv_waits);
1078: for (i = 0; i < nrecvs; i++) MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i);
1080: /* pack messages containing lists of local nodes to owners */
1081: PetscMalloc1(2 * n + 1, &sends);
1082: PetscMalloc1(size + 1, &starts);
1083: starts[0] = 0;
1084: for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1085: for (i = 0; i < n; i++) {
1086: sends[starts[owner[i]]++] = lindices[i];
1087: sends[starts[owner[i]]++] = i;
1088: }
1089: PetscFree(owner);
1090: starts[0] = 0;
1091: for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1093: /* send the messages */
1094: PetscMalloc1(nsends + 1, &send_waits);
1095: PetscMalloc1(nsends + 1, &dest);
1096: cnt = 0;
1097: for (i = 0; i < size; i++) {
1098: if (nprocs[2 * i]) {
1099: MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt);
1100: dest[cnt] = i;
1101: cnt++;
1102: }
1103: }
1104: PetscFree(starts);
1106: /* wait on receives */
1107: PetscMalloc1(nrecvs + 1, &source);
1108: PetscMalloc1(nrecvs + 1, &len);
1109: cnt = nrecvs;
1110: PetscCalloc1(ng + 1, &nownedsenders);
1111: while (cnt) {
1112: MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status);
1113: /* unpack receives into our local space */
1114: MPI_Get_count(&recv_status, MPIU_INT, &len[imdex]);
1115: source[imdex] = recv_status.MPI_SOURCE;
1116: len[imdex] = len[imdex] / 2;
1117: /* count how many local owners for each of my global owned indices */
1118: for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++;
1119: cnt--;
1120: }
1121: PetscFree(recv_waits);
1123: /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1124: nownedm = 0;
1125: for (i = 0; i < ng; i++) {
1126: if (nownedsenders[i] > 1) nownedm += nownedsenders[i];
1127: }
1129: /* create single array to contain rank of all local owners of each globally owned index */
1130: PetscMalloc1(nownedm + 1, &ownedsenders);
1131: PetscMalloc1(ng + 1, &starts);
1132: starts[0] = 0;
1133: for (i = 1; i < ng; i++) {
1134: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1135: else starts[i] = starts[i - 1];
1136: }
1138: /* for each nontrivial globally owned node list all arriving processors */
1139: for (i = 0; i < nrecvs; i++) {
1140: for (j = 0; j < len[i]; j++) {
1141: node = recvs[2 * i * nmax + 2 * j] - rstart;
1142: if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1143: }
1144: }
1146: if (debug) { /* ----------------------------------- */
1147: starts[0] = 0;
1148: for (i = 1; i < ng; i++) {
1149: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1150: else starts[i] = starts[i - 1];
1151: }
1152: for (i = 0; i < ng; i++) {
1153: if (nownedsenders[i] > 1) {
1154: PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart);
1155: for (j = 0; j < nownedsenders[i]; j++) PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j]);
1156: PetscSynchronizedPrintf(comm, "\n");
1157: }
1158: }
1159: PetscSynchronizedFlush(comm, PETSC_STDOUT);
1160: } /* ----------------------------------- */
1162: /* wait on original sends */
1163: if (nsends) {
1164: PetscMalloc1(nsends, &send_status);
1165: MPI_Waitall(nsends, send_waits, send_status);
1166: PetscFree(send_status);
1167: }
1168: PetscFree(send_waits);
1169: PetscFree(sends);
1170: PetscFree(nprocs);
1172: /* pack messages to send back to local owners */
1173: starts[0] = 0;
1174: for (i = 1; i < ng; i++) {
1175: if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1176: else starts[i] = starts[i - 1];
1177: }
1178: nsends2 = nrecvs;
1179: PetscMalloc1(nsends2 + 1, &nprocs); /* length of each message */
1180: for (i = 0; i < nrecvs; i++) {
1181: nprocs[i] = 1;
1182: for (j = 0; j < len[i]; j++) {
1183: node = recvs[2 * i * nmax + 2 * j] - rstart;
1184: if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1185: }
1186: }
1187: nt = 0;
1188: for (i = 0; i < nsends2; i++) nt += nprocs[i];
1190: PetscMalloc1(nt + 1, &sends2);
1191: PetscMalloc1(nsends2 + 1, &starts2);
1193: starts2[0] = 0;
1194: for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1];
1195: /*
1196: Each message is 1 + nprocs[i] long, and consists of
1197: (0) the number of nodes being sent back
1198: (1) the local node number,
1199: (2) the number of processors sharing it,
1200: (3) the processors sharing it
1201: */
1202: for (i = 0; i < nsends2; i++) {
1203: cnt = 1;
1204: sends2[starts2[i]] = 0;
1205: for (j = 0; j < len[i]; j++) {
1206: node = recvs[2 * i * nmax + 2 * j] - rstart;
1207: if (nownedsenders[node] > 1) {
1208: sends2[starts2[i]]++;
1209: sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1];
1210: sends2[starts2[i] + cnt++] = nownedsenders[node];
1211: PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node]);
1212: cnt += nownedsenders[node];
1213: }
1214: }
1215: }
1217: /* receive the message lengths */
1218: nrecvs2 = nsends;
1219: PetscMalloc1(nrecvs2 + 1, &lens2);
1220: PetscMalloc1(nrecvs2 + 1, &starts3);
1221: PetscMalloc1(nrecvs2 + 1, &recv_waits);
1222: for (i = 0; i < nrecvs2; i++) MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i);
1224: /* send the message lengths */
1225: for (i = 0; i < nsends2; i++) MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm);
1227: /* wait on receives of lens */
1228: if (nrecvs2) {
1229: PetscMalloc1(nrecvs2, &recv_statuses);
1230: MPI_Waitall(nrecvs2, recv_waits, recv_statuses);
1231: PetscFree(recv_statuses);
1232: }
1233: PetscFree(recv_waits);
1235: starts3[0] = 0;
1236: nt = 0;
1237: for (i = 0; i < nrecvs2 - 1; i++) {
1238: starts3[i + 1] = starts3[i] + lens2[i];
1239: nt += lens2[i];
1240: }
1241: if (nrecvs2) nt += lens2[nrecvs2 - 1];
1243: PetscMalloc1(nt + 1, &recvs2);
1244: PetscMalloc1(nrecvs2 + 1, &recv_waits);
1245: for (i = 0; i < nrecvs2; i++) MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i);
1247: /* send the messages */
1248: PetscMalloc1(nsends2 + 1, &send_waits);
1249: for (i = 0; i < nsends2; i++) MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i);
1251: /* wait on receives */
1252: if (nrecvs2) {
1253: PetscMalloc1(nrecvs2, &recv_statuses);
1254: MPI_Waitall(nrecvs2, recv_waits, recv_statuses);
1255: PetscFree(recv_statuses);
1256: }
1257: PetscFree(recv_waits);
1258: PetscFree(nprocs);
1260: if (debug) { /* ----------------------------------- */
1261: cnt = 0;
1262: for (i = 0; i < nrecvs2; i++) {
1263: nt = recvs2[cnt++];
1264: for (j = 0; j < nt; j++) {
1265: PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1]);
1266: for (k = 0; k < recvs2[cnt + 1]; k++) PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k]);
1267: cnt += 2 + recvs2[cnt + 1];
1268: PetscSynchronizedPrintf(comm, "\n");
1269: }
1270: }
1271: PetscSynchronizedFlush(comm, PETSC_STDOUT);
1272: } /* ----------------------------------- */
1274: /* count number subdomains for each local node */
1275: PetscCalloc1(size, &nprocs);
1276: cnt = 0;
1277: for (i = 0; i < nrecvs2; i++) {
1278: nt = recvs2[cnt++];
1279: for (j = 0; j < nt; j++) {
1280: for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++;
1281: cnt += 2 + recvs2[cnt + 1];
1282: }
1283: }
1284: nt = 0;
1285: for (i = 0; i < size; i++) nt += (nprocs[i] > 0);
1286: *nproc = nt;
1287: PetscMalloc1(nt + 1, procs);
1288: PetscMalloc1(nt + 1, numprocs);
1289: PetscMalloc1(nt + 1, indices);
1290: for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL;
1291: PetscMalloc1(size, &bprocs);
1292: cnt = 0;
1293: for (i = 0; i < size; i++) {
1294: if (nprocs[i] > 0) {
1295: bprocs[i] = cnt;
1296: (*procs)[cnt] = i;
1297: (*numprocs)[cnt] = nprocs[i];
1298: PetscMalloc1(nprocs[i], &(*indices)[cnt]);
1299: cnt++;
1300: }
1301: }
1303: /* make the list of subdomains for each nontrivial local node */
1304: PetscArrayzero(*numprocs, nt);
1305: cnt = 0;
1306: for (i = 0; i < nrecvs2; i++) {
1307: nt = recvs2[cnt++];
1308: for (j = 0; j < nt; j++) {
1309: for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt];
1310: cnt += 2 + recvs2[cnt + 1];
1311: }
1312: }
1313: PetscFree(bprocs);
1314: PetscFree(recvs2);
1316: /* sort the node indexing by their global numbers */
1317: nt = *nproc;
1318: for (i = 0; i < nt; i++) {
1319: PetscMalloc1((*numprocs)[i], &tmp);
1320: for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1321: PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i]);
1322: PetscFree(tmp);
1323: }
1325: if (debug) { /* ----------------------------------- */
1326: nt = *nproc;
1327: for (i = 0; i < nt; i++) {
1328: PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i]);
1329: for (j = 0; j < (*numprocs)[i]; j++) PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j]);
1330: PetscSynchronizedPrintf(comm, "\n");
1331: }
1332: PetscSynchronizedFlush(comm, PETSC_STDOUT);
1333: } /* ----------------------------------- */
1335: /* wait on sends */
1336: if (nsends2) {
1337: PetscMalloc1(nsends2, &send_status);
1338: MPI_Waitall(nsends2, send_waits, send_status);
1339: PetscFree(send_status);
1340: }
1342: PetscFree(starts3);
1343: PetscFree(dest);
1344: PetscFree(send_waits);
1346: PetscFree(nownedsenders);
1347: PetscFree(ownedsenders);
1348: PetscFree(starts);
1349: PetscFree(starts2);
1350: PetscFree(lens2);
1352: PetscFree(source);
1353: PetscFree(len);
1354: PetscFree(recvs);
1355: PetscFree(nprocs);
1356: PetscFree(sends2);
1358: /* put the information about myself as the first entry in the list */
1359: first_procs = (*procs)[0];
1360: first_numprocs = (*numprocs)[0];
1361: first_indices = (*indices)[0];
1362: for (i = 0; i < *nproc; i++) {
1363: if ((*procs)[i] == rank) {
1364: (*procs)[0] = (*procs)[i];
1365: (*numprocs)[0] = (*numprocs)[i];
1366: (*indices)[0] = (*indices)[i];
1367: (*procs)[i] = first_procs;
1368: (*numprocs)[i] = first_numprocs;
1369: (*indices)[i] = first_indices;
1370: break;
1371: }
1372: }
1374: /* save info for reuse */
1375: mapping->info_nproc = *nproc;
1376: mapping->info_procs = *procs;
1377: mapping->info_numprocs = *numprocs;
1378: mapping->info_indices = *indices;
1379: mapping->info_cached = PETSC_TRUE;
1380: return 0;
1381: }
1383: /*@C
1384: ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetBlockInfo()`
1386: Collective
1388: Input Parameter:
1389: . mapping - the mapping from local to global indexing
1391: Output Parameters:
1392: + nproc - number of processors that are connected to this one
1393: . proc - neighboring processors
1394: . numproc - number of indices for each processor
1395: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1397: Level: advanced
1399: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1400: `ISLocalToGlobalMappingGetInfo()`
1401: @*/
1402: PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1403: {
1405: if (mapping->info_free) {
1406: PetscFree(*numprocs);
1407: if (*indices) {
1408: PetscInt i;
1410: PetscFree((*indices)[0]);
1411: for (i = 1; i < *nproc; i++) PetscFree((*indices)[i]);
1412: PetscFree(*indices);
1413: }
1414: }
1415: *nproc = 0;
1416: *procs = NULL;
1417: *numprocs = NULL;
1418: *indices = NULL;
1419: return 0;
1420: }
1422: /*@C
1423: ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1424: each index shared by more than one processor
1426: Collective
1428: Input Parameter:
1429: . mapping - the mapping from local to global indexing
1431: Output Parameters:
1432: + nproc - number of processors that are connected to this one
1433: . proc - neighboring processors
1434: . numproc - number of indices for each subdomain (processor)
1435: - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1437: Level: advanced
1439: Note:
1440: The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1442: Fortran Usage:
1443: .vb
1444: PetscInt indices[nproc][numprocmax],ierr)
1445: ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1446: ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1447: .ve
1449: Fortran Note:
1450: There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that procs[], numprocs[] and
1451: indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1453: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1454: `ISLocalToGlobalMappingRestoreInfo()`
1455: @*/
1456: PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1457: {
1458: PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k;
1461: bs = mapping->bs;
1462: ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices);
1463: if (bs > 1) { /* we need to expand the cached info */
1464: PetscCalloc1(*nproc, &*indices);
1465: PetscCalloc1(*nproc, &*numprocs);
1466: for (i = 0; i < *nproc; i++) {
1467: PetscMalloc1(bs * bnumprocs[i], &(*indices)[i]);
1468: for (j = 0; j < bnumprocs[i]; j++) {
1469: for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k;
1470: }
1471: (*numprocs)[i] = bnumprocs[i] * bs;
1472: }
1473: mapping->info_free = PETSC_TRUE;
1474: } else {
1475: *numprocs = bnumprocs;
1476: *indices = bindices;
1477: }
1478: return 0;
1479: }
1481: /*@C
1482: ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetInfo()`
1484: Collective
1486: Input Parameter:
1487: . mapping - the mapping from local to global indexing
1489: Output Parameters:
1490: + nproc - number of processors that are connected to this one
1491: . proc - neighboring processors
1492: . numproc - number of indices for each processor
1493: - indices - indices of local nodes shared with neighbor (sorted by global numbering)
1495: Level: advanced
1497: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1498: `ISLocalToGlobalMappingGetInfo()`
1499: @*/
1500: PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1501: {
1502: ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices);
1503: return 0;
1504: }
1506: /*@C
1507: ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each MPI rank
1509: Collective
1511: Input Parameter:
1512: . mapping - the mapping from local to global indexing
1514: Output Parameters:
1515: + nnodes - number of local nodes (same `ISLocalToGlobalMappingGetSize()`)
1516: . count - number of neighboring processors per node
1517: - indices - indices of processes sharing the node (sorted)
1519: Level: advanced
1521: Note:
1522: The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1524: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1525: `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1526: @*/
1527: PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1528: {
1529: PetscInt n;
1532: ISLocalToGlobalMappingGetSize(mapping, &n);
1533: if (!mapping->info_nodec) {
1534: PetscInt i, m, n_neigh, *neigh, *n_shared, **shared;
1536: PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei);
1537: ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared);
1538: for (i = 0; i < n; i++) mapping->info_nodec[i] = 1;
1539: m = n;
1540: mapping->info_nodec[n] = 0;
1541: for (i = 1; i < n_neigh; i++) {
1542: PetscInt j;
1544: m += n_shared[i];
1545: for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1;
1546: }
1547: if (n) PetscMalloc1(m, &mapping->info_nodei[0]);
1548: for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1];
1549: PetscArrayzero(mapping->info_nodec, n);
1550: for (i = 0; i < n; i++) {
1551: mapping->info_nodec[i] = 1;
1552: mapping->info_nodei[i][0] = neigh[0];
1553: }
1554: for (i = 1; i < n_neigh; i++) {
1555: PetscInt j;
1557: for (j = 0; j < n_shared[i]; j++) {
1558: PetscInt k = shared[i][j];
1560: mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1561: mapping->info_nodec[k] += 1;
1562: }
1563: }
1564: for (i = 0; i < n; i++) PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i]);
1565: ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared);
1566: }
1567: if (nnodes) *nnodes = n;
1568: if (count) *count = mapping->info_nodec;
1569: if (indices) *indices = mapping->info_nodei;
1570: return 0;
1571: }
1573: /*@C
1574: ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetNodeInfo()`
1576: Collective
1578: Input Parameter:
1579: . mapping - the mapping from local to global indexing
1581: Output Parameters:
1582: + nnodes - number of local nodes
1583: . count - number of neighboring processors per node
1584: - indices - indices of processes sharing the node (sorted)
1586: Level: advanced
1588: .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1589: `ISLocalToGlobalMappingGetInfo()`
1590: @*/
1591: PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1592: {
1594: if (nnodes) *nnodes = 0;
1595: if (count) *count = NULL;
1596: if (indices) *indices = NULL;
1597: return 0;
1598: }
1600: /*@C
1601: ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1603: Not Collective
1605: Input Parameter:
1606: . ltog - local to global mapping
1608: Output Parameter:
1609: . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1611: Level: advanced
1613: Note:
1614: `ISLocalToGlobalMappingGetSize()` returns the length the this array
1616: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1617: @*/
1618: PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1619: {
1622: if (ltog->bs == 1) {
1623: *array = ltog->indices;
1624: } else {
1625: PetscInt *jj, k, i, j, n = ltog->n, bs = ltog->bs;
1626: const PetscInt *ii;
1628: PetscMalloc1(bs * n, &jj);
1629: *array = jj;
1630: k = 0;
1631: ii = ltog->indices;
1632: for (i = 0; i < n; i++)
1633: for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j;
1634: }
1635: return 0;
1636: }
1638: /*@C
1639: ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with `ISLocalToGlobalMappingGetIndices()`
1641: Not Collective
1643: Input Parameters:
1644: + ltog - local to global mapping
1645: - array - array of indices
1647: Level: advanced
1649: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1650: @*/
1651: PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1652: {
1657: if (ltog->bs > 1) PetscFree(*(void **)array);
1658: return 0;
1659: }
1661: /*@C
1662: ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1664: Not Collective
1666: Input Parameter:
1667: . ltog - local to global mapping
1669: Output Parameter:
1670: . array - array of indices
1672: Level: advanced
1674: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1675: @*/
1676: PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1677: {
1680: *array = ltog->indices;
1681: return 0;
1682: }
1684: /*@C
1685: ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with `ISLocalToGlobalMappingGetBlockIndices()`
1687: Not Collective
1689: Input Parameters:
1690: + ltog - local to global mapping
1691: - array - array of indices
1693: Level: advanced
1695: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1696: @*/
1697: PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1698: {
1702: *array = NULL;
1703: return 0;
1704: }
1706: /*@C
1707: ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1709: Not Collective
1711: Input Parameters:
1712: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1713: . n - number of mappings to concatenate
1714: - ltogs - local to global mappings
1716: Output Parameter:
1717: . ltogcat - new mapping
1719: Level: advanced
1721: Note:
1722: This currently always returns a mapping with block size of 1
1724: Developer Note:
1725: If all the input mapping have the same block size we could easily handle that as a special case
1727: .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
1728: @*/
1729: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat)
1730: {
1731: PetscInt i, cnt, m, *idx;
1737: for (cnt = 0, i = 0; i < n; i++) {
1738: ISLocalToGlobalMappingGetSize(ltogs[i], &m);
1739: cnt += m;
1740: }
1741: PetscMalloc1(cnt, &idx);
1742: for (cnt = 0, i = 0; i < n; i++) {
1743: const PetscInt *subidx;
1744: ISLocalToGlobalMappingGetSize(ltogs[i], &m);
1745: ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx);
1746: PetscArraycpy(&idx[cnt], subidx, m);
1747: ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx);
1748: cnt += m;
1749: }
1750: ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat);
1751: return 0;
1752: }
1754: /*MC
1755: ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1756: used this is good for only small and moderate size problems.
1758: Options Database Keys:
1759: . -islocaltoglobalmapping_type basic - select this method
1761: Level: beginner
1763: Developer Note:
1764: This stores all the mapping information on each MPI rank.
1766: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1767: M*/
1768: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1769: {
1770: ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Basic;
1771: ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Basic;
1772: ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1773: ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Basic;
1774: return 0;
1775: }
1777: /*MC
1778: ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1779: used this is good for large memory problems.
1781: Options Database Keys:
1782: . -islocaltoglobalmapping_type hash - select this method
1784: Level: beginner
1786: Note:
1787: This is selected automatically for large problems if the user does not set the type.
1789: .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGBASIC`
1790: M*/
1791: PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1792: {
1793: ltog->ops->globaltolocalmappingapply = ISGlobalToLocalMappingApply_Hash;
1794: ltog->ops->globaltolocalmappingsetup = ISGlobalToLocalMappingSetUp_Hash;
1795: ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1796: ltog->ops->destroy = ISLocalToGlobalMappingDestroy_Hash;
1797: return 0;
1798: }
1800: /*@C
1801: ISLocalToGlobalMappingRegister - Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping`
1803: Not Collective
1805: Input Parameters:
1806: + sname - name of a new method
1807: - routine_create - routine to create method context
1809: Sample usage:
1810: .vb
1811: ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1812: .ve
1814: Then, your mapping can be chosen with the procedural interface via
1815: $ ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1816: or at runtime via the option
1817: $ -islocaltoglobalmapping_type my_mapper
1819: Level: advanced
1821: Note:
1822: `ISLocalToGlobalMappingRegister()` may be called multiple times to add several user-defined mappings.
1824: .seealso: [](sec_scatter), `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`,
1825: `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`
1826: @*/
1827: PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping))
1828: {
1829: ISInitializePackage();
1830: PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function);
1831: return 0;
1832: }
1834: /*@C
1835: ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use
1837: Logically Collective
1839: Input Parameters:
1840: + ltog - the `ISLocalToGlobalMapping` object
1841: - type - a known method
1843: Options Database Key:
1844: . -islocaltoglobalmapping_type <method> - Sets the method; use -help for a list of available methods (for instance, basic or hash)
1846: Level: intermediate
1848: Notes:
1849: See "petsc/include/petscis.h" for available methods
1851: Normally, it is best to use the `ISLocalToGlobalMappingSetFromOptions()` command and
1852: then set the `ISLocalToGlobalMappingType` from the options database rather than by using
1853: this routine.
1855: Developer Note:
1856: `ISLocalToGlobalMappingRegister()` is used to add new types to `ISLocalToGlobalMappingList` from which they
1857: are accessed by `ISLocalToGlobalMappingSetType()`.
1859: .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1860: @*/
1861: PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1862: {
1863: PetscBool match;
1864: PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1869: PetscObjectTypeCompare((PetscObject)ltog, type, &match);
1870: if (match) return 0;
1872: /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1873: if (type) {
1874: PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r);
1876: }
1877: /* Destroy the previous private LTOG context */
1878: PetscTryTypeMethod(ltog, destroy);
1879: ltog->ops->destroy = NULL;
1881: PetscObjectChangeTypeName((PetscObject)ltog, type);
1882: if (r) (*r)(ltog);
1883: return 0;
1884: }
1886: /*@C
1887: ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping`
1889: Not Collective
1891: Input Parameter:
1892: . ltog - the `ISLocalToGlobalMapping` object
1894: Output Parameter:
1895: . type - the type
1897: Level: intermediate
1899: .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1900: @*/
1901: PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1902: {
1905: *type = ((PetscObject)ltog)->type_name;
1906: return 0;
1907: }
1909: PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1911: /*@C
1912: ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the `IS` package.
1914: Not Collective
1916: Level: advanced
1918: .seealso: [](sec_scatter), `ISRegister()`, `ISLocalToGlobalRegister()`
1919: @*/
1920: PetscErrorCode ISLocalToGlobalMappingRegisterAll(void)
1921: {
1922: if (ISLocalToGlobalMappingRegisterAllCalled) return 0;
1923: ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1924: ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);
1925: ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);
1926: return 0;
1927: }