Actual source code: psplit.c
2: #include <petscsys.h>
4: /*@
5: PetscSplitOwnershipBlock - Given a global (or local) length determines a local
6: (or global) length via a simple formula. Splits so each processors local size
7: is divisible by the block size.
9: Collective (if N is `PETSC_DECIDE`)
11: Input Parameters:
12: + comm - MPI communicator that shares the object being divided
13: . bs - block size
14: . n - local length (or `PETSC_DECIDE` to have it set)
15: - N - global length (or `PETSC_DECIDE`)
17: Level: developer
19: Notes:
20: n and N cannot be both `PETSC_DECIDE`
22: If one processor calls this with N of `PETSC_DECIDE` then all processors
23: must, otherwise the program will hang.
25: .seealso: `PetscSplitOwnership()`, `PetscSplitOwnershipEqual()`
26: @*/
27: PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm, PetscInt bs, PetscInt *n, PetscInt *N)
28: {
29: PetscMPIInt size, rank;
33: if (*N == PETSC_DECIDE) {
35: MPIU_Allreduce(n, N, 1, MPIU_INT, MPI_SUM, comm);
36: } else if (*n == PETSC_DECIDE) {
37: PetscInt Nbs = *N / bs;
38: MPI_Comm_size(comm, &size);
39: MPI_Comm_rank(comm, &rank);
40: *n = bs * (Nbs / size + ((Nbs % size) > rank));
41: }
42: return 0;
43: }
45: /*@
46: PetscSplitOwnership - Given a global (or local) length determines a local
47: (or global) length via a simple formula
49: Collective (if n or N is `PETSC_DECIDE`)
51: Input Parameters:
52: + comm - MPI communicator that shares the object being divided
53: . n - local length (or `PETSC_DECIDE` to have it set)
54: - N - global length (or `PETSC_DECIDE`)
56: Level: developer
58: Notes:
59: n and N cannot be both `PETSC_DECIDE`
61: If one processor calls this with n or N of `PETSC_DECIDE` then all processors
62: must. Otherwise, an error is thrown in debug mode while the program will hang
63: in optimized (i.e. configured --with-debugging=0) mode.
65: .seealso: `PetscSplitOwnershipBlock()`, `PetscSplitOwnershipEqual()`
66: @*/
67: PetscErrorCode PetscSplitOwnership(MPI_Comm comm, PetscInt *n, PetscInt *N)
68: {
69: PetscMPIInt size, rank;
72: if (PetscDefined(USE_DEBUG)) {
73: PetscMPIInt l[2], g[2];
74: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
75: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
76: MPI_Comm_size(comm, &size);
77: MPIU_Allreduce(l, g, 2, MPI_INT, MPI_SUM, comm);
80: }
82: if (*N == PETSC_DECIDE) {
83: PetscInt64 m = *n, M;
84: MPIU_Allreduce(&m, &M, 1, MPIU_INT64, MPI_SUM, comm);
86: *N = (PetscInt)M;
87: } else if (*n == PETSC_DECIDE) {
88: MPI_Comm_size(comm, &size);
89: MPI_Comm_rank(comm, &rank);
90: *n = *N / size + ((*N % size) > rank);
91: } else if (PetscDefined(USE_DEBUG)) {
92: PetscInt tmp;
93: MPIU_Allreduce(n, &tmp, 1, MPIU_INT, MPI_SUM, comm);
95: }
96: return 0;
97: }
99: /*@
100: PetscSplitOwnershipEqual - Given a global (or local) length determines a local
101: (or global) length via a simple formula, trying to have all local lengths equal
103: Collective (if n or N is `PETSC_DECIDE`)
105: Input Parameters:
106: + comm - MPI communicator that shares the object being divided
107: . n - local length (or `PETSC_DECIDE` to have it set)
108: - N - global length (or `PETSC_DECIDE`)
110: Level: developer
112: Notes:
113: This is intended to be used with `MATSCALAPACK`, where the local size must
114: be equal in all processes (except possibly the last one). For instance,
115: the local sizes when splitting N=50 with 6 processes are 9,9,9,9,9,5
117: n and N cannot be both `PETSC_DECIDE`
119: If one processor calls this with n or N of `PETSC_DECIDE` then all processors
120: must. Otherwise, an error is thrown in debug mode while the program will hang
121: in optimized (i.e. configured --with-debugging=0) mode.
123: .seealso: `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`
124: @*/
125: PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm, PetscInt *n, PetscInt *N)
126: {
127: PetscMPIInt size, rank;
130: if (PetscDefined(USE_DEBUG)) {
131: PetscMPIInt l[2], g[2];
132: l[0] = (*n == PETSC_DECIDE) ? 1 : 0;
133: l[1] = (*N == PETSC_DECIDE) ? 1 : 0;
134: MPI_Comm_size(comm, &size);
135: MPIU_Allreduce(l, g, 2, MPI_INT, MPI_SUM, comm);
138: }
140: if (*N == PETSC_DECIDE) {
141: PetscInt64 m = *n, M;
142: MPIU_Allreduce(&m, &M, 1, MPIU_INT64, MPI_SUM, comm);
144: *N = (PetscInt)M;
145: } else if (*n == PETSC_DECIDE) {
146: MPI_Comm_size(comm, &size);
147: MPI_Comm_rank(comm, &rank);
148: *n = *N / size;
149: if (*N % size) {
150: if ((rank + 1) * (*n + 1) <= *N) *n = *n + 1;
151: else if (rank * (*n + 1) <= *N) *n = *N - rank * (*n + 1);
152: else *n = 0;
153: }
154: } else if (PetscDefined(USE_DEBUG)) {
155: PetscInt tmp;
156: MPIU_Allreduce(n, &tmp, 1, MPIU_INT, MPI_SUM, comm);
158: }
159: return 0;
160: }