Actual source code: shvec.c
2: /*
3: This file contains routines for Parallel vector operations that use shared memory
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
7: #if defined(PETSC_USE_SHARED_MEMORY)
9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm, PetscInt, PetscInt, void **);
11: PetscErrorCode VecDuplicate_Shared(Vec win, Vec *v)
12: {
13: Vec_MPI *w = (Vec_MPI *)win->data;
14: PetscScalar *array;
16: /* first processor allocates entire array and sends it's address to the others */
17: PetscSharedMalloc(PetscObjectComm((PetscObject)win), win->map->n * sizeof(PetscScalar), win->map->N * sizeof(PetscScalar), (void **)&array);
19: VecCreate(PetscObjectComm((PetscObject)win), v);
20: VecSetSizes(*v, win->map->n, win->map->N);
21: VecCreate_MPI_Private(*v, PETSC_FALSE, w->nghost, array);
22: PetscLayoutReference(win->map, &(*v)->map);
24: /* New vector should inherit stashing property of parent */
25: (*v)->stash.donotstash = win->stash.donotstash;
26: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
28: PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)*v)->olist);
29: PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)*v)->qlist);
31: (*v)->ops->duplicate = VecDuplicate_Shared;
32: (*v)->bstash.bs = win->bstash.bs;
33: return 0;
34: }
36: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
37: {
38: PetscScalar *array;
40: PetscSplitOwnership(PetscObjectComm((PetscObject)vv), &vv->map->n, &vv->map->N);
41: PetscSharedMalloc(PetscObjectComm((PetscObject)vv), vv->map->n * sizeof(PetscScalar), vv->map->N * sizeof(PetscScalar), (void **)&array);
43: VecCreate_MPI_Private(vv, PETSC_FALSE, 0, array);
44: vv->ops->duplicate = VecDuplicate_Shared;
45: return 0;
46: }
48: /* ----------------------------------------------------------------------------------------
49: Code to manage shared memory allocation using standard Unix shared memory
50: */
51: #include <petscsys.h>
52: #if defined(PETSC_HAVE_PWD_H)
53: #include <pwd.h>
54: #endif
55: #include <ctype.h>
56: #include <sys/stat.h>
57: #if defined(PETSC_HAVE_UNISTD_H)
58: #include <unistd.h>
59: #endif
60: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
61: #include <sys/utsname.h>
62: #endif
63: #include <fcntl.h>
64: #include <time.h>
65: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
66: #include <sys/systeminfo.h>
67: #endif
68: #include <sys/shm.h>
69: #include <sys/mman.h>
71: static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
73: /*
74: Private routine to delete internal storage when a communicator is freed.
75: This is called by MPI, not by users.
77: The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
78: it was MPI_Comm *comm.
79: */
80: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm, PetscInt keyval, void *attr_val, void *extra_state)
81: {
82: PetscFree(attr_val);
83: return MPI_SUCCESS;
84: }
86: /*
88: This routine is still incomplete and needs work.
90: For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
91: cat /etc/sysctl.conf
92: kern.sysv.shmmax=67108864
93: kern.sysv.shmmin=1
94: kern.sysv.shmmni=32
95: kern.sysv.shmseg=512
96: kern.sysv.shmall=1024
98: This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
99: ipcrm to remove the shared memory in use.
101: */
102: PetscErrorCode PetscSharedMalloc(MPI_Comm comm, PetscInt llen, PetscInt len, void **result)
103: {
104: PetscInt shift;
105: PetscMPIInt rank, flag;
106: int *arena, id, key = 0;
107: char *value;
109: *result = 0;
111: MPI_Scan(&llen, &shift, 1, MPI_INT, MPI_SUM, comm);
112: shift -= llen;
114: MPI_Comm_rank(comm, &rank);
115: if (rank == 0) {
116: id = shmget(key, len, 0666 | IPC_CREAT);
117: if (id == -1) {
118: perror("Unable to malloc shared memory");
119: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
120: }
121: } else {
122: id = shmget(key, len, 0666);
123: if (id == -1) {
124: perror("Unable to malloc shared memory");
125: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
126: }
127: }
128: value = shmat(id, (void *)0, 0);
129: if (value == (char *)-1) {
130: perror("Unable to access shared memory allocated");
131: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to access shared memory allocated");
132: }
133: *result = (void *)(value + shift);
134: return 0;
135: }
137: #else
139: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
140: {
141: PetscMPIInt size;
143: MPI_Comm_size(PetscObjectComm((PetscObject)vv), &size);
145: VecCreate_Seq(vv);
146: return 0;
147: }
149: #endif
151: /*@
152: VecCreateShared - Creates a parallel vector that uses shared memory.
154: Input Parameters:
155: + comm - the MPI communicator to use
156: . n - local vector length (or PETSC_DECIDE to have calculated if N is given)
157: - N - global vector length (or PETSC_DECIDE to have calculated if n is given)
159: Output Parameter:
160: . vv - the vector
162: Collective
164: Notes:
165: Currently VecCreateShared() is available only on the SGI; otherwise,
166: this routine is the same as VecCreateMPI().
168: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
169: same type as an existing vector.
171: Level: advanced
173: .seealso: `VecCreateSeq()`, `VecCreate()`, `VecCreateMPI()`, `VecDuplicate()`, `VecDuplicateVecs()`,
174: `VecCreateGhost()`, `VecCreateMPIWithArray()`, `VecCreateGhostWithArray()`
176: @*/
177: PetscErrorCode VecCreateShared(MPI_Comm comm, PetscInt n, PetscInt N, Vec *v)
178: {
179: VecCreate(comm, v);
180: VecSetSizes(*v, n, N);
181: VecSetType(*v, VECSHARED);
182: return 0;
183: }