xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision e84e3fd21fa5912dca3017339ab4b3699e3a9c51)
1 
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #if defined(PETSC_USE_SHARED_MEMORY)
8 
9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
10 
11 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
12 {
13   Vec_MPI        *w = (Vec_MPI*)win->data;
14   PetscScalar    *array;
15 
16   PetscFunctionBegin;
17   /* first processor allocates entire array and sends it's address to the others */
18   PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array));
19 
20   PetscCall(VecCreate(PetscObjectComm((PetscObject)win),v));
21   PetscCall(VecSetSizes(*v,win->map->n,win->map->N));
22   PetscCall(VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array));
23   PetscCall(PetscLayoutReference(win->map,&(*v)->map));
24 
25   /* New vector should inherit stashing property of parent */
26   (*v)->stash.donotstash   = win->stash.donotstash;
27   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
28 
29   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist));
30   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist));
31 
32   (*v)->ops->duplicate = VecDuplicate_Shared;
33   (*v)->bstash.bs      = win->bstash.bs;
34   PetscFunctionReturn(0);
35 }
36 
37 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
38 {
39   PetscScalar    *array;
40 
41   PetscFunctionBegin;
42   PetscCall(PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N));
43   PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array));
44 
45   PetscCall(VecCreate_MPI_Private(vv,PETSC_FALSE,0,array));
46   vv->ops->duplicate = VecDuplicate_Shared;
47   PetscFunctionReturn(0);
48 }
49 
50 /* ----------------------------------------------------------------------------------------
51      Code to manage shared memory allocation using standard Unix shared memory
52 */
53 #include <petscsys.h>
54 #if defined(PETSC_HAVE_PWD_H)
55 #include <pwd.h>
56 #endif
57 #include <ctype.h>
58 #include <sys/stat.h>
59 #if defined(PETSC_HAVE_UNISTD_H)
60 #include <unistd.h>
61 #endif
62 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
63 #include <sys/utsname.h>
64 #endif
65 #include <fcntl.h>
66 #include <time.h>
67 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
68 #include <sys/systeminfo.h>
69 #endif
70 #include <sys/shm.h>
71 #include <sys/mman.h>
72 
73 static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
74 
75 /*
76    Private routine to delete internal storage when a communicator is freed.
77   This is called by MPI, not by users.
78 
79   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
80   it was MPI_Comm *comm.
81 */
82 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
83 {
84   PetscFunctionBegin;
85   PetscCall(PetscFree(attr_val));
86   PetscFunctionReturn(MPI_SUCCESS);
87 }
88 
89 /*
90 
91     This routine is still incomplete and needs work.
92 
93     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
94 cat /etc/sysctl.conf
95 kern.sysv.shmmax=67108864
96 kern.sysv.shmmin=1
97 kern.sysv.shmmni=32
98 kern.sysv.shmseg=512
99 kern.sysv.shmall=1024
100 
101   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
102 ipcrm to remove the shared memory in use.
103 
104 */
105 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
106 {
107   PetscInt       shift;
108   PetscMPIInt    rank,flag;
109   int            *arena,id,key = 0;
110   char           *value;
111 
112   PetscFunctionBegin;
113   *result = 0;
114 
115   PetscCallMPI(MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm));
116   shift -= llen;
117 
118   PetscCallMPI(MPI_Comm_rank(comm,&rank));
119   if (rank == 0) {
120     id = shmget(key,len, 0666 |IPC_CREAT);
121     if (id == -1) {
122       perror("Unable to malloc shared memory");
123       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
124     }
125   } else {
126     id = shmget(key,len, 0666);
127     if (id == -1) {
128       perror("Unable to malloc shared memory");
129       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
130     }
131   }
132   value = shmat(id,(void*)0,0);
133   if (value == (char*)-1) {
134     perror("Unable to access shared memory allocated");
135     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
136   }
137   *result = (void*) (value + shift);
138   PetscFunctionReturn(0);
139 }
140 
141 #else
142 
143 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
144 {
145   PetscMPIInt    size;
146 
147   PetscFunctionBegin;
148   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size));
149   PetscCheck(size <= 1,PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
150   PetscCall(VecCreate_Seq(vv));
151   PetscFunctionReturn(0);
152 }
153 
154 #endif
155 
156 /*@
157    VecCreateShared - Creates a parallel vector that uses shared memory.
158 
159    Input Parameters:
160 +  comm - the MPI communicator to use
161 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
162 -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
163 
164    Output Parameter:
165 .  vv - the vector
166 
167    Collective
168 
169    Notes:
170    Currently VecCreateShared() is available only on the SGI; otherwise,
171    this routine is the same as VecCreateMPI().
172 
173    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
174    same type as an existing vector.
175 
176    Level: advanced
177 
178 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
179           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
180 
181 @*/
182 PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
183 {
184   PetscFunctionBegin;
185   PetscCall(VecCreate(comm,v));
186   PetscCall(VecSetSizes(*v,n,N));
187   PetscCall(VecSetType(*v,VECSHARED));
188   PetscFunctionReturn(0);
189 }
190