xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision 4d25b1e8fcd3ad4ffb67bd2c1c0590886f5a9001)
1 
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #if defined(PETSC_USE_SHARED_MEMORY)
8 
9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "VecDuplicate_Shared"
13 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
14 {
15   PetscErrorCode ierr;
16   Vec_MPI        *w = (Vec_MPI *)win->data;
17   PetscScalar    *array;
18 
19   PetscFunctionBegin;
20   /* first processor allocates entire array and sends it's address to the others */
21   ierr = PetscSharedMalloc(((PetscObject)win)->comm,win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
22 
23   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
24   ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr);
25   ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr);
26   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
27 
28   /* New vector should inherit stashing property of parent */
29   (*v)->stash.donotstash   = win->stash.donotstash;
30   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
31 
32   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr);
33   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr);
34 
35   (*v)->ops->duplicate = VecDuplicate_Shared;
36   (*v)->bstash.bs = win->bstash.bs;
37   PetscFunctionReturn(0);
38 }
39 
40 
41 EXTERN_C_BEGIN
42 #undef __FUNCT__
43 #define __FUNCT__ "VecCreate_Shared"
44 PetscErrorCode  VecCreate_Shared(Vec vv)
45 {
46   PetscErrorCode ierr;
47   PetscScalar    *array;
48 
49   PetscFunctionBegin;
50   ierr = PetscSplitOwnership(((PetscObject)vv)->comm,&vv->map->n,&vv->map->N);CHKERRQ(ierr);
51   ierr = PetscSharedMalloc(((PetscObject)vv)->comm,vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
52 
53   ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
54   vv->ops->duplicate = VecDuplicate_Shared;
55 
56   PetscFunctionReturn(0);
57 }
58 EXTERN_C_END
59 
60 
61 /* ----------------------------------------------------------------------------------------
62      Code to manage shared memory allocation using standard Unix shared memory
63 */
64 #include <petscsys.h>
65 #if defined(PETSC_HAVE_PWD_H)
66 #include <pwd.h>
67 #endif
68 #include <ctype.h>
69 #include <sys/types.h>
70 #include <sys/stat.h>
71 #if defined(PETSC_HAVE_UNISTD_H)
72 #include <unistd.h>
73 #endif
74 #if defined(PETSC_HAVE_STDLIB_H)
75 #include <stdlib.h>
76 #endif
77 #if defined(PETSC_HAVE_SYS_PARAM_H)
78 #include <sys/param.h>
79 #endif
80 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
81 #include <sys/utsname.h>
82 #endif
83 #include <fcntl.h>
84 #include <time.h>
85 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
86 #include <sys/systeminfo.h>
87 #endif
88 #include <sys/shm.h>
89 #include <sys/mman.h>
90 
91 
92 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "Petsc_DeleteShared"
96 /*
97    Private routine to delete internal storage when a communicator is freed.
98   This is called by MPI, not by users.
99 
100   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
101   it was MPI_Comm *comm.
102 */
103 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
104 {
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PetscFree(attr_val);CHKERRQ(ierr);
109   PetscFunctionReturn(MPI_SUCCESS);
110 }
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "PetscSharedMalloc"
114 /*
115 
116     This routine is still incomplete and needs work.
117 
118     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
119 cat /etc/sysctl.conf
120 kern.sysv.shmmax=67108864
121 kern.sysv.shmmin=1
122 kern.sysv.shmmni=32
123 kern.sysv.shmseg=512
124 kern.sysv.shmall=1024
125 
126   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
127 ipcrm to remove the shared memory in use.
128 
129 */
130 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
131 {
132   PetscErrorCode ierr;
133   PetscInt       shift;
134   PetscMPIInt    rank,flag;
135   int            *arena,id,key = 0;
136   char           *value;
137 
138   PetscFunctionBegin;
139   *result = 0;
140 
141   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
142   shift -= llen;
143 
144   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
145   if (!rank) {
146     id = shmget(key,len, 0666 |IPC_CREAT);
147     if (id == -1) {
148       perror("Unable to malloc shared memory");
149       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
150     }
151   } else {
152     id = shmget(key,len, 0666);
153     if (id == -1) {
154       perror("Unable to malloc shared memory");
155       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
156     }
157   }
158   value = shmat(id,(void*)0,0);
159   if (value == (char*)-1) {
160     perror("Unable to access shared memory allocated");
161     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
162   }
163   *result = (void*) (value + shift);
164 
165   PetscFunctionReturn(0);
166 }
167 
168 #else
169 
170 EXTERN_C_BEGIN
171 extern PetscErrorCode  VecCreate_Seq(Vec);
172 EXTERN_C_END
173 
174 EXTERN_C_BEGIN
175 #undef __FUNCT__
176 #define __FUNCT__ "VecCreate_Shared"
177 PetscErrorCode  VecCreate_Shared(Vec vv)
178 {
179   PetscErrorCode ierr;
180   PetscMPIInt    size;
181 
182   PetscFunctionBegin;
183   ierr = MPI_Comm_size(((PetscObject)vv)->comm,&size);CHKERRQ(ierr);
184   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
185   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
186   PetscFunctionReturn(0);
187 }
188 EXTERN_C_END
189 
190 #endif
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "VecCreateShared"
194 /*@
195    VecCreateShared - Creates a parallel vector that uses shared memory.
196 
197    Input Parameters:
198 .  comm - the MPI communicator to use
199 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
200 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
201 
202    Output Parameter:
203 .  vv - the vector
204 
205    Collective on MPI_Comm
206 
207    Notes:
208    Currently VecCreateShared() is available only on the SGI; otherwise,
209    this routine is the same as VecCreateMPI().
210 
211    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
212    same type as an existing vector.
213 
214    Level: advanced
215 
216    Concepts: vectors^creating with shared memory
217 
218 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
219           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
220 
221 @*/
222 PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
223 {
224   PetscErrorCode ierr;
225 
226   PetscFunctionBegin;
227   ierr = VecCreate(comm,v);CHKERRQ(ierr);
228   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
229   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 
234 
235 
236 
237