xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision 2d50711886017447cb8c1ab06d8b61e8775efc0e)
1 
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #if defined(PETSC_USE_SHARED_MEMORY)
8 
9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "VecDuplicate_Shared"
13 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
14 {
15   PetscErrorCode ierr;
16   Vec_MPI        *w = (Vec_MPI *)win->data;
17   PetscScalar    *array;
18 
19   PetscFunctionBegin;
20 
21   /* first processor allocates entire array and sends it's address to the others */
22   ierr = PetscSharedMalloc(((PetscObject)win)->comm,win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
23 
24   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
25   ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr);
26   ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr);
27   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
28 
29   /* New vector should inherit stashing property of parent */
30   (*v)->stash.donotstash   = win->stash.donotstash;
31   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
32 
33   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr);
34   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr);
35 
36   (*v)->ops->duplicate = VecDuplicate_Shared;
37   (*v)->bstash.bs = win->bstash.bs;
38   PetscFunctionReturn(0);
39 }
40 
41 
42 EXTERN_C_BEGIN
43 #undef __FUNCT__
44 #define __FUNCT__ "VecCreate_Shared"
45 PetscErrorCode  VecCreate_Shared(Vec vv)
46 {
47   PetscErrorCode ierr;
48   PetscScalar    *array;
49 
50   PetscFunctionBegin;
51   ierr = PetscSplitOwnership(((PetscObject)vv)->comm,&vv->map->n,&vv->map->N);CHKERRQ(ierr);
52   ierr = PetscSharedMalloc(((PetscObject)vv)->comm,vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
53 
54   ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
55   vv->ops->duplicate = VecDuplicate_Shared;
56 
57   PetscFunctionReturn(0);
58 }
59 EXTERN_C_END
60 
61 
62 /* ----------------------------------------------------------------------------------------
63      Code to manage shared memory allocation using standard Unix shared memory
64 */
65 #include <petscsys.h>
66 #if defined(PETSC_HAVE_PWD_H)
67 #include <pwd.h>
68 #endif
69 #include <ctype.h>
70 #include <sys/types.h>
71 #include <sys/stat.h>
72 #if defined(PETSC_HAVE_UNISTD_H)
73 #include <unistd.h>
74 #endif
75 #if defined(PETSC_HAVE_STDLIB_H)
76 #include <stdlib.h>
77 #endif
78 #if defined(PETSC_HAVE_SYS_PARAM_H)
79 #include <sys/param.h>
80 #endif
81 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
82 #include <sys/utsname.h>
83 #endif
84 #include <fcntl.h>
85 #include <time.h>
86 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
87 #include <sys/systeminfo.h>
88 #endif
89 #include <sys/shm.h>
90 #include <sys/mman.h>
91 
92 
93 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "Petsc_DeleteShared"
97 /*
98    Private routine to delete internal storage when a communicator is freed.
99   This is called by MPI, not by users.
100 
101   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
102   it was MPI_Comm *comm.
103 */
104 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
105 {
106   PetscErrorCode ierr;
107 
108   PetscFunctionBegin;
109   ierr = PetscFree(attr_val);CHKERRQ(ierr);
110   PetscFunctionReturn(MPI_SUCCESS);
111 }
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "PetscSharedMalloc"
115 /*
116 
117     This routine is still incomplete and needs work.
118 
119     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
120 cat /etc/sysctl.conf
121 kern.sysv.shmmax=67108864
122 kern.sysv.shmmin=1
123 kern.sysv.shmmni=32
124 kern.sysv.shmseg=512
125 kern.sysv.shmall=1024
126 
127   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
128 ipcrm to remove the shared memory in use.
129 
130 */
131 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
132 {
133   PetscErrorCode ierr;
134   PetscInt       shift;
135   PetscMPIInt    rank,flag;
136   int            *arena,id,key = 0;
137   char           *value;
138 
139   PetscFunctionBegin;
140   *result = 0;
141 
142   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
143   shift -= llen;
144 
145   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
146   if (!rank) {
147     id = shmget(key,len, 0666 |IPC_CREAT);
148     if (id == -1) {
149       perror("Unable to malloc shared memory");
150       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
151     }
152   } else {
153     id = shmget(key,len, 0666);
154     if (id == -1) {
155       perror("Unable to malloc shared memory");
156       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
157     }
158   }
159   value = shmat(id,(void*)0,0);
160   if (value == (char*)-1) {
161     perror("Unable to access shared memory allocated");
162     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
163   }
164   *result = (void*) (value + shift);
165 
166   PetscFunctionReturn(0);
167 }
168 
169 #else
170 
171 EXTERN_C_BEGIN
172 extern PetscErrorCode  VecCreate_Seq(Vec);
173 EXTERN_C_END
174 
175 EXTERN_C_BEGIN
176 #undef __FUNCT__
177 #define __FUNCT__ "VecCreate_Shared"
178 PetscErrorCode  VecCreate_Shared(Vec vv)
179 {
180   PetscErrorCode ierr;
181   PetscMPIInt    size;
182 
183   PetscFunctionBegin;
184   ierr = MPI_Comm_size(((PetscObject)vv)->comm,&size);CHKERRQ(ierr);
185   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
186   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 EXTERN_C_END
190 
191 #endif
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "VecCreateShared"
195 /*@
196    VecCreateShared - Creates a parallel vector that uses shared memory.
197 
198    Input Parameters:
199 .  comm - the MPI communicator to use
200 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
201 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
202 
203    Output Parameter:
204 .  vv - the vector
205 
206    Collective on MPI_Comm
207 
208    Notes:
209    Currently VecCreateShared() is available only on the SGI; otherwise,
210    this routine is the same as VecCreateMPI().
211 
212    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
213    same type as an existing vector.
214 
215    Level: advanced
216 
217    Concepts: vectors^creating with shared memory
218 
219 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
220           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
221 
222 @*/
223 PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
224 {
225   PetscErrorCode ierr;
226 
227   PetscFunctionBegin;
228   ierr = VecCreate(comm,v);CHKERRQ(ierr);
229   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
230   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 
235 
236 
237 
238