xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision fe7fb37988af96bf0482881d180222cb0a505f74)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8      Could not get the include files to work properly on the SGI with
9   the C++ compiler.
10 */
11 #if defined(PETSC_USE_SHARED_MEMORY) && !defined(__cplusplus)
12 
13 EXTERN PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
14 
15 #undef __FUNCT__
16 #define __FUNCT__ "VecDuplicate_Shared"
17 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
18 {
19   PetscErrorCode ierr;
20   Vec_MPI        *w = (Vec_MPI *)win->data;
21   PetscScalar    *array;
22 
23   PetscFunctionBegin;
24 
25   /* first processor allocates entire array and sends it's address to the others */
26   ierr = PetscSharedMalloc(win->comm,win->map.n*sizeof(PetscScalar),win->map.N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
27 
28   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
29   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
30   ierr = VecCreate_MPI_Private(*v,w->nghost,array,win->map);CHKERRQ(ierr);
31 
32   /* New vector should inherit stashing property of parent */
33   (*v)->stash.donotstash   = win->stash.donotstash;
34   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
35 
36   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
37   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
38 
39   if (win->mapping) {
40     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
41     (*v)->mapping = win->mapping;
42   }
43   if (win->bmapping) {
44     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
45     (*v)->bmapping = win->bmapping;
46   }
47   (*v)->ops->duplicate = VecDuplicate_Shared;
48   (*v)->map.bs    = win->map.bs;
49   (*v)->bstash.bs = win->bstash.bs;
50   PetscFunctionReturn(0);
51 }
52 
53 
54 EXTERN_C_BEGIN
55 #undef __FUNCT__
56 #define __FUNCT__ "VecCreate_Shared"
57 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
58 {
59   PetscErrorCode ierr;
60   PetscScalar    *array;
61 
62   PetscFunctionBegin;
63   ierr = PetscSplitOwnership(vv->comm,&vv->map.n,&vv->map.N);CHKERRQ(ierr);
64   ierr = PetscSharedMalloc(vv->comm,vv->map.n*sizeof(PetscScalar),vv->map.N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
65 
66   ierr = VecCreate_MPI_Private(vv,0,array,PETSC_NULL);CHKERRQ(ierr);
67   vv->ops->duplicate = VecDuplicate_Shared;
68 
69   PetscFunctionReturn(0);
70 }
71 EXTERN_C_END
72 
73 
74 /* ----------------------------------------------------------------------------------------
75      Code to manage shared memory allocation under the SGI with MPI
76 
77   We associate with a communicator a shared memory "areana" from which memory may be shmalloced.
78 */
79 #include "petscsys.h"
80 #include "petscfix.h"
81 #if defined(PETSC_HAVE_PWD_H)
82 #include <pwd.h>
83 #endif
84 #include <ctype.h>
85 #include <sys/types.h>
86 #include <sys/stat.h>
87 #if defined(PETSC_HAVE_UNISTD_H)
88 #include <unistd.h>
89 #endif
90 #if defined(PETSC_HAVE_STDLIB_H)
91 #include <stdlib.h>
92 #endif
93 #if defined(PETSC_HAVE_SYS_PARAM_H)
94 #include <sys/param.h>
95 #endif
96 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
97 #include <sys/utsname.h>
98 #endif
99 #include <fcntl.h>
100 #include <time.h>
101 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
102 #include <sys/systeminfo.h>
103 #endif
104 #include "petscfix.h"
105 
106 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
107 static PetscInt Petsc_Shared_size   = 100000000;
108 
109 #undef __FUNCT__
110 #define __FUNCT__ "Petsc_DeleteShared"
111 /*
112    Private routine to delete internal storage when a communicator is freed.
113   This is called by MPI, not by users.
114 
115   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
116   it was MPI_Comm *comm.
117 */
118 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
119 {
120   PetscErrorCode ierr;
121 
122   PetscFunctionBegin;
123   ierr = PetscFree(attr_val);CHKERRQ(ierr);
124   PetscFunctionReturn(MPI_SUCCESS);
125 }
126 
127 #undef __FUNCT__
128 #define __FUNCT__ "PetscSharedMemorySetSize"
129 PetscErrorCode PetscSharedMemorySetSize(PetscInt s)
130 {
131   PetscFunctionBegin;
132   Petsc_Shared_size = s;
133   PetscFunctionReturn(0);
134 }
135 
136 #include "petscfix.h"
137 
138 #include <ulocks.h>
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "PetscSharedInitialize"
142 PetscErrorCode PetscSharedInitialize(MPI_Comm comm)
143 {
144   PetscErrorCode ierr;
145   PetscMPIInt    rank,flag;
146   char           filename[PETSC_MAX_PATH_LEN];
147   usptr_t        **arena;
148 
149   PetscFunctionBegin;
150 
151   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
152     /*
153        The calling sequence of the 2nd argument to this function changed
154        between MPI Standard 1.0 and the revisions 1.1 Here we match the
155        new standard, if you are using an MPI implementation that uses
156        the older version you will get a warning message about the next line;
157        it is only a warning message and should do no harm.
158     */
159     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DeleteShared,&Petsc_Shared_keyval,0);CHKERRQ(ierr);
160   }
161 
162   ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
163 
164   if (!flag) {
165     /* This communicator does not yet have a shared memory areana */
166     ierr = PetscMalloc(sizeof(usptr_t*),&arena);CHKERRQ(ierr);
167 
168     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
169     if (!rank) {
170       ierr = PetscStrcpy(filename,"/tmp/PETScArenaXXXXXX");CHKERRQ(ierr);
171 #ifdef PETSC_HAVE_MKSTEMP
172       if (mkstemp(filename) < 0) {
173         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
174       }
175 #else
176       if (!mktemp(filename)) {
177         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
178       }
179 #endif
180     }
181     ierr     = MPI_Bcast(filename,PETSC_MAX_PATH_LEN,MPI_CHAR,0,comm);CHKERRQ(ierr);
182     ierr     = PetscOptionsGetInt(PETSC_NULL,"-shared_size",&Petsc_Shared_size,&flag);CHKERRQ(ierr);
183     usconfig(CONF_INITSIZE,Petsc_Shared_size);
184     *arena   = usinit(filename);
185     ierr     = MPI_Attr_put(comm,Petsc_Shared_keyval,arena);CHKERRQ(ierr);
186   }
187 
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "PetscSharedMalloc"
193 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
194 {
195   char           *value;
196   PetscErrorCode ierr;
197   PetscInt       shift;
198   PetscMPIInt    rank,flag;
199   usptr_t        **arena;
200 
201   PetscFunctionBegin;
202   *result = 0;
203   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
204     ierr = PetscSharedInitialize(comm);CHKERRQ(ierr);
205   }
206   ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
207   if (!flag) {
208     ierr = PetscSharedInitialize(comm);CHKERRQ(ierr);
209     ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
210     if (!flag) SETERRQ(PETSC_ERR_LIB,"Unable to initialize shared memory");
211   }
212 
213   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
214   shift -= llen;
215 
216   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
217   if (!rank) {
218     value = (char*)usmalloc((size_t) len,*arena);
219     if (!value) {
220       (*PetscErrorPrintf)("Unable to allocate shared memory location\n");
221       (*PetscErrorPrintf)("Run with option -shared_size <size> \n");
222       (*PetscErrorPrintf)("with size > %d \n",(int)(1.2*(Petsc_Shared_size+len)));
223       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
224     }
225   }
226   ierr = MPI_Bcast(&value,8,MPI_BYTE,0,comm);CHKERRQ(ierr);
227   value += shift;
228 
229   PetscFunctionReturn(0);
230 }
231 
232 #else
233 
234 EXTERN_C_BEGIN
235 extern PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Seq(Vec);
236 EXTERN_C_END
237 
238 EXTERN_C_BEGIN
239 #undef __FUNCT__
240 #define __FUNCT__ "VecCreate_Shared"
241 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
242 {
243   PetscErrorCode ierr;
244   PetscMPIInt    size;
245 
246   PetscFunctionBegin;
247   ierr = MPI_Comm_size(vv->comm,&size);CHKERRQ(ierr);
248   if (size > 1) {
249     SETERRQ(PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
250   }
251   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
252   PetscFunctionReturn(0);
253 }
254 EXTERN_C_END
255 
256 #endif
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "VecCreateShared"
260 /*@
261    VecCreateShared - Creates a parallel vector that uses shared memory.
262 
263    Input Parameters:
264 .  comm - the MPI communicator to use
265 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
266 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
267 
268    Output Parameter:
269 .  vv - the vector
270 
271    Collective on MPI_Comm
272 
273    Notes:
274    Currently VecCreateShared() is available only on the SGI; otherwise,
275    this routine is the same as VecCreateMPI().
276 
277    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
278    same type as an existing vector.
279 
280    Level: advanced
281 
282    Concepts: vectors^creating with shared memory
283 
284 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
285           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
286 
287 @*/
288 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
289 {
290   PetscErrorCode ierr;
291 
292   PetscFunctionBegin;
293   ierr = VecCreate(comm,v);CHKERRQ(ierr);
294   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
295   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 
300 
301 
302 
303