xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision 05b42c5fbeb981b578319140e7e668b42ba3bbd1)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include "src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8      Could not get the include files to work properly on the SGI with
9   the C++ compiler.
10 */
11 #if defined(PETSC_USE_SHARED_MEMORY) && !defined(__cplusplus)
12 
13 EXTERN PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
14 
15 #undef __FUNCT__
16 #define __FUNCT__ "VecDuplicate_Shared"
17 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
18 {
19   PetscErrorCode ierr;
20   Vec_MPI        *w = (Vec_MPI *)win->data;
21   PetscScalar    *array;
22 
23   PetscFunctionBegin;
24 
25   /* first processor allocates entire array and sends it's address to the others */
26   ierr = PetscSharedMalloc(win->comm,win->map.n*sizeof(PetscScalar),win->map.N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
27 
28   ierr = VecCreate(win->comm,v);CHKERRQ(ierr);
29   ierr = VecSetSizes(*v,win->map.n,win->map.N);CHKERRQ(ierr);
30   ierr = VecCreate_MPI_Private(*v,w->nghost,array,win->map);CHKERRQ(ierr);
31 
32   /* New vector should inherit stashing property of parent */
33   (*v)->stash.donotstash = win->stash.donotstash;
34 
35   ierr = PetscOListDuplicate(win->olist,&(*v)->olist);CHKERRQ(ierr);
36   ierr = PetscFListDuplicate(win->qlist,&(*v)->qlist);CHKERRQ(ierr);
37 
38   if (win->mapping) {
39     (*v)->mapping = win->mapping;
40     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
41   }
42   (*v)->ops->duplicate = VecDuplicate_Shared;
43   (*v)->map.bs    = win->map.bs;
44   (*v)->bstash.bs = win->bstash.bs;
45   PetscFunctionReturn(0);
46 }
47 
48 
49 EXTERN_C_BEGIN
50 #undef __FUNCT__
51 #define __FUNCT__ "VecCreate_Shared"
52 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
53 {
54   PetscErrorCode ierr;
55   PetscScalar    *array;
56 
57   PetscFunctionBegin;
58   ierr = PetscSplitOwnership(vv->comm,&vv->map.n,&vv->map.N);CHKERRQ(ierr);
59   ierr = PetscSharedMalloc(vv->comm,vv->map.n*sizeof(PetscScalar),vv->map.N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
60 
61   ierr = VecCreate_MPI_Private(vv,0,array,PETSC_NULL);CHKERRQ(ierr);
62   vv->ops->duplicate = VecDuplicate_Shared;
63 
64   PetscFunctionReturn(0);
65 }
66 EXTERN_C_END
67 
68 
69 /* ----------------------------------------------------------------------------------------
70      Code to manage shared memory allocation under the SGI with MPI
71 
72   We associate with a communicator a shared memory "areana" from which memory may be shmalloced.
73 */
74 #include "petscsys.h"
75 #include "petscfix.h"
76 #if defined(PETSC_HAVE_PWD_H)
77 #include <pwd.h>
78 #endif
79 #include <ctype.h>
80 #include <sys/types.h>
81 #include <sys/stat.h>
82 #if defined(PETSC_HAVE_UNISTD_H)
83 #include <unistd.h>
84 #endif
85 #if defined(PETSC_HAVE_STDLIB_H)
86 #include <stdlib.h>
87 #endif
88 #if defined(PETSC_HAVE_SYS_PARAM_H)
89 #include <sys/param.h>
90 #endif
91 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
92 #include <sys/utsname.h>
93 #endif
94 #include <fcntl.h>
95 #include <time.h>
96 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
97 #include <sys/systeminfo.h>
98 #endif
99 #include "petscfix.h"
100 
101 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
102 static PetscInt Petsc_Shared_size   = 100000000;
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "Petsc_DeleteShared"
106 /*
107    Private routine to delete internal storage when a communicator is freed.
108   This is called by MPI, not by users.
109 
110   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
111   it was MPI_Comm *comm.
112 */
113 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
114 {
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = PetscFree(attr_val);CHKERRQ(ierr);
119   PetscFunctionReturn(MPI_SUCCESS);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "PetscSharedMemorySetSize"
124 PetscErrorCode PetscSharedMemorySetSize(PetscInt s)
125 {
126   PetscFunctionBegin;
127   Petsc_Shared_size = s;
128   PetscFunctionReturn(0);
129 }
130 
131 #include "petscfix.h"
132 
133 #include <ulocks.h>
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "PetscSharedInitialize"
137 PetscErrorCode PetscSharedInitialize(MPI_Comm comm)
138 {
139   PetscErrorCode ierr;
140   PetscMPIInt    rank,flag;
141   char           filename[PETSC_MAX_PATH_LEN];
142   usptr_t        **arena;
143 
144   PetscFunctionBegin;
145 
146   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
147     /*
148        The calling sequence of the 2nd argument to this function changed
149        between MPI Standard 1.0 and the revisions 1.1 Here we match the
150        new standard, if you are using an MPI implementation that uses
151        the older version you will get a warning message about the next line;
152        it is only a warning message and should do no harm.
153     */
154     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DeleteShared,&Petsc_Shared_keyval,0);CHKERRQ(ierr);
155   }
156 
157   ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
158 
159   if (!flag) {
160     /* This communicator does not yet have a shared memory areana */
161     ierr = PetscMalloc(sizeof(usptr_t*),&arena);CHKERRQ(ierr);
162 
163     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
164     if (!rank) {
165       ierr = PetscStrcpy(filename,"/tmp/PETScArenaXXXXXX");CHKERRQ(ierr);
166 #ifdef PETSC_HAVE_MKSTEMP
167       if (mkstemp(filename) < 0) {
168         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
169       }
170 #else
171       if (!mktemp(filename)) {
172         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
173       }
174 #endif
175     }
176     ierr     = MPI_Bcast(filename,PETSC_MAX_PATH_LEN,MPI_CHAR,0,comm);CHKERRQ(ierr);
177     ierr     = PetscOptionsGetInt(PETSC_NULL,"-shared_size",&Petsc_Shared_size,&flag);CHKERRQ(ierr);
178     usconfig(CONF_INITSIZE,Petsc_Shared_size);
179     *arena   = usinit(filename);
180     ierr     = MPI_Attr_put(comm,Petsc_Shared_keyval,arena);CHKERRQ(ierr);
181   }
182 
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "PetscSharedMalloc"
188 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
189 {
190   char           *value;
191   PetscErrorCode ierr;
192   PetscInt       shift;
193   PetscMPIInt    rank,flag;
194   usptr_t        **arena;
195 
196   PetscFunctionBegin;
197   *result = 0;
198   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
199     ierr = PetscSharedInitialize(comm);CHKERRQ(ierr);
200   }
201   ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
202   if (!flag) {
203     ierr = PetscSharedInitialize(comm);CHKERRQ(ierr);
204     ierr = MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);CHKERRQ(ierr);
205     if (!flag) SETERRQ(PETSC_ERR_LIB,"Unable to initialize shared memory");
206   }
207 
208   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
209   shift -= llen;
210 
211   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
212   if (!rank) {
213     value = (char*)usmalloc((size_t) len,*arena);
214     if (!value) {
215       (*PetscErrorPrintf)("Unable to allocate shared memory location\n");
216       (*PetscErrorPrintf)("Run with option -shared_size <size> \n");
217       (*PetscErrorPrintf)("with size > %d \n",(int)(1.2*(Petsc_Shared_size+len)));
218       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
219     }
220   }
221   ierr = MPI_Bcast(&value,8,MPI_BYTE,0,comm);CHKERRQ(ierr);
222   value += shift;
223 
224   PetscFunctionReturn(0);
225 }
226 
227 #else
228 
229 EXTERN_C_BEGIN
230 extern PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Seq(Vec);
231 EXTERN_C_END
232 
233 EXTERN_C_BEGIN
234 #undef __FUNCT__
235 #define __FUNCT__ "VecCreate_Shared"
236 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
237 {
238   PetscErrorCode ierr;
239   PetscMPIInt    size;
240 
241   PetscFunctionBegin;
242   ierr = MPI_Comm_size(vv->comm,&size);CHKERRQ(ierr);
243   if (size > 1) {
244     SETERRQ(PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
245   }
246   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 EXTERN_C_END
250 
251 #endif
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "VecCreateShared"
255 /*@
256    VecCreateShared - Creates a parallel vector that uses shared memory.
257 
258    Input Parameters:
259 .  comm - the MPI communicator to use
260 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
261 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
262 
263    Output Parameter:
264 .  vv - the vector
265 
266    Collective on MPI_Comm
267 
268    Notes:
269    Currently VecCreateShared() is available only on the SGI; otherwise,
270    this routine is the same as VecCreateMPI().
271 
272    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
273    same type as an existing vector.
274 
275    Level: advanced
276 
277    Concepts: vectors^creating with shared memory
278 
279 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
280           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
281 
282 @*/
283 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
284 {
285   PetscErrorCode ierr;
286 
287   PetscFunctionBegin;
288   ierr = VecCreate(comm,v);CHKERRQ(ierr);
289   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
290   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
291   PetscFunctionReturn(0);
292 }
293 
294 
295 
296 
297 
298