xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision ec28410655ce6be868eff91977e35fe3aeb7a6ee)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 /*
8      Could not get the include files to work properly on the SGI with
9   the C++ compiler.
10 */
11 /* #define PETSC_USE_SHARED_MEMORY */
12 #if defined(PETSC_USE_SHARED_MEMORY) && !defined(__cplusplus)
13 
14 EXTERN PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "VecDuplicate_Shared"
18 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
19 {
20   PetscErrorCode ierr;
21   Vec_MPI        *w = (Vec_MPI *)win->data;
22   PetscScalar    *array;
23 
24   PetscFunctionBegin;
25 
26   /* first processor allocates entire array and sends it's address to the others */
27   ierr = PetscSharedMalloc(((PetscObject)win)->comm,win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
28 
29   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
30   ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr);
31   ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr);
32 
33   /* New vector should inherit stashing property of parent */
34   (*v)->stash.donotstash   = win->stash.donotstash;
35   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
36 
37   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr);
38   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr);
39 
40   if (win->mapping) {
41     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
42     (*v)->mapping = win->mapping;
43   }
44   if (win->bmapping) {
45     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
46     (*v)->bmapping = win->bmapping;
47   }
48   (*v)->ops->duplicate = VecDuplicate_Shared;
49   (*v)->map->bs    = win->map->bs;
50   (*v)->bstash.bs = win->bstash.bs;
51   PetscFunctionReturn(0);
52 }
53 
54 
55 EXTERN_C_BEGIN
56 #undef __FUNCT__
57 #define __FUNCT__ "VecCreate_Shared"
58 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
59 {
60   PetscErrorCode ierr;
61   PetscScalar    *array;
62 
63   PetscFunctionBegin;
64   ierr = PetscSplitOwnership(((PetscObject)vv)->comm,&vv->map->n,&vv->map->N);CHKERRQ(ierr);
65   ierr = PetscSharedMalloc(((PetscObject)vv)->comm,vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
66 
67   ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
68   vv->ops->duplicate = VecDuplicate_Shared;
69 
70   PetscFunctionReturn(0);
71 }
72 EXTERN_C_END
73 
74 
75 /* ----------------------------------------------------------------------------------------
76      Code to manage shared memory allocation under the SGI with MPI
77 
78   We associate with a communicator a shared memory "areana" from which memory may be shmalloced.
79 */
80 #include "petscsys.h"
81 #include "petscfix.h"
82 #if defined(PETSC_HAVE_PWD_H)
83 #include <pwd.h>
84 #endif
85 #include <ctype.h>
86 #include <sys/types.h>
87 #include <sys/stat.h>
88 #if defined(PETSC_HAVE_UNISTD_H)
89 #include <unistd.h>
90 #endif
91 #if defined(PETSC_HAVE_STDLIB_H)
92 #include <stdlib.h>
93 #endif
94 #if defined(PETSC_HAVE_SYS_PARAM_H)
95 #include <sys/param.h>
96 #endif
97 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
98 #include <sys/utsname.h>
99 #endif
100 #include <fcntl.h>
101 #include <time.h>
102 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
103 #include <sys/systeminfo.h>
104 #endif
105 #include <sys/shm.h>
106 #include <sys/mman.h>
107 
108 #include "petscfix.h"
109 
110 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "Petsc_DeleteShared"
114 /*
115    Private routine to delete internal storage when a communicator is freed.
116   This is called by MPI, not by users.
117 
118   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
119   it was MPI_Comm *comm.
120 */
121 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
122 {
123   PetscErrorCode ierr;
124 
125   PetscFunctionBegin;
126   ierr = PetscFree(attr_val);CHKERRQ(ierr);
127   PetscFunctionReturn(MPI_SUCCESS);
128 }
129 
130 #include "petscfix.h"
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "PetscSharedMalloc"
134 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
135 {
136   PetscErrorCode ierr;
137   PetscInt       shift;
138   PetscMPIInt    rank,flag;
139   int            *arena,id,key = 0;
140   char           *value;
141 
142   PetscFunctionBegin;
143   *result = 0;
144 
145   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
146   shift -= llen;
147 
148   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
149   if (!rank) {
150     id = shmget(key,len, 0666 |IPC_CREAT);
151     if (id == -1) {
152       perror("Unable to malloc shared memory");
153       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
154     }
155   } else {
156     id = shmget(key,len, 0666);
157     if (id == -1) {
158       perror("Unable to malloc shared memory");
159       SETERRQ(PETSC_ERR_LIB,"Unable to malloc shared memory");
160     }
161   }
162   value = shmat(id,(void*)0,0);
163   if (value == (char*)-1) {
164     perror("Unable to access shared memory allocated");
165     SETERRQ(PETSC_ERR_LIB,"Unable to access shared memory allocated");
166   }
167   *result = (void*) (value + shift);
168 
169   PetscFunctionReturn(0);
170 }
171 
172 #else
173 
174 EXTERN_C_BEGIN
175 extern PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Seq(Vec);
176 EXTERN_C_END
177 
178 EXTERN_C_BEGIN
179 #undef __FUNCT__
180 #define __FUNCT__ "VecCreate_Shared"
181 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_Shared(Vec vv)
182 {
183   PetscErrorCode ierr;
184   PetscMPIInt    size;
185 
186   PetscFunctionBegin;
187   ierr = MPI_Comm_size(((PetscObject)vv)->comm,&size);CHKERRQ(ierr);
188   if (size > 1) {
189     SETERRQ(PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
190   }
191   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 EXTERN_C_END
195 
196 #endif
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "VecCreateShared"
200 /*@
201    VecCreateShared - Creates a parallel vector that uses shared memory.
202 
203    Input Parameters:
204 .  comm - the MPI communicator to use
205 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
206 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
207 
208    Output Parameter:
209 .  vv - the vector
210 
211    Collective on MPI_Comm
212 
213    Notes:
214    Currently VecCreateShared() is available only on the SGI; otherwise,
215    this routine is the same as VecCreateMPI().
216 
217    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
218    same type as an existing vector.
219 
220    Level: advanced
221 
222    Concepts: vectors^creating with shared memory
223 
224 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
225           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
226 
227 @*/
228 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
229 {
230   PetscErrorCode ierr;
231 
232   PetscFunctionBegin;
233   ierr = VecCreate(comm,v);CHKERRQ(ierr);
234   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
235   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 
240 
241 
242 
243