xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision 9139b27e5c4f289a508b2e1ab36bbd549c67a69a)
1 
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #if defined(PETSC_USE_SHARED_MEMORY)
8 
9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
10 
11 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
12 {
13   PetscErrorCode ierr;
14   Vec_MPI        *w = (Vec_MPI*)win->data;
15   PetscScalar    *array;
16 
17   PetscFunctionBegin;
18   /* first processor allocates entire array and sends it's address to the others */
19   ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
20 
21   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
22   ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr);
23   ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr);
24   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
25 
26   /* New vector should inherit stashing property of parent */
27   (*v)->stash.donotstash   = win->stash.donotstash;
28   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
29 
30   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr);
31   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr);
32 
33   (*v)->ops->duplicate = VecDuplicate_Shared;
34   (*v)->bstash.bs      = win->bstash.bs;
35   PetscFunctionReturn(0);
36 }
37 
38 
39 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
40 {
41   PetscErrorCode ierr;
42   PetscScalar    *array;
43 
44   PetscFunctionBegin;
45   ierr = PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);CHKERRQ(ierr);
46   ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
47 
48   ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
49   vv->ops->duplicate = VecDuplicate_Shared;
50   PetscFunctionReturn(0);
51 }
52 
53 /* ----------------------------------------------------------------------------------------
54      Code to manage shared memory allocation using standard Unix shared memory
55 */
56 #include <petscsys.h>
57 #if defined(PETSC_HAVE_PWD_H)
58 #include <pwd.h>
59 #endif
60 #include <ctype.h>
61 #include <sys/stat.h>
62 #if defined(PETSC_HAVE_UNISTD_H)
63 #include <unistd.h>
64 #endif
65 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
66 #include <sys/utsname.h>
67 #endif
68 #include <fcntl.h>
69 #include <time.h>
70 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
71 #include <sys/systeminfo.h>
72 #endif
73 #include <sys/shm.h>
74 #include <sys/mman.h>
75 
76 
77 static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
78 
79 /*
80    Private routine to delete internal storage when a communicator is freed.
81   This is called by MPI, not by users.
82 
83   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
84   it was MPI_Comm *comm.
85 */
86 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
87 {
88   PetscErrorCode ierr;
89 
90   PetscFunctionBegin;
91   ierr = PetscFree(attr_val);CHKERRQ(ierr);
92   PetscFunctionReturn(MPI_SUCCESS);
93 }
94 
95 /*
96 
97     This routine is still incomplete and needs work.
98 
99     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
100 cat /etc/sysctl.conf
101 kern.sysv.shmmax=67108864
102 kern.sysv.shmmin=1
103 kern.sysv.shmmni=32
104 kern.sysv.shmseg=512
105 kern.sysv.shmall=1024
106 
107   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
108 ipcrm to remove the shared memory in use.
109 
110 */
111 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
112 {
113   PetscErrorCode ierr;
114   PetscInt       shift;
115   PetscMPIInt    rank,flag;
116   int            *arena,id,key = 0;
117   char           *value;
118 
119   PetscFunctionBegin;
120   *result = 0;
121 
122   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRMPI(ierr);
123   shift -= llen;
124 
125   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
126   if (!rank) {
127     id = shmget(key,len, 0666 |IPC_CREAT);
128     if (id == -1) {
129       perror("Unable to malloc shared memory");
130       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
131     }
132   } else {
133     id = shmget(key,len, 0666);
134     if (id == -1) {
135       perror("Unable to malloc shared memory");
136       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
137     }
138   }
139   value = shmat(id,(void*)0,0);
140   if (value == (char*)-1) {
141     perror("Unable to access shared memory allocated");
142     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
143   }
144   *result = (void*) (value + shift);
145   PetscFunctionReturn(0);
146 }
147 
148 #else
149 
150 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
151 {
152   PetscErrorCode ierr;
153   PetscMPIInt    size;
154 
155   PetscFunctionBegin;
156   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);CHKERRMPI(ierr);
157   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
158   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
159   PetscFunctionReturn(0);
160 }
161 
162 #endif
163 
164 /*@
165    VecCreateShared - Creates a parallel vector that uses shared memory.
166 
167    Input Parameters:
168 +  comm - the MPI communicator to use
169 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
170 -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
171 
172    Output Parameter:
173 .  vv - the vector
174 
175    Collective
176 
177    Notes:
178    Currently VecCreateShared() is available only on the SGI; otherwise,
179    this routine is the same as VecCreateMPI().
180 
181    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
182    same type as an existing vector.
183 
184    Level: advanced
185 
186 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
187           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
188 
189 @*/
190 PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
191 {
192   PetscErrorCode ierr;
193 
194   PetscFunctionBegin;
195   ierr = VecCreate(comm,v);CHKERRQ(ierr);
196   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
197   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
198   PetscFunctionReturn(0);
199 }
200 
201 
202 
203 
204 
205