xref: /petsc/src/vec/vec/impls/shared/shvec.c (revision 8dbb0df66754dee4fb72dff2ad56e76db1e6b7c7)
1 
2 /*
3    This file contains routines for Parallel vector operations that use shared memory
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #if defined(PETSC_USE_SHARED_MEMORY)
8 
9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);
10 
11 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
12 {
13   PetscErrorCode ierr;
14   Vec_MPI        *w = (Vec_MPI*)win->data;
15   PetscScalar    *array;
16 
17   PetscFunctionBegin;
18   /* first processor allocates entire array and sends it's address to the others */
19   ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
20 
21   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
22   ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr);
23   ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr);
24   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
25 
26   /* New vector should inherit stashing property of parent */
27   (*v)->stash.donotstash   = win->stash.donotstash;
28   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
29 
30   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr);
31   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr);
32 
33   (*v)->ops->duplicate = VecDuplicate_Shared;
34   (*v)->bstash.bs      = win->bstash.bs;
35   PetscFunctionReturn(0);
36 }
37 
38 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
39 {
40   PetscErrorCode ierr;
41   PetscScalar    *array;
42 
43   PetscFunctionBegin;
44   ierr = PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);CHKERRQ(ierr);
45   ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr);
46 
47   ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
48   vv->ops->duplicate = VecDuplicate_Shared;
49   PetscFunctionReturn(0);
50 }
51 
52 /* ----------------------------------------------------------------------------------------
53      Code to manage shared memory allocation using standard Unix shared memory
54 */
55 #include <petscsys.h>
56 #if defined(PETSC_HAVE_PWD_H)
57 #include <pwd.h>
58 #endif
59 #include <ctype.h>
60 #include <sys/stat.h>
61 #if defined(PETSC_HAVE_UNISTD_H)
62 #include <unistd.h>
63 #endif
64 #if defined(PETSC_HAVE_SYS_UTSNAME_H)
65 #include <sys/utsname.h>
66 #endif
67 #include <fcntl.h>
68 #include <time.h>
69 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
70 #include <sys/systeminfo.h>
71 #endif
72 #include <sys/shm.h>
73 #include <sys/mman.h>
74 
75 static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
76 
77 /*
78    Private routine to delete internal storage when a communicator is freed.
79   This is called by MPI, not by users.
80 
81   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
82   it was MPI_Comm *comm.
83 */
84 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
85 {
86   PetscErrorCode ierr;
87 
88   PetscFunctionBegin;
89   ierr = PetscFree(attr_val);CHKERRQ(ierr);
90   PetscFunctionReturn(MPI_SUCCESS);
91 }
92 
93 /*
94 
95     This routine is still incomplete and needs work.
96 
97     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
98 cat /etc/sysctl.conf
99 kern.sysv.shmmax=67108864
100 kern.sysv.shmmin=1
101 kern.sysv.shmmni=32
102 kern.sysv.shmseg=512
103 kern.sysv.shmall=1024
104 
105   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
106 ipcrm to remove the shared memory in use.
107 
108 */
109 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
110 {
111   PetscErrorCode ierr;
112   PetscInt       shift;
113   PetscMPIInt    rank,flag;
114   int            *arena,id,key = 0;
115   char           *value;
116 
117   PetscFunctionBegin;
118   *result = 0;
119 
120   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRMPI(ierr);
121   shift -= llen;
122 
123   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
124   if (rank == 0) {
125     id = shmget(key,len, 0666 |IPC_CREAT);
126     if (id == -1) {
127       perror("Unable to malloc shared memory");
128       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
129     }
130   } else {
131     id = shmget(key,len, 0666);
132     if (id == -1) {
133       perror("Unable to malloc shared memory");
134       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
135     }
136   }
137   value = shmat(id,(void*)0,0);
138   if (value == (char*)-1) {
139     perror("Unable to access shared memory allocated");
140     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
141   }
142   *result = (void*) (value + shift);
143   PetscFunctionReturn(0);
144 }
145 
146 #else
147 
148 PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
149 {
150   PetscErrorCode ierr;
151   PetscMPIInt    size;
152 
153   PetscFunctionBegin;
154   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);CHKERRMPI(ierr);
155   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
156   ierr = VecCreate_Seq(vv);CHKERRQ(ierr);
157   PetscFunctionReturn(0);
158 }
159 
160 #endif
161 
162 /*@
163    VecCreateShared - Creates a parallel vector that uses shared memory.
164 
165    Input Parameters:
166 +  comm - the MPI communicator to use
167 .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
168 -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
169 
170    Output Parameter:
171 .  vv - the vector
172 
173    Collective
174 
175    Notes:
176    Currently VecCreateShared() is available only on the SGI; otherwise,
177    this routine is the same as VecCreateMPI().
178 
179    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
180    same type as an existing vector.
181 
182    Level: advanced
183 
184 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
185           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()
186 
187 @*/
188 PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
189 {
190   PetscErrorCode ierr;
191 
192   PetscFunctionBegin;
193   ierr = VecCreate(comm,v);CHKERRQ(ierr);
194   ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
195   ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199