1 2 /* 3 This file contains routines for Parallel vector operations that use shared memory 4 */ 5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/ 6 7 #if defined(PETSC_USE_SHARED_MEMORY) 8 9 extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**); 10 11 #undef __FUNCT__ 12 #define __FUNCT__ "VecDuplicate_Shared" 13 PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v) 14 { 15 PetscErrorCode ierr; 16 Vec_MPI *w = (Vec_MPI*)win->data; 17 PetscScalar *array; 18 19 PetscFunctionBegin; 20 /* first processor allocates entire array and sends it's address to the others */ 21 ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr); 22 23 ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr); 24 ierr = VecSetSizes(*v,win->map->n,win->map->N);CHKERRQ(ierr); 25 ierr = VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);CHKERRQ(ierr); 26 ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr); 27 28 /* New vector should inherit stashing property of parent */ 29 (*v)->stash.donotstash = win->stash.donotstash; 30 (*v)->stash.ignorenegidx = win->stash.ignorenegidx; 31 32 ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);CHKERRQ(ierr); 33 ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);CHKERRQ(ierr); 34 35 (*v)->ops->duplicate = VecDuplicate_Shared; 36 (*v)->bstash.bs = win->bstash.bs; 37 PetscFunctionReturn(0); 38 } 39 40 41 #undef __FUNCT__ 42 #define __FUNCT__ "VecCreate_Shared" 43 PETSC_EXTERN_C PetscErrorCode VecCreate_Shared(Vec vv) 44 { 45 PetscErrorCode ierr; 46 PetscScalar *array; 47 48 PetscFunctionBegin; 49 ierr = PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);CHKERRQ(ierr); 50 ierr = PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);CHKERRQ(ierr); 51 52 ierr = VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);CHKERRQ(ierr); 53 vv->ops->duplicate = VecDuplicate_Shared; 54 PetscFunctionReturn(0); 55 } 56 57 /* ---------------------------------------------------------------------------------------- 58 Code to manage shared memory allocation using standard Unix shared memory 59 */ 60 #include <petscsys.h> 61 #if defined(PETSC_HAVE_PWD_H) 62 #include <pwd.h> 63 #endif 64 #include <ctype.h> 65 #include <sys/stat.h> 66 #if defined(PETSC_HAVE_UNISTD_H) 67 #include <unistd.h> 68 #endif 69 #if defined(PETSC_HAVE_SYS_UTSNAME_H) 70 #include <sys/utsname.h> 71 #endif 72 #include <fcntl.h> 73 #include <time.h> 74 #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H) 75 #include <sys/systeminfo.h> 76 #endif 77 #include <sys/shm.h> 78 #include <sys/mman.h> 79 80 81 static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID; 82 83 #undef __FUNCT__ 84 #define __FUNCT__ "Petsc_DeleteShared" 85 /* 86 Private routine to delete internal storage when a communicator is freed. 87 This is called by MPI, not by users. 88 89 The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0 90 it was MPI_Comm *comm. 91 */ 92 static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state) 93 { 94 PetscErrorCode ierr; 95 96 PetscFunctionBegin; 97 ierr = PetscFree(attr_val);CHKERRQ(ierr); 98 PetscFunctionReturn(MPI_SUCCESS); 99 } 100 101 #undef __FUNCT__ 102 #define __FUNCT__ "PetscSharedMalloc" 103 /* 104 105 This routine is still incomplete and needs work. 106 107 For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf 108 cat /etc/sysctl.conf 109 kern.sysv.shmmax=67108864 110 kern.sysv.shmmin=1 111 kern.sysv.shmmni=32 112 kern.sysv.shmseg=512 113 kern.sysv.shmall=1024 114 115 This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and 116 ipcrm to remove the shared memory in use. 117 118 */ 119 PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result) 120 { 121 PetscErrorCode ierr; 122 PetscInt shift; 123 PetscMPIInt rank,flag; 124 int *arena,id,key = 0; 125 char *value; 126 127 PetscFunctionBegin; 128 *result = 0; 129 130 ierr = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr); 131 shift -= llen; 132 133 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 134 if (!rank) { 135 id = shmget(key,len, 0666 |IPC_CREAT); 136 if (id == -1) { 137 perror("Unable to malloc shared memory"); 138 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory"); 139 } 140 } else { 141 id = shmget(key,len, 0666); 142 if (id == -1) { 143 perror("Unable to malloc shared memory"); 144 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory"); 145 } 146 } 147 value = shmat(id,(void*)0,0); 148 if (value == (char*)-1) { 149 perror("Unable to access shared memory allocated"); 150 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated"); 151 } 152 *result = (void*) (value + shift); 153 PetscFunctionReturn(0); 154 } 155 156 #else 157 158 PETSC_EXTERN_C PetscErrorCode VecCreate_Seq(Vec); 159 160 #undef __FUNCT__ 161 #define __FUNCT__ "VecCreate_Shared" 162 PETSC_EXTERN_C PetscErrorCode VecCreate_Shared(Vec vv) 163 { 164 PetscErrorCode ierr; 165 PetscMPIInt size; 166 167 PetscFunctionBegin; 168 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);CHKERRQ(ierr); 169 if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine"); 170 ierr = VecCreate_Seq(vv);CHKERRQ(ierr); 171 PetscFunctionReturn(0); 172 } 173 174 #endif 175 176 #undef __FUNCT__ 177 #define __FUNCT__ "VecCreateShared" 178 /*@ 179 VecCreateShared - Creates a parallel vector that uses shared memory. 180 181 Input Parameters: 182 . comm - the MPI communicator to use 183 . n - local vector length (or PETSC_DECIDE to have calculated if N is given) 184 . N - global vector length (or PETSC_DECIDE to have calculated if n is given) 185 186 Output Parameter: 187 . vv - the vector 188 189 Collective on MPI_Comm 190 191 Notes: 192 Currently VecCreateShared() is available only on the SGI; otherwise, 193 this routine is the same as VecCreateMPI(). 194 195 Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the 196 same type as an existing vector. 197 198 Level: advanced 199 200 Concepts: vectors^creating with shared memory 201 202 .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(), 203 VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray() 204 205 @*/ 206 PetscErrorCode VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v) 207 { 208 PetscErrorCode ierr; 209 210 PetscFunctionBegin; 211 ierr = VecCreate(comm,v);CHKERRQ(ierr); 212 ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr); 213 ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr); 214 PetscFunctionReturn(0); 215 } 216 217 218 219 220 221