1 2 /* 3 Code for manipulating distributed regular arrays in parallel. 4 */ 5 6 #include <petsc-private/daimpl.h> /*I "petscdmda.h" I*/ 7 8 #undef __FUNCT__ 9 #define __FUNCT__ "DMCreateCoordinateDM_DA" 10 PetscErrorCode DMCreateCoordinateDM_DA(DM dm, DM *cdm) 11 { 12 DM_DA *da = (DM_DA *) dm->data; 13 PetscMPIInt size; 14 PetscErrorCode ierr; 15 16 PetscFunctionBegin; 17 ierr = MPI_Comm_size(((PetscObject) dm)->comm, &size);CHKERRQ(ierr); 18 if (da->dim == 1) { 19 PetscInt s,m,*lc,l; 20 DMDABoundaryType bx; 21 22 ierr = DMDAGetInfo(dm,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);CHKERRQ(ierr); 23 ierr = DMDAGetCorners(dm,0,0,0,&l,0,0);CHKERRQ(ierr); 24 ierr = PetscMalloc(size * sizeof(PetscInt), &lc);CHKERRQ(ierr); 25 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 26 ierr = DMDACreate1d(((PetscObject)dm)->comm,bx,m,1,s,lc,cdm);CHKERRQ(ierr); 27 ierr = PetscFree(lc);CHKERRQ(ierr); 28 } else if (da->dim == 2) { 29 PetscInt i,s,m,*lc,*ld,l,k,n,M,N; 30 DMDABoundaryType bx,by; 31 32 ierr = DMDAGetInfo(dm,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);CHKERRQ(ierr); 33 ierr = DMDAGetCorners(dm,0,0,0,&l,&k,0);CHKERRQ(ierr); 34 ierr = PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);CHKERRQ(ierr); 35 /* only first M values in lc matter */ 36 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 37 /* every Mth value in ld matters */ 38 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 39 for(i = 0; i < N; ++i) { 40 ld[i] = ld[M*i]; 41 } 42 if (bx == DMDA_BOUNDARY_MIRROR || by == DMDA_BOUNDARY_MIRROR) { 43 ierr = DMDACreate2d(((PetscObject)dm)->comm,bx,by,DMDA_STENCIL_STAR,m,n,M,N,2,s,lc,ld,cdm);CHKERRQ(ierr); 44 } else { 45 ierr = DMDACreate2d(((PetscObject)dm)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,cdm);CHKERRQ(ierr); 46 } 47 ierr = PetscFree2(lc,ld);CHKERRQ(ierr); 48 } else if (da->dim == 3) { 49 PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p; 50 DMDABoundaryType bx,by,bz; 51 52 ierr = DMDAGetInfo(dm,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);CHKERRQ(ierr); 53 ierr = DMDAGetCorners(dm,0,0,0,&l,&k,&q);CHKERRQ(ierr); 54 ierr = PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);CHKERRQ(ierr); 55 /* only first M values in lc matter */ 56 ierr = MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 57 /* every Mth value in ld matters */ 58 ierr = MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 59 for(i = 0; i < N; ++i) { 60 ld[i] = ld[M*i]; 61 } 62 ierr = MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 63 for(i = 0; i < P; ++i) { 64 le[i] = le[M*N*i]; 65 } 66 ierr = DMDACreate3d(((PetscObject)dm)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,cdm);CHKERRQ(ierr); 67 ierr = PetscFree3(lc,ld,le);CHKERRQ(ierr); 68 } 69 PetscFunctionReturn(0); 70 } 71 72 #undef __FUNCT__ 73 #define __FUNCT__ "DMDASetFieldName" 74 /*@C 75 DMDASetFieldName - Sets the names of individual field components in multicomponent 76 vectors associated with a DMDA. 77 78 Not Collective 79 80 Input Parameters: 81 + da - the distributed array 82 . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 83 number of degrees of freedom per node within the DMDA 84 - names - the name of the field (component) 85 86 Level: intermediate 87 88 .keywords: distributed array, get, component name 89 90 .seealso: DMDAGetFieldName() 91 @*/ 92 PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[]) 93 { 94 PetscErrorCode ierr; 95 DM_DA *dd = (DM_DA*)da->data; 96 97 PetscFunctionBegin; 98 PetscValidHeaderSpecific(da,DM_CLASSID,1); 99 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 100 ierr = PetscFree(dd->fieldname[nf]);CHKERRQ(ierr); 101 ierr = PetscStrallocpy(name,&dd->fieldname[nf]);CHKERRQ(ierr); 102 PetscFunctionReturn(0); 103 } 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "DMDAGetFieldName" 107 /*@C 108 DMDAGetFieldName - Gets the names of individual field components in multicomponent 109 vectors associated with a DMDA. 110 111 Not Collective 112 113 Input Parameter: 114 + da - the distributed array 115 - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the 116 number of degrees of freedom per node within the DMDA 117 118 Output Parameter: 119 . names - the name of the field (component) 120 121 Level: intermediate 122 123 .keywords: distributed array, get, component name 124 125 .seealso: DMDASetFieldName() 126 @*/ 127 PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name) 128 { 129 DM_DA *dd = (DM_DA*)da->data; 130 131 PetscFunctionBegin; 132 PetscValidHeaderSpecific(da,DM_CLASSID,1); 133 PetscValidPointer(name,3); 134 if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf); 135 *name = dd->fieldname[nf]; 136 PetscFunctionReturn(0); 137 } 138 139 #undef __FUNCT__ 140 #define __FUNCT__ "DMDAGetCorners" 141 /*@ 142 DMDAGetCorners - Returns the global (x,y,z) indices of the lower left 143 corner of the local region, excluding ghost points. 144 145 Not Collective 146 147 Input Parameter: 148 . da - the distributed array 149 150 Output Parameters: 151 + x,y,z - the corner indices (where y and z are optional; these are used 152 for 2D and 3D problems) 153 - m,n,p - widths in the corresponding directions (where n and p are optional; 154 these are used for 2D and 3D problems) 155 156 Note: 157 The corner information is independent of the number of degrees of 158 freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and 159 m, n, p can be thought of as coordinates on a logical grid, where each 160 grid point has (potentially) several degrees of freedom. 161 Any of y, z, n, and p can be passed in as PETSC_NULL if not needed. 162 163 Level: beginner 164 165 .keywords: distributed array, get, corners, nodes, local indices 166 167 .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges() 168 @*/ 169 PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p) 170 { 171 PetscInt w; 172 DM_DA *dd = (DM_DA*)da->data; 173 174 PetscFunctionBegin; 175 PetscValidHeaderSpecific(da,DM_CLASSID,1); 176 /* since the xs, xe ... have all been multiplied by the number of degrees 177 of freedom per cell, w = dd->w, we divide that out before returning.*/ 178 w = dd->w; 179 if (x) *x = dd->xs/w + dd->xo; if (m) *m = (dd->xe - dd->xs)/w; 180 /* the y and z have NOT been multiplied by w */ 181 if (y) *y = dd->ys + dd->yo; if (n) *n = (dd->ye - dd->ys); 182 if (z) *z = dd->zs + dd->zo; if (p) *p = (dd->ze - dd->zs); 183 PetscFunctionReturn(0); 184 } 185 186 #undef __FUNCT__ 187 #define __FUNCT__ "DMDAGetLocalBoundingBox" 188 /*@ 189 DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA. 190 191 Not Collective 192 193 Input Parameter: 194 . da - the distributed array 195 196 Output Parameters: 197 + lmin - local minimum coordinates (length dim, optional) 198 - lmax - local maximim coordinates (length dim, optional) 199 200 Level: beginner 201 202 .keywords: distributed array, get, coordinates 203 204 .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetBoundingBox() 205 @*/ 206 PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[]) 207 { 208 PetscErrorCode ierr; 209 Vec coords = PETSC_NULL; 210 PetscInt dim,i,j; 211 const PetscScalar *local_coords; 212 PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL}; 213 PetscInt N,Ni; 214 DM_DA *dd = (DM_DA*)da->data; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(da,DM_CLASSID,1); 218 dim = dd->dim; 219 ierr = DMGetCoordinates(da,&coords);CHKERRQ(ierr); 220 if (coords) { 221 ierr = VecGetArrayRead(coords,&local_coords);CHKERRQ(ierr); 222 ierr = VecGetLocalSize(coords,&N);CHKERRQ(ierr); 223 Ni = N/dim; 224 for (i=0; i<Ni; i++) { 225 for (j=0; j<3; j++) { 226 min[j] = j < dim ? PetscMin(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 227 max[j] = j < dim ? PetscMax(min[j],PetscRealPart(local_coords[i*dim+j])) : 0; 228 } 229 } 230 ierr = VecRestoreArrayRead(coords,&local_coords);CHKERRQ(ierr); 231 } else { /* Just use grid indices */ 232 DMDALocalInfo info; 233 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 234 min[0] = info.xs; 235 min[1] = info.ys; 236 min[2] = info.zs; 237 max[0] = info.xs + info.xm-1; 238 max[1] = info.ys + info.ym-1; 239 max[2] = info.zs + info.zm-1; 240 } 241 if (lmin) {ierr = PetscMemcpy(lmin,min,dim*sizeof(PetscReal));CHKERRQ(ierr);} 242 if (lmax) {ierr = PetscMemcpy(lmax,max,dim*sizeof(PetscReal));CHKERRQ(ierr);} 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "DMDAGetBoundingBox" 248 /*@ 249 DMDAGetBoundingBox - Returns the global bounding box for the DMDA. 250 251 Collective on DMDA 252 253 Input Parameter: 254 . da - the distributed array 255 256 Output Parameters: 257 + gmin - global minimum coordinates (length dim, optional) 258 - gmax - global maximim coordinates (length dim, optional) 259 260 Level: beginner 261 262 .keywords: distributed array, get, coordinates 263 264 .seealso: DMDAGetCoordinateDA(), DMGetCoordinates(), DMDAGetLocalBoundingBox() 265 @*/ 266 PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[]) 267 { 268 PetscErrorCode ierr; 269 PetscMPIInt count; 270 PetscReal lmin[3],lmax[3]; 271 DM_DA *dd = (DM_DA*)da->data; 272 273 PetscFunctionBegin; 274 PetscValidHeaderSpecific(da,DM_CLASSID,1); 275 count = PetscMPIIntCast(dd->dim); 276 ierr = DMDAGetLocalBoundingBox(da,lmin,lmax);CHKERRQ(ierr); 277 if (gmin) {ierr = MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);CHKERRQ(ierr);} 278 if (gmax) {ierr = MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);CHKERRQ(ierr);} 279 PetscFunctionReturn(0); 280 } 281 282 #undef __FUNCT__ 283 #define __FUNCT__ "DMDAGetReducedDA" 284 /*@ 285 DMDAGetReducedDA - Gets the DMDA with the same layout but with fewer or more fields 286 287 Collective on DMDA 288 289 Input Parameter: 290 + da - the distributed array 291 . nfields - number of fields in new DMDA 292 293 Output Parameter: 294 . nda - the new DMDA 295 296 Level: intermediate 297 298 .keywords: distributed array, get, corners, nodes, local indices, coordinates 299 300 .seealso: DMDAGetGhostCorners(), DMSetCoordinates(), DMDASetUniformCoordinates(), DMGetCoordinates(), DMDAGetGhostedCoordinates() 301 @*/ 302 PetscErrorCode DMDAGetReducedDA(DM da,PetscInt nfields,DM *nda) 303 { 304 PetscErrorCode ierr; 305 DM_DA *dd = (DM_DA*)da->data; 306 PetscInt s,m,n,p,M,N,P,dim; 307 const PetscInt *lx,*ly,*lz; 308 DMDABoundaryType bx,by,bz; 309 DMDAStencilType stencil_type; 310 311 PetscFunctionBegin; 312 ierr = DMDAGetInfo(da,&dim,&M,&N,&P,&m,&n,&p,0,&s,&bx,&by,&bz,&stencil_type);CHKERRQ(ierr); 313 ierr = DMDAGetOwnershipRanges(da,&lx,&ly,&lz);CHKERRQ(ierr); 314 if (dim == 1) { 315 ierr = DMDACreate1d(((PetscObject)da)->comm,bx,M,nfields,s,dd->lx,nda);CHKERRQ(ierr); 316 } else if (dim == 2) { 317 ierr = DMDACreate2d(((PetscObject)da)->comm,bx,by,stencil_type,M,N,m,n,nfields,s,lx,ly,nda);CHKERRQ(ierr); 318 } else if (dim == 3) { 319 ierr = DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,stencil_type,M,N,P,m,n,p,nfields,s,lx,ly,lz,nda);CHKERRQ(ierr); 320 } 321 if (da->coordinates) { 322 ierr = PetscObjectReference((PetscObject)da->coordinates);CHKERRQ(ierr); 323 (*nda)->coordinates = da->coordinates; 324 } 325 PetscFunctionReturn(0); 326 } 327 328