1 #define PETSCDM_DLL 2 3 #include "petscda.h" /*I "petscda.h" I*/ 4 #include "petscmat.h" /*I "petscmat.h" I*/ 5 #include "private/dmimpl.h" /*I "petscmat.h" I*/ 6 7 /* CSR storage of the nonzero structure of a bs*bs matrix */ 8 typedef struct { 9 PetscInt bs,nz,*i,*j; 10 } DMSlicedBlockFills; 11 12 typedef struct { 13 Vec globalvector; 14 PetscInt bs,n,N,Nghosts,*ghosts; 15 PetscInt d_nz,o_nz,*d_nnz,*o_nnz; 16 DMSlicedBlockFills *dfill,*ofill; 17 } DM_Sliced; 18 19 #undef __FUNCT__ 20 #define __FUNCT__ "DMGetMatrix_Sliced" 21 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Sliced(DM dm, const MatType mtype,Mat *J) 22 { 23 PetscErrorCode ierr; 24 PetscInt *globals,*sd_nnz,*so_nnz,rstart,bs,i; 25 ISLocalToGlobalMapping lmap,blmap; 26 void (*aij)(void) = PETSC_NULL; 27 DM_Sliced *slice = (DM_Sliced*)dm->data; 28 29 PetscFunctionBegin; 30 bs = slice->bs; 31 ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr); 32 ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 33 ierr = MatSetType(*J,mtype);CHKERRQ(ierr); 34 ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr); 35 ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr); 36 /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any 37 * good before going on with it. */ 38 ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr); 39 if (!aij) { 40 ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr); 41 } 42 if (aij) { 43 if (bs == 1) { 44 ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr); 45 ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr); 46 } else if (!slice->d_nnz) { 47 ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr); 48 ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr); 49 } else { 50 /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */ 51 ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr); 52 for (i=0; i<slice->n*bs; i++) { 53 sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs) 54 + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs); 55 if (so_nnz) { 56 so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs); 57 } 58 } 59 ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr); 60 ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr); 61 ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr); 62 } 63 } 64 65 ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr); 66 67 /* Set up the local to global map. For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */ 68 ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr); 69 ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr); 70 for (i=0; i<slice->n*bs; i++) { 71 globals[i] = rstart + i; 72 } 73 for (i=0; i<slice->Nghosts*bs; i++) { 74 globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs; 75 } 76 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr); 77 ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr); 78 ierr = MatSetLocalToGlobalMapping(*J,lmap);CHKERRQ(ierr); 79 ierr = MatSetLocalToGlobalMappingBlock(*J,blmap);CHKERRQ(ierr); 80 ierr = ISLocalToGlobalMappingDestroy(lmap);CHKERRQ(ierr); 81 ierr = ISLocalToGlobalMappingDestroy(blmap);CHKERRQ(ierr); 82 PetscFunctionReturn(0); 83 } 84 85 #undef __FUNCT__ 86 #define __FUNCT__ "DMSlicedSetGhosts" 87 /*@C 88 DMSlicedSetGhosts - Sets the global indices of other processes elements that will 89 be ghosts on this process 90 91 Not Collective 92 93 Input Parameters: 94 + slice - the DM object 95 . bs - block size 96 . nlocal - number of local (owned, non-ghost) blocks 97 . Nghosts - number of ghost blocks on this process 98 - ghosts - global indices of each ghost block 99 100 Level: advanced 101 102 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices() 103 104 @*/ 105 PetscErrorCode PETSCDM_DLLEXPORT DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[]) 106 { 107 PetscErrorCode ierr; 108 DM_Sliced *slice = (DM_Sliced*)dm->data; 109 110 PetscFunctionBegin; 111 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 112 ierr = PetscFree(slice->ghosts);CHKERRQ(ierr); 113 ierr = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr); 114 ierr = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr); 115 slice->bs = bs; 116 slice->n = nlocal; 117 slice->Nghosts = Nghosts; 118 PetscFunctionReturn(0); 119 } 120 121 #undef __FUNCT__ 122 #define __FUNCT__ "DMSlicedSetPreallocation" 123 /*@C 124 DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced 125 126 Not Collective 127 128 Input Parameters: 129 + slice - the DM object 130 . d_nz - number of block nonzeros per block row in diagonal portion of local 131 submatrix (same for all local rows) 132 . d_nnz - array containing the number of block nonzeros in the various block rows 133 of the in diagonal portion of the local (possibly different for each block 134 row) or PETSC_NULL. You must leave room for the diagonal entry even if it is zero. 135 . o_nz - number of block nonzeros per block row in the off-diagonal portion of local 136 submatrix (same for all local rows). 137 - o_nnz - array containing the number of nonzeros in the various block rows of the 138 off-diagonal portion of the local submatrix (possibly different for 139 each block row) or PETSC_NULL. 140 141 Notes: 142 See MatMPIBAIJSetPreallocation() for more details on preallocation. If a scalar matrix (AIJ) is 143 obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills(). 144 145 Level: advanced 146 147 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices(), MatMPIAIJSetPreallocation(), 148 MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills() 149 150 @*/ 151 PetscErrorCode PETSCDM_DLLEXPORT DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 152 { 153 DM_Sliced *slice = (DM_Sliced*)dm->data; 154 155 PetscFunctionBegin; 156 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 157 slice->d_nz = d_nz; 158 slice->d_nnz = (PetscInt*)d_nnz; 159 slice->o_nz = o_nz; 160 slice->o_nnz = (PetscInt*)o_nnz; 161 PetscFunctionReturn(0); 162 } 163 164 #undef __FUNCT__ 165 #define __FUNCT__ "DMSlicedSetBlockFills_Private" 166 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf) 167 { 168 PetscErrorCode ierr; 169 PetscInt i,j,nz,*fi,*fj; 170 DMSlicedBlockFills *f; 171 172 PetscFunctionBegin; 173 PetscValidPointer(inf,3); 174 if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);} 175 if (!fill) PetscFunctionReturn(0); 176 for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++; 177 ierr = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr); 178 f->bs = bs; 179 f->nz = nz; 180 f->i = fi; 181 f->j = fj; 182 for (i=0,nz=0; i<bs; i++) { 183 fi[i] = nz; 184 for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j; 185 } 186 fi[i] = nz; 187 *inf = f; 188 PetscFunctionReturn(0); 189 } 190 191 #undef __FUNCT__ 192 #define __FUNCT__ "DMSlicedSetBlockFills" 193 /*@C 194 DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem 195 of the matrix returned by DMSlicedGetMatrix(). 196 197 Logically Collective on DM 198 199 Input Parameter: 200 + sliced - the DM object 201 . dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block) 202 - ofill - the fill pattern in the off-diagonal blocks 203 204 Notes: 205 This only makes sense for multicomponent problems using scalar matrix formats (AIJ). 206 See DASetBlockFills() for example usage. 207 208 Level: advanced 209 210 .seealso DMSlicedGetMatrix(), DASetBlockFills() 211 @*/ 212 PetscErrorCode PETSCDM_DLLEXPORT DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill) 213 { 214 DM_Sliced *slice = (DM_Sliced*)dm->data; 215 PetscErrorCode ierr; 216 217 PetscFunctionBegin; 218 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 219 ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr); 220 ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr); 221 PetscFunctionReturn(0); 222 } 223 224 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *); 225 226 #undef __FUNCT__ 227 #define __FUNCT__ "DMDestroy_Sliced" 228 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Sliced(DM dm) 229 { 230 PetscErrorCode ierr; 231 PetscBool done; 232 DM_Sliced *slice = (DM_Sliced*)dm->data; 233 234 PetscFunctionBegin; 235 ierr = DMDestroy_Private(dm,&done);CHKERRQ(ierr); 236 if (!done) PetscFunctionReturn(0); 237 238 if (slice->globalvector) {ierr = VecDestroy(slice->globalvector);CHKERRQ(ierr);} 239 ierr = PetscFree(slice->ghosts);CHKERRQ(ierr); 240 if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);} 241 if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);} 242 ierr = PetscFree(dm->data);CHKERRQ(ierr); 243 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 244 PetscFunctionReturn(0); 245 } 246 247 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "DMCreateGlobalVector_Sliced" 251 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Sliced(DM dm,Vec *gvec) 252 { 253 PetscErrorCode ierr; 254 PetscInt bs,cnt; 255 DM_Sliced *slice = (DM_Sliced*)dm->data; 256 257 PetscFunctionBegin; 258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 259 PetscValidPointer(gvec,2); 260 *gvec = 0; 261 if (slice->globalvector) { 262 ierr = PetscObjectGetReference((PetscObject)slice->globalvector,&cnt);CHKERRQ(ierr); 263 if (cnt == 1) { /* Nobody else has a reference so we can just reference it and give it away */ 264 *gvec = slice->globalvector; 265 ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr); 266 ierr = VecZeroEntries(*gvec);CHKERRQ(ierr); 267 } else { /* Someone else has a reference so we duplicate the global vector */ 268 ierr = VecDuplicate(slice->globalvector,gvec);CHKERRQ(ierr); 269 } 270 } else { 271 bs = slice->bs; 272 ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,bs,slice->n*bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,&slice->globalvector);CHKERRQ(ierr); 273 *gvec = slice->globalvector; 274 ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr); 275 } 276 PetscFunctionReturn(0); 277 } 278 279 #undef __FUNCT__ 280 #define __FUNCT__ "DMSlicedCreate" 281 /*@C 282 DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem 283 284 Collective on MPI_Comm 285 286 Input Parameter: 287 . comm - the processors that will share the global vector 288 289 Output Parameters: 290 . slice - the slice object 291 292 Level: advanced 293 294 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices() 295 296 @*/ 297 PetscErrorCode PETSCDM_DLLEXPORT DMSlicedCreate(MPI_Comm comm,DM *dm) 298 { 299 PetscErrorCode ierr; 300 DM p; 301 DM_Sliced *slice; 302 303 PetscFunctionBegin; 304 PetscValidPointer(dm,2); 305 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 306 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 307 #endif 308 309 ierr = PetscHeaderCreate(p,_p_DM,struct _DMOps,DM_CLASSID,0,"DM",comm,DMDestroy,DMView);CHKERRQ(ierr); 310 ierr = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr); 311 p->data = slice; 312 313 ierr = PetscObjectChangeTypeName((PetscObject)p,"Sliced");CHKERRQ(ierr); 314 p->ops->createglobalvector = DMCreateGlobalVector_Sliced; 315 p->ops->getmatrix = DMGetMatrix_Sliced; 316 p->ops->destroy = DMDestroy_Sliced; 317 *dm = p; 318 PetscFunctionReturn(0); 319 } 320 321 322 #undef __FUNCT__ 323 #define __FUNCT__ "DMSlicedGetGlobalIndices" 324 /*@C 325 DMSlicedGetGlobalIndices - Gets the global indices for all the local entries 326 327 Collective on DM 328 329 Input Parameter: 330 . slice - the slice object 331 332 Output Parameters: 333 . idx - the individual indices for each packed vector/array 334 335 Level: advanced 336 337 Notes: 338 The idx parameters should be freed by the calling routine with PetscFree() 339 340 .seealso DMSlicedDestroy(), DMSlicedCreateGlobalVector(), DMSlicedCreate() 341 342 @*/ 343 PetscErrorCode PETSCDM_DLLEXPORT DMSlicedGetGlobalIndices(DM dm,PetscInt *idx[]) 344 { 345 PetscFunctionReturn(0); 346 } 347 348 349 /* Explanation of the missing functions for DA-style handling of the local vector: 350 351 DMSlicedCreateLocalVector() 352 DMSlicedGlobalToLocalBegin() 353 DMSlicedGlobalToLocalEnd() 354 355 There is no way to get the global form from a local form, so DMSlicedCreateLocalVector() is a memory leak without 356 external accounting for the global vector. Also, DMSliced intends the user to work with the VecGhost interface since the 357 ghosts are already ordered after the owned entries. Contrast this to a DA where the local vector has a special 358 ordering described by the structured grid, hence it cannot share memory with the global form. For this reason, users 359 of DMSliced should work with the global vector and use 360 361 VecGhostGetLocalForm(), VecGhostRestoreLocalForm() 362 VecGhostUpdateBegin(), VecGhostUpdateEnd() 363 364 rather than the missing DA-style functions. This is conceptually simpler and offers better performance than is 365 possible with the DA-style interface. 366 */ 367