1 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 2 3 /*@ 4 DMDASetSizes - Sets the number of grid points in the three dimensional directions 5 6 Logically Collective on da 7 8 Input Parameters: 9 + da - the DMDA 10 . M - the global X size 11 . N - the global Y size 12 - P - the global Z size 13 14 Level: intermediate 15 16 Developer Notes: 17 Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points 18 19 .seealso: `PetscSplitOwnership()` 20 @*/ 21 PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P) 22 { 23 DM_DA *dd = (DM_DA*)da->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA); 27 PetscValidLogicalCollectiveInt(da,M,2); 28 PetscValidLogicalCollectiveInt(da,N,3); 29 PetscValidLogicalCollectiveInt(da,P,4); 30 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 31 PetscCheck(M >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in X direction must be positive"); 32 PetscCheck(N >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Y direction must be positive"); 33 PetscCheck(P >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Number of grid points in Z direction must be positive"); 34 35 dd->M = M; 36 dd->N = N; 37 dd->P = P; 38 PetscFunctionReturn(0); 39 } 40 41 /*@ 42 DMDASetNumProcs - Sets the number of processes in each dimension 43 44 Logically Collective on da 45 46 Input Parameters: 47 + da - the DMDA 48 . m - the number of X procs (or PETSC_DECIDE) 49 . n - the number of Y procs (or PETSC_DECIDE) 50 - p - the number of Z procs (or PETSC_DECIDE) 51 52 Level: intermediate 53 54 .seealso: `DMDASetSizes()`, `PetscSplitOwnership()` 55 @*/ 56 PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p) 57 { 58 DM_DA *dd = (DM_DA*)da->data; 59 60 PetscFunctionBegin; 61 PetscValidHeaderSpecificType(da, DM_CLASSID, 1,DMDA); 62 PetscValidLogicalCollectiveInt(da,m,2); 63 PetscValidLogicalCollectiveInt(da,n,3); 64 PetscValidLogicalCollectiveInt(da,p,4); 65 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 66 dd->m = m; 67 dd->n = n; 68 dd->p = p; 69 if (da->dim == 2) { 70 PetscMPIInt size; 71 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da),&size)); 72 if ((dd->m > 0) && (dd->n < 0)) { 73 dd->n = size/dd->m; 74 PetscCheck(dd->n*dd->m == size,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%" PetscInt_FMT " processes in X direction not divisible into comm size %d",m,size); 75 } 76 if ((dd->n > 0) && (dd->m < 0)) { 77 dd->m = size/dd->n; 78 PetscCheck(dd->n*dd->m == size,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"%" PetscInt_FMT " processes in Y direction not divisible into comm size %d",n,size); 79 } 80 } 81 PetscFunctionReturn(0); 82 } 83 84 /*@ 85 DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries. 86 87 Not collective 88 89 Input Parameters: 90 + da - The DMDA 91 - bx,by,bz - One of DM_BOUNDARY_NONE, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_PERIODIC 92 93 Level: intermediate 94 95 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA`, `DMBoundaryType` 96 @*/ 97 PetscErrorCode DMDASetBoundaryType(DM da,DMBoundaryType bx,DMBoundaryType by,DMBoundaryType bz) 98 { 99 DM_DA *dd = (DM_DA*)da->data; 100 101 PetscFunctionBegin; 102 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 103 PetscValidLogicalCollectiveEnum(da,bx,2); 104 PetscValidLogicalCollectiveEnum(da,by,3); 105 PetscValidLogicalCollectiveEnum(da,bz,4); 106 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 107 dd->bx = bx; 108 dd->by = by; 109 dd->bz = bz; 110 PetscFunctionReturn(0); 111 } 112 113 /*@ 114 DMDASetDof - Sets the number of degrees of freedom per vertex 115 116 Not collective 117 118 Input Parameters: 119 + da - The DMDA 120 - dof - Number of degrees of freedom 121 122 Level: intermediate 123 124 .seealso: `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 125 @*/ 126 PetscErrorCode DMDASetDof(DM da, PetscInt dof) 127 { 128 DM_DA *dd = (DM_DA*)da->data; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 132 PetscValidLogicalCollectiveInt(da,dof,2); 133 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 134 dd->w = dof; 135 da->bs = dof; 136 PetscFunctionReturn(0); 137 } 138 139 /*@ 140 DMDAGetDof - Gets the number of degrees of freedom per vertex 141 142 Not collective 143 144 Input Parameter: 145 . da - The DMDA 146 147 Output Parameter: 148 . dof - Number of degrees of freedom 149 150 Level: intermediate 151 152 .seealso: `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`, `DMDA` 153 @*/ 154 PetscErrorCode DMDAGetDof(DM da, PetscInt *dof) 155 { 156 DM_DA *dd = (DM_DA *) da->data; 157 158 PetscFunctionBegin; 159 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 160 PetscValidIntPointer(dof,2); 161 *dof = dd->w; 162 PetscFunctionReturn(0); 163 } 164 165 /*@ 166 DMDAGetOverlap - Gets the size of the per-processor overlap. 167 168 Not collective 169 170 Input Parameter: 171 . da - The DMDA 172 173 Output Parameters: 174 + x - Overlap in the x direction 175 . y - Overlap in the y direction 176 - z - Overlap in the z direction 177 178 Level: intermediate 179 180 .seealso: `DMCreateDomainDecomposition()`, `DMDASetOverlap()`, `DMDA` 181 @*/ 182 PetscErrorCode DMDAGetOverlap(DM da,PetscInt *x,PetscInt *y,PetscInt *z) 183 { 184 DM_DA *dd = (DM_DA*)da->data; 185 186 PetscFunctionBegin; 187 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 188 if (x) *x = dd->xol; 189 if (y) *y = dd->yol; 190 if (z) *z = dd->zol; 191 PetscFunctionReturn(0); 192 } 193 194 /*@ 195 DMDASetOverlap - Sets the size of the per-processor overlap. 196 197 Not collective 198 199 Input Parameters: 200 + da - The DMDA 201 . x - Overlap in the x direction 202 . y - Overlap in the y direction 203 - z - Overlap in the z direction 204 205 Level: intermediate 206 207 .seealso: `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`, `DMDA` 208 @*/ 209 PetscErrorCode DMDASetOverlap(DM da,PetscInt x,PetscInt y,PetscInt z) 210 { 211 DM_DA *dd = (DM_DA*)da->data; 212 213 PetscFunctionBegin; 214 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 215 PetscValidLogicalCollectiveInt(da,x,2); 216 PetscValidLogicalCollectiveInt(da,y,3); 217 PetscValidLogicalCollectiveInt(da,z,4); 218 dd->xol = x; 219 dd->yol = y; 220 dd->zol = z; 221 PetscFunctionReturn(0); 222 } 223 224 /*@ 225 DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition. 226 227 Not collective 228 229 Input Parameters: 230 . da - The DMDA 231 232 Output Parameters: 233 . Nsub - Number of local subdomains created upon decomposition 234 235 Level: intermediate 236 237 .seealso: `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`, `DMDA` 238 @*/ 239 PetscErrorCode DMDAGetNumLocalSubDomains(DM da,PetscInt *Nsub) 240 { 241 DM_DA *dd = (DM_DA*)da->data; 242 243 PetscFunctionBegin; 244 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 245 if (Nsub) *Nsub = dd->Nsub; 246 PetscFunctionReturn(0); 247 } 248 249 /*@ 250 DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition. 251 252 Not collective 253 254 Input Parameters: 255 + da - The DMDA 256 - Nsub - The number of local subdomains requested 257 258 Level: intermediate 259 260 .seealso: `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`, `DMDA` 261 @*/ 262 PetscErrorCode DMDASetNumLocalSubDomains(DM da,PetscInt Nsub) 263 { 264 DM_DA *dd = (DM_DA*)da->data; 265 266 PetscFunctionBegin; 267 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 268 PetscValidLogicalCollectiveInt(da,Nsub,2); 269 dd->Nsub = Nsub; 270 PetscFunctionReturn(0); 271 } 272 273 /*@ 274 DMDASetOffset - Sets the index offset of the DA. 275 276 Collective on DA 277 278 Input Parameters: 279 + da - The DMDA 280 . xo - The offset in the x direction 281 . yo - The offset in the y direction 282 - zo - The offset in the z direction 283 284 Level: intermediate 285 286 Notes: 287 This is used primarily to overlap a computation on a local DA with that on a global DA without 288 changing boundary conditions or subdomain features that depend upon the global offsets. 289 290 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 291 @*/ 292 PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po) 293 { 294 DM_DA *dd = (DM_DA*)da->data; 295 296 PetscFunctionBegin; 297 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 298 PetscValidLogicalCollectiveInt(da,xo,2); 299 PetscValidLogicalCollectiveInt(da,yo,3); 300 PetscValidLogicalCollectiveInt(da,zo,4); 301 PetscValidLogicalCollectiveInt(da,Mo,5); 302 PetscValidLogicalCollectiveInt(da,No,6); 303 PetscValidLogicalCollectiveInt(da,Po,7); 304 dd->xo = xo; 305 dd->yo = yo; 306 dd->zo = zo; 307 dd->Mo = Mo; 308 dd->No = No; 309 dd->Po = Po; 310 311 if (da->coordinateDM) PetscCall(DMDASetOffset(da->coordinateDM,xo,yo,zo,Mo,No,Po)); 312 PetscFunctionReturn(0); 313 } 314 315 /*@ 316 DMDAGetOffset - Gets the index offset of the DA. 317 318 Not collective 319 320 Input Parameter: 321 . da - The DMDA 322 323 Output Parameters: 324 + xo - The offset in the x direction 325 . yo - The offset in the y direction 326 . zo - The offset in the z direction 327 . Mo - The global size in the x direction 328 . No - The global size in the y direction 329 - Po - The global size in the z direction 330 331 Level: intermediate 332 333 .seealso: `DMDASetOffset()`, `DMDAVecGetArray()` 334 @*/ 335 PetscErrorCode DMDAGetOffset(DM da,PetscInt *xo,PetscInt *yo,PetscInt *zo,PetscInt *Mo,PetscInt *No,PetscInt *Po) 336 { 337 DM_DA *dd = (DM_DA*)da->data; 338 339 PetscFunctionBegin; 340 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 341 if (xo) *xo = dd->xo; 342 if (yo) *yo = dd->yo; 343 if (zo) *zo = dd->zo; 344 if (Mo) *Mo = dd->Mo; 345 if (No) *No = dd->No; 346 if (Po) *Po = dd->Po; 347 PetscFunctionReturn(0); 348 } 349 350 /*@ 351 DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain DM. 352 353 Not collective 354 355 Input Parameter: 356 . da - The DMDA 357 358 Output Parameters: 359 + xs - The start of the region in x 360 . ys - The start of the region in y 361 . zs - The start of the region in z 362 . xs - The size of the region in x 363 . ys - The size of the region in y 364 - zs - The size of the region in z 365 366 Level: intermediate 367 368 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 369 @*/ 370 PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm) 371 { 372 DM_DA *dd = (DM_DA*)da->data; 373 374 PetscFunctionBegin; 375 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 376 if (xs) *xs = dd->nonxs; 377 if (ys) *ys = dd->nonys; 378 if (zs) *zs = dd->nonzs; 379 if (xm) *xm = dd->nonxm; 380 if (ym) *ym = dd->nonym; 381 if (zm) *zm = dd->nonzm; 382 PetscFunctionReturn(0); 383 } 384 385 /*@ 386 DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain DM. 387 388 Collective on DA 389 390 Input Parameters: 391 + da - The DMDA 392 . xs - The start of the region in x 393 . ys - The start of the region in y 394 . zs - The start of the region in z 395 . xs - The size of the region in x 396 . ys - The size of the region in y 397 - zs - The size of the region in z 398 399 Level: intermediate 400 401 .seealso: `DMDAGetOffset()`, `DMDAVecGetArray()` 402 @*/ 403 PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm) 404 { 405 DM_DA *dd = (DM_DA*)da->data; 406 407 PetscFunctionBegin; 408 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 409 PetscValidLogicalCollectiveInt(da,xs,2); 410 PetscValidLogicalCollectiveInt(da,ys,3); 411 PetscValidLogicalCollectiveInt(da,zs,4); 412 PetscValidLogicalCollectiveInt(da,xm,5); 413 PetscValidLogicalCollectiveInt(da,ym,6); 414 PetscValidLogicalCollectiveInt(da,zm,7); 415 dd->nonxs = xs; 416 dd->nonys = ys; 417 dd->nonzs = zs; 418 dd->nonxm = xm; 419 dd->nonym = ym; 420 dd->nonzm = zm; 421 422 PetscFunctionReturn(0); 423 } 424 425 /*@ 426 DMDASetStencilType - Sets the type of the communication stencil 427 428 Logically Collective on da 429 430 Input Parameters: 431 + da - The DMDA 432 - stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 433 434 Level: intermediate 435 436 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 437 @*/ 438 PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype) 439 { 440 DM_DA *dd = (DM_DA*)da->data; 441 442 PetscFunctionBegin; 443 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 444 PetscValidLogicalCollectiveEnum(da,stype,2); 445 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 446 dd->stencil_type = stype; 447 PetscFunctionReturn(0); 448 } 449 450 /*@ 451 DMDAGetStencilType - Gets the type of the communication stencil 452 453 Not collective 454 455 Input Parameter: 456 . da - The DMDA 457 458 Output Parameter: 459 . stype - The stencil type, use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR. 460 461 Level: intermediate 462 463 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 464 @*/ 465 PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype) 466 { 467 DM_DA *dd = (DM_DA*)da->data; 468 469 PetscFunctionBegin; 470 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 471 PetscValidPointer(stype,2); 472 *stype = dd->stencil_type; 473 PetscFunctionReturn(0); 474 } 475 476 /*@ 477 DMDASetStencilWidth - Sets the width of the communication stencil 478 479 Logically Collective on da 480 481 Input Parameters: 482 + da - The DMDA 483 - width - The stencil width 484 485 Level: intermediate 486 487 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 488 @*/ 489 PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width) 490 { 491 DM_DA *dd = (DM_DA*)da->data; 492 493 PetscFunctionBegin; 494 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 495 PetscValidLogicalCollectiveInt(da,width,2); 496 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 497 dd->s = width; 498 PetscFunctionReturn(0); 499 } 500 501 /*@ 502 DMDAGetStencilWidth - Gets the width of the communication stencil 503 504 Not collective 505 506 Input Parameter: 507 . da - The DMDA 508 509 Output Parameter: 510 . width - The stencil width 511 512 Level: intermediate 513 514 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 515 @*/ 516 PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width) 517 { 518 DM_DA *dd = (DM_DA *) da->data; 519 520 PetscFunctionBegin; 521 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 522 PetscValidIntPointer(width,2); 523 *width = dd->s; 524 PetscFunctionReturn(0); 525 } 526 527 static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da,PetscInt M,PetscInt m,const PetscInt lx[]) 528 { 529 PetscInt i,sum; 530 531 PetscFunctionBegin; 532 PetscCheck(M >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Global dimension not set"); 533 for (i=sum=0; i<m; i++) sum += lx[i]; 534 PetscCheck(sum == M,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_INCOMP,"Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT,sum,M); 535 PetscFunctionReturn(0); 536 } 537 538 /*@ 539 DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process 540 541 Logically Collective on da 542 543 Input Parameters: 544 + da - The DMDA 545 . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m 546 . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n 547 - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p. 548 549 Level: intermediate 550 551 Note: these numbers are NOT multiplied by the number of dof per node. 552 553 .seealso: `DMDACreate()`, `DMDestroy()`, `DMDA` 554 @*/ 555 PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[]) 556 { 557 DM_DA *dd = (DM_DA*)da->data; 558 559 PetscFunctionBegin; 560 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 561 PetscCheck(!da->setupcalled,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"This function must be called before DMSetUp()"); 562 if (lx) { 563 PetscCheck(dd->m >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 564 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->M,dd->m,lx)); 565 if (!dd->lx) { 566 PetscCall(PetscMalloc1(dd->m, &dd->lx)); 567 } 568 PetscCall(PetscArraycpy(dd->lx, lx, dd->m)); 569 } 570 if (ly) { 571 PetscCheck(dd->n >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 572 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->N,dd->n,ly)); 573 if (!dd->ly) { 574 PetscCall(PetscMalloc1(dd->n, &dd->ly)); 575 } 576 PetscCall(PetscArraycpy(dd->ly, ly, dd->n)); 577 } 578 if (lz) { 579 PetscCheck(dd->p >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONGSTATE,"Cannot set ownership ranges before setting number of procs"); 580 PetscCall(DMDACheckOwnershipRanges_Private(da,dd->P,dd->p,lz)); 581 if (!dd->lz) { 582 PetscCall(PetscMalloc1(dd->p, &dd->lz)); 583 } 584 PetscCall(PetscArraycpy(dd->lz, lz, dd->p)); 585 } 586 PetscFunctionReturn(0); 587 } 588 589 /*@ 590 DMDASetInterpolationType - Sets the type of interpolation that will be 591 returned by DMCreateInterpolation() 592 593 Logically Collective on da 594 595 Input Parameters: 596 + da - initial distributed array 597 - ctype - DMDA_Q1 and DMDA_Q0 are currently the only supported forms 598 599 Level: intermediate 600 601 Notes: 602 you should call this on the coarser of the two DMDAs you pass to DMCreateInterpolation() 603 604 .seealso: `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDA`, `DMDAInterpolationType` 605 @*/ 606 PetscErrorCode DMDASetInterpolationType(DM da,DMDAInterpolationType ctype) 607 { 608 DM_DA *dd = (DM_DA*)da->data; 609 610 PetscFunctionBegin; 611 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 612 PetscValidLogicalCollectiveEnum(da,ctype,2); 613 dd->interptype = ctype; 614 PetscFunctionReturn(0); 615 } 616 617 /*@ 618 DMDAGetInterpolationType - Gets the type of interpolation that will be 619 used by DMCreateInterpolation() 620 621 Not Collective 622 623 Input Parameter: 624 . da - distributed array 625 626 Output Parameter: 627 . ctype - interpolation type (DMDA_Q1 and DMDA_Q0 are currently the only supported forms) 628 629 Level: intermediate 630 631 .seealso: `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()` 632 @*/ 633 PetscErrorCode DMDAGetInterpolationType(DM da,DMDAInterpolationType *ctype) 634 { 635 DM_DA *dd = (DM_DA*)da->data; 636 637 PetscFunctionBegin; 638 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 639 PetscValidPointer(ctype,2); 640 *ctype = dd->interptype; 641 PetscFunctionReturn(0); 642 } 643 644 /*@C 645 DMDAGetNeighbors - Gets an array containing the MPI rank of all the current 646 processes neighbors. 647 648 Not Collective 649 650 Input Parameter: 651 . da - the DMDA object 652 653 Output Parameters: 654 . ranks - the neighbors ranks, stored with the x index increasing most rapidly. 655 this process itself is in the list 656 657 Notes: 658 In 2d the array is of length 9, in 3d of length 27 659 Not supported in 1d 660 Do not free the array, it is freed when the DMDA is destroyed. 661 662 Fortran Notes: 663 In fortran you must pass in an array of the appropriate length. 664 665 Level: intermediate 666 667 @*/ 668 PetscErrorCode DMDAGetNeighbors(DM da,const PetscMPIInt *ranks[]) 669 { 670 DM_DA *dd = (DM_DA*)da->data; 671 672 PetscFunctionBegin; 673 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 674 *ranks = dd->neighbors; 675 PetscFunctionReturn(0); 676 } 677 678 /*@C 679 DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process 680 681 Not Collective 682 683 Input Parameter: 684 . da - the DMDA object 685 686 Output Parameters: 687 + lx - ownership along x direction (optional) 688 . ly - ownership along y direction (optional) 689 - lz - ownership along z direction (optional) 690 691 Level: intermediate 692 693 Note: these correspond to the optional final arguments passed to DMDACreate(), DMDACreate2d(), DMDACreate3d() 694 695 In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and 696 eighth arguments from DMDAGetInfo() 697 698 In C you should not free these arrays, nor change the values in them. They will only have valid values while the 699 DMDA they came from still exists (has not been destroyed). 700 701 These numbers are NOT multiplied by the number of dof per node. 702 703 .seealso: `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()` 704 @*/ 705 PetscErrorCode DMDAGetOwnershipRanges(DM da,const PetscInt *lx[],const PetscInt *ly[],const PetscInt *lz[]) 706 { 707 DM_DA *dd = (DM_DA*)da->data; 708 709 PetscFunctionBegin; 710 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 711 if (lx) *lx = dd->lx; 712 if (ly) *ly = dd->ly; 713 if (lz) *lz = dd->lz; 714 PetscFunctionReturn(0); 715 } 716 717 /*@ 718 DMDASetRefinementFactor - Set the ratios that the DMDA grid is refined 719 720 Logically Collective on da 721 722 Input Parameters: 723 + da - the DMDA object 724 . refine_x - ratio of fine grid to coarse in x direction (2 by default) 725 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 726 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 727 728 Options Database: 729 + -da_refine_x refine_x - refinement ratio in x direction 730 . -da_refine_y rafine_y - refinement ratio in y direction 731 . -da_refine_z refine_z - refinement ratio in z direction 732 - -da_refine <n> - refine the DMDA object n times when it is created. 733 734 Level: intermediate 735 736 Notes: 737 Pass PETSC_IGNORE to leave a value unchanged 738 739 .seealso: `DMRefine()`, `DMDAGetRefinementFactor()` 740 @*/ 741 PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z) 742 { 743 DM_DA *dd = (DM_DA*)da->data; 744 745 PetscFunctionBegin; 746 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 747 PetscValidLogicalCollectiveInt(da,refine_x,2); 748 PetscValidLogicalCollectiveInt(da,refine_y,3); 749 PetscValidLogicalCollectiveInt(da,refine_z,4); 750 751 if (refine_x > 0) dd->refine_x = refine_x; 752 if (refine_y > 0) dd->refine_y = refine_y; 753 if (refine_z > 0) dd->refine_z = refine_z; 754 PetscFunctionReturn(0); 755 } 756 757 /*@C 758 DMDAGetRefinementFactor - Gets the ratios that the DMDA grid is refined 759 760 Not Collective 761 762 Input Parameter: 763 . da - the DMDA object 764 765 Output Parameters: 766 + refine_x - ratio of fine grid to coarse in x direction (2 by default) 767 . refine_y - ratio of fine grid to coarse in y direction (2 by default) 768 - refine_z - ratio of fine grid to coarse in z direction (2 by default) 769 770 Level: intermediate 771 772 Notes: 773 Pass NULL for values you do not need 774 775 .seealso: `DMRefine()`, `DMDASetRefinementFactor()` 776 @*/ 777 PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z) 778 { 779 DM_DA *dd = (DM_DA*)da->data; 780 781 PetscFunctionBegin; 782 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 783 if (refine_x) *refine_x = dd->refine_x; 784 if (refine_y) *refine_y = dd->refine_y; 785 if (refine_z) *refine_z = dd->refine_z; 786 PetscFunctionReturn(0); 787 } 788 789 /*@C 790 DMDASetGetMatrix - Sets the routine used by the DMDA to allocate a matrix. 791 792 Logically Collective on da 793 794 Input Parameters: 795 + da - the DMDA object 796 - f - the function that allocates the matrix for that specific DMDA 797 798 Level: developer 799 800 Notes: 801 See DMDASetBlockFills() that provides a simple way to provide the nonzero structure for 802 the diagonal and off-diagonal blocks of the matrix 803 804 Not supported from Fortran 805 806 .seealso: `DMCreateMatrix()`, `DMDASetBlockFills()` 807 @*/ 808 PetscErrorCode DMDASetGetMatrix(DM da,PetscErrorCode (*f)(DM, Mat*)) 809 { 810 PetscFunctionBegin; 811 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 812 da->ops->creatematrix = f; 813 PetscFunctionReturn(0); 814 } 815 816 /*@ 817 DMDAMapMatStencilToGlobal - Map a list of MatStencils on a grid to global indices. 818 819 Not Collective 820 821 Input Parameters: 822 + da - the DMDA object 823 . m - number of MatStencils 824 - idxm - grid points (and component number when dof > 1) 825 826 Output Parameters: 827 + gidxm - global row indices 828 829 .seealso: `MatStencil` 830 @*/ 831 PetscErrorCode DMDAMapMatStencilToGlobal(DM da,PetscInt m,const MatStencil idxm[],PetscInt gidxm[]) 832 { 833 const DM_DA *dd = (const DM_DA*)da->data; 834 const PetscInt *dxm = (const PetscInt*)idxm; 835 PetscInt i,j,sdim,tmp,dim; 836 PetscInt dims[4],starts[4],dims2[3],starts2[3],dof = dd->w; 837 ISLocalToGlobalMapping ltog; 838 839 PetscFunctionBegin; 840 if (m <= 0) PetscFunctionReturn(0); 841 dim = da->dim; /* DA dim: 1 to 3 */ 842 sdim = dim + (dof > 1? 1 : 0); /* Dimension in MatStencil's (k,j,i,c) view */ 843 844 starts2[0] = dd->Xs/dof + dd->xo; 845 starts2[1] = dd->Ys + dd->yo; 846 starts2[2] = dd->Zs + dd->zo; 847 dims2[0] = (dd->Xe - dd->Xs)/dof; 848 dims2[1] = (dd->Ye - dd->Ys); 849 dims2[2] = (dd->Ze - dd->Zs); 850 851 for (i=0; i<dim; i++) { 852 dims[i] = dims2[dim-i-1]; /* copy the values in backwards */ 853 starts[i] = starts2[dim-i-1]; 854 } 855 starts[dim] = 0; 856 dims[dim] = dof; 857 858 /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */ 859 for (i=0; i<m; i++) { 860 for (j=0; j<3-dim; j++) dxm++; /* dxm[] is in k,j,i,c order; move to the first significant index */ 861 tmp = *dxm++ - starts[0]; 862 for (j=0; j<sdim-1; j++) { 863 if (tmp < 0 || (*dxm - starts[j+1]) < 0) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */ 864 else tmp = tmp*dims[j] + (*dxm - starts[j+1]); 865 dxm++; 866 } 867 if (dof == 1) dxm++; /* If no dof, skip the unused c */ 868 gidxm[i] = tmp; 869 } 870 871 /* Map local indices to global indices */ 872 PetscCall(DMGetLocalToGlobalMapping(da,<og)); 873 PetscCall(ISLocalToGlobalMappingApply(ltog,m,gidxm,gidxm)); 874 PetscFunctionReturn(0); 875 } 876 877 /* 878 Creates "balanced" ownership ranges after refinement, constrained by the need for the 879 fine grid boundaries to fall within one stencil width of the coarse partition. 880 881 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 882 */ 883 static PetscErrorCode DMDARefineOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lc[],PetscInt lf[]) 884 { 885 PetscInt i,totalc = 0,remaining,startc = 0,startf = 0; 886 887 PetscFunctionBegin; 888 PetscCheck(ratio >= 1,PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %" PetscInt_FMT " must be at least 1",ratio); 889 if (ratio == 1) { 890 PetscCall(PetscArraycpy(lf,lc,m)); 891 PetscFunctionReturn(0); 892 } 893 for (i=0; i<m; i++) totalc += lc[i]; 894 remaining = (!periodic) + ratio * (totalc - (!periodic)); 895 for (i=0; i<m; i++) { 896 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 897 if (i == m-1) lf[i] = want; 898 else { 899 const PetscInt nextc = startc + lc[i]; 900 /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one 901 * coarse stencil width of the first coarse node in the next subdomain. */ 902 while ((startf+want)/ratio < nextc - stencil_width) want++; 903 /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one 904 * coarse stencil width of the last coarse node in the current subdomain. */ 905 while ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width) want--; 906 /* Make sure all constraints are satisfied */ 907 if (want < 0 || want > remaining || ((startf+want)/ratio < nextc - stencil_width) 908 || ((startf+want-1+ratio-1)/ratio > nextc-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible refined ownership range"); 909 } 910 lf[i] = want; 911 startc += lc[i]; 912 startf += lf[i]; 913 remaining -= lf[i]; 914 } 915 PetscFunctionReturn(0); 916 } 917 918 /* 919 Creates "balanced" ownership ranges after coarsening, constrained by the need for the 920 fine grid boundaries to fall within one stencil width of the coarse partition. 921 922 Uses a greedy algorithm to handle non-ideal layouts, could probably do something better. 923 */ 924 static PetscErrorCode DMDACoarsenOwnershipRanges(DM da,PetscBool periodic,PetscInt stencil_width,PetscInt ratio,PetscInt m,const PetscInt lf[],PetscInt lc[]) 925 { 926 PetscInt i,totalf,remaining,startc,startf; 927 928 PetscFunctionBegin; 929 PetscCheck(ratio >= 1,PetscObjectComm((PetscObject)da),PETSC_ERR_USER,"Requested refinement ratio %" PetscInt_FMT " must be at least 1",ratio); 930 if (ratio == 1) { 931 PetscCall(PetscArraycpy(lc,lf,m)); 932 PetscFunctionReturn(0); 933 } 934 for (i=0,totalf=0; i<m; i++) totalf += lf[i]; 935 remaining = (!periodic) + (totalf - (!periodic)) / ratio; 936 for (i=0,startc=0,startf=0; i<m; i++) { 937 PetscInt want = remaining/(m-i) + !!(remaining%(m-i)); 938 if (i == m-1) lc[i] = want; 939 else { 940 const PetscInt nextf = startf+lf[i]; 941 /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine 942 * node is within one stencil width. */ 943 while (nextf/ratio < startc+want-stencil_width) want--; 944 /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last 945 * fine node is within one stencil width. */ 946 while ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width) want++; 947 if (want < 0 || want > remaining 948 || (nextf/ratio < startc+want-stencil_width) || ((nextf-1+ratio-1)/ratio > startc+want-1+stencil_width)) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_SIZ,"Could not find a compatible coarsened ownership range"); 949 } 950 lc[i] = want; 951 startc += lc[i]; 952 startf += lf[i]; 953 remaining -= lc[i]; 954 } 955 PetscFunctionReturn(0); 956 } 957 958 PetscErrorCode DMRefine_DA(DM da,MPI_Comm comm,DM *daref) 959 { 960 PetscInt M,N,P,i,dim; 961 DM da2; 962 DM_DA *dd = (DM_DA*)da->data,*dd2; 963 964 PetscFunctionBegin; 965 PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA); 966 PetscValidPointer(daref,3); 967 968 PetscCall(DMGetDimension(da, &dim)); 969 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 970 M = dd->refine_x*dd->M; 971 } else { 972 M = 1 + dd->refine_x*(dd->M - 1); 973 } 974 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 975 if (dim > 1) { 976 N = dd->refine_y*dd->N; 977 } else { 978 N = 1; 979 } 980 } else { 981 N = 1 + dd->refine_y*(dd->N - 1); 982 } 983 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 984 if (dim > 2) { 985 P = dd->refine_z*dd->P; 986 } else { 987 P = 1; 988 } 989 } else { 990 P = 1 + dd->refine_z*(dd->P - 1); 991 } 992 PetscCall(DMDACreate(PetscObjectComm((PetscObject)da),&da2)); 993 PetscCall(DMSetOptionsPrefix(da2,((PetscObject)da)->prefix)); 994 PetscCall(DMSetDimension(da2,dim)); 995 PetscCall(DMDASetSizes(da2,M,N,P)); 996 PetscCall(DMDASetNumProcs(da2,dd->m,dd->n,dd->p)); 997 PetscCall(DMDASetBoundaryType(da2,dd->bx,dd->by,dd->bz)); 998 PetscCall(DMDASetDof(da2,dd->w)); 999 PetscCall(DMDASetStencilType(da2,dd->stencil_type)); 1000 PetscCall(DMDASetStencilWidth(da2,dd->s)); 1001 if (dim == 3) { 1002 PetscInt *lx,*ly,*lz; 1003 PetscCall(PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz)); 1004 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1005 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly)); 1006 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_z,dd->p,dd->lz,lz)); 1007 PetscCall(DMDASetOwnershipRanges(da2,lx,ly,lz)); 1008 PetscCall(PetscFree3(lx,ly,lz)); 1009 } else if (dim == 2) { 1010 PetscInt *lx,*ly; 1011 PetscCall(PetscMalloc2(dd->m,&lx,dd->n,&ly)); 1012 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1013 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_y,dd->n,dd->ly,ly)); 1014 PetscCall(DMDASetOwnershipRanges(da2,lx,ly,NULL)); 1015 PetscCall(PetscFree2(lx,ly)); 1016 } else if (dim == 1) { 1017 PetscInt *lx; 1018 PetscCall(PetscMalloc1(dd->m,&lx)); 1019 PetscCall(DMDARefineOwnershipRanges(da,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->refine_x,dd->m,dd->lx,lx)); 1020 PetscCall(DMDASetOwnershipRanges(da2,lx,NULL,NULL)); 1021 PetscCall(PetscFree(lx)); 1022 } 1023 dd2 = (DM_DA*)da2->data; 1024 1025 /* allow overloaded (user replaced) operations to be inherited by refinement clones */ 1026 da2->ops->creatematrix = da->ops->creatematrix; 1027 /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */ 1028 da2->ops->getcoloring = da->ops->getcoloring; 1029 dd2->interptype = dd->interptype; 1030 1031 /* copy fill information if given */ 1032 if (dd->dfill) { 1033 PetscCall(PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill)); 1034 PetscCall(PetscArraycpy(dd2->dfill,dd->dfill,dd->dfill[dd->w]+dd->w+1)); 1035 } 1036 if (dd->ofill) { 1037 PetscCall(PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill)); 1038 PetscCall(PetscArraycpy(dd2->ofill,dd->ofill,dd->ofill[dd->w]+dd->w+1)); 1039 } 1040 /* copy the refine information */ 1041 dd2->coarsen_x = dd2->refine_x = dd->refine_x; 1042 dd2->coarsen_y = dd2->refine_y = dd->refine_y; 1043 dd2->coarsen_z = dd2->refine_z = dd->refine_z; 1044 1045 if (dd->refine_z_hier) { 1046 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) { 1047 dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1]; 1048 } 1049 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) { 1050 dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown]; 1051 } 1052 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1053 PetscCall(PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier)); 1054 PetscCall(PetscArraycpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n)); 1055 } 1056 if (dd->refine_y_hier) { 1057 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) { 1058 dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1]; 1059 } 1060 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) { 1061 dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown]; 1062 } 1063 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1064 PetscCall(PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier)); 1065 PetscCall(PetscArraycpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n)); 1066 } 1067 if (dd->refine_x_hier) { 1068 if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) { 1069 dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1]; 1070 } 1071 if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) { 1072 dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown]; 1073 } 1074 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1075 PetscCall(PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier)); 1076 PetscCall(PetscArraycpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n)); 1077 } 1078 1079 /* copy vector type information */ 1080 PetscCall(DMSetVecType(da2,da->vectype)); 1081 1082 dd2->lf = dd->lf; 1083 dd2->lj = dd->lj; 1084 1085 da2->leveldown = da->leveldown; 1086 da2->levelup = da->levelup + 1; 1087 1088 PetscCall(DMSetUp(da2)); 1089 1090 /* interpolate coordinates if they are set on the coarse grid */ 1091 if (da->coordinates) { 1092 DM cdaf,cdac; 1093 Vec coordsc,coordsf; 1094 Mat II; 1095 1096 PetscCall(DMGetCoordinateDM(da,&cdac)); 1097 PetscCall(DMGetCoordinates(da,&coordsc)); 1098 PetscCall(DMGetCoordinateDM(da2,&cdaf)); 1099 /* force creation of the coordinate vector */ 1100 PetscCall(DMDASetUniformCoordinates(da2,0.0,1.0,0.0,1.0,0.0,1.0)); 1101 PetscCall(DMGetCoordinates(da2,&coordsf)); 1102 PetscCall(DMCreateInterpolation(cdac,cdaf,&II,NULL)); 1103 PetscCall(MatInterpolate(II,coordsc,coordsf)); 1104 PetscCall(MatDestroy(&II)); 1105 } 1106 1107 for (i=0; i<da->bs; i++) { 1108 const char *fieldname; 1109 PetscCall(DMDAGetFieldName(da,i,&fieldname)); 1110 PetscCall(DMDASetFieldName(da2,i,fieldname)); 1111 } 1112 1113 *daref = da2; 1114 PetscFunctionReturn(0); 1115 } 1116 1117 PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm,DM *dmc) 1118 { 1119 PetscInt M,N,P,i,dim; 1120 DM dmc2; 1121 DM_DA *dd = (DM_DA*)dmf->data,*dd2; 1122 1123 PetscFunctionBegin; 1124 PetscValidHeaderSpecificType(dmf,DM_CLASSID,1,DMDA); 1125 PetscValidPointer(dmc,3); 1126 1127 PetscCall(DMGetDimension(dmf, &dim)); 1128 if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1129 M = dd->M / dd->coarsen_x; 1130 } else { 1131 M = 1 + (dd->M - 1) / dd->coarsen_x; 1132 } 1133 if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1134 if (dim > 1) { 1135 N = dd->N / dd->coarsen_y; 1136 } else { 1137 N = 1; 1138 } 1139 } else { 1140 N = 1 + (dd->N - 1) / dd->coarsen_y; 1141 } 1142 if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) { 1143 if (dim > 2) { 1144 P = dd->P / dd->coarsen_z; 1145 } else { 1146 P = 1; 1147 } 1148 } else { 1149 P = 1 + (dd->P - 1) / dd->coarsen_z; 1150 } 1151 PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf),&dmc2)); 1152 PetscCall(DMSetOptionsPrefix(dmc2,((PetscObject)dmf)->prefix)); 1153 PetscCall(DMSetDimension(dmc2,dim)); 1154 PetscCall(DMDASetSizes(dmc2,M,N,P)); 1155 PetscCall(DMDASetNumProcs(dmc2,dd->m,dd->n,dd->p)); 1156 PetscCall(DMDASetBoundaryType(dmc2,dd->bx,dd->by,dd->bz)); 1157 PetscCall(DMDASetDof(dmc2,dd->w)); 1158 PetscCall(DMDASetStencilType(dmc2,dd->stencil_type)); 1159 PetscCall(DMDASetStencilWidth(dmc2,dd->s)); 1160 if (dim == 3) { 1161 PetscInt *lx,*ly,*lz; 1162 PetscCall(PetscMalloc3(dd->m,&lx,dd->n,&ly,dd->p,&lz)); 1163 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1164 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly)); 1165 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_z,dd->p,dd->lz,lz)); 1166 PetscCall(DMDASetOwnershipRanges(dmc2,lx,ly,lz)); 1167 PetscCall(PetscFree3(lx,ly,lz)); 1168 } else if (dim == 2) { 1169 PetscInt *lx,*ly; 1170 PetscCall(PetscMalloc2(dd->m,&lx,dd->n,&ly)); 1171 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1172 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_y,dd->n,dd->ly,ly)); 1173 PetscCall(DMDASetOwnershipRanges(dmc2,lx,ly,NULL)); 1174 PetscCall(PetscFree2(lx,ly)); 1175 } else if (dim == 1) { 1176 PetscInt *lx; 1177 PetscCall(PetscMalloc1(dd->m,&lx)); 1178 PetscCall(DMDACoarsenOwnershipRanges(dmf,(PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0),dd->s,dd->coarsen_x,dd->m,dd->lx,lx)); 1179 PetscCall(DMDASetOwnershipRanges(dmc2,lx,NULL,NULL)); 1180 PetscCall(PetscFree(lx)); 1181 } 1182 dd2 = (DM_DA*)dmc2->data; 1183 1184 /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */ 1185 /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */ 1186 dmc2->ops->creatematrix = dmf->ops->creatematrix; 1187 dmc2->ops->getcoloring = dmf->ops->getcoloring; 1188 dd2->interptype = dd->interptype; 1189 1190 /* copy fill information if given */ 1191 if (dd->dfill) { 1192 PetscCall(PetscMalloc1(dd->dfill[dd->w]+dd->w+1,&dd2->dfill)); 1193 PetscCall(PetscArraycpy(dd2->dfill,dd->dfill,dd->dfill[dd->w]+dd->w+1)); 1194 } 1195 if (dd->ofill) { 1196 PetscCall(PetscMalloc1(dd->ofill[dd->w]+dd->w+1,&dd2->ofill)); 1197 PetscCall(PetscArraycpy(dd2->ofill,dd->ofill,dd->ofill[dd->w]+dd->w+1)); 1198 } 1199 /* copy the refine information */ 1200 dd2->coarsen_x = dd2->refine_x = dd->coarsen_x; 1201 dd2->coarsen_y = dd2->refine_y = dd->coarsen_y; 1202 dd2->coarsen_z = dd2->refine_z = dd->coarsen_z; 1203 1204 if (dd->refine_z_hier) { 1205 if (dmf->levelup - dmf->leveldown -1 > -1 && dmf->levelup - dmf->leveldown - 1< dd->refine_z_hier_n) { 1206 dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1]; 1207 } 1208 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) { 1209 dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2]; 1210 } 1211 dd2->refine_z_hier_n = dd->refine_z_hier_n; 1212 PetscCall(PetscMalloc1(dd2->refine_z_hier_n,&dd2->refine_z_hier)); 1213 PetscCall(PetscArraycpy(dd2->refine_z_hier,dd->refine_z_hier,dd2->refine_z_hier_n)); 1214 } 1215 if (dd->refine_y_hier) { 1216 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1< dd->refine_y_hier_n) { 1217 dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1]; 1218 } 1219 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) { 1220 dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2]; 1221 } 1222 dd2->refine_y_hier_n = dd->refine_y_hier_n; 1223 PetscCall(PetscMalloc1(dd2->refine_y_hier_n,&dd2->refine_y_hier)); 1224 PetscCall(PetscArraycpy(dd2->refine_y_hier,dd->refine_y_hier,dd2->refine_y_hier_n)); 1225 } 1226 if (dd->refine_x_hier) { 1227 if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) { 1228 dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1]; 1229 } 1230 if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) { 1231 dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2]; 1232 } 1233 dd2->refine_x_hier_n = dd->refine_x_hier_n; 1234 PetscCall(PetscMalloc1(dd2->refine_x_hier_n,&dd2->refine_x_hier)); 1235 PetscCall(PetscArraycpy(dd2->refine_x_hier,dd->refine_x_hier,dd2->refine_x_hier_n)); 1236 } 1237 1238 /* copy vector type information */ 1239 PetscCall(DMSetVecType(dmc2,dmf->vectype)); 1240 1241 dd2->lf = dd->lf; 1242 dd2->lj = dd->lj; 1243 1244 dmc2->leveldown = dmf->leveldown + 1; 1245 dmc2->levelup = dmf->levelup; 1246 1247 PetscCall(DMSetUp(dmc2)); 1248 1249 /* inject coordinates if they are set on the fine grid */ 1250 if (dmf->coordinates) { 1251 DM cdaf,cdac; 1252 Vec coordsc,coordsf; 1253 Mat inject; 1254 VecScatter vscat; 1255 1256 PetscCall(DMGetCoordinateDM(dmf,&cdaf)); 1257 PetscCall(DMGetCoordinates(dmf,&coordsf)); 1258 PetscCall(DMGetCoordinateDM(dmc2,&cdac)); 1259 /* force creation of the coordinate vector */ 1260 PetscCall(DMDASetUniformCoordinates(dmc2,0.0,1.0,0.0,1.0,0.0,1.0)); 1261 PetscCall(DMGetCoordinates(dmc2,&coordsc)); 1262 1263 PetscCall(DMCreateInjection(cdac,cdaf,&inject)); 1264 PetscCall(MatScatterGetVecScatter(inject,&vscat)); 1265 PetscCall(VecScatterBegin(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD)); 1266 PetscCall(VecScatterEnd(vscat,coordsf,coordsc,INSERT_VALUES,SCATTER_FORWARD)); 1267 PetscCall(MatDestroy(&inject)); 1268 } 1269 1270 for (i=0; i<dmf->bs; i++) { 1271 const char *fieldname; 1272 PetscCall(DMDAGetFieldName(dmf,i,&fieldname)); 1273 PetscCall(DMDASetFieldName(dmc2,i,fieldname)); 1274 } 1275 1276 *dmc = dmc2; 1277 PetscFunctionReturn(0); 1278 } 1279 1280 PetscErrorCode DMRefineHierarchy_DA(DM da,PetscInt nlevels,DM daf[]) 1281 { 1282 PetscInt i,n,*refx,*refy,*refz; 1283 1284 PetscFunctionBegin; 1285 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1286 PetscCheck(nlevels >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1287 if (nlevels == 0) PetscFunctionReturn(0); 1288 PetscValidPointer(daf,3); 1289 1290 /* Get refinement factors, defaults taken from the coarse DMDA */ 1291 PetscCall(PetscMalloc3(nlevels,&refx,nlevels,&refy,nlevels,&refz)); 1292 for (i=0; i<nlevels; i++) { 1293 PetscCall(DMDAGetRefinementFactor(da,&refx[i],&refy[i],&refz[i])); 1294 } 1295 n = nlevels; 1296 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_x",refx,&n,NULL)); 1297 n = nlevels; 1298 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_y",refy,&n,NULL)); 1299 n = nlevels; 1300 PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options,((PetscObject)da)->prefix,"-da_refine_hierarchy_z",refz,&n,NULL)); 1301 1302 PetscCall(DMDASetRefinementFactor(da,refx[0],refy[0],refz[0])); 1303 PetscCall(DMRefine(da,PetscObjectComm((PetscObject)da),&daf[0])); 1304 for (i=1; i<nlevels; i++) { 1305 PetscCall(DMDASetRefinementFactor(daf[i-1],refx[i],refy[i],refz[i])); 1306 PetscCall(DMRefine(daf[i-1],PetscObjectComm((PetscObject)da),&daf[i])); 1307 } 1308 PetscCall(PetscFree3(refx,refy,refz)); 1309 PetscFunctionReturn(0); 1310 } 1311 1312 PetscErrorCode DMCoarsenHierarchy_DA(DM da,PetscInt nlevels,DM dac[]) 1313 { 1314 PetscInt i; 1315 1316 PetscFunctionBegin; 1317 PetscValidHeaderSpecific(da,DM_CLASSID,1); 1318 PetscCheck(nlevels >= 0,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1319 if (nlevels == 0) PetscFunctionReturn(0); 1320 PetscValidPointer(dac,3); 1321 PetscCall(DMCoarsen(da,PetscObjectComm((PetscObject)da),&dac[0])); 1322 for (i=1; i<nlevels; i++) { 1323 PetscCall(DMCoarsen(dac[i-1],PetscObjectComm((PetscObject)da),&dac[i])); 1324 } 1325 PetscFunctionReturn(0); 1326 } 1327 1328 PetscErrorCode DMDASetGLLCoordinates_1d(DM dm,PetscInt n,PetscReal *nodes) 1329 { 1330 PetscInt i,j,xs,xn,q; 1331 PetscScalar *xx; 1332 PetscReal h; 1333 Vec x; 1334 DM_DA *da = (DM_DA*)dm->data; 1335 1336 PetscFunctionBegin; 1337 if (da->bx != DM_BOUNDARY_PERIODIC) { 1338 PetscCall(DMDAGetInfo(dm,NULL,&q,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); 1339 q = (q-1)/(n-1); /* number of spectral elements */ 1340 h = 2.0/q; 1341 PetscCall(DMDAGetCorners(dm,&xs,NULL,NULL,&xn,NULL,NULL)); 1342 xs = xs/(n-1); 1343 xn = xn/(n-1); 1344 PetscCall(DMDASetUniformCoordinates(dm,-1.,1.,0.,0.,0.,0.)); 1345 PetscCall(DMGetCoordinates(dm,&x)); 1346 PetscCall(DMDAVecGetArray(dm,x,&xx)); 1347 1348 /* loop over local spectral elements */ 1349 for (j=xs; j<xs+xn; j++) { 1350 /* 1351 Except for the first process, each process starts on the second GLL point of the first element on that process 1352 */ 1353 for (i= (j == xs && xs > 0)? 1 : 0; i<n; i++) { 1354 xx[j*(n-1) + i] = -1.0 + h*j + h*(nodes[i]+1.0)/2.; 1355 } 1356 } 1357 PetscCall(DMDAVecRestoreArray(dm,x,&xx)); 1358 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for periodic"); 1359 PetscFunctionReturn(0); 1360 } 1361 1362 /*@ 1363 1364 DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points 1365 1366 Collective on da 1367 1368 Input Parameters: 1369 + da - the DMDA object 1370 - n - the number of GLL nodes 1371 - nodes - the GLL nodes 1372 1373 Notes: 1374 the parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points 1375 on each process much be divisible by the number of GLL elements needed per process. This depends on whether the DM is 1376 periodic or not. 1377 1378 Level: advanced 1379 1380 .seealso: `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()` 1381 @*/ 1382 PetscErrorCode DMDASetGLLCoordinates(DM da,PetscInt n,PetscReal *nodes) 1383 { 1384 PetscFunctionBegin; 1385 if (da->dim == 1) { 1386 PetscCall(DMDASetGLLCoordinates_1d(da,n,nodes)); 1387 } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented for 2 or 3d"); 1388 PetscFunctionReturn(0); 1389 } 1390 1391 PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1,DM dm2,PetscBool *compatible,PetscBool *set) 1392 { 1393 DM_DA *dd1 = (DM_DA*)da1->data,*dd2; 1394 DM da2; 1395 DMType dmtype2; 1396 PetscBool isda,compatibleLocal; 1397 PetscInt i; 1398 1399 PetscFunctionBegin; 1400 PetscCheck(da1->setupcalled,PetscObjectComm((PetscObject)da1),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on first DM before DMGetCompatibility()"); 1401 PetscCall(DMGetType(dm2,&dmtype2)); 1402 PetscCall(PetscStrcmp(dmtype2,DMDA,&isda)); 1403 if (isda) { 1404 da2 = dm2; 1405 dd2 = (DM_DA*)da2->data; 1406 PetscCheck(da2->setupcalled,PetscObjectComm((PetscObject)da2),PETSC_ERR_ARG_WRONGSTATE,"DMSetUp() must be called on second DM before DMGetCompatibility()"); 1407 compatibleLocal = (PetscBool)(da1->dim == da2->dim); 1408 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */ 1409 /* Global size ranks Boundary type */ 1410 if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx)); 1411 if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by)); 1412 if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz)); 1413 if (compatibleLocal) { 1414 for (i=0; i<dd1->m; ++i) { 1415 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */ 1416 } 1417 } 1418 if (compatibleLocal && da1->dim > 1) { 1419 for (i=0; i<dd1->n; ++i) { 1420 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i])); 1421 } 1422 } 1423 if (compatibleLocal && da1->dim > 2) { 1424 for (i=0; i<dd1->p; ++i) { 1425 compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i])); 1426 } 1427 } 1428 *compatible = compatibleLocal; 1429 *set = PETSC_TRUE; 1430 } else { 1431 /* Decline to determine compatibility with other DM types */ 1432 *set = PETSC_FALSE; 1433 } 1434 PetscFunctionReturn(0); 1435 } 1436