1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'rnd' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMClone" 108 /*@ 109 DMClone - Creates a DM object with the same topology as the original. 110 111 Collective on MPI_Comm 112 113 Input Parameter: 114 . dm - The original DM object 115 116 Output Parameter: 117 . newdm - The new DM object 118 119 Level: beginner 120 121 .keywords: DM, topology, create 122 @*/ 123 PetscErrorCode DMClone(DM dm, DM *newdm) 124 { 125 PetscSF sf; 126 Vec coords; 127 void *ctx; 128 PetscErrorCode ierr; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 132 PetscValidPointer(newdm,2); 133 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 134 if (dm->ops->clone) { 135 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 136 } 137 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 138 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 139 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 140 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 141 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 142 if (coords) { 143 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 144 } else { 145 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 146 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 147 } 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetVecType" 153 /*@C 154 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 155 156 Logically Collective on DMDA 157 158 Input Parameter: 159 + da - initial distributed array 160 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 161 162 Options Database: 163 . -dm_vec_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 168 @*/ 169 PetscErrorCode DMSetVecType(DM da,VecType ctype) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(da,DM_CLASSID,1); 175 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 176 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 177 PetscFunctionReturn(0); 178 } 179 180 #undef __FUNCT__ 181 #define __FUNCT__ "VecGetDM" 182 /*@ 183 VecGetDM - Gets the DM defining the data layout of the vector 184 185 Not collective 186 187 Input Parameter: 188 . v - The Vec 189 190 Output Parameter: 191 . dm - The DM 192 193 Level: intermediate 194 195 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 196 @*/ 197 PetscErrorCode VecGetDM(Vec v, DM *dm) 198 { 199 PetscErrorCode ierr; 200 201 PetscFunctionBegin; 202 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 203 PetscValidPointer(dm,2); 204 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 205 PetscFunctionReturn(0); 206 } 207 208 #undef __FUNCT__ 209 #define __FUNCT__ "VecSetDM" 210 /*@ 211 VecSetDM - Sets the DM defining the data layout of the vector 212 213 Not collective 214 215 Input Parameters: 216 + v - The Vec 217 - dm - The DM 218 219 Level: intermediate 220 221 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 222 @*/ 223 PetscErrorCode VecSetDM(Vec v, DM dm) 224 { 225 PetscErrorCode ierr; 226 227 PetscFunctionBegin; 228 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 229 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 230 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 231 PetscFunctionReturn(0); 232 } 233 234 #undef __FUNCT__ 235 #define __FUNCT__ "DMSetMatType" 236 /*@C 237 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 238 239 Logically Collective on DM 240 241 Input Parameter: 242 + dm - the DM context 243 . ctype - the matrix type 244 245 Options Database: 246 . -dm_mat_type ctype 247 248 Level: intermediate 249 250 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 251 @*/ 252 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 253 { 254 PetscErrorCode ierr; 255 256 PetscFunctionBegin; 257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 258 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 259 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 260 PetscFunctionReturn(0); 261 } 262 263 #undef __FUNCT__ 264 #define __FUNCT__ "MatGetDM" 265 /*@ 266 MatGetDM - Gets the DM defining the data layout of the matrix 267 268 Not collective 269 270 Input Parameter: 271 . A - The Mat 272 273 Output Parameter: 274 . dm - The DM 275 276 Level: intermediate 277 278 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 279 @*/ 280 PetscErrorCode MatGetDM(Mat A, DM *dm) 281 { 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 286 PetscValidPointer(dm,2); 287 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 288 PetscFunctionReturn(0); 289 } 290 291 #undef __FUNCT__ 292 #define __FUNCT__ "MatSetDM" 293 /*@ 294 MatSetDM - Sets the DM defining the data layout of the matrix 295 296 Not collective 297 298 Input Parameters: 299 + A - The Mat 300 - dm - The DM 301 302 Level: intermediate 303 304 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 305 @*/ 306 PetscErrorCode MatSetDM(Mat A, DM dm) 307 { 308 PetscErrorCode ierr; 309 310 PetscFunctionBegin; 311 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 312 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 313 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 314 PetscFunctionReturn(0); 315 } 316 317 #undef __FUNCT__ 318 #define __FUNCT__ "DMSetOptionsPrefix" 319 /*@C 320 DMSetOptionsPrefix - Sets the prefix used for searching for all 321 DMDA options in the database. 322 323 Logically Collective on DMDA 324 325 Input Parameter: 326 + da - the DMDA context 327 - prefix - the prefix to prepend to all option names 328 329 Notes: 330 A hyphen (-) must NOT be given at the beginning of the prefix name. 331 The first character of all runtime options is AUTOMATICALLY the hyphen. 332 333 Level: advanced 334 335 .keywords: DMDA, set, options, prefix, database 336 337 .seealso: DMSetFromOptions() 338 @*/ 339 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 340 { 341 PetscErrorCode ierr; 342 343 PetscFunctionBegin; 344 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 345 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 346 PetscFunctionReturn(0); 347 } 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "DMDestroy" 351 /*@ 352 DMDestroy - Destroys a vector packer or DMDA. 353 354 Collective on DM 355 356 Input Parameter: 357 . dm - the DM object to destroy 358 359 Level: developer 360 361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 362 363 @*/ 364 PetscErrorCode DMDestroy(DM *dm) 365 { 366 PetscInt i, cnt = 0, f; 367 DMNamedVecLink nlink,nnext; 368 PetscErrorCode ierr; 369 370 PetscFunctionBegin; 371 if (!*dm) PetscFunctionReturn(0); 372 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 373 374 /* count all the circular references of DM and its contained Vecs */ 375 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 376 if ((*dm)->localin[i]) cnt++; 377 if ((*dm)->globalin[i]) cnt++; 378 } 379 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 380 if ((*dm)->x) { 381 DM obj; 382 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 383 if (obj == *dm) cnt++; 384 } 385 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 386 if ((*dm)->x) { 387 DM obj; 388 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 389 if (obj == *dm) cnt++; 390 } 391 392 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 393 /* 394 Need this test because the dm references the vectors that 395 reference the dm, so destroying the dm calls destroy on the 396 vectors that cause another destroy on the dm 397 */ 398 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 399 ((PetscObject) (*dm))->refct = 0; 400 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 401 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 402 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 403 } 404 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 405 nnext = nlink->next; 406 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 407 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 408 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 409 ierr = PetscFree(nlink);CHKERRQ(ierr); 410 } 411 (*dm)->namedglobal = NULL; 412 413 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 414 nnext = nlink->next; 415 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 416 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 417 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 418 ierr = PetscFree(nlink);CHKERRQ(ierr); 419 } 420 (*dm)->namedlocal = NULL; 421 422 /* Destroy the list of hooks */ 423 { 424 DMCoarsenHookLink link,next; 425 for (link=(*dm)->coarsenhook; link; link=next) { 426 next = link->next; 427 ierr = PetscFree(link);CHKERRQ(ierr); 428 } 429 (*dm)->coarsenhook = NULL; 430 } 431 { 432 DMRefineHookLink link,next; 433 for (link=(*dm)->refinehook; link; link=next) { 434 next = link->next; 435 ierr = PetscFree(link);CHKERRQ(ierr); 436 } 437 (*dm)->refinehook = NULL; 438 } 439 { 440 DMSubDomainHookLink link,next; 441 for (link=(*dm)->subdomainhook; link; link=next) { 442 next = link->next; 443 ierr = PetscFree(link);CHKERRQ(ierr); 444 } 445 (*dm)->subdomainhook = NULL; 446 } 447 { 448 DMGlobalToLocalHookLink link,next; 449 for (link=(*dm)->gtolhook; link; link=next) { 450 next = link->next; 451 ierr = PetscFree(link);CHKERRQ(ierr); 452 } 453 (*dm)->gtolhook = NULL; 454 } 455 /* Destroy the work arrays */ 456 { 457 DMWorkLink link,next; 458 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 459 for (link=(*dm)->workin; link; link=next) { 460 next = link->next; 461 ierr = PetscFree(link->mem);CHKERRQ(ierr); 462 ierr = PetscFree(link);CHKERRQ(ierr); 463 } 464 (*dm)->workin = NULL; 465 } 466 467 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 468 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 469 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 470 471 if ((*dm)->ctx && (*dm)->ctxdestroy) { 472 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 473 } 474 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 475 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 476 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 477 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 478 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 479 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 480 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 481 482 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 483 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 484 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 485 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 486 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 487 488 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 489 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 490 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 491 492 for (f = 0; f < (*dm)->numFields; ++f) { 493 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 494 } 495 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 496 /* if memory was published with AMS then destroy it */ 497 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 498 499 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 500 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 501 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 502 PetscFunctionReturn(0); 503 } 504 505 #undef __FUNCT__ 506 #define __FUNCT__ "DMSetUp" 507 /*@ 508 DMSetUp - sets up the data structures inside a DM object 509 510 Collective on DM 511 512 Input Parameter: 513 . dm - the DM object to setup 514 515 Level: developer 516 517 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 518 519 @*/ 520 PetscErrorCode DMSetUp(DM dm) 521 { 522 PetscErrorCode ierr; 523 524 PetscFunctionBegin; 525 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 526 if (dm->setupcalled) PetscFunctionReturn(0); 527 if (dm->ops->setup) { 528 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 529 } 530 dm->setupcalled = PETSC_TRUE; 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "DMSetFromOptions" 536 /*@ 537 DMSetFromOptions - sets parameters in a DM from the options database 538 539 Collective on DM 540 541 Input Parameter: 542 . dm - the DM object to set options for 543 544 Options Database: 545 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 546 . -dm_vec_type <type> type of vector to create inside DM 547 . -dm_mat_type <type> type of matrix to create inside DM 548 - -dm_coloring_type <global or ghosted> 549 550 Level: developer 551 552 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 553 554 @*/ 555 PetscErrorCode DMSetFromOptions(DM dm) 556 { 557 char typeName[256] = MATAIJ; 558 PetscBool flg; 559 PetscErrorCode ierr; 560 561 PetscFunctionBegin; 562 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 563 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 564 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 565 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 566 if (flg) { 567 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 568 } 569 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 570 if (flg) { 571 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 572 } 573 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 574 if (dm->ops->setfromoptions) { 575 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 576 } 577 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 578 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 579 ierr = PetscOptionsEnd();CHKERRQ(ierr); 580 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 581 PetscFunctionReturn(0); 582 } 583 584 #undef __FUNCT__ 585 #define __FUNCT__ "DMView" 586 /*@C 587 DMView - Views a vector packer or DMDA. 588 589 Collective on DM 590 591 Input Parameter: 592 + dm - the DM object to view 593 - v - the viewer 594 595 Level: developer 596 597 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 598 599 @*/ 600 PetscErrorCode DMView(DM dm,PetscViewer v) 601 { 602 PetscErrorCode ierr; 603 PetscBool isbinary; 604 605 PetscFunctionBegin; 606 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 607 if (!v) { 608 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 609 } 610 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 611 if (isbinary) { 612 PetscInt classid = DM_FILE_CLASSID; 613 char type[256]; 614 615 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 616 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 617 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 618 } 619 if (dm->ops->view) { 620 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 621 } 622 PetscFunctionReturn(0); 623 } 624 625 #undef __FUNCT__ 626 #define __FUNCT__ "DMCreateGlobalVector" 627 /*@ 628 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 629 630 Collective on DM 631 632 Input Parameter: 633 . dm - the DM object 634 635 Output Parameter: 636 . vec - the global vector 637 638 Level: beginner 639 640 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 641 642 @*/ 643 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 644 { 645 PetscErrorCode ierr; 646 647 PetscFunctionBegin; 648 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 649 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 650 PetscFunctionReturn(0); 651 } 652 653 #undef __FUNCT__ 654 #define __FUNCT__ "DMCreateLocalVector" 655 /*@ 656 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 657 658 Not Collective 659 660 Input Parameter: 661 . dm - the DM object 662 663 Output Parameter: 664 . vec - the local vector 665 666 Level: beginner 667 668 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 669 670 @*/ 671 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 672 { 673 PetscErrorCode ierr; 674 675 PetscFunctionBegin; 676 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 677 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "DMGetLocalToGlobalMapping" 683 /*@ 684 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 685 686 Collective on DM 687 688 Input Parameter: 689 . dm - the DM that provides the mapping 690 691 Output Parameter: 692 . ltog - the mapping 693 694 Level: intermediate 695 696 Notes: 697 This mapping can then be used by VecSetLocalToGlobalMapping() or 698 MatSetLocalToGlobalMapping(). 699 700 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 701 @*/ 702 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 703 { 704 PetscErrorCode ierr; 705 706 PetscFunctionBegin; 707 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 708 PetscValidPointer(ltog,2); 709 if (!dm->ltogmap) { 710 PetscSection section, sectionGlobal; 711 712 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 713 if (section) { 714 PetscInt *ltog; 715 PetscInt pStart, pEnd, size, p, l; 716 717 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 718 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 719 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 720 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 721 for (p = pStart, l = 0; p < pEnd; ++p) { 722 PetscInt dof, off, c; 723 724 /* Should probably use constrained dofs */ 725 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 726 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 727 for (c = 0; c < dof; ++c, ++l) { 728 ltog[l] = off+c; 729 } 730 } 731 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 732 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 733 } else { 734 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 735 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 736 } 737 } 738 *ltog = dm->ltogmap; 739 PetscFunctionReturn(0); 740 } 741 742 #undef __FUNCT__ 743 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 744 /*@ 745 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 746 747 Collective on DM 748 749 Input Parameter: 750 . da - the distributed array that provides the mapping 751 752 Output Parameter: 753 . ltog - the block mapping 754 755 Level: intermediate 756 757 Notes: 758 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 759 MatSetLocalToGlobalMappingBlock(). 760 761 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 762 @*/ 763 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 764 { 765 PetscErrorCode ierr; 766 767 PetscFunctionBegin; 768 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 769 PetscValidPointer(ltog,2); 770 if (!dm->ltogmapb) { 771 PetscInt bs; 772 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 773 if (bs > 1) { 774 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 775 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 776 } else { 777 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 778 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 779 } 780 } 781 *ltog = dm->ltogmapb; 782 PetscFunctionReturn(0); 783 } 784 785 #undef __FUNCT__ 786 #define __FUNCT__ "DMGetBlockSize" 787 /*@ 788 DMGetBlockSize - Gets the inherent block size associated with a DM 789 790 Not Collective 791 792 Input Parameter: 793 . dm - the DM with block structure 794 795 Output Parameter: 796 . bs - the block size, 1 implies no exploitable block structure 797 798 Level: intermediate 799 800 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 801 @*/ 802 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 803 { 804 PetscFunctionBegin; 805 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 806 PetscValidPointer(bs,2); 807 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 808 *bs = dm->bs; 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMCreateInterpolation" 814 /*@ 815 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 816 817 Collective on DM 818 819 Input Parameter: 820 + dm1 - the DM object 821 - dm2 - the second, finer DM object 822 823 Output Parameter: 824 + mat - the interpolation 825 - vec - the scaling (optional) 826 827 Level: developer 828 829 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 830 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 831 832 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 833 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 834 835 836 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 837 838 @*/ 839 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 840 { 841 PetscErrorCode ierr; 842 843 PetscFunctionBegin; 844 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 845 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 846 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 847 PetscFunctionReturn(0); 848 } 849 850 #undef __FUNCT__ 851 #define __FUNCT__ "DMCreateInjection" 852 /*@ 853 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 854 855 Collective on DM 856 857 Input Parameter: 858 + dm1 - the DM object 859 - dm2 - the second, finer DM object 860 861 Output Parameter: 862 . ctx - the injection 863 864 Level: developer 865 866 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 867 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 868 869 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 870 871 @*/ 872 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 873 { 874 PetscErrorCode ierr; 875 876 PetscFunctionBegin; 877 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 878 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 879 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 880 PetscFunctionReturn(0); 881 } 882 883 #undef __FUNCT__ 884 #define __FUNCT__ "DMCreateColoring" 885 /*@C 886 DMCreateColoring - Gets coloring for a DMDA or DMComposite 887 888 Collective on DM 889 890 Input Parameter: 891 + dm - the DM object 892 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 893 - matype - either MATAIJ or MATBAIJ 894 895 Output Parameter: 896 . coloring - the coloring 897 898 Level: developer 899 900 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 901 902 @*/ 903 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 904 { 905 PetscErrorCode ierr; 906 907 PetscFunctionBegin; 908 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 909 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 910 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 911 PetscFunctionReturn(0); 912 } 913 914 #undef __FUNCT__ 915 #define __FUNCT__ "DMCreateMatrix" 916 /*@C 917 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 918 919 Collective on DM 920 921 Input Parameter: 922 + dm - the DM object 923 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 924 any type which inherits from one of these (such as MATAIJ) 925 926 Output Parameter: 927 . mat - the empty Jacobian 928 929 Level: beginner 930 931 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 932 do not need to do it yourself. 933 934 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 935 the nonzero pattern call DMDASetMatPreallocateOnly() 936 937 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 938 internally by PETSc. 939 940 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 941 the indices for the global numbering for DMDAs which is complicated. 942 943 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 944 945 @*/ 946 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 952 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 953 ierr = MatInitializePackage();CHKERRQ(ierr); 954 #endif 955 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 956 PetscValidPointer(mat,3); 957 if (dm->mattype) { 958 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 959 } else { 960 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 961 } 962 PetscFunctionReturn(0); 963 } 964 965 #undef __FUNCT__ 966 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 967 /*@ 968 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 969 preallocated but the nonzero structure and zero values will not be set. 970 971 Logically Collective on DMDA 972 973 Input Parameter: 974 + dm - the DM 975 - only - PETSC_TRUE if only want preallocation 976 977 Level: developer 978 .seealso DMCreateMatrix() 979 @*/ 980 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 981 { 982 PetscFunctionBegin; 983 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 984 dm->prealloc_only = only; 985 PetscFunctionReturn(0); 986 } 987 988 #undef __FUNCT__ 989 #define __FUNCT__ "DMGetWorkArray" 990 /*@C 991 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 992 993 Not Collective 994 995 Input Parameters: 996 + dm - the DM object 997 . count - The minium size 998 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 999 1000 Output Parameter: 1001 . array - the work array 1002 1003 Level: developer 1004 1005 .seealso DMDestroy(), DMCreate() 1006 @*/ 1007 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1008 { 1009 PetscErrorCode ierr; 1010 DMWorkLink link; 1011 size_t size; 1012 1013 PetscFunctionBegin; 1014 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1015 PetscValidPointer(mem,4); 1016 if (dm->workin) { 1017 link = dm->workin; 1018 dm->workin = dm->workin->next; 1019 } else { 1020 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1021 } 1022 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1023 if (size*count > link->bytes) { 1024 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1025 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1026 link->bytes = size*count; 1027 } 1028 link->next = dm->workout; 1029 dm->workout = link; 1030 *(void**)mem = link->mem; 1031 PetscFunctionReturn(0); 1032 } 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "DMRestoreWorkArray" 1036 /*@C 1037 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1038 1039 Not Collective 1040 1041 Input Parameters: 1042 + dm - the DM object 1043 . count - The minium size 1044 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1045 1046 Output Parameter: 1047 . array - the work array 1048 1049 Level: developer 1050 1051 .seealso DMDestroy(), DMCreate() 1052 @*/ 1053 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1054 { 1055 DMWorkLink *p,link; 1056 1057 PetscFunctionBegin; 1058 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1059 PetscValidPointer(mem,4); 1060 for (p=&dm->workout; (link=*p); p=&link->next) { 1061 if (link->mem == *(void**)mem) { 1062 *p = link->next; 1063 link->next = dm->workin; 1064 dm->workin = link; 1065 *(void**)mem = NULL; 1066 PetscFunctionReturn(0); 1067 } 1068 } 1069 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1070 PetscFunctionReturn(0); 1071 } 1072 1073 #undef __FUNCT__ 1074 #define __FUNCT__ "DMSetNullSpaceConstructor" 1075 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1076 { 1077 PetscFunctionBegin; 1078 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1079 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1080 dm->nullspaceConstructors[field] = nullsp; 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "DMCreateFieldIS" 1086 /*@C 1087 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1088 1089 Not collective 1090 1091 Input Parameter: 1092 . dm - the DM object 1093 1094 Output Parameters: 1095 + numFields - The number of fields (or NULL if not requested) 1096 . fieldNames - The name for each field (or NULL if not requested) 1097 - fields - The global indices for each field (or NULL if not requested) 1098 1099 Level: intermediate 1100 1101 Notes: 1102 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1103 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1104 PetscFree(). 1105 1106 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1107 @*/ 1108 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1109 { 1110 PetscSection section, sectionGlobal; 1111 PetscErrorCode ierr; 1112 1113 PetscFunctionBegin; 1114 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1115 if (numFields) { 1116 PetscValidPointer(numFields,2); 1117 *numFields = 0; 1118 } 1119 if (fieldNames) { 1120 PetscValidPointer(fieldNames,3); 1121 *fieldNames = NULL; 1122 } 1123 if (fields) { 1124 PetscValidPointer(fields,4); 1125 *fields = NULL; 1126 } 1127 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1128 if (section) { 1129 PetscInt *fieldSizes, **fieldIndices; 1130 PetscInt nF, f, pStart, pEnd, p; 1131 1132 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1133 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1134 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1135 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1136 for (f = 0; f < nF; ++f) { 1137 fieldSizes[f] = 0; 1138 } 1139 for (p = pStart; p < pEnd; ++p) { 1140 PetscInt gdof; 1141 1142 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1143 if (gdof > 0) { 1144 for (f = 0; f < nF; ++f) { 1145 PetscInt fdof, fcdof; 1146 1147 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1148 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1149 fieldSizes[f] += fdof-fcdof; 1150 } 1151 } 1152 } 1153 for (f = 0; f < nF; ++f) { 1154 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1155 fieldSizes[f] = 0; 1156 } 1157 for (p = pStart; p < pEnd; ++p) { 1158 PetscInt gdof, goff; 1159 1160 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1161 if (gdof > 0) { 1162 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1163 for (f = 0; f < nF; ++f) { 1164 PetscInt fdof, fcdof, fc; 1165 1166 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1167 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1168 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1169 fieldIndices[f][fieldSizes[f]] = goff++; 1170 } 1171 } 1172 } 1173 } 1174 if (numFields) *numFields = nF; 1175 if (fieldNames) { 1176 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1177 for (f = 0; f < nF; ++f) { 1178 const char *fieldName; 1179 1180 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1181 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1182 } 1183 } 1184 if (fields) { 1185 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1186 for (f = 0; f < nF; ++f) { 1187 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1188 } 1189 } 1190 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1191 } else if (dm->ops->createfieldis) { 1192 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1193 } 1194 PetscFunctionReturn(0); 1195 } 1196 1197 1198 #undef __FUNCT__ 1199 #define __FUNCT__ "DMCreateFieldDecomposition" 1200 /*@C 1201 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1202 corresponding to different fields: each IS contains the global indices of the dofs of the 1203 corresponding field. The optional list of DMs define the DM for each subproblem. 1204 Generalizes DMCreateFieldIS(). 1205 1206 Not collective 1207 1208 Input Parameter: 1209 . dm - the DM object 1210 1211 Output Parameters: 1212 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1213 . namelist - The name for each field (or NULL if not requested) 1214 . islist - The global indices for each field (or NULL if not requested) 1215 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1216 1217 Level: intermediate 1218 1219 Notes: 1220 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1221 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1222 and all of the arrays should be freed with PetscFree(). 1223 1224 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1225 @*/ 1226 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1227 { 1228 PetscErrorCode ierr; 1229 1230 PetscFunctionBegin; 1231 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1232 if (len) { 1233 PetscValidPointer(len,2); 1234 *len = 0; 1235 } 1236 if (namelist) { 1237 PetscValidPointer(namelist,3); 1238 *namelist = 0; 1239 } 1240 if (islist) { 1241 PetscValidPointer(islist,4); 1242 *islist = 0; 1243 } 1244 if (dmlist) { 1245 PetscValidPointer(dmlist,5); 1246 *dmlist = 0; 1247 } 1248 /* 1249 Is it a good idea to apply the following check across all impls? 1250 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1251 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1252 */ 1253 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1254 if (!dm->ops->createfielddecomposition) { 1255 PetscSection section; 1256 PetscInt numFields, f; 1257 1258 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1259 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1260 if (section && numFields && dm->ops->createsubdm) { 1261 *len = numFields; 1262 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1263 for (f = 0; f < numFields; ++f) { 1264 const char *fieldName; 1265 1266 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1267 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1268 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1269 } 1270 } else { 1271 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1272 /* By default there are no DMs associated with subproblems. */ 1273 if (dmlist) *dmlist = NULL; 1274 } 1275 } else { 1276 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1277 } 1278 PetscFunctionReturn(0); 1279 } 1280 1281 #undef __FUNCT__ 1282 #define __FUNCT__ "DMCreateSubDM" 1283 /*@C 1284 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1285 The fields are defined by DMCreateFieldIS(). 1286 1287 Not collective 1288 1289 Input Parameters: 1290 + dm - the DM object 1291 . numFields - number of fields in this subproblem 1292 - len - The number of subproblems in the decomposition (or NULL if not requested) 1293 1294 Output Parameters: 1295 . is - The global indices for the subproblem 1296 - dm - The DM for the subproblem 1297 1298 Level: intermediate 1299 1300 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1301 @*/ 1302 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1303 { 1304 PetscErrorCode ierr; 1305 1306 PetscFunctionBegin; 1307 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1308 PetscValidPointer(fields,3); 1309 if (is) PetscValidPointer(is,4); 1310 if (subdm) PetscValidPointer(subdm,5); 1311 if (dm->ops->createsubdm) { 1312 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1313 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1314 PetscFunctionReturn(0); 1315 } 1316 1317 1318 #undef __FUNCT__ 1319 #define __FUNCT__ "DMCreateDomainDecomposition" 1320 /*@C 1321 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1322 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1323 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1324 define a nonoverlapping covering, while outer subdomains can overlap. 1325 The optional list of DMs define the DM for each subproblem. 1326 1327 Not collective 1328 1329 Input Parameter: 1330 . dm - the DM object 1331 1332 Output Parameters: 1333 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1334 . namelist - The name for each subdomain (or NULL if not requested) 1335 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1336 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1337 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1338 1339 Level: intermediate 1340 1341 Notes: 1342 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1343 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1344 and all of the arrays should be freed with PetscFree(). 1345 1346 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1347 @*/ 1348 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1349 { 1350 PetscErrorCode ierr; 1351 DMSubDomainHookLink link; 1352 PetscInt i,l; 1353 1354 PetscFunctionBegin; 1355 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1356 if (len) {PetscValidPointer(len,2); *len = 0;} 1357 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1358 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1359 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1360 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1361 /* 1362 Is it a good idea to apply the following check across all impls? 1363 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1364 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1365 */ 1366 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1367 if (dm->ops->createdomaindecomposition) { 1368 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1369 /* copy subdomain hooks and context over to the subdomain DMs */ 1370 if (dmlist) { 1371 for (i = 0; i < l; i++) { 1372 for (link=dm->subdomainhook; link; link=link->next) { 1373 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1374 } 1375 (*dmlist)[i]->ctx = dm->ctx; 1376 } 1377 } 1378 if (len) *len = l; 1379 } 1380 PetscFunctionReturn(0); 1381 } 1382 1383 1384 #undef __FUNCT__ 1385 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1386 /*@C 1387 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1388 1389 Not collective 1390 1391 Input Parameters: 1392 + dm - the DM object 1393 . n - the number of subdomain scatters 1394 - subdms - the local subdomains 1395 1396 Output Parameters: 1397 + n - the number of scatters returned 1398 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1399 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1400 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1401 1402 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1403 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1404 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1405 solution and residual data. 1406 1407 Level: developer 1408 1409 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1410 @*/ 1411 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1412 { 1413 PetscErrorCode ierr; 1414 1415 PetscFunctionBegin; 1416 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1417 PetscValidPointer(subdms,3); 1418 if (dm->ops->createddscatters) { 1419 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1420 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1421 PetscFunctionReturn(0); 1422 } 1423 1424 #undef __FUNCT__ 1425 #define __FUNCT__ "DMRefine" 1426 /*@ 1427 DMRefine - Refines a DM object 1428 1429 Collective on DM 1430 1431 Input Parameter: 1432 + dm - the DM object 1433 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1434 1435 Output Parameter: 1436 . dmf - the refined DM, or NULL 1437 1438 Note: If no refinement was done, the return value is NULL 1439 1440 Level: developer 1441 1442 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1443 @*/ 1444 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1445 { 1446 PetscErrorCode ierr; 1447 DMRefineHookLink link; 1448 1449 PetscFunctionBegin; 1450 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1451 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1452 if (*dmf) { 1453 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1454 1455 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1456 1457 (*dmf)->ctx = dm->ctx; 1458 (*dmf)->leveldown = dm->leveldown; 1459 (*dmf)->levelup = dm->levelup + 1; 1460 1461 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1462 for (link=dm->refinehook; link; link=link->next) { 1463 if (link->refinehook) { 1464 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1465 } 1466 } 1467 } 1468 PetscFunctionReturn(0); 1469 } 1470 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "DMRefineHookAdd" 1473 /*@C 1474 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1475 1476 Logically Collective 1477 1478 Input Arguments: 1479 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1480 . refinehook - function to run when setting up a coarser level 1481 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1482 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1483 1484 Calling sequence of refinehook: 1485 $ refinehook(DM coarse,DM fine,void *ctx); 1486 1487 + coarse - coarse level DM 1488 . fine - fine level DM to interpolate problem to 1489 - ctx - optional user-defined function context 1490 1491 Calling sequence for interphook: 1492 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1493 1494 + coarse - coarse level DM 1495 . interp - matrix interpolating a coarse-level solution to the finer grid 1496 . fine - fine level DM to update 1497 - ctx - optional user-defined function context 1498 1499 Level: advanced 1500 1501 Notes: 1502 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1503 1504 If this function is called multiple times, the hooks will be run in the order they are added. 1505 1506 This function is currently not available from Fortran. 1507 1508 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1509 @*/ 1510 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1511 { 1512 PetscErrorCode ierr; 1513 DMRefineHookLink link,*p; 1514 1515 PetscFunctionBegin; 1516 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1517 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1518 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1519 link->refinehook = refinehook; 1520 link->interphook = interphook; 1521 link->ctx = ctx; 1522 link->next = NULL; 1523 *p = link; 1524 PetscFunctionReturn(0); 1525 } 1526 1527 #undef __FUNCT__ 1528 #define __FUNCT__ "DMInterpolate" 1529 /*@ 1530 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1531 1532 Collective if any hooks are 1533 1534 Input Arguments: 1535 + coarse - coarser DM to use as a base 1536 . restrct - interpolation matrix, apply using MatInterpolate() 1537 - fine - finer DM to update 1538 1539 Level: developer 1540 1541 .seealso: DMRefineHookAdd(), MatInterpolate() 1542 @*/ 1543 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1544 { 1545 PetscErrorCode ierr; 1546 DMRefineHookLink link; 1547 1548 PetscFunctionBegin; 1549 for (link=fine->refinehook; link; link=link->next) { 1550 if (link->interphook) { 1551 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1552 } 1553 } 1554 PetscFunctionReturn(0); 1555 } 1556 1557 #undef __FUNCT__ 1558 #define __FUNCT__ "DMGetRefineLevel" 1559 /*@ 1560 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1561 1562 Not Collective 1563 1564 Input Parameter: 1565 . dm - the DM object 1566 1567 Output Parameter: 1568 . level - number of refinements 1569 1570 Level: developer 1571 1572 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1573 1574 @*/ 1575 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1576 { 1577 PetscFunctionBegin; 1578 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1579 *level = dm->levelup; 1580 PetscFunctionReturn(0); 1581 } 1582 1583 #undef __FUNCT__ 1584 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1585 /*@C 1586 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1587 1588 Logically Collective 1589 1590 Input Arguments: 1591 + dm - the DM 1592 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1593 . endhook - function to run after DMGlobalToLocalEnd() has completed 1594 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1595 1596 Calling sequence for beginhook: 1597 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1598 1599 + dm - global DM 1600 . g - global vector 1601 . mode - mode 1602 . l - local vector 1603 - ctx - optional user-defined function context 1604 1605 1606 Calling sequence for endhook: 1607 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1608 1609 + global - global DM 1610 - ctx - optional user-defined function context 1611 1612 Level: advanced 1613 1614 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1615 @*/ 1616 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1617 { 1618 PetscErrorCode ierr; 1619 DMGlobalToLocalHookLink link,*p; 1620 1621 PetscFunctionBegin; 1622 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1623 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1624 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1625 link->beginhook = beginhook; 1626 link->endhook = endhook; 1627 link->ctx = ctx; 1628 link->next = NULL; 1629 *p = link; 1630 PetscFunctionReturn(0); 1631 } 1632 1633 #undef __FUNCT__ 1634 #define __FUNCT__ "DMGlobalToLocalBegin" 1635 /*@ 1636 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1637 1638 Neighbor-wise Collective on DM 1639 1640 Input Parameters: 1641 + dm - the DM object 1642 . g - the global vector 1643 . mode - INSERT_VALUES or ADD_VALUES 1644 - l - the local vector 1645 1646 1647 Level: beginner 1648 1649 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1650 1651 @*/ 1652 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1653 { 1654 PetscSF sf; 1655 PetscErrorCode ierr; 1656 DMGlobalToLocalHookLink link; 1657 1658 PetscFunctionBegin; 1659 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1660 for (link=dm->gtolhook; link; link=link->next) { 1661 if (link->beginhook) { 1662 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1663 } 1664 } 1665 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1666 if (sf) { 1667 PetscScalar *lArray, *gArray; 1668 1669 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1670 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1671 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1672 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1673 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1674 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1675 } else { 1676 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1677 } 1678 PetscFunctionReturn(0); 1679 } 1680 1681 #undef __FUNCT__ 1682 #define __FUNCT__ "DMGlobalToLocalEnd" 1683 /*@ 1684 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1685 1686 Neighbor-wise Collective on DM 1687 1688 Input Parameters: 1689 + dm - the DM object 1690 . g - the global vector 1691 . mode - INSERT_VALUES or ADD_VALUES 1692 - l - the local vector 1693 1694 1695 Level: beginner 1696 1697 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1698 1699 @*/ 1700 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1701 { 1702 PetscSF sf; 1703 PetscErrorCode ierr; 1704 PetscScalar *lArray, *gArray; 1705 DMGlobalToLocalHookLink link; 1706 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1709 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1710 if (sf) { 1711 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1712 1713 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1714 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1715 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1716 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1717 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1718 } else { 1719 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1720 } 1721 for (link=dm->gtolhook; link; link=link->next) { 1722 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1723 } 1724 PetscFunctionReturn(0); 1725 } 1726 1727 #undef __FUNCT__ 1728 #define __FUNCT__ "DMLocalToGlobalBegin" 1729 /*@ 1730 DMLocalToGlobalBegin - updates global vectors from local vectors 1731 1732 Neighbor-wise Collective on DM 1733 1734 Input Parameters: 1735 + dm - the DM object 1736 . l - the local vector 1737 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1738 base point. 1739 - - the global vector 1740 1741 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1742 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1743 global array to the final global array with VecAXPY(). 1744 1745 Level: beginner 1746 1747 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1748 1749 @*/ 1750 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1751 { 1752 PetscSF sf; 1753 PetscErrorCode ierr; 1754 1755 PetscFunctionBegin; 1756 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1757 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1758 if (sf) { 1759 MPI_Op op; 1760 PetscScalar *lArray, *gArray; 1761 1762 switch (mode) { 1763 case INSERT_VALUES: 1764 case INSERT_ALL_VALUES: 1765 op = MPIU_REPLACE; break; 1766 case ADD_VALUES: 1767 case ADD_ALL_VALUES: 1768 op = MPI_SUM; break; 1769 default: 1770 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1771 } 1772 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1773 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1774 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1775 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1776 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1777 } else { 1778 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1779 } 1780 PetscFunctionReturn(0); 1781 } 1782 1783 #undef __FUNCT__ 1784 #define __FUNCT__ "DMLocalToGlobalEnd" 1785 /*@ 1786 DMLocalToGlobalEnd - updates global vectors from local vectors 1787 1788 Neighbor-wise Collective on DM 1789 1790 Input Parameters: 1791 + dm - the DM object 1792 . l - the local vector 1793 . mode - INSERT_VALUES or ADD_VALUES 1794 - g - the global vector 1795 1796 1797 Level: beginner 1798 1799 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1800 1801 @*/ 1802 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1803 { 1804 PetscSF sf; 1805 PetscErrorCode ierr; 1806 1807 PetscFunctionBegin; 1808 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1809 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1810 if (sf) { 1811 MPI_Op op; 1812 PetscScalar *lArray, *gArray; 1813 1814 switch (mode) { 1815 case INSERT_VALUES: 1816 case INSERT_ALL_VALUES: 1817 op = MPIU_REPLACE; break; 1818 case ADD_VALUES: 1819 case ADD_ALL_VALUES: 1820 op = MPI_SUM; break; 1821 default: 1822 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1823 } 1824 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1825 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1826 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1827 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1828 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1829 } else { 1830 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1831 } 1832 PetscFunctionReturn(0); 1833 } 1834 1835 #undef __FUNCT__ 1836 #define __FUNCT__ "DMCoarsen" 1837 /*@ 1838 DMCoarsen - Coarsens a DM object 1839 1840 Collective on DM 1841 1842 Input Parameter: 1843 + dm - the DM object 1844 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1845 1846 Output Parameter: 1847 . dmc - the coarsened DM 1848 1849 Level: developer 1850 1851 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1852 1853 @*/ 1854 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1855 { 1856 PetscErrorCode ierr; 1857 DMCoarsenHookLink link; 1858 1859 PetscFunctionBegin; 1860 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1861 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1862 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1863 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1864 (*dmc)->ctx = dm->ctx; 1865 (*dmc)->levelup = dm->levelup; 1866 (*dmc)->leveldown = dm->leveldown + 1; 1867 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1868 for (link=dm->coarsenhook; link; link=link->next) { 1869 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1870 } 1871 PetscFunctionReturn(0); 1872 } 1873 1874 #undef __FUNCT__ 1875 #define __FUNCT__ "DMCoarsenHookAdd" 1876 /*@C 1877 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1878 1879 Logically Collective 1880 1881 Input Arguments: 1882 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1883 . coarsenhook - function to run when setting up a coarser level 1884 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1885 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1886 1887 Calling sequence of coarsenhook: 1888 $ coarsenhook(DM fine,DM coarse,void *ctx); 1889 1890 + fine - fine level DM 1891 . coarse - coarse level DM to restrict problem to 1892 - ctx - optional user-defined function context 1893 1894 Calling sequence for restricthook: 1895 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1896 1897 + fine - fine level DM 1898 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1899 . rscale - scaling vector for restriction 1900 . inject - matrix restricting by injection 1901 . coarse - coarse level DM to update 1902 - ctx - optional user-defined function context 1903 1904 Level: advanced 1905 1906 Notes: 1907 This function is only needed if auxiliary data needs to be set up on coarse grids. 1908 1909 If this function is called multiple times, the hooks will be run in the order they are added. 1910 1911 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1912 extract the finest level information from its context (instead of from the SNES). 1913 1914 This function is currently not available from Fortran. 1915 1916 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1917 @*/ 1918 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1919 { 1920 PetscErrorCode ierr; 1921 DMCoarsenHookLink link,*p; 1922 1923 PetscFunctionBegin; 1924 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1925 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1926 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1927 link->coarsenhook = coarsenhook; 1928 link->restricthook = restricthook; 1929 link->ctx = ctx; 1930 link->next = NULL; 1931 *p = link; 1932 PetscFunctionReturn(0); 1933 } 1934 1935 #undef __FUNCT__ 1936 #define __FUNCT__ "DMRestrict" 1937 /*@ 1938 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1939 1940 Collective if any hooks are 1941 1942 Input Arguments: 1943 + fine - finer DM to use as a base 1944 . restrct - restriction matrix, apply using MatRestrict() 1945 . inject - injection matrix, also use MatRestrict() 1946 - coarse - coarer DM to update 1947 1948 Level: developer 1949 1950 .seealso: DMCoarsenHookAdd(), MatRestrict() 1951 @*/ 1952 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1953 { 1954 PetscErrorCode ierr; 1955 DMCoarsenHookLink link; 1956 1957 PetscFunctionBegin; 1958 for (link=fine->coarsenhook; link; link=link->next) { 1959 if (link->restricthook) { 1960 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 1961 } 1962 } 1963 PetscFunctionReturn(0); 1964 } 1965 1966 #undef __FUNCT__ 1967 #define __FUNCT__ "DMSubDomainHookAdd" 1968 /*@C 1969 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 1970 1971 Logically Collective 1972 1973 Input Arguments: 1974 + global - global DM 1975 . ddhook - function to run to pass data to the decomposition DM upon its creation 1976 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1977 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1978 1979 1980 Calling sequence for ddhook: 1981 $ ddhook(DM global,DM block,void *ctx) 1982 1983 + global - global DM 1984 . block - block DM 1985 - ctx - optional user-defined function context 1986 1987 Calling sequence for restricthook: 1988 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 1989 1990 + global - global DM 1991 . out - scatter to the outer (with ghost and overlap points) block vector 1992 . in - scatter to block vector values only owned locally 1993 . block - block DM 1994 - ctx - optional user-defined function context 1995 1996 Level: advanced 1997 1998 Notes: 1999 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2000 2001 If this function is called multiple times, the hooks will be run in the order they are added. 2002 2003 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2004 extract the global information from its context (instead of from the SNES). 2005 2006 This function is currently not available from Fortran. 2007 2008 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2009 @*/ 2010 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2011 { 2012 PetscErrorCode ierr; 2013 DMSubDomainHookLink link,*p; 2014 2015 PetscFunctionBegin; 2016 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2017 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2018 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2019 link->restricthook = restricthook; 2020 link->ddhook = ddhook; 2021 link->ctx = ctx; 2022 link->next = NULL; 2023 *p = link; 2024 PetscFunctionReturn(0); 2025 } 2026 2027 #undef __FUNCT__ 2028 #define __FUNCT__ "DMSubDomainRestrict" 2029 /*@ 2030 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2031 2032 Collective if any hooks are 2033 2034 Input Arguments: 2035 + fine - finer DM to use as a base 2036 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2037 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2038 - coarse - coarer DM to update 2039 2040 Level: developer 2041 2042 .seealso: DMCoarsenHookAdd(), MatRestrict() 2043 @*/ 2044 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2045 { 2046 PetscErrorCode ierr; 2047 DMSubDomainHookLink link; 2048 2049 PetscFunctionBegin; 2050 for (link=global->subdomainhook; link; link=link->next) { 2051 if (link->restricthook) { 2052 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2053 } 2054 } 2055 PetscFunctionReturn(0); 2056 } 2057 2058 #undef __FUNCT__ 2059 #define __FUNCT__ "DMGetCoarsenLevel" 2060 /*@ 2061 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2062 2063 Not Collective 2064 2065 Input Parameter: 2066 . dm - the DM object 2067 2068 Output Parameter: 2069 . level - number of coarsenings 2070 2071 Level: developer 2072 2073 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2074 2075 @*/ 2076 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2077 { 2078 PetscFunctionBegin; 2079 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2080 *level = dm->leveldown; 2081 PetscFunctionReturn(0); 2082 } 2083 2084 2085 2086 #undef __FUNCT__ 2087 #define __FUNCT__ "DMRefineHierarchy" 2088 /*@C 2089 DMRefineHierarchy - Refines a DM object, all levels at once 2090 2091 Collective on DM 2092 2093 Input Parameter: 2094 + dm - the DM object 2095 - nlevels - the number of levels of refinement 2096 2097 Output Parameter: 2098 . dmf - the refined DM hierarchy 2099 2100 Level: developer 2101 2102 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2103 2104 @*/ 2105 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2106 { 2107 PetscErrorCode ierr; 2108 2109 PetscFunctionBegin; 2110 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2111 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2112 if (nlevels == 0) PetscFunctionReturn(0); 2113 if (dm->ops->refinehierarchy) { 2114 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2115 } else if (dm->ops->refine) { 2116 PetscInt i; 2117 2118 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2119 for (i=1; i<nlevels; i++) { 2120 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2121 } 2122 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2123 PetscFunctionReturn(0); 2124 } 2125 2126 #undef __FUNCT__ 2127 #define __FUNCT__ "DMCoarsenHierarchy" 2128 /*@C 2129 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2130 2131 Collective on DM 2132 2133 Input Parameter: 2134 + dm - the DM object 2135 - nlevels - the number of levels of coarsening 2136 2137 Output Parameter: 2138 . dmc - the coarsened DM hierarchy 2139 2140 Level: developer 2141 2142 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2143 2144 @*/ 2145 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2146 { 2147 PetscErrorCode ierr; 2148 2149 PetscFunctionBegin; 2150 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2151 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2152 if (nlevels == 0) PetscFunctionReturn(0); 2153 PetscValidPointer(dmc,3); 2154 if (dm->ops->coarsenhierarchy) { 2155 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2156 } else if (dm->ops->coarsen) { 2157 PetscInt i; 2158 2159 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2160 for (i=1; i<nlevels; i++) { 2161 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2162 } 2163 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2164 PetscFunctionReturn(0); 2165 } 2166 2167 #undef __FUNCT__ 2168 #define __FUNCT__ "DMCreateAggregates" 2169 /*@ 2170 DMCreateAggregates - Gets the aggregates that map between 2171 grids associated with two DMs. 2172 2173 Collective on DM 2174 2175 Input Parameters: 2176 + dmc - the coarse grid DM 2177 - dmf - the fine grid DM 2178 2179 Output Parameters: 2180 . rest - the restriction matrix (transpose of the projection matrix) 2181 2182 Level: intermediate 2183 2184 .keywords: interpolation, restriction, multigrid 2185 2186 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2187 @*/ 2188 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2189 { 2190 PetscErrorCode ierr; 2191 2192 PetscFunctionBegin; 2193 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2194 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2195 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2196 PetscFunctionReturn(0); 2197 } 2198 2199 #undef __FUNCT__ 2200 #define __FUNCT__ "DMSetApplicationContextDestroy" 2201 /*@C 2202 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2203 2204 Not Collective 2205 2206 Input Parameters: 2207 + dm - the DM object 2208 - destroy - the destroy function 2209 2210 Level: intermediate 2211 2212 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2213 2214 @*/ 2215 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2216 { 2217 PetscFunctionBegin; 2218 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2219 dm->ctxdestroy = destroy; 2220 PetscFunctionReturn(0); 2221 } 2222 2223 #undef __FUNCT__ 2224 #define __FUNCT__ "DMSetApplicationContext" 2225 /*@ 2226 DMSetApplicationContext - Set a user context into a DM object 2227 2228 Not Collective 2229 2230 Input Parameters: 2231 + dm - the DM object 2232 - ctx - the user context 2233 2234 Level: intermediate 2235 2236 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2237 2238 @*/ 2239 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2240 { 2241 PetscFunctionBegin; 2242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2243 dm->ctx = ctx; 2244 PetscFunctionReturn(0); 2245 } 2246 2247 #undef __FUNCT__ 2248 #define __FUNCT__ "DMGetApplicationContext" 2249 /*@ 2250 DMGetApplicationContext - Gets a user context from a DM object 2251 2252 Not Collective 2253 2254 Input Parameter: 2255 . dm - the DM object 2256 2257 Output Parameter: 2258 . ctx - the user context 2259 2260 Level: intermediate 2261 2262 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2263 2264 @*/ 2265 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2266 { 2267 PetscFunctionBegin; 2268 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2269 *(void**)ctx = dm->ctx; 2270 PetscFunctionReturn(0); 2271 } 2272 2273 #undef __FUNCT__ 2274 #define __FUNCT__ "DMSetVariableBounds" 2275 /*@C 2276 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2277 2278 Logically Collective on DM 2279 2280 Input Parameter: 2281 + dm - the DM object 2282 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2283 2284 Level: intermediate 2285 2286 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2287 DMSetJacobian() 2288 2289 @*/ 2290 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2291 { 2292 PetscFunctionBegin; 2293 dm->ops->computevariablebounds = f; 2294 PetscFunctionReturn(0); 2295 } 2296 2297 #undef __FUNCT__ 2298 #define __FUNCT__ "DMHasVariableBounds" 2299 /*@ 2300 DMHasVariableBounds - does the DM object have a variable bounds function? 2301 2302 Not Collective 2303 2304 Input Parameter: 2305 . dm - the DM object to destroy 2306 2307 Output Parameter: 2308 . flg - PETSC_TRUE if the variable bounds function exists 2309 2310 Level: developer 2311 2312 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2313 2314 @*/ 2315 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2316 { 2317 PetscFunctionBegin; 2318 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2319 PetscFunctionReturn(0); 2320 } 2321 2322 #undef __FUNCT__ 2323 #define __FUNCT__ "DMComputeVariableBounds" 2324 /*@C 2325 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2326 2327 Logically Collective on DM 2328 2329 Input Parameters: 2330 + dm - the DM object to destroy 2331 - x - current solution at which the bounds are computed 2332 2333 Output parameters: 2334 + xl - lower bound 2335 - xu - upper bound 2336 2337 Level: intermediate 2338 2339 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2340 2341 @*/ 2342 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2343 { 2344 PetscErrorCode ierr; 2345 2346 PetscFunctionBegin; 2347 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2348 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2349 if (dm->ops->computevariablebounds) { 2350 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2351 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2352 PetscFunctionReturn(0); 2353 } 2354 2355 #undef __FUNCT__ 2356 #define __FUNCT__ "DMHasColoring" 2357 /*@ 2358 DMHasColoring - does the DM object have a method of providing a coloring? 2359 2360 Not Collective 2361 2362 Input Parameter: 2363 . dm - the DM object 2364 2365 Output Parameter: 2366 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2367 2368 Level: developer 2369 2370 .seealso DMHasFunction(), DMCreateColoring() 2371 2372 @*/ 2373 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2374 { 2375 PetscFunctionBegin; 2376 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2377 PetscFunctionReturn(0); 2378 } 2379 2380 #undef __FUNCT__ 2381 #define __FUNCT__ "DMSetVec" 2382 /*@C 2383 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2384 2385 Collective on DM 2386 2387 Input Parameter: 2388 + dm - the DM object 2389 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2390 2391 Level: developer 2392 2393 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2394 2395 @*/ 2396 PetscErrorCode DMSetVec(DM dm,Vec x) 2397 { 2398 PetscErrorCode ierr; 2399 2400 PetscFunctionBegin; 2401 if (x) { 2402 if (!dm->x) { 2403 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2404 } 2405 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2406 } else if (dm->x) { 2407 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2408 } 2409 PetscFunctionReturn(0); 2410 } 2411 2412 PetscFunctionList DMList = NULL; 2413 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2414 2415 #undef __FUNCT__ 2416 #define __FUNCT__ "DMSetType" 2417 /*@C 2418 DMSetType - Builds a DM, for a particular DM implementation. 2419 2420 Collective on DM 2421 2422 Input Parameters: 2423 + dm - The DM object 2424 - method - The name of the DM type 2425 2426 Options Database Key: 2427 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2428 2429 Notes: 2430 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2431 2432 Level: intermediate 2433 2434 .keywords: DM, set, type 2435 .seealso: DMGetType(), DMCreate() 2436 @*/ 2437 PetscErrorCode DMSetType(DM dm, DMType method) 2438 { 2439 PetscErrorCode (*r)(DM); 2440 PetscBool match; 2441 PetscErrorCode ierr; 2442 2443 PetscFunctionBegin; 2444 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2445 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2446 if (match) PetscFunctionReturn(0); 2447 2448 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2449 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2450 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2451 2452 if (dm->ops->destroy) { 2453 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2454 dm->ops->destroy = NULL; 2455 } 2456 ierr = (*r)(dm);CHKERRQ(ierr); 2457 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2458 PetscFunctionReturn(0); 2459 } 2460 2461 #undef __FUNCT__ 2462 #define __FUNCT__ "DMGetType" 2463 /*@C 2464 DMGetType - Gets the DM type name (as a string) from the DM. 2465 2466 Not Collective 2467 2468 Input Parameter: 2469 . dm - The DM 2470 2471 Output Parameter: 2472 . type - The DM type name 2473 2474 Level: intermediate 2475 2476 .keywords: DM, get, type, name 2477 .seealso: DMSetType(), DMCreate() 2478 @*/ 2479 PetscErrorCode DMGetType(DM dm, DMType *type) 2480 { 2481 PetscErrorCode ierr; 2482 2483 PetscFunctionBegin; 2484 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2485 PetscValidCharPointer(type,2); 2486 if (!DMRegisterAllCalled) { 2487 ierr = DMRegisterAll();CHKERRQ(ierr); 2488 } 2489 *type = ((PetscObject)dm)->type_name; 2490 PetscFunctionReturn(0); 2491 } 2492 2493 #undef __FUNCT__ 2494 #define __FUNCT__ "DMConvert" 2495 /*@C 2496 DMConvert - Converts a DM to another DM, either of the same or different type. 2497 2498 Collective on DM 2499 2500 Input Parameters: 2501 + dm - the DM 2502 - newtype - new DM type (use "same" for the same type) 2503 2504 Output Parameter: 2505 . M - pointer to new DM 2506 2507 Notes: 2508 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2509 the MPI communicator of the generated DM is always the same as the communicator 2510 of the input DM. 2511 2512 Level: intermediate 2513 2514 .seealso: DMCreate() 2515 @*/ 2516 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2517 { 2518 DM B; 2519 char convname[256]; 2520 PetscBool sametype, issame; 2521 PetscErrorCode ierr; 2522 2523 PetscFunctionBegin; 2524 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2525 PetscValidType(dm,1); 2526 PetscValidPointer(M,3); 2527 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2528 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2529 { 2530 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2531 2532 /* 2533 Order of precedence: 2534 1) See if a specialized converter is known to the current DM. 2535 2) See if a specialized converter is known to the desired DM class. 2536 3) See if a good general converter is registered for the desired class 2537 4) See if a good general converter is known for the current matrix. 2538 5) Use a really basic converter. 2539 */ 2540 2541 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2542 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2543 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2544 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2545 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2546 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2547 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2548 if (conv) goto foundconv; 2549 2550 /* 2) See if a specialized converter is known to the desired DM class. */ 2551 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2552 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2553 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2554 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2555 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2556 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2557 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2558 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2559 if (conv) { 2560 ierr = DMDestroy(&B);CHKERRQ(ierr); 2561 goto foundconv; 2562 } 2563 2564 #if 0 2565 /* 3) See if a good general converter is registered for the desired class */ 2566 conv = B->ops->convertfrom; 2567 ierr = DMDestroy(&B);CHKERRQ(ierr); 2568 if (conv) goto foundconv; 2569 2570 /* 4) See if a good general converter is known for the current matrix */ 2571 if (dm->ops->convert) { 2572 conv = dm->ops->convert; 2573 } 2574 if (conv) goto foundconv; 2575 #endif 2576 2577 /* 5) Use a really basic converter. */ 2578 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2579 2580 foundconv: 2581 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2582 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2583 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2584 } 2585 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2586 PetscFunctionReturn(0); 2587 } 2588 2589 /*--------------------------------------------------------------------------------------------------------------------*/ 2590 2591 #undef __FUNCT__ 2592 #define __FUNCT__ "DMRegister" 2593 /*@C 2594 DMRegister - Adds a new DM component implementation 2595 2596 Not Collective 2597 2598 Input Parameters: 2599 + name - The name of a new user-defined creation routine 2600 - create_func - The creation routine itself 2601 2602 Notes: 2603 DMRegister() may be called multiple times to add several user-defined DMs 2604 2605 2606 Sample usage: 2607 .vb 2608 DMRegister("my_da", MyDMCreate); 2609 .ve 2610 2611 Then, your DM type can be chosen with the procedural interface via 2612 .vb 2613 DMCreate(MPI_Comm, DM *); 2614 DMSetType(DM,"my_da"); 2615 .ve 2616 or at runtime via the option 2617 .vb 2618 -da_type my_da 2619 .ve 2620 2621 Level: advanced 2622 2623 .keywords: DM, register 2624 .seealso: DMRegisterAll(), DMRegisterDestroy() 2625 2626 @*/ 2627 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2628 { 2629 PetscErrorCode ierr; 2630 2631 PetscFunctionBegin; 2632 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2633 PetscFunctionReturn(0); 2634 } 2635 2636 #undef __FUNCT__ 2637 #define __FUNCT__ "DMLoad" 2638 /*@C 2639 DMLoad - Loads a DM that has been stored in binary with DMView(). 2640 2641 Collective on PetscViewer 2642 2643 Input Parameters: 2644 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2645 some related function before a call to DMLoad(). 2646 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2647 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2648 2649 Level: intermediate 2650 2651 Notes: 2652 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2653 2654 Notes for advanced users: 2655 Most users should not need to know the details of the binary storage 2656 format, since DMLoad() and DMView() completely hide these details. 2657 But for anyone who's interested, the standard binary matrix storage 2658 format is 2659 .vb 2660 has not yet been determined 2661 .ve 2662 2663 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2664 @*/ 2665 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2666 { 2667 PetscErrorCode ierr; 2668 PetscBool isbinary; 2669 PetscInt classid; 2670 char type[256]; 2671 2672 PetscFunctionBegin; 2673 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2674 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2675 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2676 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2677 2678 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2679 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2680 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2681 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2682 if (newdm->ops->load) { 2683 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2684 } 2685 PetscFunctionReturn(0); 2686 } 2687 2688 /******************************** FEM Support **********************************/ 2689 2690 #undef __FUNCT__ 2691 #define __FUNCT__ "DMPrintCellVector" 2692 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2693 { 2694 PetscInt f; 2695 PetscErrorCode ierr; 2696 2697 PetscFunctionBegin; 2698 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2699 for (f = 0; f < len; ++f) { 2700 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2701 } 2702 PetscFunctionReturn(0); 2703 } 2704 2705 #undef __FUNCT__ 2706 #define __FUNCT__ "DMPrintCellMatrix" 2707 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2708 { 2709 PetscInt f, g; 2710 PetscErrorCode ierr; 2711 2712 PetscFunctionBegin; 2713 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2714 for (f = 0; f < rows; ++f) { 2715 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2716 for (g = 0; g < cols; ++g) { 2717 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2718 } 2719 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2720 } 2721 PetscFunctionReturn(0); 2722 } 2723 2724 #undef __FUNCT__ 2725 #define __FUNCT__ "DMGetDefaultSection" 2726 /*@ 2727 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2728 2729 Input Parameter: 2730 . dm - The DM 2731 2732 Output Parameter: 2733 . section - The PetscSection 2734 2735 Level: intermediate 2736 2737 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2738 2739 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2740 @*/ 2741 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2742 { 2743 PetscFunctionBegin; 2744 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2745 PetscValidPointer(section, 2); 2746 *section = dm->defaultSection; 2747 PetscFunctionReturn(0); 2748 } 2749 2750 #undef __FUNCT__ 2751 #define __FUNCT__ "DMSetDefaultSection" 2752 /*@ 2753 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2754 2755 Input Parameters: 2756 + dm - The DM 2757 - section - The PetscSection 2758 2759 Level: intermediate 2760 2761 Note: Any existing Section will be destroyed 2762 2763 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2764 @*/ 2765 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2766 { 2767 PetscInt numFields; 2768 PetscInt f; 2769 PetscErrorCode ierr; 2770 2771 PetscFunctionBegin; 2772 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2773 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2774 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2775 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2776 dm->defaultSection = section; 2777 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2778 if (numFields) { 2779 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2780 for (f = 0; f < numFields; ++f) { 2781 const char *name; 2782 2783 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2784 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2785 } 2786 } 2787 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2788 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2789 PetscFunctionReturn(0); 2790 } 2791 2792 #undef __FUNCT__ 2793 #define __FUNCT__ "DMGetDefaultGlobalSection" 2794 /*@ 2795 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2796 2797 Collective on DM 2798 2799 Input Parameter: 2800 . dm - The DM 2801 2802 Output Parameter: 2803 . section - The PetscSection 2804 2805 Level: intermediate 2806 2807 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2808 2809 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2810 @*/ 2811 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2812 { 2813 PetscErrorCode ierr; 2814 2815 PetscFunctionBegin; 2816 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2817 PetscValidPointer(section, 2); 2818 if (!dm->defaultGlobalSection) { 2819 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2820 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2821 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2822 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2823 } 2824 *section = dm->defaultGlobalSection; 2825 PetscFunctionReturn(0); 2826 } 2827 2828 #undef __FUNCT__ 2829 #define __FUNCT__ "DMSetDefaultGlobalSection" 2830 /*@ 2831 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2832 2833 Input Parameters: 2834 + dm - The DM 2835 - section - The PetscSection, or NULL 2836 2837 Level: intermediate 2838 2839 Note: Any existing Section will be destroyed 2840 2841 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2842 @*/ 2843 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2844 { 2845 PetscErrorCode ierr; 2846 2847 PetscFunctionBegin; 2848 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2849 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2850 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2851 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2852 dm->defaultGlobalSection = section; 2853 PetscFunctionReturn(0); 2854 } 2855 2856 #undef __FUNCT__ 2857 #define __FUNCT__ "DMGetDefaultSF" 2858 /*@ 2859 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2860 it is created from the default PetscSection layouts in the DM. 2861 2862 Input Parameter: 2863 . dm - The DM 2864 2865 Output Parameter: 2866 . sf - The PetscSF 2867 2868 Level: intermediate 2869 2870 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2871 2872 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2873 @*/ 2874 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2875 { 2876 PetscInt nroots; 2877 PetscErrorCode ierr; 2878 2879 PetscFunctionBegin; 2880 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2881 PetscValidPointer(sf, 2); 2882 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2883 if (nroots < 0) { 2884 PetscSection section, gSection; 2885 2886 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2887 if (section) { 2888 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2889 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2890 } else { 2891 *sf = NULL; 2892 PetscFunctionReturn(0); 2893 } 2894 } 2895 *sf = dm->defaultSF; 2896 PetscFunctionReturn(0); 2897 } 2898 2899 #undef __FUNCT__ 2900 #define __FUNCT__ "DMSetDefaultSF" 2901 /*@ 2902 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2903 2904 Input Parameters: 2905 + dm - The DM 2906 - sf - The PetscSF 2907 2908 Level: intermediate 2909 2910 Note: Any previous SF is destroyed 2911 2912 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2913 @*/ 2914 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2915 { 2916 PetscErrorCode ierr; 2917 2918 PetscFunctionBegin; 2919 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2920 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2921 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2922 dm->defaultSF = sf; 2923 PetscFunctionReturn(0); 2924 } 2925 2926 #undef __FUNCT__ 2927 #define __FUNCT__ "DMCreateDefaultSF" 2928 /*@C 2929 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 2930 describing the data layout. 2931 2932 Input Parameters: 2933 + dm - The DM 2934 . localSection - PetscSection describing the local data layout 2935 - globalSection - PetscSection describing the global data layout 2936 2937 Level: intermediate 2938 2939 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 2940 @*/ 2941 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 2942 { 2943 MPI_Comm comm; 2944 PetscLayout layout; 2945 const PetscInt *ranges; 2946 PetscInt *local; 2947 PetscSFNode *remote; 2948 PetscInt pStart, pEnd, p, nroots, nleaves, l; 2949 PetscMPIInt size, rank; 2950 PetscErrorCode ierr; 2951 2952 PetscFunctionBegin; 2953 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 2954 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2955 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2956 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2957 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 2958 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 2959 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 2960 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 2961 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 2962 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 2963 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 2964 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 2965 PetscInt gdof, gcdof; 2966 2967 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2968 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2969 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2970 } 2971 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 2972 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 2973 for (p = pStart, l = 0; p < pEnd; ++p) { 2974 const PetscInt *cind; 2975 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 2976 2977 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 2978 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 2979 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 2980 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 2981 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2982 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2983 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 2984 if (!gdof) continue; /* Censored point */ 2985 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2986 if (gsize != dof-cdof) { 2987 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 2988 cdof = 0; /* Ignore constraints */ 2989 } 2990 for (d = 0, c = 0; d < dof; ++d) { 2991 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 2992 local[l+d-c] = off+d; 2993 } 2994 if (gdof < 0) { 2995 for (d = 0; d < gsize; ++d, ++l) { 2996 PetscInt offset = -(goff+1) + d, r; 2997 2998 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 2999 if (r < 0) r = -(r+2); 3000 remote[l].rank = r; 3001 remote[l].index = offset - ranges[r]; 3002 } 3003 } else { 3004 for (d = 0; d < gsize; ++d, ++l) { 3005 remote[l].rank = rank; 3006 remote[l].index = goff+d - ranges[rank]; 3007 } 3008 } 3009 } 3010 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3011 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3012 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3013 PetscFunctionReturn(0); 3014 } 3015 3016 #undef __FUNCT__ 3017 #define __FUNCT__ "DMGetPointSF" 3018 /*@ 3019 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3020 3021 Input Parameter: 3022 . dm - The DM 3023 3024 Output Parameter: 3025 . sf - The PetscSF 3026 3027 Level: intermediate 3028 3029 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3030 3031 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3032 @*/ 3033 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3034 { 3035 PetscFunctionBegin; 3036 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3037 PetscValidPointer(sf, 2); 3038 *sf = dm->sf; 3039 PetscFunctionReturn(0); 3040 } 3041 3042 #undef __FUNCT__ 3043 #define __FUNCT__ "DMSetPointSF" 3044 /*@ 3045 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3046 3047 Input Parameters: 3048 + dm - The DM 3049 - sf - The PetscSF 3050 3051 Level: intermediate 3052 3053 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3054 @*/ 3055 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3056 { 3057 PetscErrorCode ierr; 3058 3059 PetscFunctionBegin; 3060 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3061 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3062 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3063 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3064 dm->sf = sf; 3065 PetscFunctionReturn(0); 3066 } 3067 3068 #undef __FUNCT__ 3069 #define __FUNCT__ "DMGetNumFields" 3070 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3071 { 3072 PetscFunctionBegin; 3073 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3074 PetscValidPointer(numFields, 2); 3075 *numFields = dm->numFields; 3076 PetscFunctionReturn(0); 3077 } 3078 3079 #undef __FUNCT__ 3080 #define __FUNCT__ "DMSetNumFields" 3081 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3082 { 3083 PetscInt f; 3084 PetscErrorCode ierr; 3085 3086 PetscFunctionBegin; 3087 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3088 for (f = 0; f < dm->numFields; ++f) { 3089 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3090 } 3091 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3092 dm->numFields = numFields; 3093 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3094 for (f = 0; f < dm->numFields; ++f) { 3095 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3096 } 3097 PetscFunctionReturn(0); 3098 } 3099 3100 #undef __FUNCT__ 3101 #define __FUNCT__ "DMGetField" 3102 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3103 { 3104 PetscFunctionBegin; 3105 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3106 PetscValidPointer(field, 2); 3107 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3108 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3109 *field = dm->fields[f]; 3110 PetscFunctionReturn(0); 3111 } 3112 3113 #undef __FUNCT__ 3114 #define __FUNCT__ "DMRestrictHook_Coordinates" 3115 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3116 { 3117 DM dm_coord,dmc_coord; 3118 PetscErrorCode ierr; 3119 Vec coords,ccoords; 3120 VecScatter scat; 3121 PetscFunctionBegin; 3122 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3123 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3124 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3125 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3126 if (coords && !ccoords) { 3127 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3128 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3129 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3130 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3131 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3132 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3133 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3134 } 3135 PetscFunctionReturn(0); 3136 } 3137 3138 #undef __FUNCT__ 3139 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3140 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3141 { 3142 DM dm_coord,subdm_coord; 3143 PetscErrorCode ierr; 3144 Vec coords,ccoords,clcoords; 3145 VecScatter *scat_i,*scat_g; 3146 PetscFunctionBegin; 3147 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3148 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3149 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3150 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3151 if (coords && !ccoords) { 3152 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3153 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3154 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3155 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3156 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3157 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3158 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3159 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3160 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3161 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3162 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3163 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3164 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3165 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3166 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3167 } 3168 PetscFunctionReturn(0); 3169 } 3170 3171 #undef __FUNCT__ 3172 #define __FUNCT__ "DMSetCoordinates" 3173 /*@ 3174 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3175 3176 Collective on DM 3177 3178 Input Parameters: 3179 + dm - the DM 3180 - c - coordinate vector 3181 3182 Note: 3183 The coordinates do include those for ghost points, which are in the local vector 3184 3185 Level: intermediate 3186 3187 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3188 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3189 @*/ 3190 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3191 { 3192 PetscErrorCode ierr; 3193 3194 PetscFunctionBegin; 3195 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3196 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3197 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3198 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3199 dm->coordinates = c; 3200 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3201 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3202 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3203 PetscFunctionReturn(0); 3204 } 3205 3206 #undef __FUNCT__ 3207 #define __FUNCT__ "DMSetCoordinatesLocal" 3208 /*@ 3209 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3210 3211 Collective on DM 3212 3213 Input Parameters: 3214 + dm - the DM 3215 - c - coordinate vector 3216 3217 Note: 3218 The coordinates of ghost points can be set using DMSetCoordinates() 3219 followed by DMGetCoordinatesLocal(). This is intended to enable the 3220 setting of ghost coordinates outside of the domain. 3221 3222 Level: intermediate 3223 3224 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3225 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3226 @*/ 3227 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3228 { 3229 PetscErrorCode ierr; 3230 3231 PetscFunctionBegin; 3232 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3233 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3234 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3235 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3236 3237 dm->coordinatesLocal = c; 3238 3239 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3240 PetscFunctionReturn(0); 3241 } 3242 3243 #undef __FUNCT__ 3244 #define __FUNCT__ "DMGetCoordinates" 3245 /*@ 3246 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3247 3248 Not Collective 3249 3250 Input Parameter: 3251 . dm - the DM 3252 3253 Output Parameter: 3254 . c - global coordinate vector 3255 3256 Note: 3257 This is a borrowed reference, so the user should NOT destroy this vector 3258 3259 Each process has only the local coordinates (does NOT have the ghost coordinates). 3260 3261 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3262 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3263 3264 Level: intermediate 3265 3266 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3267 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3268 @*/ 3269 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3270 { 3271 PetscErrorCode ierr; 3272 3273 PetscFunctionBegin; 3274 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3275 PetscValidPointer(c,2); 3276 if (!dm->coordinates && dm->coordinatesLocal) { 3277 DM cdm = NULL; 3278 3279 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3280 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3281 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3282 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3283 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3284 } 3285 *c = dm->coordinates; 3286 PetscFunctionReturn(0); 3287 } 3288 3289 #undef __FUNCT__ 3290 #define __FUNCT__ "DMGetCoordinatesLocal" 3291 /*@ 3292 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3293 3294 Collective on DM 3295 3296 Input Parameter: 3297 . dm - the DM 3298 3299 Output Parameter: 3300 . c - coordinate vector 3301 3302 Note: 3303 This is a borrowed reference, so the user should NOT destroy this vector 3304 3305 Each process has the local and ghost coordinates 3306 3307 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3308 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3309 3310 Level: intermediate 3311 3312 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3313 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3314 @*/ 3315 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3316 { 3317 PetscErrorCode ierr; 3318 3319 PetscFunctionBegin; 3320 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3321 PetscValidPointer(c,2); 3322 if (!dm->coordinatesLocal && dm->coordinates) { 3323 DM cdm = NULL; 3324 3325 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3326 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3327 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3328 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3329 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3330 } 3331 *c = dm->coordinatesLocal; 3332 PetscFunctionReturn(0); 3333 } 3334 3335 #undef __FUNCT__ 3336 #define __FUNCT__ "DMGetCoordinateDM" 3337 /*@ 3338 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3339 3340 Collective on DM 3341 3342 Input Parameter: 3343 . dm - the DM 3344 3345 Output Parameter: 3346 . cdm - coordinate DM 3347 3348 Level: intermediate 3349 3350 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3351 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3352 @*/ 3353 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3354 { 3355 PetscErrorCode ierr; 3356 3357 PetscFunctionBegin; 3358 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3359 PetscValidPointer(cdm,2); 3360 if (!dm->coordinateDM) { 3361 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3362 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3363 } 3364 *cdm = dm->coordinateDM; 3365 PetscFunctionReturn(0); 3366 } 3367 3368 #undef __FUNCT__ 3369 #define __FUNCT__ "DMLocatePoints" 3370 /*@ 3371 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3372 3373 Not collective 3374 3375 Input Parameters: 3376 + dm - The DM 3377 - v - The Vec of points 3378 3379 Output Parameter: 3380 . cells - The local cell numbers for cells which contain the points 3381 3382 Level: developer 3383 3384 .keywords: point location, mesh 3385 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3386 @*/ 3387 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3388 { 3389 PetscErrorCode ierr; 3390 3391 PetscFunctionBegin; 3392 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3393 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3394 PetscValidPointer(cells,3); 3395 if (dm->ops->locatepoints) { 3396 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3397 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3398 PetscFunctionReturn(0); 3399 } 3400