1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMSetVecType" 70 /*@C 71 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 72 73 Logically Collective on DMDA 74 75 Input Parameter: 76 + da - initial distributed array 77 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 78 79 Options Database: 80 . -dm_vec_type ctype 81 82 Level: intermediate 83 84 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 85 @*/ 86 PetscErrorCode DMSetVecType(DM da,VecType ctype) 87 { 88 PetscErrorCode ierr; 89 90 PetscFunctionBegin; 91 PetscValidHeaderSpecific(da,DM_CLASSID,1); 92 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 93 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 97 #undef __FUNCT__ 98 #define __FUNCT__ "VecGetDM" 99 /*@ 100 VecSetDM - Gets the DM defining the data layout of the vector 101 102 Not collective 103 104 Input Parameter: 105 . v - The Vec 106 107 Output Parameter: 108 . dm - The DM 109 110 Level: intermediate 111 112 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 113 @*/ 114 PetscErrorCode VecGetDM(Vec v, DM *dm) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 120 PetscValidPointer(dm,2); 121 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "VecSetDM" 127 /*@ 128 VecSetDM - Sets the DM defining the data layout of the vector 129 130 Not collective 131 132 Input Parameters: 133 + v - The Vec 134 - dm - The DM 135 136 Level: intermediate 137 138 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 139 @*/ 140 PetscErrorCode VecSetDM(Vec v, DM dm) 141 { 142 PetscErrorCode ierr; 143 144 PetscFunctionBegin; 145 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 146 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 147 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetMatType" 153 /*@C 154 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 155 156 Logically Collective on DM 157 158 Input Parameter: 159 + dm - the DM context 160 . ctype - the matrix type 161 162 Options Database: 163 . -dm_mat_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 168 @*/ 169 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 170 { 171 PetscErrorCode ierr; 172 PetscFunctionBegin; 173 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 174 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 175 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 176 PetscFunctionReturn(0); 177 } 178 179 #undef __FUNCT__ 180 #define __FUNCT__ "DMSetOptionsPrefix" 181 /*@C 182 DMSetOptionsPrefix - Sets the prefix used for searching for all 183 DMDA options in the database. 184 185 Logically Collective on DMDA 186 187 Input Parameter: 188 + da - the DMDA context 189 - prefix - the prefix to prepend to all option names 190 191 Notes: 192 A hyphen (-) must NOT be given at the beginning of the prefix name. 193 The first character of all runtime options is AUTOMATICALLY the hyphen. 194 195 Level: advanced 196 197 .keywords: DMDA, set, options, prefix, database 198 199 .seealso: DMSetFromOptions() 200 @*/ 201 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 202 { 203 PetscErrorCode ierr; 204 205 PetscFunctionBegin; 206 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 207 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 208 PetscFunctionReturn(0); 209 } 210 211 #undef __FUNCT__ 212 #define __FUNCT__ "DMDestroy" 213 /*@ 214 DMDestroy - Destroys a vector packer or DMDA. 215 216 Collective on DM 217 218 Input Parameter: 219 . dm - the DM object to destroy 220 221 Level: developer 222 223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 224 225 @*/ 226 PetscErrorCode DMDestroy(DM *dm) 227 { 228 PetscInt i, cnt = 0, f; 229 DMNamedVecLink nlink,nnext; 230 PetscErrorCode ierr; 231 232 PetscFunctionBegin; 233 if (!*dm) PetscFunctionReturn(0); 234 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 235 236 /* count all the circular references of DM and its contained Vecs */ 237 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 238 if ((*dm)->localin[i]) {cnt++;} 239 if ((*dm)->globalin[i]) {cnt++;} 240 } 241 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 242 if ((*dm)->x) { 243 PetscObject obj; 244 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 245 if (obj == (PetscObject)*dm) cnt++; 246 } 247 248 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 249 /* 250 Need this test because the dm references the vectors that 251 reference the dm, so destroying the dm calls destroy on the 252 vectors that cause another destroy on the dm 253 */ 254 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 255 ((PetscObject) (*dm))->refct = 0; 256 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 257 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 258 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 259 } 260 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 261 nnext = nlink->next; 262 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 263 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 264 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 265 ierr = PetscFree(nlink);CHKERRQ(ierr); 266 } 267 (*dm)->namedglobal = PETSC_NULL; 268 269 /* Destroy the list of hooks */ 270 { 271 DMCoarsenHookLink link,next; 272 for (link=(*dm)->coarsenhook; link; link=next) { 273 next = link->next; 274 ierr = PetscFree(link);CHKERRQ(ierr); 275 } 276 (*dm)->coarsenhook = PETSC_NULL; 277 } 278 { 279 DMRefineHookLink link,next; 280 for (link=(*dm)->refinehook; link; link=next) { 281 next = link->next; 282 ierr = PetscFree(link);CHKERRQ(ierr); 283 } 284 (*dm)->refinehook = PETSC_NULL; 285 } 286 /* Destroy the work arrays */ 287 { 288 DMWorkLink link,next; 289 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 290 for (link=(*dm)->workin; link; link=next) { 291 next = link->next; 292 ierr = PetscFree(link->mem);CHKERRQ(ierr); 293 ierr = PetscFree(link);CHKERRQ(ierr); 294 } 295 (*dm)->workin = PETSC_NULL; 296 } 297 298 if ((*dm)->ctx && (*dm)->ctxdestroy) { 299 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 300 } 301 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 302 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 303 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 304 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 305 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 306 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 307 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 308 309 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 310 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 311 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 312 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 313 314 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 315 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 316 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 317 318 for (f = 0; f < (*dm)->numFields; ++f) { 319 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 320 } 321 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 322 /* if memory was published with AMS then destroy it */ 323 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 324 325 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 326 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 327 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 328 PetscFunctionReturn(0); 329 } 330 331 #undef __FUNCT__ 332 #define __FUNCT__ "DMSetUp" 333 /*@ 334 DMSetUp - sets up the data structures inside a DM object 335 336 Collective on DM 337 338 Input Parameter: 339 . dm - the DM object to setup 340 341 Level: developer 342 343 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 344 345 @*/ 346 PetscErrorCode DMSetUp(DM dm) 347 { 348 PetscErrorCode ierr; 349 350 PetscFunctionBegin; 351 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 352 if (dm->setupcalled) PetscFunctionReturn(0); 353 if (dm->ops->setup) { 354 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 355 } 356 dm->setupcalled = PETSC_TRUE; 357 PetscFunctionReturn(0); 358 } 359 360 #undef __FUNCT__ 361 #define __FUNCT__ "DMSetFromOptions" 362 /*@ 363 DMSetFromOptions - sets parameters in a DM from the options database 364 365 Collective on DM 366 367 Input Parameter: 368 . dm - the DM object to set options for 369 370 Options Database: 371 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 372 . -dm_vec_type <type> type of vector to create inside DM 373 . -dm_mat_type <type> type of matrix to create inside DM 374 - -dm_coloring_type <global or ghosted> 375 376 Level: developer 377 378 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 379 380 @*/ 381 PetscErrorCode DMSetFromOptions(DM dm) 382 { 383 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 384 PetscErrorCode ierr; 385 char typeName[256] = MATAIJ; 386 387 PetscFunctionBegin; 388 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 389 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 390 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 391 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 392 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 393 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 394 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 395 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 396 if (flg) { 397 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 398 } 399 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 400 if (flg) { 401 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 402 } 403 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 404 if (dm->ops->setfromoptions) { 405 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 406 } 407 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 408 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 409 ierr = PetscOptionsEnd();CHKERRQ(ierr); 410 if (flg1) { 411 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 412 } 413 if (flg2) { 414 PetscViewer viewer; 415 416 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 417 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 418 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 419 ierr = DMView(dm, viewer);CHKERRQ(ierr); 420 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 421 } 422 if (flg3) { 423 PetscViewer viewer; 424 425 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 426 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 427 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 428 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 429 ierr = DMView(dm, viewer);CHKERRQ(ierr); 430 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 431 } 432 if (flg4) { 433 PetscViewer viewer; 434 435 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 436 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 437 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 438 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 439 ierr = DMView(dm, viewer);CHKERRQ(ierr); 440 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 441 } 442 PetscFunctionReturn(0); 443 } 444 445 #undef __FUNCT__ 446 #define __FUNCT__ "DMView" 447 /*@C 448 DMView - Views a vector packer or DMDA. 449 450 Collective on DM 451 452 Input Parameter: 453 + dm - the DM object to view 454 - v - the viewer 455 456 Level: developer 457 458 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 459 460 @*/ 461 PetscErrorCode DMView(DM dm,PetscViewer v) 462 { 463 PetscErrorCode ierr; 464 465 PetscFunctionBegin; 466 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 467 if (!v) { 468 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 469 } 470 if (dm->ops->view) { 471 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 472 } 473 PetscFunctionReturn(0); 474 } 475 476 #undef __FUNCT__ 477 #define __FUNCT__ "DMCreateGlobalVector" 478 /*@ 479 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 480 481 Collective on DM 482 483 Input Parameter: 484 . dm - the DM object 485 486 Output Parameter: 487 . vec - the global vector 488 489 Level: beginner 490 491 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 492 493 @*/ 494 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 495 { 496 PetscErrorCode ierr; 497 498 PetscFunctionBegin; 499 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 500 if (dm->defaultSection) { 501 PetscSection gSection; 502 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 503 504 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 505 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 506 for(p = pStart; p < pEnd; ++p) { 507 PetscInt dof, cdof; 508 509 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 510 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 511 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 512 if ((dof > 0) && (dof-cdof != blockSize)) { 513 blockSize = 1; 514 break; 515 } 516 } 517 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 518 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 519 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 520 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 521 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 522 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 523 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 524 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 525 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 526 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 527 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 528 } else { 529 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 530 } 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "DMCreateLocalVector" 536 /*@ 537 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 538 539 Not Collective 540 541 Input Parameter: 542 . dm - the DM object 543 544 Output Parameter: 545 . vec - the local vector 546 547 Level: beginner 548 549 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 550 551 @*/ 552 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 553 { 554 PetscErrorCode ierr; 555 556 PetscFunctionBegin; 557 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 558 if (dm->defaultSection) { 559 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 560 561 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 562 for(p = pStart; p < pEnd; ++p) { 563 PetscInt dof; 564 565 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 566 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 567 if ((dof > 0) && (dof != blockSize)) { 568 blockSize = 1; 569 break; 570 } 571 } 572 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 573 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 574 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 575 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 576 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 577 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 578 } else { 579 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 580 } 581 PetscFunctionReturn(0); 582 } 583 584 #undef __FUNCT__ 585 #define __FUNCT__ "DMGetLocalToGlobalMapping" 586 /*@ 587 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 588 589 Collective on DM 590 591 Input Parameter: 592 . dm - the DM that provides the mapping 593 594 Output Parameter: 595 . ltog - the mapping 596 597 Level: intermediate 598 599 Notes: 600 This mapping can then be used by VecSetLocalToGlobalMapping() or 601 MatSetLocalToGlobalMapping(). 602 603 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 604 @*/ 605 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 606 { 607 PetscErrorCode ierr; 608 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 611 PetscValidPointer(ltog,2); 612 if (!dm->ltogmap) { 613 PetscSection section, sectionGlobal; 614 615 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 616 if (section) { 617 PetscInt *ltog; 618 PetscInt pStart, pEnd, size, p, l; 619 620 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 621 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 622 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 623 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 624 for (p = pStart, l = 0; p < pEnd; ++p) { 625 PetscInt dof, off, c; 626 627 /* Should probably use constrained dofs */ 628 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 629 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 630 for (c = 0; c < dof; ++c, ++l) { 631 ltog[l] = off+c; 632 } 633 } 634 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 635 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 636 } else { 637 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 638 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 639 } 640 } 641 *ltog = dm->ltogmap; 642 PetscFunctionReturn(0); 643 } 644 645 #undef __FUNCT__ 646 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 647 /*@ 648 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 649 650 Collective on DM 651 652 Input Parameter: 653 . da - the distributed array that provides the mapping 654 655 Output Parameter: 656 . ltog - the block mapping 657 658 Level: intermediate 659 660 Notes: 661 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 662 MatSetLocalToGlobalMappingBlock(). 663 664 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 665 @*/ 666 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 667 { 668 PetscErrorCode ierr; 669 670 PetscFunctionBegin; 671 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 672 PetscValidPointer(ltog,2); 673 if (!dm->ltogmapb) { 674 PetscInt bs; 675 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 676 if (bs > 1) { 677 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 678 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 679 } else { 680 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 681 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 682 } 683 } 684 *ltog = dm->ltogmapb; 685 PetscFunctionReturn(0); 686 } 687 688 #undef __FUNCT__ 689 #define __FUNCT__ "DMGetBlockSize" 690 /*@ 691 DMGetBlockSize - Gets the inherent block size associated with a DM 692 693 Not Collective 694 695 Input Parameter: 696 . dm - the DM with block structure 697 698 Output Parameter: 699 . bs - the block size, 1 implies no exploitable block structure 700 701 Level: intermediate 702 703 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 704 @*/ 705 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 706 { 707 PetscFunctionBegin; 708 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 709 PetscValidPointer(bs,2); 710 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 711 *bs = dm->bs; 712 PetscFunctionReturn(0); 713 } 714 715 #undef __FUNCT__ 716 #define __FUNCT__ "DMCreateInterpolation" 717 /*@ 718 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 719 720 Collective on DM 721 722 Input Parameter: 723 + dm1 - the DM object 724 - dm2 - the second, finer DM object 725 726 Output Parameter: 727 + mat - the interpolation 728 - vec - the scaling (optional) 729 730 Level: developer 731 732 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 733 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 734 735 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 736 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 737 738 739 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 740 741 @*/ 742 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 743 { 744 PetscErrorCode ierr; 745 746 PetscFunctionBegin; 747 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 748 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 749 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 750 PetscFunctionReturn(0); 751 } 752 753 #undef __FUNCT__ 754 #define __FUNCT__ "DMCreateInjection" 755 /*@ 756 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 757 758 Collective on DM 759 760 Input Parameter: 761 + dm1 - the DM object 762 - dm2 - the second, finer DM object 763 764 Output Parameter: 765 . ctx - the injection 766 767 Level: developer 768 769 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 770 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 771 772 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 773 774 @*/ 775 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 776 { 777 PetscErrorCode ierr; 778 779 PetscFunctionBegin; 780 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 781 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 782 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 783 PetscFunctionReturn(0); 784 } 785 786 #undef __FUNCT__ 787 #define __FUNCT__ "DMCreateColoring" 788 /*@C 789 DMCreateColoring - Gets coloring for a DMDA or DMComposite 790 791 Collective on DM 792 793 Input Parameter: 794 + dm - the DM object 795 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 796 - matype - either MATAIJ or MATBAIJ 797 798 Output Parameter: 799 . coloring - the coloring 800 801 Level: developer 802 803 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 804 805 @*/ 806 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 807 { 808 PetscErrorCode ierr; 809 810 PetscFunctionBegin; 811 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 812 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 813 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 814 PetscFunctionReturn(0); 815 } 816 817 #undef __FUNCT__ 818 #define __FUNCT__ "DMCreateMatrix" 819 /*@C 820 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 821 822 Collective on DM 823 824 Input Parameter: 825 + dm - the DM object 826 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 827 any type which inherits from one of these (such as MATAIJ) 828 829 Output Parameter: 830 . mat - the empty Jacobian 831 832 Level: beginner 833 834 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 835 do not need to do it yourself. 836 837 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 838 the nonzero pattern call DMDASetMatPreallocateOnly() 839 840 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 841 internally by PETSc. 842 843 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 844 the indices for the global numbering for DMDAs which is complicated. 845 846 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 847 848 @*/ 849 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 850 { 851 PetscErrorCode ierr; 852 853 PetscFunctionBegin; 854 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 855 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 856 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 857 #endif 858 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 859 PetscValidPointer(mat,3); 860 if (dm->mattype) { 861 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 862 } else { 863 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 864 } 865 PetscFunctionReturn(0); 866 } 867 868 #undef __FUNCT__ 869 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 870 /*@ 871 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 872 preallocated but the nonzero structure and zero values will not be set. 873 874 Logically Collective on DMDA 875 876 Input Parameter: 877 + dm - the DM 878 - only - PETSC_TRUE if only want preallocation 879 880 Level: developer 881 .seealso DMCreateMatrix() 882 @*/ 883 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 884 { 885 PetscFunctionBegin; 886 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 887 dm->prealloc_only = only; 888 PetscFunctionReturn(0); 889 } 890 891 #undef __FUNCT__ 892 #define __FUNCT__ "DMGetWorkArray" 893 /*@C 894 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 895 896 Not Collective 897 898 Input Parameters: 899 + dm - the DM object 900 . count - The minium size 901 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 902 903 Output Parameter: 904 . array - the work array 905 906 Level: developer 907 908 .seealso DMDestroy(), DMCreate() 909 @*/ 910 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 911 { 912 PetscErrorCode ierr; 913 DMWorkLink link; 914 size_t size; 915 916 PetscFunctionBegin; 917 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 918 PetscValidPointer(mem,4); 919 if (dm->workin) { 920 link = dm->workin; 921 dm->workin = dm->workin->next; 922 } else { 923 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 924 } 925 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 926 if (size*count > link->bytes) { 927 ierr = PetscFree(link->mem);CHKERRQ(ierr); 928 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 929 link->bytes = size*count; 930 } 931 link->next = dm->workout; 932 dm->workout = link; 933 *(void**)mem = link->mem; 934 PetscFunctionReturn(0); 935 } 936 937 #undef __FUNCT__ 938 #define __FUNCT__ "DMRestoreWorkArray" 939 /*@C 940 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 941 942 Not Collective 943 944 Input Parameters: 945 + dm - the DM object 946 . count - The minium size 947 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 948 949 Output Parameter: 950 . array - the work array 951 952 Level: developer 953 954 .seealso DMDestroy(), DMCreate() 955 @*/ 956 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 957 { 958 DMWorkLink *p,link; 959 960 PetscFunctionBegin; 961 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 962 PetscValidPointer(mem,4); 963 for (p=&dm->workout; (link=*p); p=&link->next) { 964 if (link->mem == *(void**)mem) { 965 *p = link->next; 966 link->next = dm->workin; 967 dm->workin = link; 968 *(void**)mem = PETSC_NULL; 969 PetscFunctionReturn(0); 970 } 971 } 972 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 973 PetscFunctionReturn(0); 974 } 975 976 #undef __FUNCT__ 977 #define __FUNCT__ "DMSetNullSpaceConstructor" 978 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 979 { 980 PetscFunctionBegin; 981 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 982 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 983 dm->nullspaceConstructors[field] = nullsp; 984 PetscFunctionReturn(0); 985 } 986 987 #undef __FUNCT__ 988 #define __FUNCT__ "DMCreateFieldIS" 989 /*@C 990 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 991 992 Not collective 993 994 Input Parameter: 995 . dm - the DM object 996 997 Output Parameters: 998 + numFields - The number of fields (or PETSC_NULL if not requested) 999 . fieldNames - The name for each field (or PETSC_NULL if not requested) 1000 - fields - The global indices for each field (or PETSC_NULL if not requested) 1001 1002 Level: intermediate 1003 1004 Notes: 1005 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1006 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1007 PetscFree(). 1008 1009 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1010 @*/ 1011 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1012 { 1013 PetscSection section, sectionGlobal; 1014 PetscErrorCode ierr; 1015 1016 PetscFunctionBegin; 1017 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1018 if (numFields) { 1019 PetscValidPointer(numFields,2); 1020 *numFields = 0; 1021 } 1022 if (fieldNames) { 1023 PetscValidPointer(fieldNames,3); 1024 *fieldNames = PETSC_NULL; 1025 } 1026 if (fields) { 1027 PetscValidPointer(fields,4); 1028 *fields = PETSC_NULL; 1029 } 1030 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1031 if (section) { 1032 PetscInt *fieldSizes, **fieldIndices; 1033 PetscInt nF, f, pStart, pEnd, p; 1034 1035 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1036 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1037 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 1038 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1039 for (f = 0; f < nF; ++f) { 1040 fieldSizes[f] = 0; 1041 } 1042 for (p = pStart; p < pEnd; ++p) { 1043 PetscInt gdof; 1044 1045 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1046 if (gdof > 0) { 1047 for (f = 0; f < nF; ++f) { 1048 PetscInt fdof, fcdof; 1049 1050 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1051 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1052 fieldSizes[f] += fdof-fcdof; 1053 } 1054 } 1055 } 1056 for (f = 0; f < nF; ++f) { 1057 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1058 fieldSizes[f] = 0; 1059 } 1060 for (p = pStart; p < pEnd; ++p) { 1061 PetscInt gdof, goff; 1062 1063 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1064 if (gdof > 0) { 1065 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1066 for (f = 0; f < nF; ++f) { 1067 PetscInt fdof, fcdof, fc; 1068 1069 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1070 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1071 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1072 fieldIndices[f][fieldSizes[f]] = goff++; 1073 } 1074 } 1075 } 1076 } 1077 if (numFields) {*numFields = nF;} 1078 if (fieldNames) { 1079 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1080 for (f = 0; f < nF; ++f) { 1081 const char *fieldName; 1082 1083 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1084 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1085 } 1086 } 1087 if (fields) { 1088 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1089 for (f = 0; f < nF; ++f) { 1090 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1091 } 1092 } 1093 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1094 } else { 1095 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1096 } 1097 PetscFunctionReturn(0); 1098 } 1099 1100 #undef __FUNCT__ 1101 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1102 /*@C 1103 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1104 1105 Not Collective 1106 1107 Input Parameters: 1108 + dm - the DM object 1109 - name - the name of the field decomposition 1110 1111 Output Parameter: 1112 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1113 1114 Level: advanced 1115 1116 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1117 @*/ 1118 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1119 { 1120 PetscErrorCode ierr; 1121 1122 PetscFunctionBegin; 1123 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1124 PetscValidCharPointer(name,2); 1125 PetscValidPointer(ddm,3); 1126 *ddm = PETSC_NULL; 1127 if (dm->ops->createfielddecompositiondm) { 1128 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1129 } 1130 PetscFunctionReturn(0); 1131 } 1132 1133 #undef __FUNCT__ 1134 #define __FUNCT__ "DMCreateFieldDecomposition" 1135 /*@C 1136 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1137 corresponding to different fields: each IS contains the global indices of the dofs of the 1138 corresponding field. The optional list of DMs define the DM for each subproblem. 1139 Generalizes DMCreateFieldIS(). 1140 1141 Not collective 1142 1143 Input Parameter: 1144 . dm - the DM object 1145 1146 Output Parameters: 1147 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1148 . namelist - The name for each field (or PETSC_NULL if not requested) 1149 . islist - The global indices for each field (or PETSC_NULL if not requested) 1150 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1151 1152 Level: intermediate 1153 1154 Notes: 1155 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1156 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1157 and all of the arrays should be freed with PetscFree(). 1158 1159 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1160 @*/ 1161 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1162 { 1163 PetscErrorCode ierr; 1164 1165 PetscFunctionBegin; 1166 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1167 if (len) {PetscValidPointer(len,2); *len = 0;} 1168 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1169 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1170 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1171 if (!dm->ops->createfielddecomposition) { 1172 PetscSection section; 1173 PetscInt numFields, f; 1174 1175 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1176 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1177 if (section && numFields && dm->ops->createsubdm) { 1178 *len = numFields; 1179 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1180 for (f = 0; f < numFields; ++f) { 1181 const char *fieldName; 1182 1183 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1184 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1185 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1186 } 1187 } else { 1188 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1189 /* By default there are no DMs associated with subproblems. */ 1190 if (dmlist) *dmlist = PETSC_NULL; 1191 } 1192 } 1193 else { 1194 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1195 } 1196 PetscFunctionReturn(0); 1197 } 1198 1199 #undef __FUNCT__ 1200 #define __FUNCT__ "DMCreateSubDM" 1201 /*@C 1202 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1203 The fields are defined by DMCreateFieldIS(). 1204 1205 Not collective 1206 1207 Input Parameters: 1208 + dm - the DM object 1209 . numFields - number of fields in this subproblem 1210 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1211 1212 Output Parameters: 1213 . is - The global indices for the subproblem 1214 - dm - The DM for the subproblem 1215 1216 Level: intermediate 1217 1218 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1219 @*/ 1220 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1221 { 1222 PetscErrorCode ierr; 1223 1224 PetscFunctionBegin; 1225 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1226 PetscValidPointer(fields,3); 1227 if (is) {PetscValidPointer(is,4);} 1228 if (subdm) {PetscValidPointer(subdm,5);} 1229 if (dm->ops->createsubdm) { 1230 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1231 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1232 PetscFunctionReturn(0); 1233 } 1234 1235 #undef __FUNCT__ 1236 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1237 /*@C 1238 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1239 1240 Not Collective 1241 1242 Input Parameters: 1243 + dm - the DM object 1244 - name - the name of the subdomain decomposition 1245 1246 Output Parameter: 1247 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1248 1249 Level: advanced 1250 1251 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1252 @*/ 1253 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1254 { 1255 PetscErrorCode ierr; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1259 PetscValidCharPointer(name,2); 1260 PetscValidPointer(ddm,3); 1261 *ddm = PETSC_NULL; 1262 if (dm->ops->createdomaindecompositiondm) { 1263 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1264 } 1265 PetscFunctionReturn(0); 1266 } 1267 1268 #undef __FUNCT__ 1269 #define __FUNCT__ "DMCreateDomainDecomposition" 1270 /*@C 1271 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1272 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1273 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1274 define a nonoverlapping covering, while outer subdomains can overlap. 1275 The optional list of DMs define the DM for each subproblem. 1276 1277 Not collective 1278 1279 Input Parameter: 1280 . dm - the DM object 1281 1282 Output Parameters: 1283 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1284 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1285 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1286 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1287 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1288 1289 Level: intermediate 1290 1291 Notes: 1292 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1293 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1294 and all of the arrays should be freed with PetscFree(). 1295 1296 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1297 @*/ 1298 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1299 { 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1304 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1305 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1306 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1307 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1308 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1309 if (dm->ops->createdomaindecomposition) { 1310 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1311 } 1312 PetscFunctionReturn(0); 1313 } 1314 1315 #undef __FUNCT__ 1316 #define __FUNCT__ "DMRefine" 1317 /*@ 1318 DMRefine - Refines a DM object 1319 1320 Collective on DM 1321 1322 Input Parameter: 1323 + dm - the DM object 1324 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1325 1326 Output Parameter: 1327 . dmf - the refined DM, or PETSC_NULL 1328 1329 Note: If no refinement was done, the return value is PETSC_NULL 1330 1331 Level: developer 1332 1333 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1334 @*/ 1335 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1336 { 1337 PetscErrorCode ierr; 1338 DMRefineHookLink link; 1339 1340 PetscFunctionBegin; 1341 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1342 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1343 if (*dmf) { 1344 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1345 (*dmf)->ops->initialguess = dm->ops->initialguess; 1346 (*dmf)->ops->function = dm->ops->function; 1347 (*dmf)->ops->functionj = dm->ops->functionj; 1348 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1349 (*dmf)->ops->jacobian = dm->ops->jacobian; 1350 } 1351 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1352 (*dmf)->ctx = dm->ctx; 1353 (*dmf)->leveldown = dm->leveldown; 1354 (*dmf)->levelup = dm->levelup + 1; 1355 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1356 for (link=dm->refinehook; link; link=link->next) { 1357 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1358 } 1359 } 1360 PetscFunctionReturn(0); 1361 } 1362 1363 #undef __FUNCT__ 1364 #define __FUNCT__ "DMRefineHookAdd" 1365 /*@ 1366 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1367 1368 Logically Collective 1369 1370 Input Arguments: 1371 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1372 . refinehook - function to run when setting up a coarser level 1373 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1374 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1375 1376 Calling sequence of refinehook: 1377 $ refinehook(DM coarse,DM fine,void *ctx); 1378 1379 + coarse - coarse level DM 1380 . fine - fine level DM to interpolate problem to 1381 - ctx - optional user-defined function context 1382 1383 Calling sequence for interphook: 1384 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1385 1386 + coarse - coarse level DM 1387 . interp - matrix interpolating a coarse-level solution to the finer grid 1388 . fine - fine level DM to update 1389 - ctx - optional user-defined function context 1390 1391 Level: advanced 1392 1393 Notes: 1394 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1395 1396 If this function is called multiple times, the hooks will be run in the order they are added. 1397 1398 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1399 @*/ 1400 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1401 { 1402 PetscErrorCode ierr; 1403 DMRefineHookLink link,*p; 1404 1405 PetscFunctionBegin; 1406 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1407 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1408 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1409 link->refinehook = refinehook; 1410 link->interphook = interphook; 1411 link->ctx = ctx; 1412 link->next = PETSC_NULL; 1413 *p = link; 1414 PetscFunctionReturn(0); 1415 } 1416 1417 #undef __FUNCT__ 1418 #define __FUNCT__ "DMInterpolate" 1419 /*@ 1420 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1421 1422 Collective if any hooks are 1423 1424 Input Arguments: 1425 + coarse - coarser DM to use as a base 1426 . restrct - interpolation matrix, apply using MatInterpolate() 1427 - fine - finer DM to update 1428 1429 Level: developer 1430 1431 .seealso: DMRefineHookAdd(), MatInterpolate() 1432 @*/ 1433 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1434 { 1435 PetscErrorCode ierr; 1436 DMRefineHookLink link; 1437 1438 PetscFunctionBegin; 1439 for (link=fine->refinehook; link; link=link->next) { 1440 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1441 } 1442 PetscFunctionReturn(0); 1443 } 1444 1445 #undef __FUNCT__ 1446 #define __FUNCT__ "DMGetRefineLevel" 1447 /*@ 1448 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1449 1450 Not Collective 1451 1452 Input Parameter: 1453 . dm - the DM object 1454 1455 Output Parameter: 1456 . level - number of refinements 1457 1458 Level: developer 1459 1460 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1461 1462 @*/ 1463 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1464 { 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1467 *level = dm->levelup; 1468 PetscFunctionReturn(0); 1469 } 1470 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "DMGlobalToLocalBegin" 1473 /*@ 1474 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1475 1476 Neighbor-wise Collective on DM 1477 1478 Input Parameters: 1479 + dm - the DM object 1480 . g - the global vector 1481 . mode - INSERT_VALUES or ADD_VALUES 1482 - l - the local vector 1483 1484 1485 Level: beginner 1486 1487 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1488 1489 @*/ 1490 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1491 { 1492 PetscSF sf; 1493 PetscErrorCode ierr; 1494 1495 PetscFunctionBegin; 1496 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1497 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1498 if (sf) { 1499 PetscScalar *lArray, *gArray; 1500 1501 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1502 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1503 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1504 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1505 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1506 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1507 } else { 1508 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1509 } 1510 PetscFunctionReturn(0); 1511 } 1512 1513 #undef __FUNCT__ 1514 #define __FUNCT__ "DMGlobalToLocalEnd" 1515 /*@ 1516 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1517 1518 Neighbor-wise Collective on DM 1519 1520 Input Parameters: 1521 + dm - the DM object 1522 . g - the global vector 1523 . mode - INSERT_VALUES or ADD_VALUES 1524 - l - the local vector 1525 1526 1527 Level: beginner 1528 1529 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1530 1531 @*/ 1532 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1533 { 1534 PetscSF sf; 1535 PetscErrorCode ierr; 1536 PetscScalar *lArray, *gArray; 1537 1538 PetscFunctionBegin; 1539 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1540 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1541 if (sf) { 1542 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1543 1544 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1545 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1546 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1547 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1548 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1549 } else { 1550 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1551 } 1552 PetscFunctionReturn(0); 1553 } 1554 1555 #undef __FUNCT__ 1556 #define __FUNCT__ "DMLocalToGlobalBegin" 1557 /*@ 1558 DMLocalToGlobalBegin - updates global vectors from local vectors 1559 1560 Neighbor-wise Collective on DM 1561 1562 Input Parameters: 1563 + dm - the DM object 1564 . l - the local vector 1565 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1566 base point. 1567 - - the global vector 1568 1569 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1570 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1571 global array to the final global array with VecAXPY(). 1572 1573 Level: beginner 1574 1575 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1576 1577 @*/ 1578 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1579 { 1580 PetscSF sf; 1581 PetscErrorCode ierr; 1582 1583 PetscFunctionBegin; 1584 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1585 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1586 if (sf) { 1587 MPI_Op op; 1588 PetscScalar *lArray, *gArray; 1589 1590 switch(mode) { 1591 case INSERT_VALUES: 1592 case INSERT_ALL_VALUES: 1593 #if defined(PETSC_HAVE_MPI_REPLACE) 1594 op = MPI_REPLACE; break; 1595 #else 1596 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1597 #endif 1598 case ADD_VALUES: 1599 case ADD_ALL_VALUES: 1600 op = MPI_SUM; break; 1601 default: 1602 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1603 } 1604 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1605 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1606 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1607 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1608 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1609 } else { 1610 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1611 } 1612 PetscFunctionReturn(0); 1613 } 1614 1615 #undef __FUNCT__ 1616 #define __FUNCT__ "DMLocalToGlobalEnd" 1617 /*@ 1618 DMLocalToGlobalEnd - updates global vectors from local vectors 1619 1620 Neighbor-wise Collective on DM 1621 1622 Input Parameters: 1623 + dm - the DM object 1624 . l - the local vector 1625 . mode - INSERT_VALUES or ADD_VALUES 1626 - g - the global vector 1627 1628 1629 Level: beginner 1630 1631 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1632 1633 @*/ 1634 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1635 { 1636 PetscSF sf; 1637 PetscErrorCode ierr; 1638 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1641 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1642 if (sf) { 1643 MPI_Op op; 1644 PetscScalar *lArray, *gArray; 1645 1646 switch(mode) { 1647 case INSERT_VALUES: 1648 case INSERT_ALL_VALUES: 1649 #if defined(PETSC_HAVE_MPI_REPLACE) 1650 op = MPI_REPLACE; break; 1651 #else 1652 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1653 #endif 1654 case ADD_VALUES: 1655 case ADD_ALL_VALUES: 1656 op = MPI_SUM; break; 1657 default: 1658 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1659 } 1660 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1661 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1662 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1663 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1664 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1665 } else { 1666 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1667 } 1668 PetscFunctionReturn(0); 1669 } 1670 1671 #undef __FUNCT__ 1672 #define __FUNCT__ "DMComputeJacobianDefault" 1673 /*@ 1674 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1675 1676 Collective on DM 1677 1678 Input Parameter: 1679 + dm - the DM object 1680 . x - location to compute Jacobian at; may be ignored for linear problems 1681 . A - matrix that defines the operator for the linear solve 1682 - B - the matrix used to construct the preconditioner 1683 1684 Level: developer 1685 1686 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1687 DMSetFunction() 1688 1689 @*/ 1690 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1691 { 1692 PetscErrorCode ierr; 1693 1694 PetscFunctionBegin; 1695 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1696 *stflag = SAME_NONZERO_PATTERN; 1697 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1698 if (A != B) { 1699 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1700 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1701 } 1702 PetscFunctionReturn(0); 1703 } 1704 1705 #undef __FUNCT__ 1706 #define __FUNCT__ "DMCoarsen" 1707 /*@ 1708 DMCoarsen - Coarsens a DM object 1709 1710 Collective on DM 1711 1712 Input Parameter: 1713 + dm - the DM object 1714 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1715 1716 Output Parameter: 1717 . dmc - the coarsened DM 1718 1719 Level: developer 1720 1721 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1722 1723 @*/ 1724 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1725 { 1726 PetscErrorCode ierr; 1727 DMCoarsenHookLink link; 1728 1729 PetscFunctionBegin; 1730 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1731 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1732 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1733 (*dmc)->ops->initialguess = dm->ops->initialguess; 1734 (*dmc)->ops->function = dm->ops->function; 1735 (*dmc)->ops->functionj = dm->ops->functionj; 1736 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1737 (*dmc)->ops->jacobian = dm->ops->jacobian; 1738 } 1739 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1740 (*dmc)->ctx = dm->ctx; 1741 (*dmc)->levelup = dm->levelup; 1742 (*dmc)->leveldown = dm->leveldown + 1; 1743 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1744 for (link=dm->coarsenhook; link; link=link->next) { 1745 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1746 } 1747 PetscFunctionReturn(0); 1748 } 1749 1750 #undef __FUNCT__ 1751 #define __FUNCT__ "DMCoarsenHookAdd" 1752 /*@ 1753 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1754 1755 Logically Collective 1756 1757 Input Arguments: 1758 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1759 . coarsenhook - function to run when setting up a coarser level 1760 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1761 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1762 1763 Calling sequence of coarsenhook: 1764 $ coarsenhook(DM fine,DM coarse,void *ctx); 1765 1766 + fine - fine level DM 1767 . coarse - coarse level DM to restrict problem to 1768 - ctx - optional user-defined function context 1769 1770 Calling sequence for restricthook: 1771 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1772 1773 + fine - fine level DM 1774 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1775 . rscale - scaling vector for restriction 1776 . inject - matrix restricting by injection 1777 . coarse - coarse level DM to update 1778 - ctx - optional user-defined function context 1779 1780 Level: advanced 1781 1782 Notes: 1783 This function is only needed if auxiliary data needs to be set up on coarse grids. 1784 1785 If this function is called multiple times, the hooks will be run in the order they are added. 1786 1787 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1788 extract the finest level information from its context (instead of from the SNES). 1789 1790 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1791 @*/ 1792 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1793 { 1794 PetscErrorCode ierr; 1795 DMCoarsenHookLink link,*p; 1796 1797 PetscFunctionBegin; 1798 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1799 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1800 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1801 link->coarsenhook = coarsenhook; 1802 link->restricthook = restricthook; 1803 link->ctx = ctx; 1804 link->next = PETSC_NULL; 1805 *p = link; 1806 PetscFunctionReturn(0); 1807 } 1808 1809 #undef __FUNCT__ 1810 #define __FUNCT__ "DMRestrict" 1811 /*@ 1812 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1813 1814 Collective if any hooks are 1815 1816 Input Arguments: 1817 + fine - finer DM to use as a base 1818 . restrct - restriction matrix, apply using MatRestrict() 1819 . inject - injection matrix, also use MatRestrict() 1820 - coarse - coarer DM to update 1821 1822 Level: developer 1823 1824 .seealso: DMCoarsenHookAdd(), MatRestrict() 1825 @*/ 1826 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1827 { 1828 PetscErrorCode ierr; 1829 DMCoarsenHookLink link; 1830 1831 PetscFunctionBegin; 1832 for (link=fine->coarsenhook; link; link=link->next) { 1833 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1834 } 1835 PetscFunctionReturn(0); 1836 } 1837 1838 #undef __FUNCT__ 1839 #define __FUNCT__ "DMBlockRestrictHookAdd" 1840 /*@ 1841 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1842 1843 Logically Collective 1844 1845 Input Arguments: 1846 + global - global DM 1847 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1848 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1849 1850 Calling sequence for restricthook: 1851 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1852 1853 + global - global DM 1854 . out - scatter to the outer (with ghost and overlap points) block vector 1855 . in - scatter to block vector values only owned locally 1856 . block - block DM (may just be a shell if the global DM is passed in correctly) 1857 - ctx - optional user-defined function context 1858 1859 Level: advanced 1860 1861 Notes: 1862 This function is only needed if auxiliary data needs to be set up on coarse grids. 1863 1864 If this function is called multiple times, the hooks will be run in the order they are added. 1865 1866 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1867 extract the finest level information from its context (instead of from the SNES). 1868 1869 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1870 @*/ 1871 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1872 { 1873 PetscErrorCode ierr; 1874 DMBlockRestrictHookLink link,*p; 1875 1876 PetscFunctionBegin; 1877 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1878 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1879 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1880 link->restricthook = restricthook; 1881 link->ctx = ctx; 1882 link->next = PETSC_NULL; 1883 *p = link; 1884 PetscFunctionReturn(0); 1885 } 1886 1887 #undef __FUNCT__ 1888 #define __FUNCT__ "DMBlockRestrict" 1889 /*@ 1890 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1891 1892 Collective if any hooks are 1893 1894 Input Arguments: 1895 + fine - finer DM to use as a base 1896 . restrct - restriction matrix, apply using MatRestrict() 1897 . inject - injection matrix, also use MatRestrict() 1898 - coarse - coarer DM to update 1899 1900 Level: developer 1901 1902 .seealso: DMCoarsenHookAdd(), MatRestrict() 1903 @*/ 1904 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1905 { 1906 PetscErrorCode ierr; 1907 DMBlockRestrictHookLink link; 1908 1909 PetscFunctionBegin; 1910 for (link=global->blockrestricthook; link; link=link->next) { 1911 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1912 } 1913 PetscFunctionReturn(0); 1914 } 1915 1916 #undef __FUNCT__ 1917 #define __FUNCT__ "DMGetCoarsenLevel" 1918 /*@ 1919 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1920 1921 Not Collective 1922 1923 Input Parameter: 1924 . dm - the DM object 1925 1926 Output Parameter: 1927 . level - number of coarsenings 1928 1929 Level: developer 1930 1931 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1932 1933 @*/ 1934 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1935 { 1936 PetscFunctionBegin; 1937 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1938 *level = dm->leveldown; 1939 PetscFunctionReturn(0); 1940 } 1941 1942 1943 1944 #undef __FUNCT__ 1945 #define __FUNCT__ "DMRefineHierarchy" 1946 /*@C 1947 DMRefineHierarchy - Refines a DM object, all levels at once 1948 1949 Collective on DM 1950 1951 Input Parameter: 1952 + dm - the DM object 1953 - nlevels - the number of levels of refinement 1954 1955 Output Parameter: 1956 . dmf - the refined DM hierarchy 1957 1958 Level: developer 1959 1960 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1961 1962 @*/ 1963 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1964 { 1965 PetscErrorCode ierr; 1966 1967 PetscFunctionBegin; 1968 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1969 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1970 if (nlevels == 0) PetscFunctionReturn(0); 1971 if (dm->ops->refinehierarchy) { 1972 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1973 } else if (dm->ops->refine) { 1974 PetscInt i; 1975 1976 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1977 for (i=1; i<nlevels; i++) { 1978 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1979 } 1980 } else { 1981 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1982 } 1983 PetscFunctionReturn(0); 1984 } 1985 1986 #undef __FUNCT__ 1987 #define __FUNCT__ "DMCoarsenHierarchy" 1988 /*@C 1989 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1990 1991 Collective on DM 1992 1993 Input Parameter: 1994 + dm - the DM object 1995 - nlevels - the number of levels of coarsening 1996 1997 Output Parameter: 1998 . dmc - the coarsened DM hierarchy 1999 2000 Level: developer 2001 2002 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2003 2004 @*/ 2005 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2006 { 2007 PetscErrorCode ierr; 2008 2009 PetscFunctionBegin; 2010 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2011 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2012 if (nlevels == 0) PetscFunctionReturn(0); 2013 PetscValidPointer(dmc,3); 2014 if (dm->ops->coarsenhierarchy) { 2015 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2016 } else if (dm->ops->coarsen) { 2017 PetscInt i; 2018 2019 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 2020 for (i=1; i<nlevels; i++) { 2021 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 2022 } 2023 } else { 2024 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2025 } 2026 PetscFunctionReturn(0); 2027 } 2028 2029 #undef __FUNCT__ 2030 #define __FUNCT__ "DMCreateAggregates" 2031 /*@ 2032 DMCreateAggregates - Gets the aggregates that map between 2033 grids associated with two DMs. 2034 2035 Collective on DM 2036 2037 Input Parameters: 2038 + dmc - the coarse grid DM 2039 - dmf - the fine grid DM 2040 2041 Output Parameters: 2042 . rest - the restriction matrix (transpose of the projection matrix) 2043 2044 Level: intermediate 2045 2046 .keywords: interpolation, restriction, multigrid 2047 2048 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2049 @*/ 2050 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2051 { 2052 PetscErrorCode ierr; 2053 2054 PetscFunctionBegin; 2055 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2056 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2057 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2058 PetscFunctionReturn(0); 2059 } 2060 2061 #undef __FUNCT__ 2062 #define __FUNCT__ "DMSetApplicationContextDestroy" 2063 /*@C 2064 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2065 2066 Not Collective 2067 2068 Input Parameters: 2069 + dm - the DM object 2070 - destroy - the destroy function 2071 2072 Level: intermediate 2073 2074 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2075 2076 @*/ 2077 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2078 { 2079 PetscFunctionBegin; 2080 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2081 dm->ctxdestroy = destroy; 2082 PetscFunctionReturn(0); 2083 } 2084 2085 #undef __FUNCT__ 2086 #define __FUNCT__ "DMSetApplicationContext" 2087 /*@ 2088 DMSetApplicationContext - Set a user context into a DM object 2089 2090 Not Collective 2091 2092 Input Parameters: 2093 + dm - the DM object 2094 - ctx - the user context 2095 2096 Level: intermediate 2097 2098 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2099 2100 @*/ 2101 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2102 { 2103 PetscFunctionBegin; 2104 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2105 dm->ctx = ctx; 2106 PetscFunctionReturn(0); 2107 } 2108 2109 #undef __FUNCT__ 2110 #define __FUNCT__ "DMGetApplicationContext" 2111 /*@ 2112 DMGetApplicationContext - Gets a user context from a DM object 2113 2114 Not Collective 2115 2116 Input Parameter: 2117 . dm - the DM object 2118 2119 Output Parameter: 2120 . ctx - the user context 2121 2122 Level: intermediate 2123 2124 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2125 2126 @*/ 2127 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2128 { 2129 PetscFunctionBegin; 2130 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2131 *(void**)ctx = dm->ctx; 2132 PetscFunctionReturn(0); 2133 } 2134 2135 #undef __FUNCT__ 2136 #define __FUNCT__ "DMSetInitialGuess" 2137 /*@C 2138 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2139 2140 Logically Collective on DM 2141 2142 Input Parameter: 2143 + dm - the DM object to destroy 2144 - f - the function to compute the initial guess 2145 2146 Level: intermediate 2147 2148 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2149 2150 @*/ 2151 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2152 { 2153 PetscFunctionBegin; 2154 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2155 dm->ops->initialguess = f; 2156 PetscFunctionReturn(0); 2157 } 2158 2159 #undef __FUNCT__ 2160 #define __FUNCT__ "DMSetFunction" 2161 /*@C 2162 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2163 2164 Logically Collective on DM 2165 2166 Input Parameter: 2167 + dm - the DM object 2168 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2169 2170 Level: intermediate 2171 2172 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2173 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2174 2175 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2176 DMSetJacobian() 2177 2178 @*/ 2179 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2180 { 2181 PetscFunctionBegin; 2182 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2183 dm->ops->function = f; 2184 if (f) { 2185 dm->ops->functionj = f; 2186 } 2187 PetscFunctionReturn(0); 2188 } 2189 2190 #undef __FUNCT__ 2191 #define __FUNCT__ "DMSetJacobian" 2192 /*@C 2193 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2194 2195 Logically Collective on DM 2196 2197 Input Parameter: 2198 + dm - the DM object to destroy 2199 - f - the function to compute the matrix entries 2200 2201 Level: intermediate 2202 2203 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2204 DMSetFunction() 2205 2206 @*/ 2207 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2208 { 2209 PetscFunctionBegin; 2210 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2211 dm->ops->jacobian = f; 2212 PetscFunctionReturn(0); 2213 } 2214 2215 #undef __FUNCT__ 2216 #define __FUNCT__ "DMSetVariableBounds" 2217 /*@C 2218 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2219 2220 Logically Collective on DM 2221 2222 Input Parameter: 2223 + dm - the DM object 2224 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2225 2226 Level: intermediate 2227 2228 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2229 DMSetJacobian() 2230 2231 @*/ 2232 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2233 { 2234 PetscFunctionBegin; 2235 dm->ops->computevariablebounds = f; 2236 PetscFunctionReturn(0); 2237 } 2238 2239 #undef __FUNCT__ 2240 #define __FUNCT__ "DMHasVariableBounds" 2241 /*@ 2242 DMHasVariableBounds - does the DM object have a variable bounds function? 2243 2244 Not Collective 2245 2246 Input Parameter: 2247 . dm - the DM object to destroy 2248 2249 Output Parameter: 2250 . flg - PETSC_TRUE if the variable bounds function exists 2251 2252 Level: developer 2253 2254 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2255 2256 @*/ 2257 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2258 { 2259 PetscFunctionBegin; 2260 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2261 PetscFunctionReturn(0); 2262 } 2263 2264 #undef __FUNCT__ 2265 #define __FUNCT__ "DMComputeVariableBounds" 2266 /*@C 2267 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2268 2269 Logically Collective on DM 2270 2271 Input Parameters: 2272 + dm - the DM object to destroy 2273 - x - current solution at which the bounds are computed 2274 2275 Output parameters: 2276 + xl - lower bound 2277 - xu - upper bound 2278 2279 Level: intermediate 2280 2281 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2282 DMSetFunction(), DMSetVariableBounds() 2283 2284 @*/ 2285 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2286 { 2287 PetscErrorCode ierr; 2288 PetscFunctionBegin; 2289 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2290 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2291 if (dm->ops->computevariablebounds) { 2292 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2293 } 2294 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2295 PetscFunctionReturn(0); 2296 } 2297 2298 #undef __FUNCT__ 2299 #define __FUNCT__ "DMComputeInitialGuess" 2300 /*@ 2301 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2302 2303 Collective on DM 2304 2305 Input Parameter: 2306 + dm - the DM object to destroy 2307 - x - the vector to hold the initial guess values 2308 2309 Level: developer 2310 2311 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2312 2313 @*/ 2314 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2315 { 2316 PetscErrorCode ierr; 2317 PetscFunctionBegin; 2318 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2319 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2320 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2321 PetscFunctionReturn(0); 2322 } 2323 2324 #undef __FUNCT__ 2325 #define __FUNCT__ "DMHasInitialGuess" 2326 /*@ 2327 DMHasInitialGuess - does the DM object have an initial guess function 2328 2329 Not Collective 2330 2331 Input Parameter: 2332 . dm - the DM object to destroy 2333 2334 Output Parameter: 2335 . flg - PETSC_TRUE if function exists 2336 2337 Level: developer 2338 2339 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2340 2341 @*/ 2342 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2343 { 2344 PetscFunctionBegin; 2345 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2346 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2347 PetscFunctionReturn(0); 2348 } 2349 2350 #undef __FUNCT__ 2351 #define __FUNCT__ "DMHasFunction" 2352 /*@ 2353 DMHasFunction - does the DM object have a function 2354 2355 Not Collective 2356 2357 Input Parameter: 2358 . dm - the DM object to destroy 2359 2360 Output Parameter: 2361 . flg - PETSC_TRUE if function exists 2362 2363 Level: developer 2364 2365 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2366 2367 @*/ 2368 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2369 { 2370 PetscFunctionBegin; 2371 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2372 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2373 PetscFunctionReturn(0); 2374 } 2375 2376 #undef __FUNCT__ 2377 #define __FUNCT__ "DMHasJacobian" 2378 /*@ 2379 DMHasJacobian - does the DM object have a matrix function 2380 2381 Not Collective 2382 2383 Input Parameter: 2384 . dm - the DM object to destroy 2385 2386 Output Parameter: 2387 . flg - PETSC_TRUE if function exists 2388 2389 Level: developer 2390 2391 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2392 2393 @*/ 2394 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2395 { 2396 PetscFunctionBegin; 2397 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2398 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2399 PetscFunctionReturn(0); 2400 } 2401 2402 #undef __FUNCT__ 2403 #define __FUNCT__ "DMHasColoring" 2404 /*@ 2405 DMHasColoring - does the DM object have a method of providing a coloring? 2406 2407 Not Collective 2408 2409 Input Parameter: 2410 . dm - the DM object 2411 2412 Output Parameter: 2413 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2414 2415 Level: developer 2416 2417 .seealso DMHasFunction(), DMCreateColoring() 2418 2419 @*/ 2420 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2421 { 2422 PetscFunctionBegin; 2423 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2424 PetscFunctionReturn(0); 2425 } 2426 2427 #undef __FUNCT__ 2428 #define __FUNCT__ "DMSetVec" 2429 /*@C 2430 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2431 2432 Collective on DM 2433 2434 Input Parameter: 2435 + dm - the DM object 2436 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2437 2438 Level: developer 2439 2440 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2441 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2442 2443 @*/ 2444 PetscErrorCode DMSetVec(DM dm,Vec x) 2445 { 2446 PetscErrorCode ierr; 2447 PetscFunctionBegin; 2448 if (x) { 2449 if (!dm->x) { 2450 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2451 } 2452 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2453 } 2454 else if (dm->x) { 2455 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2456 } 2457 PetscFunctionReturn(0); 2458 } 2459 2460 2461 #undef __FUNCT__ 2462 #define __FUNCT__ "DMComputeFunction" 2463 /*@ 2464 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2465 2466 Collective on DM 2467 2468 Input Parameter: 2469 + dm - the DM object to destroy 2470 . x - the location where the function is evaluationed, may be ignored for linear problems 2471 - b - the vector to hold the right hand side entries 2472 2473 Level: developer 2474 2475 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2476 DMSetJacobian() 2477 2478 @*/ 2479 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2480 { 2481 PetscErrorCode ierr; 2482 PetscFunctionBegin; 2483 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2484 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2485 PetscStackPush("DM user function"); 2486 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2487 PetscStackPop; 2488 PetscFunctionReturn(0); 2489 } 2490 2491 2492 2493 #undef __FUNCT__ 2494 #define __FUNCT__ "DMComputeJacobian" 2495 /*@ 2496 DMComputeJacobian - compute the matrix entries for the solver 2497 2498 Collective on DM 2499 2500 Input Parameter: 2501 + dm - the DM object 2502 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2503 . A - matrix that defines the operator for the linear solve 2504 - B - the matrix used to construct the preconditioner 2505 2506 Level: developer 2507 2508 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2509 DMSetFunction() 2510 2511 @*/ 2512 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2513 { 2514 PetscErrorCode ierr; 2515 2516 PetscFunctionBegin; 2517 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2518 if (!dm->ops->jacobian) { 2519 ISColoring coloring; 2520 MatFDColoring fd; 2521 MatType mtype; 2522 2523 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2524 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2525 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2526 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2527 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2528 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2529 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2530 2531 dm->fd = fd; 2532 dm->ops->jacobian = DMComputeJacobianDefault; 2533 2534 /* don't know why this is needed */ 2535 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2536 } 2537 if (!x) x = dm->x; 2538 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2539 2540 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2541 if (x) { 2542 if (!dm->x) { 2543 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2544 } 2545 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2546 } 2547 if (A != B) { 2548 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2549 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2550 } 2551 PetscFunctionReturn(0); 2552 } 2553 2554 2555 PetscFList DMList = PETSC_NULL; 2556 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2557 2558 #undef __FUNCT__ 2559 #define __FUNCT__ "DMSetType" 2560 /*@C 2561 DMSetType - Builds a DM, for a particular DM implementation. 2562 2563 Collective on DM 2564 2565 Input Parameters: 2566 + dm - The DM object 2567 - method - The name of the DM type 2568 2569 Options Database Key: 2570 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2571 2572 Notes: 2573 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2574 2575 Level: intermediate 2576 2577 .keywords: DM, set, type 2578 .seealso: DMGetType(), DMCreate() 2579 @*/ 2580 PetscErrorCode DMSetType(DM dm, DMType method) 2581 { 2582 PetscErrorCode (*r)(DM); 2583 PetscBool match; 2584 PetscErrorCode ierr; 2585 2586 PetscFunctionBegin; 2587 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2588 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2589 if (match) PetscFunctionReturn(0); 2590 2591 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2592 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2593 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2594 2595 if (dm->ops->destroy) { 2596 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2597 dm->ops->destroy = PETSC_NULL; 2598 } 2599 ierr = (*r)(dm);CHKERRQ(ierr); 2600 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2601 PetscFunctionReturn(0); 2602 } 2603 2604 #undef __FUNCT__ 2605 #define __FUNCT__ "DMGetType" 2606 /*@C 2607 DMGetType - Gets the DM type name (as a string) from the DM. 2608 2609 Not Collective 2610 2611 Input Parameter: 2612 . dm - The DM 2613 2614 Output Parameter: 2615 . type - The DM type name 2616 2617 Level: intermediate 2618 2619 .keywords: DM, get, type, name 2620 .seealso: DMSetType(), DMCreate() 2621 @*/ 2622 PetscErrorCode DMGetType(DM dm, DMType *type) 2623 { 2624 PetscErrorCode ierr; 2625 2626 PetscFunctionBegin; 2627 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2628 PetscValidCharPointer(type,2); 2629 if (!DMRegisterAllCalled) { 2630 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2631 } 2632 *type = ((PetscObject)dm)->type_name; 2633 PetscFunctionReturn(0); 2634 } 2635 2636 #undef __FUNCT__ 2637 #define __FUNCT__ "DMConvert" 2638 /*@C 2639 DMConvert - Converts a DM to another DM, either of the same or different type. 2640 2641 Collective on DM 2642 2643 Input Parameters: 2644 + dm - the DM 2645 - newtype - new DM type (use "same" for the same type) 2646 2647 Output Parameter: 2648 . M - pointer to new DM 2649 2650 Notes: 2651 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2652 the MPI communicator of the generated DM is always the same as the communicator 2653 of the input DM. 2654 2655 Level: intermediate 2656 2657 .seealso: DMCreate() 2658 @*/ 2659 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2660 { 2661 DM B; 2662 char convname[256]; 2663 PetscBool sametype, issame; 2664 PetscErrorCode ierr; 2665 2666 PetscFunctionBegin; 2667 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2668 PetscValidType(dm,1); 2669 PetscValidPointer(M,3); 2670 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2671 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2672 { 2673 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2674 2675 /* 2676 Order of precedence: 2677 1) See if a specialized converter is known to the current DM. 2678 2) See if a specialized converter is known to the desired DM class. 2679 3) See if a good general converter is registered for the desired class 2680 4) See if a good general converter is known for the current matrix. 2681 5) Use a really basic converter. 2682 */ 2683 2684 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2685 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2686 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2687 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2688 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2689 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2690 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2691 if (conv) goto foundconv; 2692 2693 /* 2) See if a specialized converter is known to the desired DM class. */ 2694 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2695 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2696 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2697 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2698 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2699 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2700 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2701 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2702 if (conv) { 2703 ierr = DMDestroy(&B);CHKERRQ(ierr); 2704 goto foundconv; 2705 } 2706 2707 #if 0 2708 /* 3) See if a good general converter is registered for the desired class */ 2709 conv = B->ops->convertfrom; 2710 ierr = DMDestroy(&B);CHKERRQ(ierr); 2711 if (conv) goto foundconv; 2712 2713 /* 4) See if a good general converter is known for the current matrix */ 2714 if (dm->ops->convert) { 2715 conv = dm->ops->convert; 2716 } 2717 if (conv) goto foundconv; 2718 #endif 2719 2720 /* 5) Use a really basic converter. */ 2721 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2722 2723 foundconv: 2724 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2725 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2726 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2727 } 2728 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2729 PetscFunctionReturn(0); 2730 } 2731 2732 /*--------------------------------------------------------------------------------------------------------------------*/ 2733 2734 #undef __FUNCT__ 2735 #define __FUNCT__ "DMRegister" 2736 /*@C 2737 DMRegister - See DMRegisterDynamic() 2738 2739 Level: advanced 2740 @*/ 2741 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2742 { 2743 char fullname[PETSC_MAX_PATH_LEN]; 2744 PetscErrorCode ierr; 2745 2746 PetscFunctionBegin; 2747 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2748 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2749 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2750 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2751 PetscFunctionReturn(0); 2752 } 2753 2754 2755 /*--------------------------------------------------------------------------------------------------------------------*/ 2756 #undef __FUNCT__ 2757 #define __FUNCT__ "DMRegisterDestroy" 2758 /*@C 2759 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2760 2761 Not Collective 2762 2763 Level: advanced 2764 2765 .keywords: DM, register, destroy 2766 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2767 @*/ 2768 PetscErrorCode DMRegisterDestroy(void) 2769 { 2770 PetscErrorCode ierr; 2771 2772 PetscFunctionBegin; 2773 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2774 DMRegisterAllCalled = PETSC_FALSE; 2775 PetscFunctionReturn(0); 2776 } 2777 2778 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2779 #include <mex.h> 2780 2781 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2782 2783 #undef __FUNCT__ 2784 #define __FUNCT__ "DMComputeFunction_Matlab" 2785 /* 2786 DMComputeFunction_Matlab - Calls the function that has been set with 2787 DMSetFunctionMatlab(). 2788 2789 For linear problems x is null 2790 2791 .seealso: DMSetFunction(), DMGetFunction() 2792 */ 2793 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2794 { 2795 PetscErrorCode ierr; 2796 DMMatlabContext *sctx; 2797 int nlhs = 1,nrhs = 4; 2798 mxArray *plhs[1],*prhs[4]; 2799 long long int lx = 0,ly = 0,ls = 0; 2800 2801 PetscFunctionBegin; 2802 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2803 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2804 PetscCheckSameComm(dm,1,y,3); 2805 2806 /* call Matlab function in ctx with arguments x and y */ 2807 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2808 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2809 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2810 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2811 prhs[0] = mxCreateDoubleScalar((double)ls); 2812 prhs[1] = mxCreateDoubleScalar((double)lx); 2813 prhs[2] = mxCreateDoubleScalar((double)ly); 2814 prhs[3] = mxCreateString(sctx->funcname); 2815 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2816 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2817 mxDestroyArray(prhs[0]); 2818 mxDestroyArray(prhs[1]); 2819 mxDestroyArray(prhs[2]); 2820 mxDestroyArray(prhs[3]); 2821 mxDestroyArray(plhs[0]); 2822 PetscFunctionReturn(0); 2823 } 2824 2825 2826 #undef __FUNCT__ 2827 #define __FUNCT__ "DMSetFunctionMatlab" 2828 /* 2829 DMSetFunctionMatlab - Sets the function evaluation routine 2830 2831 */ 2832 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2833 { 2834 PetscErrorCode ierr; 2835 DMMatlabContext *sctx; 2836 2837 PetscFunctionBegin; 2838 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2839 /* currently sctx is memory bleed */ 2840 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2841 if (!sctx) { 2842 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2843 } 2844 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2845 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2846 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2847 PetscFunctionReturn(0); 2848 } 2849 2850 #undef __FUNCT__ 2851 #define __FUNCT__ "DMComputeJacobian_Matlab" 2852 /* 2853 DMComputeJacobian_Matlab - Calls the function that has been set with 2854 DMSetJacobianMatlab(). 2855 2856 For linear problems x is null 2857 2858 .seealso: DMSetFunction(), DMGetFunction() 2859 */ 2860 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2861 { 2862 PetscErrorCode ierr; 2863 DMMatlabContext *sctx; 2864 int nlhs = 2,nrhs = 5; 2865 mxArray *plhs[2],*prhs[5]; 2866 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2867 2868 PetscFunctionBegin; 2869 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2870 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2871 2872 /* call MATLAB function in ctx with arguments x, A, and B */ 2873 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2874 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2875 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2876 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2877 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2878 prhs[0] = mxCreateDoubleScalar((double)ls); 2879 prhs[1] = mxCreateDoubleScalar((double)lx); 2880 prhs[2] = mxCreateDoubleScalar((double)lA); 2881 prhs[3] = mxCreateDoubleScalar((double)lB); 2882 prhs[4] = mxCreateString(sctx->jacname); 2883 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2884 *str = (MatStructure) mxGetScalar(plhs[0]); 2885 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2886 mxDestroyArray(prhs[0]); 2887 mxDestroyArray(prhs[1]); 2888 mxDestroyArray(prhs[2]); 2889 mxDestroyArray(prhs[3]); 2890 mxDestroyArray(prhs[4]); 2891 mxDestroyArray(plhs[0]); 2892 mxDestroyArray(plhs[1]); 2893 PetscFunctionReturn(0); 2894 } 2895 2896 2897 #undef __FUNCT__ 2898 #define __FUNCT__ "DMSetJacobianMatlab" 2899 /* 2900 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2901 2902 */ 2903 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2904 { 2905 PetscErrorCode ierr; 2906 DMMatlabContext *sctx; 2907 2908 PetscFunctionBegin; 2909 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2910 /* currently sctx is memory bleed */ 2911 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2912 if (!sctx) { 2913 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2914 } 2915 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2916 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2917 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2918 PetscFunctionReturn(0); 2919 } 2920 #endif 2921 2922 #undef __FUNCT__ 2923 #define __FUNCT__ "DMLoad" 2924 /*@C 2925 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2926 with DMView(). 2927 2928 Collective on PetscViewer 2929 2930 Input Parameters: 2931 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2932 some related function before a call to DMLoad(). 2933 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2934 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2935 2936 Level: intermediate 2937 2938 Notes: 2939 Defaults to the DM DA. 2940 2941 Notes for advanced users: 2942 Most users should not need to know the details of the binary storage 2943 format, since DMLoad() and DMView() completely hide these details. 2944 But for anyone who's interested, the standard binary matrix storage 2945 format is 2946 .vb 2947 has not yet been determined 2948 .ve 2949 2950 In addition, PETSc automatically does the byte swapping for 2951 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2952 linux, Windows and the paragon; thus if you write your own binary 2953 read/write routines you have to swap the bytes; see PetscBinaryRead() 2954 and PetscBinaryWrite() to see how this may be done. 2955 2956 Concepts: vector^loading from file 2957 2958 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2959 @*/ 2960 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2961 { 2962 PetscErrorCode ierr; 2963 2964 PetscFunctionBegin; 2965 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2966 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2967 2968 if (!((PetscObject)newdm)->type_name) { 2969 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2970 } 2971 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2972 PetscFunctionReturn(0); 2973 } 2974 2975 /******************************** FEM Support **********************************/ 2976 2977 #undef __FUNCT__ 2978 #define __FUNCT__ "DMPrintCellVector" 2979 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2980 PetscInt f; 2981 PetscErrorCode ierr; 2982 2983 PetscFunctionBegin; 2984 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2985 for (f = 0; f < len; ++f) { 2986 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2987 } 2988 PetscFunctionReturn(0); 2989 } 2990 2991 #undef __FUNCT__ 2992 #define __FUNCT__ "DMPrintCellMatrix" 2993 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2994 PetscInt f, g; 2995 PetscErrorCode ierr; 2996 2997 PetscFunctionBegin; 2998 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2999 for (f = 0; f < rows; ++f) { 3000 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 3001 for (g = 0; g < cols; ++g) { 3002 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 3003 } 3004 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 3005 } 3006 PetscFunctionReturn(0); 3007 } 3008 3009 #undef __FUNCT__ 3010 #define __FUNCT__ "DMGetLocalFunction" 3011 /*@C 3012 DMGetLocalFunction - Get the local residual function from this DM 3013 3014 Not collective 3015 3016 Input Parameter: 3017 . dm - The DM 3018 3019 Output Parameter: 3020 . lf - The local residual function 3021 3022 Calling sequence of lf: 3023 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3024 3025 + snes - the SNES context 3026 . x - local vector with the state at which to evaluate residual 3027 . f - local vector to put residual in 3028 - ctx - optional user-defined function context 3029 3030 Level: intermediate 3031 3032 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3033 @*/ 3034 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 3035 { 3036 PetscFunctionBegin; 3037 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3038 if (lf) *lf = dm->lf; 3039 PetscFunctionReturn(0); 3040 } 3041 3042 #undef __FUNCT__ 3043 #define __FUNCT__ "DMSetLocalFunction" 3044 /*@C 3045 DMSetLocalFunction - Set the local residual function from this DM 3046 3047 Not collective 3048 3049 Input Parameters: 3050 + dm - The DM 3051 - lf - The local residual function 3052 3053 Calling sequence of lf: 3054 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3055 3056 + snes - the SNES context 3057 . x - local vector with the state at which to evaluate residual 3058 . f - local vector to put residual in 3059 - ctx - optional user-defined function context 3060 3061 Level: intermediate 3062 3063 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3064 @*/ 3065 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3066 { 3067 PetscFunctionBegin; 3068 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3069 dm->lf = lf; 3070 PetscFunctionReturn(0); 3071 } 3072 3073 #undef __FUNCT__ 3074 #define __FUNCT__ "DMGetLocalJacobian" 3075 /*@C 3076 DMGetLocalJacobian - Get the local Jacobian function from this DM 3077 3078 Not collective 3079 3080 Input Parameter: 3081 . dm - The DM 3082 3083 Output Parameter: 3084 . lj - The local Jacobian function 3085 3086 Calling sequence of lj: 3087 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3088 3089 + snes - the SNES context 3090 . x - local vector with the state at which to evaluate residual 3091 . J - matrix to put Jacobian in 3092 . M - matrix to use for defining Jacobian preconditioner 3093 - ctx - optional user-defined function context 3094 3095 Level: intermediate 3096 3097 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3098 @*/ 3099 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3100 { 3101 PetscFunctionBegin; 3102 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3103 if (lj) *lj = dm->lj; 3104 PetscFunctionReturn(0); 3105 } 3106 3107 #undef __FUNCT__ 3108 #define __FUNCT__ "DMSetLocalJacobian" 3109 /*@C 3110 DMSetLocalJacobian - Set the local Jacobian function from this DM 3111 3112 Not collective 3113 3114 Input Parameters: 3115 + dm - The DM 3116 - lj - The local Jacobian function 3117 3118 Calling sequence of lj: 3119 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3120 3121 + snes - the SNES context 3122 . x - local vector with the state at which to evaluate residual 3123 . J - matrix to put Jacobian in 3124 . M - matrix to use for defining Jacobian preconditioner 3125 - ctx - optional user-defined function context 3126 3127 Level: intermediate 3128 3129 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3130 @*/ 3131 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3132 { 3133 PetscFunctionBegin; 3134 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3135 dm->lj = lj; 3136 PetscFunctionReturn(0); 3137 } 3138 3139 #undef __FUNCT__ 3140 #define __FUNCT__ "DMGetDefaultSection" 3141 /*@ 3142 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3143 3144 Input Parameter: 3145 . dm - The DM 3146 3147 Output Parameter: 3148 . section - The PetscSection 3149 3150 Level: intermediate 3151 3152 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3153 3154 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3155 @*/ 3156 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3157 PetscFunctionBegin; 3158 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3159 PetscValidPointer(section, 2); 3160 *section = dm->defaultSection; 3161 PetscFunctionReturn(0); 3162 } 3163 3164 #undef __FUNCT__ 3165 #define __FUNCT__ "DMSetDefaultSection" 3166 /*@ 3167 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3168 3169 Input Parameters: 3170 + dm - The DM 3171 - section - The PetscSection 3172 3173 Level: intermediate 3174 3175 Note: Any existing Section will be destroyed 3176 3177 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3178 @*/ 3179 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3180 PetscInt numFields; 3181 PetscInt f; 3182 PetscErrorCode ierr; 3183 3184 PetscFunctionBegin; 3185 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3186 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3187 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3188 dm->defaultSection = section; 3189 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3190 if (numFields) { 3191 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3192 for (f = 0; f < numFields; ++f) { 3193 const char *name; 3194 3195 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3196 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3197 } 3198 } 3199 PetscFunctionReturn(0); 3200 } 3201 3202 #undef __FUNCT__ 3203 #define __FUNCT__ "DMGetDefaultGlobalSection" 3204 /*@ 3205 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3206 3207 Input Parameter: 3208 . dm - The DM 3209 3210 Output Parameter: 3211 . section - The PetscSection 3212 3213 Level: intermediate 3214 3215 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3216 3217 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3218 @*/ 3219 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3220 PetscErrorCode ierr; 3221 3222 PetscFunctionBegin; 3223 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3224 PetscValidPointer(section, 2); 3225 if (!dm->defaultGlobalSection) { 3226 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3227 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3228 } 3229 *section = dm->defaultGlobalSection; 3230 PetscFunctionReturn(0); 3231 } 3232 3233 #undef __FUNCT__ 3234 #define __FUNCT__ "DMSetDefaultGlobalSection" 3235 /*@ 3236 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3237 3238 Input Parameters: 3239 + dm - The DM 3240 - section - The PetscSection 3241 3242 Level: intermediate 3243 3244 Note: Any existing Section will be destroyed 3245 3246 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3247 @*/ 3248 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3249 PetscErrorCode ierr; 3250 3251 PetscFunctionBegin; 3252 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3253 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3254 dm->defaultGlobalSection = section; 3255 PetscFunctionReturn(0); 3256 } 3257 3258 #undef __FUNCT__ 3259 #define __FUNCT__ "DMGetDefaultSF" 3260 /*@ 3261 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3262 it is created from the default PetscSection layouts in the DM. 3263 3264 Input Parameter: 3265 . dm - The DM 3266 3267 Output Parameter: 3268 . sf - The PetscSF 3269 3270 Level: intermediate 3271 3272 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3273 3274 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3275 @*/ 3276 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3277 PetscInt nroots; 3278 PetscErrorCode ierr; 3279 3280 PetscFunctionBegin; 3281 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3282 PetscValidPointer(sf, 2); 3283 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3284 if (nroots < 0) { 3285 PetscSection section, gSection; 3286 3287 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3288 if (section) { 3289 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3290 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3291 } else { 3292 *sf = PETSC_NULL; 3293 PetscFunctionReturn(0); 3294 } 3295 } 3296 *sf = dm->defaultSF; 3297 PetscFunctionReturn(0); 3298 } 3299 3300 #undef __FUNCT__ 3301 #define __FUNCT__ "DMSetDefaultSF" 3302 /*@ 3303 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3304 3305 Input Parameters: 3306 + dm - The DM 3307 - sf - The PetscSF 3308 3309 Level: intermediate 3310 3311 Note: Any previous SF is destroyed 3312 3313 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3314 @*/ 3315 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3316 PetscErrorCode ierr; 3317 3318 PetscFunctionBegin; 3319 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3320 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3321 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3322 dm->defaultSF = sf; 3323 PetscFunctionReturn(0); 3324 } 3325 3326 #undef __FUNCT__ 3327 #define __FUNCT__ "DMCreateDefaultSF" 3328 /*@C 3329 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3330 describing the data layout. 3331 3332 Input Parameters: 3333 + dm - The DM 3334 . localSection - PetscSection describing the local data layout 3335 - globalSection - PetscSection describing the global data layout 3336 3337 Level: intermediate 3338 3339 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3340 @*/ 3341 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3342 { 3343 MPI_Comm comm = ((PetscObject) dm)->comm; 3344 PetscLayout layout; 3345 const PetscInt *ranges; 3346 PetscInt *local; 3347 PetscSFNode *remote; 3348 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3349 PetscMPIInt size, rank; 3350 PetscErrorCode ierr; 3351 3352 PetscFunctionBegin; 3353 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3354 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3355 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3356 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3357 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3358 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3359 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3360 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3361 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3362 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3363 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3364 PetscInt gdof, gcdof; 3365 3366 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3367 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3368 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3369 } 3370 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3371 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3372 for (p = pStart, l = 0; p < pEnd; ++p) { 3373 const PetscInt *cind; 3374 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3375 3376 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3377 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3378 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3379 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3380 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3381 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3382 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3383 if (!gdof) continue; /* Censored point */ 3384 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3385 if (gsize != dof-cdof) { 3386 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", size, p, dof-cdof, dof); 3387 cdof = 0; /* Ignore constraints */ 3388 } 3389 for (d = 0, c = 0; d < dof; ++d) { 3390 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3391 local[l+d-c] = off+d; 3392 } 3393 if (gdof < 0) { 3394 for(d = 0; d < gsize; ++d, ++l) { 3395 PetscInt offset = -(goff+1) + d, r; 3396 3397 for (r = 0; r < size; ++r) { 3398 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3399 } 3400 remote[l].rank = r; 3401 remote[l].index = offset - ranges[r]; 3402 } 3403 } else { 3404 for(d = 0; d < gsize; ++d, ++l) { 3405 remote[l].rank = rank; 3406 remote[l].index = goff+d - ranges[rank]; 3407 } 3408 } 3409 } 3410 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3411 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3412 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3413 PetscFunctionReturn(0); 3414 } 3415 3416 #undef __FUNCT__ 3417 #define __FUNCT__ "DMGetPointSF" 3418 /*@ 3419 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3420 3421 Input Parameter: 3422 . dm - The DM 3423 3424 Output Parameter: 3425 . sf - The PetscSF 3426 3427 Level: intermediate 3428 3429 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3430 3431 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3432 @*/ 3433 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3434 PetscFunctionBegin; 3435 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3436 PetscValidPointer(sf, 2); 3437 *sf = dm->sf; 3438 PetscFunctionReturn(0); 3439 } 3440 3441 #undef __FUNCT__ 3442 #define __FUNCT__ "DMGetNumFields" 3443 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3444 { 3445 PetscFunctionBegin; 3446 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3447 PetscValidPointer(numFields, 2); 3448 *numFields = dm->numFields; 3449 PetscFunctionReturn(0); 3450 } 3451 3452 #undef __FUNCT__ 3453 #define __FUNCT__ "DMSetNumFields" 3454 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3455 { 3456 PetscInt f; 3457 PetscErrorCode ierr; 3458 3459 PetscFunctionBegin; 3460 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3461 for (f = 0; f < dm->numFields; ++f) { 3462 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3463 } 3464 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3465 dm->numFields = numFields; 3466 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3467 for (f = 0; f < dm->numFields; ++f) { 3468 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3469 } 3470 PetscFunctionReturn(0); 3471 } 3472 3473 #undef __FUNCT__ 3474 #define __FUNCT__ "DMGetField" 3475 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3476 { 3477 PetscFunctionBegin; 3478 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3479 PetscValidPointer(field, 2); 3480 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3481 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3482 *field = dm->fields[f]; 3483 PetscFunctionReturn(0); 3484 } 3485 3486 #undef __FUNCT__ 3487 #define __FUNCT__ "DMSetCoordinates" 3488 /*@ 3489 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3490 3491 Collective on DM 3492 3493 Input Parameters: 3494 + dm - the DM 3495 - c - coordinate vector 3496 3497 Note: 3498 The coordinates do include those for ghost points, which are in the local vector 3499 3500 Level: intermediate 3501 3502 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3503 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3504 @*/ 3505 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3506 { 3507 PetscErrorCode ierr; 3508 3509 PetscFunctionBegin; 3510 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3511 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3512 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3513 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3514 dm->coordinates = c; 3515 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3516 PetscFunctionReturn(0); 3517 } 3518 3519 #undef __FUNCT__ 3520 #define __FUNCT__ "DMSetCoordinatesLocal" 3521 /*@ 3522 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3523 3524 Collective on DM 3525 3526 Input Parameters: 3527 + dm - the DM 3528 - c - coordinate vector 3529 3530 Note: 3531 The coordinates of ghost points can be set using DMSetCoordinates() 3532 followed by DMGetCoordinatesLocal(). This is intended to enable the 3533 setting of ghost coordinates outside of the domain. 3534 3535 Level: intermediate 3536 3537 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3538 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3539 @*/ 3540 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3541 { 3542 PetscErrorCode ierr; 3543 3544 PetscFunctionBegin; 3545 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3546 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3547 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3548 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3549 dm->coordinatesLocal = c; 3550 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3551 PetscFunctionReturn(0); 3552 } 3553 3554 #undef __FUNCT__ 3555 #define __FUNCT__ "DMGetCoordinates" 3556 /*@ 3557 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3558 3559 Not Collective 3560 3561 Input Parameter: 3562 . dm - the DM 3563 3564 Output Parameter: 3565 . c - global coordinate vector 3566 3567 Note: 3568 This is a borrowed reference, so the user should NOT destroy this vector 3569 3570 Each process has only the local coordinates (does NOT have the ghost coordinates). 3571 3572 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3573 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3574 3575 Level: intermediate 3576 3577 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3578 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3579 @*/ 3580 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3581 { 3582 PetscErrorCode ierr; 3583 3584 PetscFunctionBegin; 3585 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3586 PetscValidPointer(c,2); 3587 if (!dm->coordinates && dm->coordinatesLocal) { 3588 DM cdm; 3589 3590 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3591 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3592 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3593 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3594 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3595 } 3596 *c = dm->coordinates; 3597 PetscFunctionReturn(0); 3598 } 3599 3600 #undef __FUNCT__ 3601 #define __FUNCT__ "DMGetCoordinatesLocal" 3602 /*@ 3603 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3604 3605 Collective on DM 3606 3607 Input Parameter: 3608 . dm - the DM 3609 3610 Output Parameter: 3611 . c - coordinate vector 3612 3613 Note: 3614 This is a borrowed reference, so the user should NOT destroy this vector 3615 3616 Each process has the local and ghost coordinates 3617 3618 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3619 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3620 3621 Level: intermediate 3622 3623 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3624 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3625 @*/ 3626 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3627 { 3628 PetscErrorCode ierr; 3629 3630 PetscFunctionBegin; 3631 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3632 PetscValidPointer(c,2); 3633 if (!dm->coordinatesLocal && dm->coordinates) { 3634 DM cdm; 3635 3636 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3637 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3638 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3639 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3640 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3641 } 3642 *c = dm->coordinatesLocal; 3643 PetscFunctionReturn(0); 3644 } 3645 3646 #undef __FUNCT__ 3647 #define __FUNCT__ "DMGetCoordinateDM" 3648 /*@ 3649 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3650 3651 Collective on DM 3652 3653 Input Parameter: 3654 . dm - the DM 3655 3656 Output Parameter: 3657 . cdm - coordinate DM 3658 3659 Level: intermediate 3660 3661 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3662 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3663 @*/ 3664 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3665 { 3666 PetscErrorCode ierr; 3667 3668 PetscFunctionBegin; 3669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3670 PetscValidPointer(cdm,2); 3671 if (!dm->coordinateDM) { 3672 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3673 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3674 } 3675 *cdm = dm->coordinateDM; 3676 PetscFunctionReturn(0); 3677 } 3678