1 #define PETSCDM_DLL 2 3 #include "petscdm.h" /*I "petscdm.h" I*/ 4 #include "private/dmimpl.h" 5 #include "petscmat.h" 6 7 /* 8 rstart is where an array/subvector starts in the global parallel vector, so arrays 9 rstarts are meaningless (and set to the previous one) except on the processor where the array lives 10 */ 11 12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType; 13 14 struct DMCompositeLink { 15 DMCompositeLinkType type; 16 struct DMCompositeLink *next; 17 PetscInt n,rstart; /* rstart is relative to this process */ 18 PetscInt grstart; /* grstart is relative to all processes */ 19 20 /* only used for DMCOMPOSITE_DM */ 21 PetscInt *grstarts; /* global row for first unknown of this DM on each process */ 22 DM dm; 23 24 /* only used for DMCOMPOSITE_ARRAY */ 25 PetscMPIInt rank; /* process where array unknowns live */ 26 }; 27 28 typedef struct { 29 PetscInt n,N,rstart; /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */ 30 PetscInt nghost; /* number of all local entries include DMDA ghost points and any shared redundant arrays */ 31 PetscInt nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */ 32 PetscBool setup; /* after this is set, cannot add new links to the DM*/ 33 struct DMCompositeLink *next; 34 35 PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt); 36 } DM_Composite; 37 38 #undef __FUNCT__ 39 #define __FUNCT__ "DMCompositeSetCoupling" 40 /*@C 41 DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the 42 seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure. 43 44 45 Logically Collective on MPI_Comm 46 47 Input Parameter: 48 + dm - the composite object 49 - formcouplelocations - routine to set the nonzero locations in the matrix 50 51 Level: advanced 52 53 Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into 54 this routine 55 56 @*/ 57 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt)) 58 { 59 DM_Composite *com = (DM_Composite*)dm->data; 60 61 PetscFunctionBegin; 62 com->FormCoupleLocations = FormCoupleLocations; 63 PetscFunctionReturn(0); 64 } 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "DMCompositeSetContext" 68 /*@ 69 DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they 70 set with DMCompositeSetCoupling() 71 72 73 Not Collective 74 75 Input Parameter: 76 + dm - the composite object 77 - ctx - the user supplied context 78 79 Level: advanced 80 81 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 82 83 @*/ 84 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx) 85 { 86 PetscFunctionBegin; 87 dm->ctx = ctx; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMCompositeGetContext" 93 /*@ 94 DMCompositeGetContext - Access the context set with DMCompositeSetContext() 95 96 97 Not Collective 98 99 Input Parameter: 100 . dm - the composite object 101 102 Output Parameter: 103 . ctx - the user supplied context 104 105 Level: advanced 106 107 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 108 109 @*/ 110 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx) 111 { 112 PetscFunctionBegin; 113 *ctx = dm->ctx; 114 PetscFunctionReturn(0); 115 } 116 117 118 119 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *); 120 121 #undef __FUNCT__ 122 #define __FUNCT__ "DMDestroy_Composite" 123 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Composite(DM dm) 124 { 125 PetscErrorCode ierr; 126 struct DMCompositeLink *next, *prev; 127 PetscBool done; 128 DM_Composite *com = (DM_Composite *)dm->data; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 132 ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr); 133 if (!done) PetscFunctionReturn(0); 134 135 next = com->next; 136 while (next) { 137 prev = next; 138 next = next->next; 139 if (prev->type == DMCOMPOSITE_DM) { 140 ierr = DMDestroy(prev->dm);CHKERRQ(ierr); 141 } 142 if (prev->grstarts) { 143 ierr = PetscFree(prev->grstarts);CHKERRQ(ierr); 144 } 145 ierr = PetscFree(prev);CHKERRQ(ierr); 146 } 147 ierr = PetscFree(com);CHKERRQ(ierr); 148 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMView_Composite" 154 PetscErrorCode PETSCDM_DLLEXPORT DMView_Composite(DM dm,PetscViewer v) 155 { 156 PetscErrorCode ierr; 157 PetscBool iascii; 158 DM_Composite *com = (DM_Composite *)dm->data; 159 160 PetscFunctionBegin; 161 ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 162 if (iascii) { 163 struct DMCompositeLink *lnk = com->next; 164 PetscInt i; 165 166 ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr); 167 ierr = PetscViewerASCIIPrintf(v," contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr); 168 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 169 for (i=0; lnk; lnk=lnk->next,i++) { 170 if (lnk->dm) { 171 ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr); 172 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 173 ierr = DMView(lnk->dm,v);CHKERRQ(ierr); 174 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 175 } else { 176 ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);CHKERRQ(ierr); 177 } 178 } 179 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 180 } 181 PetscFunctionReturn(0); 182 } 183 184 /* --------------------------------------------------------------------------------------*/ 185 #undef __FUNCT__ 186 #define __FUNCT__ "DMSetUp_Composite" 187 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_Composite(DM dm) 188 { 189 PetscErrorCode ierr; 190 PetscInt nprev = 0; 191 PetscMPIInt rank,size; 192 DM_Composite *com = (DM_Composite*)dm->data; 193 struct DMCompositeLink *next = com->next; 194 PetscLayout map; 195 196 PetscFunctionBegin; 197 if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup"); 198 ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr); 199 ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr); 200 ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr); 201 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr); 202 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 203 ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr); 204 ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr); 205 ierr = PetscLayoutDestroy(map);CHKERRQ(ierr); 206 207 /* now set the rstart for each linked array/vector */ 208 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 209 ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr); 210 while (next) { 211 next->rstart = nprev; 212 if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n; 213 next->grstart = com->rstart + next->rstart; 214 if (next->type == DMCOMPOSITE_ARRAY) { 215 ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 216 } else { 217 ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr); 218 ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 219 } 220 next = next->next; 221 } 222 com->setup = PETSC_TRUE; 223 PetscFunctionReturn(0); 224 } 225 226 227 #undef __FUNCT__ 228 #define __FUNCT__ "DMCompositeGetAccess_Array" 229 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 230 { 231 PetscErrorCode ierr; 232 PetscScalar *varray; 233 PetscMPIInt rank; 234 235 PetscFunctionBegin; 236 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 237 if (array) { 238 if (rank == mine->rank) { 239 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 240 *array = varray + mine->rstart; 241 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 242 } else { 243 *array = 0; 244 } 245 } 246 PetscFunctionReturn(0); 247 } 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "DMCompositeGetAccess_DM" 251 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 252 { 253 PetscErrorCode ierr; 254 PetscScalar *array; 255 256 PetscFunctionBegin; 257 if (global) { 258 ierr = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr); 259 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 260 ierr = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr); 261 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 262 } 263 PetscFunctionReturn(0); 264 } 265 266 #undef __FUNCT__ 267 #define __FUNCT__ "DMCompositeRestoreAccess_Array" 268 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 269 { 270 PetscFunctionBegin; 271 PetscFunctionReturn(0); 272 } 273 274 #undef __FUNCT__ 275 #define __FUNCT__ "DMCompositeRestoreAccess_DM" 276 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 277 { 278 PetscErrorCode ierr; 279 280 PetscFunctionBegin; 281 if (global) { 282 ierr = VecResetArray(*global);CHKERRQ(ierr); 283 ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr); 284 } 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "DMCompositeScatter_Array" 290 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array) 291 { 292 PetscErrorCode ierr; 293 PetscScalar *varray; 294 PetscMPIInt rank; 295 296 PetscFunctionBegin; 297 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 298 if (rank == mine->rank) { 299 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 300 ierr = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 301 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 302 } 303 ierr = MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 304 PetscFunctionReturn(0); 305 } 306 307 #undef __FUNCT__ 308 #define __FUNCT__ "DMCompositeScatter_DM" 309 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local) 310 { 311 PetscErrorCode ierr; 312 PetscScalar *array; 313 Vec global; 314 315 PetscFunctionBegin; 316 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 317 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 318 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 319 ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 320 ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 321 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 322 ierr = VecResetArray(global);CHKERRQ(ierr); 323 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 324 PetscFunctionReturn(0); 325 } 326 327 #undef __FUNCT__ 328 #define __FUNCT__ "DMCompositeGather_Array" 329 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array) 330 { 331 PetscErrorCode ierr; 332 PetscScalar *varray; 333 PetscMPIInt rank; 334 335 PetscFunctionBegin; 336 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 337 if (rank == mine->rank) { 338 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 339 if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()"); 340 } 341 switch (imode) { 342 case INSERT_VALUES: 343 if (rank == mine->rank) { 344 ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 345 } 346 break; 347 case ADD_VALUES: { 348 PetscInt i; 349 const PetscScalar *source; 350 PetscScalar *buffer,*dest; 351 if (rank == mine->rank) { 352 dest = &varray[mine->rstart]; 353 #if defined(PETSC_HAVE_MPI_IN_PLACE) 354 buffer = dest; 355 source = MPI_IN_PLACE; 356 #else 357 ierr = PetscMalloc(mine->n*sizeof(PetscScalar),&buffer);CHKERRQ(ierr); 358 source = buffer; 359 #endif 360 for (i=0; i<mine->n; i++) buffer[i] = varray[mine->rstart+i] + array[i]; 361 } else { 362 source = array; 363 dest = PETSC_NULL; 364 } 365 ierr = MPI_Reduce((void*)source,dest,mine->n,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 366 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 367 if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);} 368 #endif 369 } break; 370 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode"); 371 } 372 if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);} 373 PetscFunctionReturn(0); 374 } 375 376 #undef __FUNCT__ 377 #define __FUNCT__ "DMCompositeGather_DM" 378 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local) 379 { 380 PetscErrorCode ierr; 381 PetscScalar *array; 382 Vec global; 383 384 PetscFunctionBegin; 385 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 386 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 387 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 388 ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr); 389 ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr); 390 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 391 ierr = VecResetArray(global);CHKERRQ(ierr); 392 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 393 PetscFunctionReturn(0); 394 } 395 396 /* ----------------------------------------------------------------------------------*/ 397 398 #include <stdarg.h> 399 400 #undef __FUNCT__ 401 #define __FUNCT__ "DMCompositeGetNumberDM" 402 /*@C 403 DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite 404 representation. 405 406 Not Collective 407 408 Input Parameter: 409 . dm - the packer object 410 411 Output Parameter: 412 . nDM - the number of DMs 413 414 Level: beginner 415 416 @*/ 417 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM) 418 { 419 DM_Composite *com = (DM_Composite*)dm->data; 420 PetscFunctionBegin; 421 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 422 *nDM = com->nDM; 423 PetscFunctionReturn(0); 424 } 425 426 427 #undef __FUNCT__ 428 #define __FUNCT__ "DMCompositeGetAccess" 429 /*@C 430 DMCompositeGetAccess - Allows one to access the individual packed vectors in their global 431 representation. 432 433 Collective on DMComposite 434 435 Input Parameter: 436 + dm - the packer object 437 . gvec - the global vector 438 - ... - the individual sequential or parallel objects (arrays or vectors) 439 440 Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them 441 442 Level: advanced 443 444 @*/ 445 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...) 446 { 447 va_list Argp; 448 PetscErrorCode ierr; 449 struct DMCompositeLink *next; 450 DM_Composite *com = (DM_Composite*)dm->data; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 454 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 455 next = com->next; 456 if (!com->setup) { 457 ierr = DMSetUp(dm);CHKERRQ(ierr); 458 } 459 460 /* loop over packed objects, handling one at at time */ 461 va_start(Argp,gvec); 462 while (next) { 463 if (next->type == DMCOMPOSITE_ARRAY) { 464 PetscScalar **array; 465 array = va_arg(Argp, PetscScalar**); 466 ierr = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 467 } else if (next->type == DMCOMPOSITE_DM) { 468 Vec *vec; 469 vec = va_arg(Argp, Vec*); 470 ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 471 } else { 472 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 473 } 474 next = next->next; 475 } 476 va_end(Argp); 477 PetscFunctionReturn(0); 478 } 479 480 #undef __FUNCT__ 481 #define __FUNCT__ "DMCompositeRestoreAccess" 482 /*@C 483 DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess() 484 representation. 485 486 Collective on DMComposite 487 488 Input Parameter: 489 + dm - the packer object 490 . gvec - the global vector 491 - ... - the individual sequential or parallel objects (arrays or vectors) 492 493 Level: advanced 494 495 .seealso DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 496 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(), 497 DMCompositeRestoreAccess(), DMCompositeGetAccess() 498 499 @*/ 500 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...) 501 { 502 va_list Argp; 503 PetscErrorCode ierr; 504 struct DMCompositeLink *next; 505 DM_Composite *com = (DM_Composite*)dm->data; 506 507 PetscFunctionBegin; 508 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 509 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 510 next = com->next; 511 if (!com->setup) { 512 ierr = DMSetUp(dm);CHKERRQ(ierr); 513 } 514 515 /* loop over packed objects, handling one at at time */ 516 va_start(Argp,gvec); 517 while (next) { 518 if (next->type == DMCOMPOSITE_ARRAY) { 519 PetscScalar **array; 520 array = va_arg(Argp, PetscScalar**); 521 ierr = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 522 } else if (next->type == DMCOMPOSITE_DM) { 523 Vec *vec; 524 vec = va_arg(Argp, Vec*); 525 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 526 } else { 527 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 528 } 529 next = next->next; 530 } 531 va_end(Argp); 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "DMCompositeScatter" 537 /*@C 538 DMCompositeScatter - Scatters from a global packed vector into its individual local vectors 539 540 Collective on DMComposite 541 542 Input Parameter: 543 + dm - the packer object 544 . gvec - the global vector 545 - ... - the individual sequential objects (arrays or vectors) 546 547 Level: advanced 548 549 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 550 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 551 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 552 553 @*/ 554 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...) 555 { 556 va_list Argp; 557 PetscErrorCode ierr; 558 struct DMCompositeLink *next; 559 PetscInt cnt = 3; 560 DM_Composite *com = (DM_Composite*)dm->data; 561 562 PetscFunctionBegin; 563 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 564 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 565 next = com->next; 566 if (!com->setup) { 567 ierr = DMSetUp(dm);CHKERRQ(ierr); 568 } 569 570 /* loop over packed objects, handling one at at time */ 571 va_start(Argp,gvec); 572 while (next) { 573 if (next->type == DMCOMPOSITE_ARRAY) { 574 PetscScalar *array; 575 array = va_arg(Argp, PetscScalar*); 576 ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr); 577 } else if (next->type == DMCOMPOSITE_DM) { 578 Vec vec; 579 vec = va_arg(Argp, Vec); 580 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 581 ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr); 582 } else { 583 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 584 } 585 cnt++; 586 next = next->next; 587 } 588 va_end(Argp); 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "DMCompositeGather" 594 /*@C 595 DMCompositeGather - Gathers into a global packed vector from its individual local vectors 596 597 Collective on DMComposite 598 599 Input Parameter: 600 + dm - the packer object 601 . gvec - the global vector 602 - ... - the individual sequential objects (arrays or vectors) 603 604 Level: advanced 605 606 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 607 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 608 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 609 610 @*/ 611 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...) 612 { 613 va_list Argp; 614 PetscErrorCode ierr; 615 struct DMCompositeLink *next; 616 DM_Composite *com = (DM_Composite*)dm->data; 617 618 PetscFunctionBegin; 619 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 620 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 621 next = com->next; 622 if (!com->setup) { 623 ierr = DMSetUp(dm);CHKERRQ(ierr); 624 } 625 626 /* loop over packed objects, handling one at at time */ 627 va_start(Argp,imode); 628 while (next) { 629 if (next->type == DMCOMPOSITE_ARRAY) { 630 PetscScalar *array; 631 array = va_arg(Argp, PetscScalar*); 632 ierr = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr); 633 } else if (next->type == DMCOMPOSITE_DM) { 634 Vec vec; 635 vec = va_arg(Argp, Vec); 636 PetscValidHeaderSpecific(vec,VEC_CLASSID,3); 637 ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr); 638 } else { 639 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 640 } 641 next = next->next; 642 } 643 va_end(Argp); 644 PetscFunctionReturn(0); 645 } 646 647 #undef __FUNCT__ 648 #define __FUNCT__ "DMCompositeAddArray" 649 /*@C 650 DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will 651 be stored in part of the array on process orank. 652 653 Collective on DMComposite 654 655 Input Parameter: 656 + dm - the packer object 657 . orank - the process on which the array entries officially live, this number must be 658 the same on all processes. 659 - n - the length of the array 660 661 Level: advanced 662 663 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 664 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 665 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 666 667 @*/ 668 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n) 669 { 670 struct DMCompositeLink *mine,*next; 671 PetscErrorCode ierr; 672 PetscMPIInt rank; 673 DM_Composite *com = (DM_Composite*)dm->data; 674 675 PetscFunctionBegin; 676 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 677 next = com->next; 678 if (com->setup) { 679 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite"); 680 } 681 #if defined(PETSC_USE_DEBUG) 682 { 683 PetscMPIInt orankmax; 684 ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr); 685 if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax); 686 } 687 #endif 688 689 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 690 /* create new link */ 691 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 692 mine->n = n; 693 mine->rank = orank; 694 mine->dm = PETSC_NULL; 695 mine->type = DMCOMPOSITE_ARRAY; 696 mine->next = PETSC_NULL; 697 if (rank == mine->rank) {com->n += n;com->nmine++;} 698 699 /* add to end of list */ 700 if (!next) { 701 com->next = mine; 702 } else { 703 while (next->next) next = next->next; 704 next->next = mine; 705 } 706 com->nredundant++; 707 PetscFunctionReturn(0); 708 } 709 710 #undef __FUNCT__ 711 #define __FUNCT__ "DMCompositeAddDM" 712 /*@C 713 DMCompositeAddDM - adds a DM vector to a DMComposite 714 715 Collective on DMComposite 716 717 Input Parameter: 718 + dm - the packer object 719 - dm - the DM object, if the DM is a da you will need to caste it with a (DM) 720 721 Level: advanced 722 723 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 724 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 725 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 726 727 @*/ 728 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm) 729 { 730 PetscErrorCode ierr; 731 PetscInt n; 732 struct DMCompositeLink *mine,*next; 733 Vec global; 734 DM_Composite *com = (DM_Composite*)dmc->data; 735 736 PetscFunctionBegin; 737 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 738 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 739 next = com->next; 740 if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite"); 741 742 /* create new link */ 743 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 744 ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr); 745 ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr); 746 ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr); 747 ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr); 748 mine->n = n; 749 mine->dm = dm; 750 mine->type = DMCOMPOSITE_DM; 751 mine->next = PETSC_NULL; 752 com->n += n; 753 754 /* add to end of list */ 755 if (!next) { 756 com->next = mine; 757 } else { 758 while (next->next) next = next->next; 759 next->next = mine; 760 } 761 com->nDM++; 762 com->nmine++; 763 PetscFunctionReturn(0); 764 } 765 766 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer); 767 EXTERN_C_BEGIN 768 #undef __FUNCT__ 769 #define __FUNCT__ "VecView_DMComposite" 770 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer) 771 { 772 DM dm; 773 PetscErrorCode ierr; 774 struct DMCompositeLink *next; 775 PetscBool isdraw; 776 DM_Composite *com; 777 778 PetscFunctionBegin; 779 ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr); 780 if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite"); 781 com = (DM_Composite*)dm->data; 782 next = com->next; 783 784 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 785 if (!isdraw) { 786 /* do I really want to call this? */ 787 ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr); 788 } else { 789 PetscInt cnt = 0; 790 791 /* loop over packed objects, handling one at at time */ 792 while (next) { 793 if (next->type == DMCOMPOSITE_ARRAY) { 794 PetscScalar *array; 795 ierr = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr); 796 797 /*skip it for now */ 798 } else if (next->type == DMCOMPOSITE_DM) { 799 Vec vec; 800 PetscInt bs; 801 802 ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 803 ierr = VecView(vec,viewer);CHKERRQ(ierr); 804 ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr); 805 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 806 ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr); 807 cnt += bs; 808 } else { 809 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 810 } 811 next = next->next; 812 } 813 ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr); 814 } 815 PetscFunctionReturn(0); 816 } 817 EXTERN_C_END 818 819 820 #undef __FUNCT__ 821 #define __FUNCT__ "DMCreateGlobalVector_Composite" 822 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Composite(DM dm,Vec *gvec) 823 { 824 PetscErrorCode ierr; 825 DM_Composite *com = (DM_Composite*)dm->data; 826 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 829 if (!com->setup) { 830 ierr = DMSetUp(dm);CHKERRQ(ierr); 831 } 832 ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr); 833 ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 834 ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr); 835 PetscFunctionReturn(0); 836 } 837 838 #undef __FUNCT__ 839 #define __FUNCT__ "DMCreateLocalVector_Composite" 840 PetscErrorCode PETSCDM_DLLEXPORT DMCreateLocalVector_Composite(DM dm,Vec *lvec) 841 { 842 PetscErrorCode ierr; 843 DM_Composite *com = (DM_Composite*)dm->data; 844 845 PetscFunctionBegin; 846 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 847 if (!com->setup) { 848 ierr = DMSetUp(dm);CHKERRQ(ierr); 849 } 850 ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr); 851 ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 852 PetscFunctionReturn(0); 853 } 854 855 #undef __FUNCT__ 856 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings" 857 /*@C 858 DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space 859 860 Collective on DMComposite 861 862 Input Parameter: 863 . dm - the packer object 864 865 Output Parameters: 866 . ltogs - the individual mappings for each packed vector/array. Note that this includes 867 all the ghost points that individual ghosted DMDA's may have. Also each process has an 868 mapping for EACH redundant array (not just the local redundant arrays). 869 870 Level: advanced 871 872 Notes: 873 Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree(). 874 875 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 876 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 877 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 878 879 @*/ 880 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs) 881 { 882 PetscErrorCode ierr; 883 PetscInt i,*idx,n,cnt; 884 struct DMCompositeLink *next; 885 PetscMPIInt rank; 886 DM_Composite *com = (DM_Composite*)dm->data; 887 888 PetscFunctionBegin; 889 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 890 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr); 891 next = com->next; 892 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 893 894 /* loop over packed objects, handling one at at time */ 895 cnt = 0; 896 while (next) { 897 if (next->type == DMCOMPOSITE_ARRAY) { 898 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 899 if (rank == next->rank) { 900 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 901 } 902 ierr = MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 903 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 904 } else if (next->type == DMCOMPOSITE_DM) { 905 ISLocalToGlobalMapping ltog; 906 PetscMPIInt size; 907 const PetscInt *suboff; 908 Vec global; 909 910 /* Get sub-DM global indices for each local dof */ 911 ierr = DMDAGetISLocalToGlobalMapping(next->dm,<og);CHKERRQ(ierr); /* This function should become generic to DM */ 912 ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr); 913 ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 914 for (i=0; i<n; i++) idx[i] = i; 915 ierr = ISLocalToGlobalMappingApply(ltog,n,idx,idx);CHKERRQ(ierr); /* This would be nicer with an ISLocalToGlobalMappingGetIndices() */ 916 917 /* Get the offsets for the sub-DM global vector */ 918 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 919 ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr); 920 ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr); 921 922 /* Shift the sub-DM definition of the global space to the composite global space */ 923 for (i=0; i<n; i++) { 924 PetscInt subi = idx[i],lo = 0,hi = size,t; 925 /* Binary search to find which rank owns subi */ 926 while (hi-lo > 1) { 927 t = lo + (hi-lo)/2; 928 if (suboff[t] > subi) hi = t; 929 else lo = t; 930 } 931 idx[i] = subi - suboff[lo] + next->grstarts[lo]; 932 } 933 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 934 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 935 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 936 next = next->next; 937 cnt++; 938 } 939 PetscFunctionReturn(0); 940 } 941 942 #undef __FUNCT__ 943 #define __FUNCT__ "DMCompositeGetLocalISs" 944 /*@C 945 DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector 946 947 Not Collective 948 949 Input Arguments: 950 . dm - composite DM 951 952 Output Arguments: 953 . is - array of serial index sets for each each component of the DMComposite 954 955 Level: intermediate 956 957 Notes: 958 At present, a composite local vector does not normally exist. This function is used to provide index sets for 959 MatGetLocalSubMatrix(). In the future, the scatters for each entry in the DMComposite may be be merged into a single 960 scatter to a composite local vector. 961 962 To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings(). 963 964 To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs(). 965 966 Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree(). 967 968 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef() 969 @*/ 970 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalISs(DM dm,IS **is) 971 { 972 PetscErrorCode ierr; 973 DM_Composite *com = (DM_Composite*)dm->data; 974 struct DMCompositeLink *link; 975 PetscInt cnt,start; 976 977 PetscFunctionBegin; 978 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 979 PetscValidPointer(is,2); 980 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 981 for (cnt=0,start=0,link=com->next; link; start+=link->n,cnt++,link=link->next) { 982 ierr = ISCreateStride(PETSC_COMM_SELF,link->n,start,1,&(*is)[cnt]);CHKERRQ(ierr); 983 } 984 PetscFunctionReturn(0); 985 } 986 987 #undef __FUNCT__ 988 #define __FUNCT__ "DMCompositeGetGlobalISs" 989 /*@C 990 DMCompositeGetGlobalISs - Gets the index sets for each composed object 991 992 Collective on DMComposite 993 994 Input Parameter: 995 . dm - the packer object 996 997 Output Parameters: 998 . is - the array of index sets 999 1000 Level: advanced 1001 1002 Notes: 1003 The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree() 1004 1005 The number of IS on each process will/may be different when redundant arrays are used 1006 1007 These could be used to extract a subset of vector entries for a "multi-physics" preconditioner 1008 1009 Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and 1010 DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global 1011 indices. 1012 1013 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1014 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 1015 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 1016 1017 @*/ 1018 1019 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[]) 1020 { 1021 PetscErrorCode ierr; 1022 PetscInt cnt = 0,*idx,i; 1023 struct DMCompositeLink *next; 1024 PetscMPIInt rank; 1025 DM_Composite *com = (DM_Composite*)dm->data; 1026 1027 PetscFunctionBegin; 1028 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1029 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 1030 next = com->next; 1031 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1032 1033 /* loop over packed objects, handling one at at time */ 1034 while (next) { 1035 1036 if (next->type == DMCOMPOSITE_ARRAY) { 1037 1038 if (rank == next->rank) { 1039 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1040 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1041 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1042 cnt++; 1043 } 1044 1045 } else if (next->type == DMCOMPOSITE_DM) { 1046 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1047 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1048 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1049 cnt++; 1050 } else { 1051 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1052 } 1053 next = next->next; 1054 } 1055 PetscFunctionReturn(0); 1056 } 1057 1058 /* -------------------------------------------------------------------------------------*/ 1059 #undef __FUNCT__ 1060 #define __FUNCT__ "DMCompositeGetLocalVectors_Array" 1061 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1062 { 1063 PetscErrorCode ierr; 1064 PetscFunctionBegin; 1065 if (array) { 1066 ierr = PetscMalloc(mine->n*sizeof(PetscScalar),array);CHKERRQ(ierr); 1067 } 1068 PetscFunctionReturn(0); 1069 } 1070 1071 #undef __FUNCT__ 1072 #define __FUNCT__ "DMCompositeGetLocalVectors_DM" 1073 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1074 { 1075 PetscErrorCode ierr; 1076 PetscFunctionBegin; 1077 if (local) { 1078 ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr); 1079 } 1080 PetscFunctionReturn(0); 1081 } 1082 1083 #undef __FUNCT__ 1084 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array" 1085 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1086 { 1087 PetscErrorCode ierr; 1088 PetscFunctionBegin; 1089 if (array) { 1090 ierr = PetscFree(*array);CHKERRQ(ierr); 1091 } 1092 PetscFunctionReturn(0); 1093 } 1094 1095 #undef __FUNCT__ 1096 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM" 1097 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1098 { 1099 PetscErrorCode ierr; 1100 PetscFunctionBegin; 1101 if (local) { 1102 ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr); 1103 } 1104 PetscFunctionReturn(0); 1105 } 1106 1107 #undef __FUNCT__ 1108 #define __FUNCT__ "DMCompositeGetLocalVectors" 1109 /*@C 1110 DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite. 1111 Use DMCompositeRestoreLocalVectors() to return them. 1112 1113 Not Collective 1114 1115 Input Parameter: 1116 . dm - the packer object 1117 1118 Output Parameter: 1119 . ... - the individual sequential objects (arrays or vectors) 1120 1121 Level: advanced 1122 1123 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1124 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1125 DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1126 1127 @*/ 1128 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...) 1129 { 1130 va_list Argp; 1131 PetscErrorCode ierr; 1132 struct DMCompositeLink *next; 1133 DM_Composite *com = (DM_Composite*)dm->data; 1134 1135 PetscFunctionBegin; 1136 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1137 next = com->next; 1138 /* loop over packed objects, handling one at at time */ 1139 va_start(Argp,dm); 1140 while (next) { 1141 if (next->type == DMCOMPOSITE_ARRAY) { 1142 PetscScalar **array; 1143 array = va_arg(Argp, PetscScalar**); 1144 ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1145 } else if (next->type == DMCOMPOSITE_DM) { 1146 Vec *vec; 1147 vec = va_arg(Argp, Vec*); 1148 ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1149 } else { 1150 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1151 } 1152 next = next->next; 1153 } 1154 va_end(Argp); 1155 PetscFunctionReturn(0); 1156 } 1157 1158 #undef __FUNCT__ 1159 #define __FUNCT__ "DMCompositeRestoreLocalVectors" 1160 /*@C 1161 DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite. 1162 1163 Not Collective 1164 1165 Input Parameter: 1166 . dm - the packer object 1167 1168 Output Parameter: 1169 . ... - the individual sequential objects (arrays or vectors) 1170 1171 Level: advanced 1172 1173 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1174 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1175 DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1176 1177 @*/ 1178 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...) 1179 { 1180 va_list Argp; 1181 PetscErrorCode ierr; 1182 struct DMCompositeLink *next; 1183 DM_Composite *com = (DM_Composite*)dm->data; 1184 1185 PetscFunctionBegin; 1186 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1187 next = com->next; 1188 /* loop over packed objects, handling one at at time */ 1189 va_start(Argp,dm); 1190 while (next) { 1191 if (next->type == DMCOMPOSITE_ARRAY) { 1192 PetscScalar **array; 1193 array = va_arg(Argp, PetscScalar**); 1194 ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1195 } else if (next->type == DMCOMPOSITE_DM) { 1196 Vec *vec; 1197 vec = va_arg(Argp, Vec*); 1198 ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1199 } else { 1200 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1201 } 1202 next = next->next; 1203 } 1204 va_end(Argp); 1205 PetscFunctionReturn(0); 1206 } 1207 1208 /* -------------------------------------------------------------------------------------*/ 1209 #undef __FUNCT__ 1210 #define __FUNCT__ "DMCompositeGetEntries_Array" 1211 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n) 1212 { 1213 PetscFunctionBegin; 1214 if (n) *n = mine->n; 1215 PetscFunctionReturn(0); 1216 } 1217 1218 #undef __FUNCT__ 1219 #define __FUNCT__ "DMCompositeGetEntries_DM" 1220 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm) 1221 { 1222 PetscFunctionBegin; 1223 if (dm) *dm = mine->dm; 1224 PetscFunctionReturn(0); 1225 } 1226 1227 #undef __FUNCT__ 1228 #define __FUNCT__ "DMCompositeGetEntries" 1229 /*@C 1230 DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite. 1231 1232 Not Collective 1233 1234 Input Parameter: 1235 . dm - the packer object 1236 1237 Output Parameter: 1238 . ... - the individual entries, DMs or integer sizes) 1239 1240 Level: advanced 1241 1242 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1243 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1244 DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(), DMCompositeScatter(), 1245 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors() 1246 1247 @*/ 1248 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...) 1249 { 1250 va_list Argp; 1251 PetscErrorCode ierr; 1252 struct DMCompositeLink *next; 1253 DM_Composite *com = (DM_Composite*)dm->data; 1254 1255 PetscFunctionBegin; 1256 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1257 next = com->next; 1258 /* loop over packed objects, handling one at at time */ 1259 va_start(Argp,dm); 1260 while (next) { 1261 if (next->type == DMCOMPOSITE_ARRAY) { 1262 PetscInt *n; 1263 n = va_arg(Argp, PetscInt*); 1264 ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr); 1265 } else if (next->type == DMCOMPOSITE_DM) { 1266 DM *dmn; 1267 dmn = va_arg(Argp, DM*); 1268 ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr); 1269 } else { 1270 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1271 } 1272 next = next->next; 1273 } 1274 va_end(Argp); 1275 PetscFunctionReturn(0); 1276 } 1277 1278 #undef __FUNCT__ 1279 #define __FUNCT__ "DMRefine_Composite" 1280 PetscErrorCode PETSCDM_DLLEXPORT DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine) 1281 { 1282 PetscErrorCode ierr; 1283 struct DMCompositeLink *next; 1284 DM_Composite *com = (DM_Composite*)dmi->data; 1285 DM dm; 1286 1287 PetscFunctionBegin; 1288 PetscValidHeaderSpecific(dmi,DM_CLASSID,1); 1289 next = com->next; 1290 ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr); 1291 1292 /* loop over packed objects, handling one at at time */ 1293 while (next) { 1294 if (next->type == DMCOMPOSITE_ARRAY) { 1295 ierr = DMCompositeAddArray(*fine,next->rank,next->n);CHKERRQ(ierr); 1296 } else if (next->type == DMCOMPOSITE_DM) { 1297 ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr); 1298 ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr); 1299 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 1300 } else { 1301 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1302 } 1303 next = next->next; 1304 } 1305 PetscFunctionReturn(0); 1306 } 1307 1308 #include "petscmat.h" 1309 1310 struct MatPackLink { 1311 Mat A; 1312 struct MatPackLink *next; 1313 }; 1314 1315 struct MatPack { 1316 DM right,left; 1317 struct MatPackLink *next; 1318 }; 1319 1320 #undef __FUNCT__ 1321 #define __FUNCT__ "MatMultBoth_Shell_Pack" 1322 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool add) 1323 { 1324 struct MatPack *mpack; 1325 struct DMCompositeLink *xnext,*ynext; 1326 struct MatPackLink *anext; 1327 PetscScalar *xarray,*yarray; 1328 PetscErrorCode ierr; 1329 PetscInt i; 1330 Vec xglobal,yglobal; 1331 PetscMPIInt rank; 1332 DM_Composite *comright; 1333 DM_Composite *comleft; 1334 1335 PetscFunctionBegin; 1336 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1337 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1338 comright = (DM_Composite*)mpack->right->data; 1339 comleft = (DM_Composite*)mpack->left->data; 1340 xnext = comright->next; 1341 ynext = comleft->next; 1342 anext = mpack->next; 1343 1344 while (xnext) { 1345 if (xnext->type == DMCOMPOSITE_ARRAY) { 1346 if (rank == xnext->rank) { 1347 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1348 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1349 if (add) { 1350 for (i=0; i<xnext->n; i++) { 1351 yarray[ynext->rstart+i] += xarray[xnext->rstart+i]; 1352 } 1353 } else { 1354 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1355 } 1356 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1357 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1358 } 1359 } else if (xnext->type == DMCOMPOSITE_DM) { 1360 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1361 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1362 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1363 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1364 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1365 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1366 if (add) { 1367 ierr = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr); 1368 } else { 1369 ierr = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1370 } 1371 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1372 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1373 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1374 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1375 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1376 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1377 anext = anext->next; 1378 } else { 1379 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1380 } 1381 xnext = xnext->next; 1382 ynext = ynext->next; 1383 } 1384 PetscFunctionReturn(0); 1385 } 1386 1387 #undef __FUNCT__ 1388 #define __FUNCT__ "MatMultAdd_Shell_Pack" 1389 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z) 1390 { 1391 PetscErrorCode ierr; 1392 PetscFunctionBegin; 1393 if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only"); 1394 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 1395 PetscFunctionReturn(0); 1396 } 1397 1398 #undef __FUNCT__ 1399 #define __FUNCT__ "MatMult_Shell_Pack" 1400 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y) 1401 { 1402 PetscErrorCode ierr; 1403 PetscFunctionBegin; 1404 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 1405 PetscFunctionReturn(0); 1406 } 1407 1408 #undef __FUNCT__ 1409 #define __FUNCT__ "MatMultTranspose_Shell_Pack" 1410 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y) 1411 { 1412 struct MatPack *mpack; 1413 struct DMCompositeLink *xnext,*ynext; 1414 struct MatPackLink *anext; 1415 PetscScalar *xarray,*yarray; 1416 PetscErrorCode ierr; 1417 Vec xglobal,yglobal; 1418 PetscMPIInt rank; 1419 DM_Composite *comright; 1420 DM_Composite *comleft; 1421 1422 PetscFunctionBegin; 1423 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1424 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1425 comright = (DM_Composite*)mpack->right->data; 1426 comleft = (DM_Composite*)mpack->left->data; 1427 ynext = comright->next; 1428 xnext = comleft->next; 1429 anext = mpack->next; 1430 1431 while (xnext) { 1432 if (xnext->type == DMCOMPOSITE_ARRAY) { 1433 if (rank == ynext->rank) { 1434 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1435 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1436 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1437 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1438 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1439 } 1440 } else if (xnext->type == DMCOMPOSITE_DM) { 1441 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1442 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1443 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1444 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1445 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1446 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1447 ierr = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1448 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1449 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1450 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1451 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1452 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1453 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1454 anext = anext->next; 1455 } else { 1456 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1457 } 1458 xnext = xnext->next; 1459 ynext = ynext->next; 1460 } 1461 PetscFunctionReturn(0); 1462 } 1463 1464 #undef __FUNCT__ 1465 #define __FUNCT__ "MatDestroy_Shell_Pack" 1466 PetscErrorCode MatDestroy_Shell_Pack(Mat A) 1467 { 1468 struct MatPack *mpack; 1469 struct MatPackLink *anext,*oldanext; 1470 PetscErrorCode ierr; 1471 1472 PetscFunctionBegin; 1473 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1474 anext = mpack->next; 1475 1476 while (anext) { 1477 ierr = MatDestroy(anext->A);CHKERRQ(ierr); 1478 oldanext = anext; 1479 anext = anext->next; 1480 ierr = PetscFree(oldanext);CHKERRQ(ierr); 1481 } 1482 ierr = PetscFree(mpack);CHKERRQ(ierr); 1483 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1484 PetscFunctionReturn(0); 1485 } 1486 1487 #undef __FUNCT__ 1488 #define __FUNCT__ "DMGetInterpolation_Composite" 1489 PetscErrorCode PETSCDM_DLLEXPORT DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v) 1490 { 1491 PetscErrorCode ierr; 1492 PetscInt m,n,M,N; 1493 struct DMCompositeLink *nextc; 1494 struct DMCompositeLink *nextf; 1495 struct MatPackLink *nextmat,*pnextmat = 0; 1496 struct MatPack *mpack; 1497 Vec gcoarse,gfine; 1498 DM_Composite *comcoarse = (DM_Composite*)coarse->data; 1499 DM_Composite *comfine = (DM_Composite*)fine->data; 1500 1501 PetscFunctionBegin; 1502 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1503 PetscValidHeaderSpecific(fine,DM_CLASSID,2); 1504 nextc = comcoarse->next; 1505 nextf = comfine->next; 1506 /* use global vectors only for determining matrix layout */ 1507 ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr); 1508 ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr); 1509 ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr); 1510 ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr); 1511 ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr); 1512 ierr = VecGetSize(gfine,&M);CHKERRQ(ierr); 1513 ierr = VecDestroy(gcoarse);CHKERRQ(ierr); 1514 ierr = VecDestroy(gfine);CHKERRQ(ierr); 1515 1516 ierr = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr); 1517 mpack->right = coarse; 1518 mpack->left = fine; 1519 ierr = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr); 1520 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1521 ierr = MatSetType(*A,MATSHELL);CHKERRQ(ierr); 1522 ierr = MatShellSetContext(*A,mpack);CHKERRQ(ierr); 1523 ierr = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr); 1524 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr); 1525 ierr = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr); 1526 ierr = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr); 1527 1528 /* loop over packed objects, handling one at at time */ 1529 while (nextc) { 1530 if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout"); 1531 1532 if (nextc->type == DMCOMPOSITE_ARRAY) { 1533 ; 1534 } else if (nextc->type == DMCOMPOSITE_DM) { 1535 ierr = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr); 1536 nextmat->next = 0; 1537 if (pnextmat) { 1538 pnextmat->next = nextmat; 1539 pnextmat = nextmat; 1540 } else { 1541 pnextmat = nextmat; 1542 mpack->next = nextmat; 1543 } 1544 ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr); 1545 } else { 1546 SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1547 } 1548 nextc = nextc->next; 1549 nextf = nextf->next; 1550 } 1551 PetscFunctionReturn(0); 1552 } 1553 1554 #undef __FUNCT__ 1555 #define __FUNCT__ "DMGetMatrix_Composite" 1556 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Composite(DM dm, const MatType mtype,Mat *J) 1557 { 1558 PetscErrorCode ierr; 1559 DM_Composite *com = (DM_Composite*)dm->data; 1560 struct DMCompositeLink *next = com->next; 1561 PetscInt m,*dnz,*onz,i,j,mA; 1562 Mat Atmp; 1563 PetscMPIInt rank; 1564 PetscScalar zero = 0.0; 1565 PetscBool dense = PETSC_FALSE; 1566 1567 PetscFunctionBegin; 1568 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1569 1570 /* use global vector to determine layout needed for matrix */ 1571 m = com->n; 1572 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1573 ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr); 1574 ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 1575 ierr = MatSetType(*J,MATAIJ);CHKERRQ(ierr); 1576 1577 /* 1578 Extremely inefficient but will compute entire Jacobian for testing 1579 */ 1580 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1581 if (dense) { 1582 PetscInt rstart,rend,*indices; 1583 PetscScalar *values; 1584 1585 mA = com->N; 1586 ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr); 1587 ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr); 1588 1589 ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); 1590 ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr); 1591 ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr); 1592 for (i=0; i<mA; i++) indices[i] = i; 1593 for (i=rstart; i<rend; i++) { 1594 ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr); 1595 } 1596 ierr = PetscFree2(values,indices);CHKERRQ(ierr); 1597 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1598 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1599 PetscFunctionReturn(0); 1600 } 1601 1602 ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr); 1603 /* loop over packed objects, handling one at at time */ 1604 next = com->next; 1605 while (next) { 1606 if (next->type == DMCOMPOSITE_ARRAY) { 1607 if (rank == next->rank) { /* zero the "little" block */ 1608 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1609 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1610 ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr); 1611 } 1612 } 1613 } 1614 } else if (next->type == DMCOMPOSITE_DM) { 1615 PetscInt nc,rstart,*ccols,maxnc; 1616 const PetscInt *cols,*rstarts; 1617 PetscMPIInt proc; 1618 1619 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1620 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1621 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1622 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1623 1624 maxnc = 0; 1625 for (i=0; i<mA; i++) { 1626 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1627 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1628 maxnc = PetscMax(nc,maxnc); 1629 } 1630 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1631 for (i=0; i<mA; i++) { 1632 ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1633 /* remap the columns taking into how much they are shifted on each process */ 1634 for (j=0; j<nc; j++) { 1635 proc = 0; 1636 while (cols[j] >= rstarts[proc+1]) proc++; 1637 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1638 } 1639 ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr); 1640 ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1641 } 1642 ierr = PetscFree(ccols);CHKERRQ(ierr); 1643 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1644 } else { 1645 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1646 } 1647 next = next->next; 1648 } 1649 if (com->FormCoupleLocations) { 1650 ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr); 1651 } 1652 ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr); 1653 ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr); 1654 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 1655 1656 next = com->next; 1657 while (next) { 1658 if (next->type == DMCOMPOSITE_ARRAY) { 1659 if (rank == next->rank) { 1660 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1661 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1662 ierr = MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);CHKERRQ(ierr); 1663 } 1664 } 1665 } 1666 } else if (next->type == DMCOMPOSITE_DM) { 1667 PetscInt nc,rstart,row,maxnc,*ccols; 1668 const PetscInt *cols,*rstarts; 1669 const PetscScalar *values; 1670 PetscMPIInt proc; 1671 1672 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1673 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1674 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1675 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1676 maxnc = 0; 1677 for (i=0; i<mA; i++) { 1678 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1679 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1680 maxnc = PetscMax(nc,maxnc); 1681 } 1682 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1683 for (i=0; i<mA; i++) { 1684 ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1685 for (j=0; j<nc; j++) { 1686 proc = 0; 1687 while (cols[j] >= rstarts[proc+1]) proc++; 1688 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1689 } 1690 row = com->rstart+next->rstart+i; 1691 ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr); 1692 ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1693 } 1694 ierr = PetscFree(ccols);CHKERRQ(ierr); 1695 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1696 } else { 1697 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1698 } 1699 next = next->next; 1700 } 1701 if (com->FormCoupleLocations) { 1702 PetscInt __rstart; 1703 ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr); 1704 ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr); 1705 } 1706 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1707 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1708 PetscFunctionReturn(0); 1709 } 1710 1711 #undef __FUNCT__ 1712 #define __FUNCT__ "DMGetColoring_Composite" 1713 PetscErrorCode PETSCDM_DLLEXPORT DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 1714 { 1715 PetscErrorCode ierr; 1716 PetscInt n,i,cnt; 1717 ISColoringValue *colors; 1718 PetscBool dense = PETSC_FALSE; 1719 ISColoringValue maxcol = 0; 1720 DM_Composite *com = (DM_Composite*)dm->data; 1721 1722 PetscFunctionBegin; 1723 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1724 if (ctype == IS_COLORING_GHOSTED) { 1725 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" ); 1726 } else if (ctype == IS_COLORING_GLOBAL) { 1727 n = com->n; 1728 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType"); 1729 ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */ 1730 1731 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1732 if (dense) { 1733 for (i=0; i<n; i++) { 1734 colors[i] = (ISColoringValue)(com->rstart + i); 1735 } 1736 maxcol = com->N; 1737 } else { 1738 struct DMCompositeLink *next = com->next; 1739 PetscMPIInt rank; 1740 1741 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1742 cnt = 0; 1743 while (next) { 1744 if (next->type == DMCOMPOSITE_ARRAY) { 1745 if (rank == next->rank) { /* each column gets is own color */ 1746 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1747 colors[cnt++] = maxcol++; 1748 } 1749 } 1750 ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1751 } else if (next->type == DMCOMPOSITE_DM) { 1752 ISColoring lcoloring; 1753 1754 ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr); 1755 for (i=0; i<lcoloring->N; i++) { 1756 colors[cnt++] = maxcol + lcoloring->colors[i]; 1757 } 1758 maxcol += lcoloring->n; 1759 ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr); 1760 } else { 1761 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1762 } 1763 next = next->next; 1764 } 1765 } 1766 ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr); 1767 PetscFunctionReturn(0); 1768 } 1769 1770 #undef __FUNCT__ 1771 #define __FUNCT__ "DMGlobalToLocalBegin_Composite" 1772 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1773 { 1774 PetscErrorCode ierr; 1775 struct DMCompositeLink *next; 1776 PetscInt cnt = 3; 1777 PetscMPIInt rank; 1778 PetscScalar *garray,*larray; 1779 DM_Composite *com = (DM_Composite*)dm->data; 1780 1781 PetscFunctionBegin; 1782 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1783 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 1784 next = com->next; 1785 if (!com->setup) { 1786 ierr = DMSetUp(dm);CHKERRQ(ierr); 1787 } 1788 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1789 ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr); 1790 ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr); 1791 1792 /* loop over packed objects, handling one at at time */ 1793 while (next) { 1794 if (next->type == DMCOMPOSITE_ARRAY) { 1795 if (rank == next->rank) { 1796 ierr = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr); 1797 garray += next->n; 1798 } 1799 /* does not handle ADD_VALUES */ 1800 ierr = MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1801 larray += next->n; 1802 } else if (next->type == DMCOMPOSITE_DM) { 1803 Vec local,global; 1804 PetscInt N; 1805 1806 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 1807 ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr); 1808 ierr = VecPlaceArray(global,garray);CHKERRQ(ierr); 1809 ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr); 1810 ierr = VecPlaceArray(local,larray);CHKERRQ(ierr); 1811 ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr); 1812 ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr); 1813 ierr = VecResetArray(global);CHKERRQ(ierr); 1814 ierr = VecResetArray(local);CHKERRQ(ierr); 1815 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 1816 ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr); 1817 larray += next->n; 1818 } else { 1819 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1820 } 1821 cnt++; 1822 next = next->next; 1823 } 1824 1825 ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr); 1826 ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr); 1827 PetscFunctionReturn(0); 1828 } 1829 1830 #undef __FUNCT__ 1831 #define __FUNCT__ "DMGlobalToLocalEnd_Composite" 1832 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1833 { 1834 PetscFunctionBegin; 1835 PetscFunctionReturn(0); 1836 } 1837 1838 EXTERN_C_BEGIN 1839 #undef __FUNCT__ 1840 #define __FUNCT__ "DMCreate_Composite" 1841 PetscErrorCode PETSCDM_DLLEXPORT DMCreate_Composite(DM p) 1842 { 1843 PetscErrorCode ierr; 1844 DM_Composite *com; 1845 1846 PetscFunctionBegin; 1847 ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr); 1848 p->data = com; 1849 ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr); 1850 com->n = 0; 1851 com->next = PETSC_NULL; 1852 com->nredundant = 0; 1853 com->nDM = 0; 1854 1855 p->ops->createglobalvector = DMCreateGlobalVector_Composite; 1856 p->ops->createlocalvector = DMCreateLocalVector_Composite; 1857 p->ops->refine = DMRefine_Composite; 1858 p->ops->getinterpolation = DMGetInterpolation_Composite; 1859 p->ops->getmatrix = DMGetMatrix_Composite; 1860 p->ops->getcoloring = DMGetColoring_Composite; 1861 p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Composite; 1862 p->ops->globaltolocalend = DMGlobalToLocalEnd_Composite; 1863 p->ops->destroy = DMDestroy_Composite; 1864 p->ops->view = DMView_Composite; 1865 p->ops->setup = DMSetUp_Composite; 1866 PetscFunctionReturn(0); 1867 } 1868 EXTERN_C_END 1869 1870 #undef __FUNCT__ 1871 #define __FUNCT__ "DMCompositeCreate" 1872 /*@C 1873 DMCompositeCreate - Creates a vector packer, used to generate "composite" 1874 vectors made up of several subvectors. 1875 1876 Collective on MPI_Comm 1877 1878 Input Parameter: 1879 . comm - the processors that will share the global vector 1880 1881 Output Parameters: 1882 . packer - the packer object 1883 1884 Level: advanced 1885 1886 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(), 1887 DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess() 1888 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 1889 1890 @*/ 1891 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer) 1892 { 1893 PetscErrorCode ierr; 1894 1895 PetscFunctionBegin; 1896 PetscValidPointer(packer,2); 1897 ierr = DMCreate(comm,packer);CHKERRQ(ierr); 1898 ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr); 1899 PetscFunctionReturn(0); 1900 } 1901