1 2 /* 3 This file contains routines for basic map object implementation. 4 */ 5 6 #include <petscis.h> /*I "petscis.h" I*/ 7 #include <petscsf.h> 8 #include <petsc/private/isimpl.h> 9 10 /*@ 11 PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default. 12 13 Collective 14 15 Input Parameters: 16 . comm - the MPI communicator 17 18 Output Parameters: 19 . map - the new PetscLayout 20 21 Level: advanced 22 23 Notes: 24 Typical calling sequence 25 .vb 26 PetscLayoutCreate(MPI_Comm,PetscLayout *); 27 PetscLayoutSetBlockSize(PetscLayout,bs); 28 PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n); 29 PetscLayoutSetUp(PetscLayout); 30 .ve 31 Optionally use any of the following: 32 33 + PetscLayoutGetSize(PetscLayout,PetscInt *); 34 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *); 35 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend); 36 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]); 37 - PetscLayoutDestroy(PetscLayout*); 38 39 The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in 40 user codes unless you really gain something in their use. 41 42 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(), 43 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp() 44 45 @*/ 46 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map) 47 { 48 PetscErrorCode ierr; 49 50 PetscFunctionBegin; 51 ierr = PetscNew(map);CHKERRQ(ierr); 52 53 (*map)->comm = comm; 54 (*map)->bs = -1; 55 (*map)->n = -1; 56 (*map)->N = -1; 57 (*map)->range = NULL; 58 (*map)->rstart = 0; 59 (*map)->rend = 0; 60 PetscFunctionReturn(0); 61 } 62 63 /*@ 64 PetscLayoutDestroy - Frees a map object and frees its range if that exists. 65 66 Collective 67 68 Input Parameters: 69 . map - the PetscLayout 70 71 Level: developer 72 73 Note: 74 The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is 75 recommended they not be used in user codes unless you really gain something in their use. 76 77 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(), 78 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp() 79 80 @*/ 81 PetscErrorCode PetscLayoutDestroy(PetscLayout *map) 82 { 83 PetscErrorCode ierr; 84 85 PetscFunctionBegin; 86 if (!*map) PetscFunctionReturn(0); 87 if (!(*map)->refcnt--) { 88 ierr = PetscFree((*map)->range);CHKERRQ(ierr); 89 ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr); 90 ierr = PetscFree((*map));CHKERRQ(ierr); 91 } 92 *map = NULL; 93 PetscFunctionReturn(0); 94 } 95 96 static PetscErrorCode PetscLayoutSetUp_SizesFromRanges_Private(PetscLayout map) 97 { 98 PetscMPIInt rank,size; 99 PetscErrorCode ierr; 100 101 PetscFunctionBegin; 102 ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr); 103 ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr); 104 map->rstart = map->range[rank]; 105 map->rend = map->range[rank+1]; 106 map->n = map->rend - map->rstart; 107 map->N = map->range[size]; 108 #if defined(PETSC_USE_DEBUG) 109 /* just check that n, N and bs are consistent */ 110 { 111 PetscInt tmp; 112 ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 113 if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n); 114 } 115 if (map->bs > 1) { 116 if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs); 117 } 118 if (map->bs > 1) { 119 if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs); 120 } 121 #endif 122 PetscFunctionReturn(0); 123 } 124 125 /*@ 126 PetscLayoutSetUp - given a map where you have set either the global or local 127 size sets up the map so that it may be used. 128 129 Collective 130 131 Input Parameters: 132 . map - pointer to the map 133 134 Level: developer 135 136 Notes: 137 Typical calling sequence 138 $ PetscLayoutCreate(MPI_Comm,PetscLayout *); 139 $ PetscLayoutSetBlockSize(PetscLayout,1); 140 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both 141 $ PetscLayoutSetUp(PetscLayout); 142 $ PetscLayoutGetSize(PetscLayout,PetscInt *); 143 144 If range exists, and local size is not set, everything gets computed from the range. 145 146 If the local size, global size are already set and range exists then this does nothing. 147 148 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(), 149 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate() 150 @*/ 151 PetscErrorCode PetscLayoutSetUp(PetscLayout map) 152 { 153 PetscMPIInt rank,size; 154 PetscInt p; 155 PetscErrorCode ierr; 156 157 PetscFunctionBegin; 158 if ((map->n >= 0) && (map->N >= 0) && (map->range)) PetscFunctionReturn(0); 159 if (map->range && map->n < 0) { 160 ierr = PetscLayoutSetUp_SizesFromRanges_Private(map);CHKERRQ(ierr); 161 PetscFunctionReturn(0); 162 } 163 164 if (map->n > 0 && map->bs > 1) { 165 if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs); 166 } 167 if (map->N > 0 && map->bs > 1) { 168 if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs); 169 } 170 171 ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr); 172 ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr); 173 if (map->n > 0) map->n = map->n/PetscAbs(map->bs); 174 if (map->N > 0) map->N = map->N/PetscAbs(map->bs); 175 ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr); 176 map->n = map->n*PetscAbs(map->bs); 177 map->N = map->N*PetscAbs(map->bs); 178 if (!map->range) { 179 ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr); 180 } 181 ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr); 182 183 map->range[0] = 0; 184 for (p = 2; p <= size; p++) map->range[p] += map->range[p-1]; 185 186 map->rstart = map->range[rank]; 187 map->rend = map->range[rank+1]; 188 PetscFunctionReturn(0); 189 } 190 191 /*@ 192 PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first. 193 194 Collective on PetscLayout 195 196 Input Parameter: 197 . in - input PetscLayout to be duplicated 198 199 Output Parameter: 200 . out - the copy 201 202 Level: developer 203 204 Notes: 205 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout 206 207 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference() 208 @*/ 209 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out) 210 { 211 PetscMPIInt size; 212 PetscErrorCode ierr; 213 MPI_Comm comm = in->comm; 214 215 PetscFunctionBegin; 216 ierr = PetscLayoutDestroy(out);CHKERRQ(ierr); 217 ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr); 218 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 219 ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr); 220 ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr); 221 ierr = PetscArraycpy((*out)->range,in->range,size+1);CHKERRQ(ierr); 222 223 (*out)->refcnt = 0; 224 PetscFunctionReturn(0); 225 } 226 227 /*@ 228 PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX() 229 230 Collective on PetscLayout 231 232 Input Parameter: 233 . in - input PetscLayout to be copied 234 235 Output Parameter: 236 . out - the reference location 237 238 Level: developer 239 240 Notes: 241 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout 242 243 If the out location already contains a PetscLayout it is destroyed 244 245 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate() 246 @*/ 247 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out) 248 { 249 PetscErrorCode ierr; 250 251 PetscFunctionBegin; 252 in->refcnt++; 253 ierr = PetscLayoutDestroy(out);CHKERRQ(ierr); 254 *out = in; 255 PetscFunctionReturn(0); 256 } 257 258 /*@ 259 PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout 260 261 Collective on PetscLayout 262 263 Input Parameter: 264 + in - input PetscLayout 265 - ltog - the local to global mapping 266 267 268 Level: developer 269 270 Notes: 271 PetscLayoutSetUp() does not need to be called on the resulting PetscLayout 272 273 If the ltog location already contains a PetscLayout it is destroyed 274 275 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate() 276 @*/ 277 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog) 278 { 279 PetscErrorCode ierr; 280 PetscInt bs; 281 282 PetscFunctionBegin; 283 ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr); 284 if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs); 285 ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr); 286 ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr); 287 in->mapping = ltog; 288 PetscFunctionReturn(0); 289 } 290 291 /*@ 292 PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object. 293 294 Collective on PetscLayout 295 296 Input Parameters: 297 + map - pointer to the map 298 - n - the local size 299 300 Level: developer 301 302 Notes: 303 Call this after the call to PetscLayoutCreate() 304 305 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp() 306 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize() 307 @*/ 308 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n) 309 { 310 PetscFunctionBegin; 311 if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs); 312 map->n = n; 313 PetscFunctionReturn(0); 314 } 315 316 /*@C 317 PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object. 318 319 Not Collective 320 321 Input Parameters: 322 . map - pointer to the map 323 324 Output Parameters: 325 . n - the local size 326 327 Level: developer 328 329 Notes: 330 Call this after the call to PetscLayoutSetUp() 331 332 Fortran Notes: 333 Not available from Fortran 334 335 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp() 336 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize() 337 338 @*/ 339 PetscErrorCode PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n) 340 { 341 PetscFunctionBegin; 342 *n = map->n; 343 PetscFunctionReturn(0); 344 } 345 346 /*@ 347 PetscLayoutSetSize - Sets the global size for a PetscLayout object. 348 349 Logically Collective on PetscLayout 350 351 Input Parameters: 352 + map - pointer to the map 353 - n - the global size 354 355 Level: developer 356 357 Notes: 358 Call this after the call to PetscLayoutCreate() 359 360 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp() 361 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize() 362 @*/ 363 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n) 364 { 365 PetscFunctionBegin; 366 map->N = n; 367 PetscFunctionReturn(0); 368 } 369 370 /*@ 371 PetscLayoutGetSize - Gets the global size for a PetscLayout object. 372 373 Not Collective 374 375 Input Parameters: 376 . map - pointer to the map 377 378 Output Parameters: 379 . n - the global size 380 381 Level: developer 382 383 Notes: 384 Call this after the call to PetscLayoutSetUp() 385 386 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp() 387 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize() 388 @*/ 389 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n) 390 { 391 PetscFunctionBegin; 392 *n = map->N; 393 PetscFunctionReturn(0); 394 } 395 396 /*@ 397 PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object. 398 399 Logically Collective on PetscLayout 400 401 Input Parameters: 402 + map - pointer to the map 403 - bs - the size 404 405 Level: developer 406 407 Notes: 408 Call this after the call to PetscLayoutCreate() 409 410 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(), 411 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp() 412 @*/ 413 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs) 414 { 415 PetscFunctionBegin; 416 if (bs < 0) PetscFunctionReturn(0); 417 if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs); 418 if (map->mapping) { 419 PetscInt obs; 420 PetscErrorCode ierr; 421 422 ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr); 423 if (obs > 1) { 424 ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr); 425 } 426 } 427 map->bs = bs; 428 PetscFunctionReturn(0); 429 } 430 431 /*@ 432 PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object. 433 434 Not Collective 435 436 Input Parameters: 437 . map - pointer to the map 438 439 Output Parameters: 440 . bs - the size 441 442 Level: developer 443 444 Notes: 445 Call this after the call to PetscLayoutSetUp() 446 447 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp() 448 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize() 449 @*/ 450 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs) 451 { 452 PetscFunctionBegin; 453 *bs = PetscAbs(map->bs); 454 PetscFunctionReturn(0); 455 } 456 457 /*@ 458 PetscLayoutGetRange - gets the range of values owned by this process 459 460 Not Collective 461 462 Input Parameters: 463 . map - pointer to the map 464 465 Output Parameters: 466 + rstart - first index owned by this process 467 - rend - one more than the last index owned by this process 468 469 Level: developer 470 471 Notes: 472 Call this after the call to PetscLayoutSetUp() 473 474 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), 475 PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp() 476 @*/ 477 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend) 478 { 479 PetscFunctionBegin; 480 if (rstart) *rstart = map->rstart; 481 if (rend) *rend = map->rend; 482 PetscFunctionReturn(0); 483 } 484 485 /*@C 486 PetscLayoutGetRanges - gets the range of values owned by all processes 487 488 Not Collective 489 490 Input Parameters: 491 . map - pointer to the map 492 493 Output Parameters: 494 . range - start of each processors range of indices (the final entry is one more then the 495 last index on the last process) 496 497 Level: developer 498 499 Notes: 500 Call this after the call to PetscLayoutSetUp() 501 502 Fortran Notes: 503 Not available from Fortran 504 505 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), 506 PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp() 507 508 @*/ 509 PetscErrorCode PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[]) 510 { 511 PetscFunctionBegin; 512 *range = map->range; 513 PetscFunctionReturn(0); 514 } 515 516 /*@C 517 PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout 518 519 Collective 520 521 Input Arguments: 522 + sf - star forest 523 . layout - PetscLayout defining the global space 524 . nleaves - number of leaf vertices on the current process, each of these references a root on any process 525 . ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage 526 . localmode - copy mode for ilocal 527 - iremote - remote locations of root vertices for each leaf on the current process 528 529 Level: intermediate 530 531 Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they 532 encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not 533 needed 534 535 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph() 536 @*/ 537 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote) 538 { 539 PetscErrorCode ierr; 540 PetscInt i,nroots; 541 PetscSFNode *remote; 542 543 PetscFunctionBegin; 544 ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr); 545 ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr); 546 for (i=0; i<nleaves; i++) { 547 PetscInt owner = -1; 548 ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr); 549 remote[i].rank = owner; 550 remote[i].index = iremote[i] - layout->range[owner]; 551 } 552 ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr); 553 PetscFunctionReturn(0); 554 } 555 556 /*@ 557 PetscLayoutCompare - Compares two layouts 558 559 Not Collective 560 561 Input Parameters: 562 + mapa - pointer to the first map 563 - mapb - pointer to the second map 564 565 Output Parameters: 566 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise 567 568 Level: beginner 569 570 Notes: 571 572 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(), 573 PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp() 574 @*/ 575 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent) 576 { 577 PetscErrorCode ierr; 578 PetscMPIInt sizea,sizeb; 579 580 PetscFunctionBegin; 581 *congruent = PETSC_FALSE; 582 ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr); 583 ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr); 584 if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) { 585 ierr = PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);CHKERRQ(ierr); 586 } 587 PetscFunctionReturn(0); 588 } 589 590 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */ 591 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs) 592 { 593 PetscInt *owners = map->range; 594 PetscInt n = map->n; 595 PetscSF sf; 596 PetscInt *lidxs,*work = NULL; 597 PetscSFNode *ridxs; 598 PetscMPIInt rank; 599 PetscInt r, p = 0, len = 0; 600 PetscErrorCode ierr; 601 602 PetscFunctionBegin; 603 if (on) *on = 0; /* squelch -Wmaybe-uninitialized */ 604 /* Create SF where leaves are input idxs and roots are owned idxs */ 605 ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr); 606 ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr); 607 for (r = 0; r < n; ++r) lidxs[r] = -1; 608 ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr); 609 for (r = 0; r < N; ++r) { 610 const PetscInt idx = idxs[r]; 611 if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N); 612 if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */ 613 ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr); 614 } 615 ridxs[r].rank = p; 616 ridxs[r].index = idxs[r] - owners[p]; 617 } 618 ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr); 619 ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr); 620 ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 621 ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr); 622 if (ogidxs) { /* communicate global idxs */ 623 PetscInt cum = 0,start,*work2; 624 625 ierr = PetscMalloc1(n,&work);CHKERRQ(ierr); 626 ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr); 627 for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++; 628 ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr); 629 start -= cum; 630 cum = 0; 631 for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++; 632 ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 633 ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr); 634 ierr = PetscFree(work2);CHKERRQ(ierr); 635 } 636 ierr = PetscSFDestroy(&sf);CHKERRQ(ierr); 637 /* Compress and put in indices */ 638 for (r = 0; r < n; ++r) 639 if (lidxs[r] >= 0) { 640 if (work) work[len] = work[r]; 641 lidxs[len++] = r; 642 } 643 if (on) *on = len; 644 if (oidxs) *oidxs = lidxs; 645 if (ogidxs) *ogidxs = work; 646 PetscFunctionReturn(0); 647 } 648