1 2 #include <petsc-private/isimpl.h> /*I "petscis.h" I*/ 3 #include <petscsf.h> 4 #include <petscviewer.h> 5 6 PetscClassId IS_LTOGM_CLASSID; 7 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "ISG2LMapApply" 11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[]) 12 { 13 PetscErrorCode ierr; 14 PetscInt i,start,end; 15 16 PetscFunctionBegin; 17 if (!mapping->globals) { 18 ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr); 19 } 20 start = mapping->globalstart; 21 end = mapping->globalend; 22 for (i=0; i<n; i++) { 23 if (in[i] < 0) out[i] = in[i]; 24 else if (in[i] < start) out[i] = -1; 25 else if (in[i] > end) out[i] = -1; 26 else out[i] = mapping->globals[in[i] - start]; 27 } 28 PetscFunctionReturn(0); 29 } 30 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize" 34 /*@ 35 ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping 36 37 Not Collective 38 39 Input Parameter: 40 . ltog - local to global mapping 41 42 Output Parameter: 43 . n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length 44 45 Level: advanced 46 47 Concepts: mapping^local to global 48 49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 50 @*/ 51 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) 52 { 53 PetscFunctionBegin; 54 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 55 PetscValidIntPointer(n,2); 56 *n = mapping->bs*mapping->n; 57 PetscFunctionReturn(0); 58 } 59 60 #undef __FUNCT__ 61 #define __FUNCT__ "ISLocalToGlobalMappingView" 62 /*@C 63 ISLocalToGlobalMappingView - View a local to global mapping 64 65 Not Collective 66 67 Input Parameters: 68 + ltog - local to global mapping 69 - viewer - viewer 70 71 Level: advanced 72 73 Concepts: mapping^local to global 74 75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 76 @*/ 77 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) 78 { 79 PetscInt i; 80 PetscMPIInt rank; 81 PetscBool iascii; 82 PetscErrorCode ierr; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 86 if (!viewer) { 87 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr); 88 } 89 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 90 91 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr); 92 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 93 if (iascii) { 94 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr); 95 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 96 for (i=0; i<mapping->n; i++) { 97 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); 98 } 99 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 100 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 101 } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name); 102 PetscFunctionReturn(0); 103 } 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS" 107 /*@ 108 ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) 109 ordering and a global parallel ordering. 110 111 Not collective 112 113 Input Parameter: 114 . is - index set containing the global numbers for each local number 115 116 Output Parameter: 117 . mapping - new mapping data structure 118 119 Notes: the block size of the IS determines the block size of the mapping 120 Level: advanced 121 122 Concepts: mapping^local to global 123 124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() 125 @*/ 126 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) 127 { 128 PetscErrorCode ierr; 129 PetscInt n,bs; 130 const PetscInt *indices; 131 MPI_Comm comm; 132 PetscBool isblock; 133 134 PetscFunctionBegin; 135 PetscValidHeaderSpecific(is,IS_CLASSID,1); 136 PetscValidPointer(mapping,2); 137 138 ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); 139 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 140 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); 141 ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); 142 /* if (!isblock) { */ 143 ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); 144 ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 145 ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); 146 /* } else { 147 ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr); 148 ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr); 149 ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr); 150 }*/ 151 PetscFunctionReturn(0); 152 } 153 154 #undef __FUNCT__ 155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF" 156 /*@C 157 ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n) 158 ordering and a global parallel ordering. 159 160 Collective 161 162 Input Parameter: 163 + sf - star forest mapping contiguous local indices to (rank, offset) 164 - start - first global index on this process 165 166 Output Parameter: 167 . mapping - new mapping data structure 168 169 Level: advanced 170 171 Concepts: mapping^local to global 172 173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS() 174 @*/ 175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping) 176 { 177 PetscErrorCode ierr; 178 PetscInt i,maxlocal,nroots,nleaves,*globals,*ltog; 179 const PetscInt *ilocal; 180 MPI_Comm comm; 181 182 PetscFunctionBegin; 183 PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1); 184 PetscValidPointer(mapping,3); 185 186 ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr); 187 ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr); 188 if (ilocal) { 189 for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1); 190 } 191 else maxlocal = nleaves; 192 ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr); 193 ierr = PetscMalloc1(maxlocal,<og);CHKERRQ(ierr); 194 for (i=0; i<nroots; i++) globals[i] = start + i; 195 for (i=0; i<maxlocal; i++) ltog[i] = -1; 196 ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 197 ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr); 198 ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr); 199 ierr = PetscFree(globals);CHKERRQ(ierr); 200 PetscFunctionReturn(0); 201 } 202 203 #undef __FUNCT__ 204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize" 205 /*@ 206 ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping 207 ordering and a global parallel ordering. 208 209 Not Collective 210 211 Input Parameters: 212 . mapping - mapping data structure 213 214 Output Parameter: 215 . bs - the blocksize 216 217 Level: advanced 218 219 Concepts: mapping^local to global 220 221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 222 @*/ 223 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs) 224 { 225 PetscFunctionBegin; 226 *bs = mapping->bs; 227 PetscFunctionReturn(0); 228 } 229 230 #undef __FUNCT__ 231 #define __FUNCT__ "ISLocalToGlobalMappingCreate" 232 /*@ 233 ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) 234 ordering and a global parallel ordering. 235 236 Not Collective, but communicator may have more than one process 237 238 Input Parameters: 239 + comm - MPI communicator 240 . bs - the block size 241 . n - the number of local elements 242 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs 243 - mode - see PetscCopyMode 244 245 Output Parameter: 246 . mapping - new mapping data structure 247 248 Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1 249 Level: advanced 250 251 Concepts: mapping^local to global 252 253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS() 254 @*/ 255 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping) 256 { 257 PetscErrorCode ierr; 258 PetscInt *in; 259 260 PetscFunctionBegin; 261 if (n) PetscValidIntPointer(indices,3); 262 PetscValidPointer(mapping,4); 263 264 *mapping = NULL; 265 ierr = ISInitializePackage();CHKERRQ(ierr); 266 267 ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS", 268 cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); 269 (*mapping)->n = n; 270 (*mapping)->bs = bs; 271 /* 272 Do not create the global to local mapping. This is only created if 273 ISGlobalToLocalMapping() is called 274 */ 275 (*mapping)->globals = 0; 276 if (mode == PETSC_COPY_VALUES) { 277 ierr = PetscMalloc1(n,&in);CHKERRQ(ierr); 278 ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); 279 (*mapping)->indices = in; 280 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 281 } else if (mode == PETSC_OWN_POINTER) { 282 (*mapping)->indices = (PetscInt*)indices; 283 ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr); 284 } 285 else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER"); 286 PetscFunctionReturn(0); 287 } 288 289 #undef __FUNCT__ 290 #define __FUNCT__ "ISLocalToGlobalMappingDestroy" 291 /*@ 292 ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) 293 ordering and a global parallel ordering. 294 295 Note Collective 296 297 Input Parameters: 298 . mapping - mapping data structure 299 300 Level: advanced 301 302 .seealso: ISLocalToGlobalMappingCreate() 303 @*/ 304 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping) 305 { 306 PetscErrorCode ierr; 307 308 PetscFunctionBegin; 309 if (!*mapping) PetscFunctionReturn(0); 310 PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1); 311 if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);} 312 ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr); 313 ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr); 314 ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); 315 *mapping = 0; 316 PetscFunctionReturn(0); 317 } 318 319 #undef __FUNCT__ 320 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS" 321 /*@ 322 ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering 323 a new index set using the global numbering defined in an ISLocalToGlobalMapping 324 context. 325 326 Not collective 327 328 Input Parameters: 329 + mapping - mapping between local and global numbering 330 - is - index set in local numbering 331 332 Output Parameters: 333 . newis - index set in global numbering 334 335 Level: advanced 336 337 Concepts: mapping^local to global 338 339 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 340 ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() 341 @*/ 342 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) 343 { 344 PetscErrorCode ierr; 345 PetscInt n,*idxout; 346 const PetscInt *idxin; 347 348 PetscFunctionBegin; 349 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 350 PetscValidHeaderSpecific(is,IS_CLASSID,2); 351 PetscValidPointer(newis,3); 352 353 ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); 354 ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); 355 ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr); 356 ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr); 357 ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); 358 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr); 359 PetscFunctionReturn(0); 360 } 361 362 #undef __FUNCT__ 363 #define __FUNCT__ "ISLocalToGlobalMappingApply" 364 /*@ 365 ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering 366 and converts them to the global numbering. 367 368 Not collective 369 370 Input Parameters: 371 + mapping - the local to global mapping context 372 . N - number of integers 373 - in - input indices in local numbering 374 375 Output Parameter: 376 . out - indices in global numbering 377 378 Notes: 379 The in and out array parameters may be identical. 380 381 Level: advanced 382 383 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 384 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 385 AOPetscToApplication(), ISGlobalToLocalMappingApply() 386 387 Concepts: mapping^local to global 388 @*/ 389 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 390 { 391 PetscInt i,bs = mapping->bs,Nmax = bs*mapping->n; 392 const PetscInt *idx = mapping->indices; 393 394 PetscFunctionBegin; 395 if (bs == 1) { 396 for (i=0; i<N; i++) { 397 if (in[i] < 0) { 398 out[i] = in[i]; 399 continue; 400 } 401 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 402 out[i] = idx[in[i]]; 403 } 404 } else { 405 for (i=0; i<N; i++) { 406 if (in[i] < 0) { 407 out[i] = in[i]; 408 continue; 409 } 410 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i); 411 out[i] = idx[in[i]/bs]*bs + (in[i] % bs); 412 } 413 } 414 PetscFunctionReturn(0); 415 } 416 417 #undef __FUNCT__ 418 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock" 419 /*@ 420 ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local numbering and converts them to the global numbering. 421 422 Not collective 423 424 Input Parameters: 425 + mapping - the local to global mapping context 426 . N - number of integers 427 - in - input indices in local numbering 428 429 Output Parameter: 430 . out - indices in global numbering 431 432 Notes: 433 The in and out array parameters may be identical. 434 435 Level: advanced 436 437 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), 438 ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), 439 AOPetscToApplication(), ISGlobalToLocalMappingApply() 440 441 Concepts: mapping^local to global 442 @*/ 443 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[]) 444 { 445 PetscInt i,Nmax = mapping->n; 446 const PetscInt *idx = mapping->indices; 447 448 PetscFunctionBegin; 449 for (i=0; i<N; i++) { 450 if (in[i] < 0) { 451 out[i] = in[i]; 452 continue; 453 } 454 if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i); 455 out[i] = idx[in[i]]; 456 } 457 PetscFunctionReturn(0); 458 } 459 460 /* -----------------------------------------------------------------------------------------*/ 461 462 #undef __FUNCT__ 463 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private" 464 /* 465 Creates the global fields in the ISLocalToGlobalMapping structure 466 */ 467 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping) 468 { 469 PetscErrorCode ierr; 470 PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; 471 472 PetscFunctionBegin; 473 end = 0; 474 start = PETSC_MAX_INT; 475 476 for (i=0; i<n; i++) { 477 if (idx[i] < 0) continue; 478 if (idx[i] < start) start = idx[i]; 479 if (idx[i] > end) end = idx[i]; 480 } 481 if (start > end) {start = 0; end = -1;} 482 mapping->globalstart = start; 483 mapping->globalend = end; 484 485 ierr = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr); 486 mapping->globals = globals; 487 for (i=0; i<end-start+1; i++) globals[i] = -1; 488 for (i=0; i<n; i++) { 489 if (idx[i] < 0) continue; 490 globals[idx[i] - start] = i; 491 } 492 493 ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr); 494 PetscFunctionReturn(0); 495 } 496 497 #undef __FUNCT__ 498 #define __FUNCT__ "ISGlobalToLocalMappingApply" 499 /*@ 500 ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers 501 specified with a global numbering. 502 503 Not collective 504 505 Input Parameters: 506 + mapping - mapping between local and global numbering 507 . type - IS_GTOLM_MASK - replaces global indices with no local value with -1 508 IS_GTOLM_DROP - drops the indices with no local value from the output list 509 . n - number of global indices to map 510 - idx - global indices to map 511 512 Output Parameters: 513 + nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n) 514 - idxout - local index of each global index, one must pass in an array long enough 515 to hold all the indices. You can call ISGlobalToLocalMappingApply() with 516 idxout == NULL to determine the required length (returned in nout) 517 and then allocate the required space and call ISGlobalToLocalMappingApply() 518 a second time to set the values. 519 520 Notes: 521 Either nout or idxout may be NULL. idx and idxout may be identical. 522 523 This is not scalable in memory usage. Each processor requires O(Nglobal) size 524 array to compute these. 525 526 Level: advanced 527 528 Developer Note: The manual page states that idx and idxout may be identical but the calling 529 sequence declares idx as const so it cannot be the same as idxout. 530 531 Concepts: mapping^global to local 532 533 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), 534 ISLocalToGlobalMappingDestroy() 535 @*/ 536 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, 537 PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[]) 538 { 539 PetscInt i,*globals,nf = 0,tmp,start,end; 540 PetscErrorCode ierr; 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 544 if (!mapping->globals) { 545 ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); 546 } 547 globals = mapping->globals; 548 start = mapping->globalstart; 549 end = mapping->globalend; 550 551 if (type == IS_GTOLM_MASK) { 552 if (idxout) { 553 for (i=0; i<n; i++) { 554 if (idx[i] < 0) idxout[i] = idx[i]; 555 else if (idx[i] < start) idxout[i] = -1; 556 else if (idx[i] > end) idxout[i] = -1; 557 else idxout[i] = globals[idx[i] - start]; 558 } 559 } 560 if (nout) *nout = n; 561 } else { 562 if (idxout) { 563 for (i=0; i<n; i++) { 564 if (idx[i] < 0) continue; 565 if (idx[i] < start) continue; 566 if (idx[i] > end) continue; 567 tmp = globals[idx[i] - start]; 568 if (tmp < 0) continue; 569 idxout[nf++] = tmp; 570 } 571 } else { 572 for (i=0; i<n; i++) { 573 if (idx[i] < 0) continue; 574 if (idx[i] < start) continue; 575 if (idx[i] > end) continue; 576 tmp = globals[idx[i] - start]; 577 if (tmp < 0) continue; 578 nf++; 579 } 580 } 581 if (nout) *nout = nf; 582 } 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo" 588 /*@C 589 ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and 590 each index shared by more than one processor 591 592 Collective on ISLocalToGlobalMapping 593 594 Input Parameters: 595 . mapping - the mapping from local to global indexing 596 597 Output Parameter: 598 + nproc - number of processors that are connected to this one 599 . proc - neighboring processors 600 . numproc - number of indices for each subdomain (processor) 601 - indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering) 602 603 Level: advanced 604 605 Concepts: mapping^local to global 606 607 Fortran Usage: 608 $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by 609 $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], 610 PetscInt indices[nproc][numprocmax],ierr) 611 There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and 612 indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. 613 614 615 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 616 ISLocalToGlobalMappingRestoreInfo() 617 @*/ 618 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 619 { 620 PetscErrorCode ierr; 621 PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; 622 PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; 623 PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; 624 PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; 625 PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; 626 PetscInt first_procs,first_numprocs,*first_indices; 627 MPI_Request *recv_waits,*send_waits; 628 MPI_Status recv_status,*send_status,*recv_statuses; 629 MPI_Comm comm; 630 PetscBool debug = PETSC_FALSE; 631 632 PetscFunctionBegin; 633 PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1); 634 ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr); 635 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 636 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 637 if (size == 1) { 638 *nproc = 0; 639 *procs = NULL; 640 ierr = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr); 641 (*numprocs)[0] = 0; 642 ierr = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr); 643 (*indices)[0] = NULL; 644 PetscFunctionReturn(0); 645 } 646 647 ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr); 648 649 /* 650 Notes on ISLocalToGlobalMappingGetInfo 651 652 globally owned node - the nodes that have been assigned to this processor in global 653 numbering, just for this routine. 654 655 nontrivial globally owned node - node assigned to this processor that is on a subdomain 656 boundary (i.e. is has more than one local owner) 657 658 locally owned node - node that exists on this processors subdomain 659 660 nontrivial locally owned node - node that is not in the interior (i.e. has more than one 661 local subdomain 662 */ 663 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); 664 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); 665 ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); 666 667 for (i=0; i<n; i++) { 668 if (lindices[i] > max) max = lindices[i]; 669 } 670 ierr = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); 671 Ng++; 672 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 673 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 674 scale = Ng/size + 1; 675 ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); 676 rstart = scale*rank; 677 678 /* determine ownership ranges of global indices */ 679 ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr); 680 ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 681 682 /* determine owners of each local node */ 683 ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr); 684 for (i=0; i<n; i++) { 685 proc = lindices[i]/scale; /* processor that globally owns this index */ 686 nprocs[2*proc+1] = 1; /* processor globally owns at least one of ours */ 687 owner[i] = proc; 688 nprocs[2*proc]++; /* count of how many that processor globally owns of ours */ 689 } 690 nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1]; 691 ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr); 692 693 /* inform other processors of number of messages and max length*/ 694 ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 695 ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr); 696 697 /* post receives for owned rows */ 698 ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr); 699 ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr); 700 for (i=0; i<nrecvs; i++) { 701 ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr); 702 } 703 704 /* pack messages containing lists of local nodes to owners */ 705 ierr = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr); 706 ierr = PetscMalloc1((size+1),&starts);CHKERRQ(ierr); 707 starts[0] = 0; 708 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 709 for (i=0; i<n; i++) { 710 sends[starts[owner[i]]++] = lindices[i]; 711 sends[starts[owner[i]]++] = i; 712 } 713 ierr = PetscFree(owner);CHKERRQ(ierr); 714 starts[0] = 0; 715 for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2]; 716 717 /* send the messages */ 718 ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr); 719 ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr); 720 cnt = 0; 721 for (i=0; i<size; i++) { 722 if (nprocs[2*i]) { 723 ierr = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr); 724 dest[cnt] = i; 725 cnt++; 726 } 727 } 728 ierr = PetscFree(starts);CHKERRQ(ierr); 729 730 /* wait on receives */ 731 ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr); 732 ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr); 733 cnt = nrecvs; 734 ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr); 735 ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr); 736 while (cnt) { 737 ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 738 /* unpack receives into our local space */ 739 ierr = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr); 740 source[imdex] = recv_status.MPI_SOURCE; 741 len[imdex] = len[imdex]/2; 742 /* count how many local owners for each of my global owned indices */ 743 for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++; 744 cnt--; 745 } 746 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 747 748 /* count how many globally owned indices are on an edge multiplied by how many processors own them. */ 749 nowned = 0; 750 nownedm = 0; 751 for (i=0; i<ng; i++) { 752 if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;} 753 } 754 755 /* create single array to contain rank of all local owners of each globally owned index */ 756 ierr = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr); 757 ierr = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr); 758 starts[0] = 0; 759 for (i=1; i<ng; i++) { 760 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 761 else starts[i] = starts[i-1]; 762 } 763 764 /* for each nontrival globally owned node list all arriving processors */ 765 for (i=0; i<nrecvs; i++) { 766 for (j=0; j<len[i]; j++) { 767 node = recvs[2*i*nmax+2*j]-rstart; 768 if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i]; 769 } 770 } 771 772 if (debug) { /* ----------------------------------- */ 773 starts[0] = 0; 774 for (i=1; i<ng; i++) { 775 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 776 else starts[i] = starts[i-1]; 777 } 778 for (i=0; i<ng; i++) { 779 if (nownedsenders[i] > 1) { 780 ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr); 781 for (j=0; j<nownedsenders[i]; j++) { 782 ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr); 783 } 784 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 785 } 786 } 787 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 788 } /* ----------------------------------- */ 789 790 /* wait on original sends */ 791 if (nsends) { 792 ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr); 793 ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 794 ierr = PetscFree(send_status);CHKERRQ(ierr); 795 } 796 ierr = PetscFree(send_waits);CHKERRQ(ierr); 797 ierr = PetscFree(sends);CHKERRQ(ierr); 798 ierr = PetscFree(nprocs);CHKERRQ(ierr); 799 800 /* pack messages to send back to local owners */ 801 starts[0] = 0; 802 for (i=1; i<ng; i++) { 803 if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1]; 804 else starts[i] = starts[i-1]; 805 } 806 nsends2 = nrecvs; 807 ierr = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */ 808 for (i=0; i<nrecvs; i++) { 809 nprocs[i] = 1; 810 for (j=0; j<len[i]; j++) { 811 node = recvs[2*i*nmax+2*j]-rstart; 812 if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node]; 813 } 814 } 815 nt = 0; 816 for (i=0; i<nsends2; i++) nt += nprocs[i]; 817 818 ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr); 819 ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr); 820 821 starts2[0] = 0; 822 for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1]; 823 /* 824 Each message is 1 + nprocs[i] long, and consists of 825 (0) the number of nodes being sent back 826 (1) the local node number, 827 (2) the number of processors sharing it, 828 (3) the processors sharing it 829 */ 830 for (i=0; i<nsends2; i++) { 831 cnt = 1; 832 sends2[starts2[i]] = 0; 833 for (j=0; j<len[i]; j++) { 834 node = recvs[2*i*nmax+2*j]-rstart; 835 if (nownedsenders[node] > 1) { 836 sends2[starts2[i]]++; 837 sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; 838 sends2[starts2[i]+cnt++] = nownedsenders[node]; 839 ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); 840 cnt += nownedsenders[node]; 841 } 842 } 843 } 844 845 /* receive the message lengths */ 846 nrecvs2 = nsends; 847 ierr = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr); 848 ierr = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr); 849 ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr); 850 for (i=0; i<nrecvs2; i++) { 851 ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr); 852 } 853 854 /* send the message lengths */ 855 for (i=0; i<nsends2; i++) { 856 ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr); 857 } 858 859 /* wait on receives of lens */ 860 if (nrecvs2) { 861 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 862 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 863 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 864 } 865 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 866 867 starts3[0] = 0; 868 nt = 0; 869 for (i=0; i<nrecvs2-1; i++) { 870 starts3[i+1] = starts3[i] + lens2[i]; 871 nt += lens2[i]; 872 } 873 if (nrecvs2) nt += lens2[nrecvs2-1]; 874 875 ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr); 876 ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr); 877 for (i=0; i<nrecvs2; i++) { 878 ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr); 879 } 880 881 /* send the messages */ 882 ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr); 883 for (i=0; i<nsends2; i++) { 884 ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr); 885 } 886 887 /* wait on receives */ 888 if (nrecvs2) { 889 ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr); 890 ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr); 891 ierr = PetscFree(recv_statuses);CHKERRQ(ierr); 892 } 893 ierr = PetscFree(recv_waits);CHKERRQ(ierr); 894 ierr = PetscFree(nprocs);CHKERRQ(ierr); 895 896 if (debug) { /* ----------------------------------- */ 897 cnt = 0; 898 for (i=0; i<nrecvs2; i++) { 899 nt = recvs2[cnt++]; 900 for (j=0; j<nt; j++) { 901 ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr); 902 for (k=0; k<recvs2[cnt+1]; k++) { 903 ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr); 904 } 905 cnt += 2 + recvs2[cnt+1]; 906 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 907 } 908 } 909 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 910 } /* ----------------------------------- */ 911 912 /* count number subdomains for each local node */ 913 ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr); 914 ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr); 915 cnt = 0; 916 for (i=0; i<nrecvs2; i++) { 917 nt = recvs2[cnt++]; 918 for (j=0; j<nt; j++) { 919 for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++; 920 cnt += 2 + recvs2[cnt+1]; 921 } 922 } 923 nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0); 924 *nproc = nt; 925 ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr); 926 ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr); 927 ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr); 928 for (i=0;i<nt+1;i++) (*indices)[i]=NULL; 929 ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr); 930 cnt = 0; 931 for (i=0; i<size; i++) { 932 if (nprocs[i] > 0) { 933 bprocs[i] = cnt; 934 (*procs)[cnt] = i; 935 (*numprocs)[cnt] = nprocs[i]; 936 ierr = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr); 937 cnt++; 938 } 939 } 940 941 /* make the list of subdomains for each nontrivial local node */ 942 ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); 943 cnt = 0; 944 for (i=0; i<nrecvs2; i++) { 945 nt = recvs2[cnt++]; 946 for (j=0; j<nt; j++) { 947 for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt]; 948 cnt += 2 + recvs2[cnt+1]; 949 } 950 } 951 ierr = PetscFree(bprocs);CHKERRQ(ierr); 952 ierr = PetscFree(recvs2);CHKERRQ(ierr); 953 954 /* sort the node indexing by their global numbers */ 955 nt = *nproc; 956 for (i=0; i<nt; i++) { 957 ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr); 958 for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]]; 959 ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr); 960 ierr = PetscFree(tmp);CHKERRQ(ierr); 961 } 962 963 if (debug) { /* ----------------------------------- */ 964 nt = *nproc; 965 for (i=0; i<nt; i++) { 966 ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr); 967 for (j=0; j<(*numprocs)[i]; j++) { 968 ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr); 969 } 970 ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr); 971 } 972 ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr); 973 } /* ----------------------------------- */ 974 975 /* wait on sends */ 976 if (nsends2) { 977 ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr); 978 ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr); 979 ierr = PetscFree(send_status);CHKERRQ(ierr); 980 } 981 982 ierr = PetscFree(starts3);CHKERRQ(ierr); 983 ierr = PetscFree(dest);CHKERRQ(ierr); 984 ierr = PetscFree(send_waits);CHKERRQ(ierr); 985 986 ierr = PetscFree(nownedsenders);CHKERRQ(ierr); 987 ierr = PetscFree(ownedsenders);CHKERRQ(ierr); 988 ierr = PetscFree(starts);CHKERRQ(ierr); 989 ierr = PetscFree(starts2);CHKERRQ(ierr); 990 ierr = PetscFree(lens2);CHKERRQ(ierr); 991 992 ierr = PetscFree(source);CHKERRQ(ierr); 993 ierr = PetscFree(len);CHKERRQ(ierr); 994 ierr = PetscFree(recvs);CHKERRQ(ierr); 995 ierr = PetscFree(nprocs);CHKERRQ(ierr); 996 ierr = PetscFree(sends2);CHKERRQ(ierr); 997 998 /* put the information about myself as the first entry in the list */ 999 first_procs = (*procs)[0]; 1000 first_numprocs = (*numprocs)[0]; 1001 first_indices = (*indices)[0]; 1002 for (i=0; i<*nproc; i++) { 1003 if ((*procs)[i] == rank) { 1004 (*procs)[0] = (*procs)[i]; 1005 (*numprocs)[0] = (*numprocs)[i]; 1006 (*indices)[0] = (*indices)[i]; 1007 (*procs)[i] = first_procs; 1008 (*numprocs)[i] = first_numprocs; 1009 (*indices)[i] = first_indices; 1010 break; 1011 } 1012 } 1013 PetscFunctionReturn(0); 1014 } 1015 1016 #undef __FUNCT__ 1017 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo" 1018 /*@C 1019 ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo() 1020 1021 Collective on ISLocalToGlobalMapping 1022 1023 Input Parameters: 1024 . mapping - the mapping from local to global indexing 1025 1026 Output Parameter: 1027 + nproc - number of processors that are connected to this one 1028 . proc - neighboring processors 1029 . numproc - number of indices for each processor 1030 - indices - indices of local nodes shared with neighbor (sorted by global numbering) 1031 1032 Level: advanced 1033 1034 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), 1035 ISLocalToGlobalMappingGetInfo() 1036 @*/ 1037 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) 1038 { 1039 PetscErrorCode ierr; 1040 PetscInt i; 1041 1042 PetscFunctionBegin; 1043 ierr = PetscFree(*procs);CHKERRQ(ierr); 1044 ierr = PetscFree(*numprocs);CHKERRQ(ierr); 1045 if (*indices) { 1046 ierr = PetscFree((*indices)[0]);CHKERRQ(ierr); 1047 for (i=1; i<*nproc; i++) { 1048 ierr = PetscFree((*indices)[i]);CHKERRQ(ierr); 1049 } 1050 ierr = PetscFree(*indices);CHKERRQ(ierr); 1051 } 1052 PetscFunctionReturn(0); 1053 } 1054 1055 #undef __FUNCT__ 1056 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices" 1057 /*@C 1058 ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped 1059 1060 Not Collective 1061 1062 Input Arguments: 1063 . ltog - local to global mapping 1064 1065 Output Arguments: 1066 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize() 1067 1068 Level: advanced 1069 1070 Notes: ISLocalToGlobalMappingGetSize() returns the length the this array 1071 1072 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices() 1073 @*/ 1074 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1075 { 1076 PetscFunctionBegin; 1077 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1078 PetscValidPointer(array,2); 1079 if (ltog->bs == 1) { 1080 *array = ltog->indices; 1081 } else { 1082 PetscInt *jj,k,i,j,n = ltog->n, bs = ltog->bs; 1083 const PetscInt *ii; 1084 PetscErrorCode ierr; 1085 1086 ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr); 1087 *array = jj; 1088 k = 0; 1089 ii = ltog->indices; 1090 for (i=0; i<n; i++) 1091 for (j=0; j<bs; j++) 1092 jj[k++] = bs*ii[i] + j; 1093 } 1094 PetscFunctionReturn(0); 1095 } 1096 1097 #undef __FUNCT__ 1098 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices" 1099 /*@C 1100 ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices() 1101 1102 Not Collective 1103 1104 Input Arguments: 1105 + ltog - local to global mapping 1106 - array - array of indices 1107 1108 Level: advanced 1109 1110 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1111 @*/ 1112 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1113 { 1114 PetscFunctionBegin; 1115 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1116 PetscValidPointer(array,2); 1117 if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1118 1119 if (ltog->bs > 1) { 1120 PetscErrorCode ierr; 1121 ierr = PetscFree(*(void**)array);CHKERRQ(ierr); 1122 } 1123 PetscFunctionReturn(0); 1124 } 1125 1126 #undef __FUNCT__ 1127 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices" 1128 /*@C 1129 ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block 1130 1131 Not Collective 1132 1133 Input Arguments: 1134 . ltog - local to global mapping 1135 1136 Output Arguments: 1137 . array - array of indices 1138 1139 Level: advanced 1140 1141 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices() 1142 @*/ 1143 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1144 { 1145 PetscFunctionBegin; 1146 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1147 PetscValidPointer(array,2); 1148 *array = ltog->indices; 1149 PetscFunctionReturn(0); 1150 } 1151 1152 #undef __FUNCT__ 1153 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices" 1154 /*@C 1155 ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices() 1156 1157 Not Collective 1158 1159 Input Arguments: 1160 + ltog - local to global mapping 1161 - array - array of indices 1162 1163 Level: advanced 1164 1165 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices() 1166 @*/ 1167 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array) 1168 { 1169 PetscFunctionBegin; 1170 PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1); 1171 PetscValidPointer(array,2); 1172 if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer"); 1173 *array = NULL; 1174 PetscFunctionReturn(0); 1175 } 1176 1177 #undef __FUNCT__ 1178 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate" 1179 /*@C 1180 ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings 1181 1182 Not Collective 1183 1184 Input Arguments: 1185 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate 1186 . n - number of mappings to concatenate 1187 - ltogs - local to global mappings 1188 1189 Output Arguments: 1190 . ltogcat - new mapping 1191 1192 Level: advanced 1193 1194 .seealso: ISLocalToGlobalMappingCreate() 1195 @*/ 1196 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat) 1197 { 1198 PetscInt i,cnt,m,*idx; 1199 PetscErrorCode ierr; 1200 1201 PetscFunctionBegin; 1202 if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n); 1203 if (n > 0) PetscValidPointer(ltogs,3); 1204 for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3); 1205 PetscValidPointer(ltogcat,4); 1206 for (cnt=0,i=0; i<n; i++) { 1207 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1208 cnt += m; 1209 } 1210 ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr); 1211 for (cnt=0,i=0; i<n; i++) { 1212 const PetscInt *subidx; 1213 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1214 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1215 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1216 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1217 cnt += m; 1218 } 1219 ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr); 1220 PetscFunctionReturn(0); 1221 } 1222 1223 1224