1 #include <petsc-private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 2 3 #undef __FUNCT__ 4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone" 5 /*@ 6 DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first 7 8 Input Parameters: 9 + dm - The DM object 10 - useCone - Flag to use the cone first 11 12 Level: intermediate 13 14 Notes: 15 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 16 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 17 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 18 19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 20 @*/ 21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone) 22 { 23 DM_Plex *mesh = (DM_Plex *) dm->data; 24 25 PetscFunctionBegin; 26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 27 mesh->useCone = useCone; 28 PetscFunctionReturn(0); 29 } 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone" 33 /*@ 34 DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first 35 36 Input Parameter: 37 . dm - The DM object 38 39 Output Parameter: 40 . useCone - Flag to use the cone first 41 42 Level: intermediate 43 44 Notes: 45 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 46 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 47 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 48 49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator() 50 @*/ 51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone) 52 { 53 DM_Plex *mesh = (DM_Plex *) dm->data; 54 55 PetscFunctionBegin; 56 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 57 PetscValidIntPointer(useCone, 2); 58 *useCone = mesh->useCone; 59 PetscFunctionReturn(0); 60 } 61 62 #undef __FUNCT__ 63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure" 64 /*@ 65 DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure 66 67 Input Parameters: 68 + dm - The DM object 69 - useClosure - Flag to use the closure 70 71 Level: intermediate 72 73 Notes: 74 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 75 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 76 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 77 78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 79 @*/ 80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure) 81 { 82 DM_Plex *mesh = (DM_Plex *) dm->data; 83 84 PetscFunctionBegin; 85 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 86 mesh->useClosure = useClosure; 87 PetscFunctionReturn(0); 88 } 89 90 #undef __FUNCT__ 91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure" 92 /*@ 93 DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure 94 95 Input Parameter: 96 . dm - The DM object 97 98 Output Parameter: 99 . useClosure - Flag to use the closure 100 101 Level: intermediate 102 103 Notes: 104 $ FEM: Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE 105 $ FVM: Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE, useClosure = PETSC_FALSE 106 $ FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE, useClosure = PETSC_TRUE 107 108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator() 109 @*/ 110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure) 111 { 112 DM_Plex *mesh = (DM_Plex *) dm->data; 113 114 PetscFunctionBegin; 115 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 116 PetscValidIntPointer(useClosure, 2); 117 *useClosure = mesh->useClosure; 118 PetscFunctionReturn(0); 119 } 120 121 #undef __FUNCT__ 122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors" 123 /*@ 124 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints. 125 126 Input Parameters: 127 + dm - The DM object 128 - useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 129 130 Level: intermediate 131 132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 133 @*/ 134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors) 135 { 136 DM_Plex *mesh = (DM_Plex *) dm->data; 137 138 PetscFunctionBegin; 139 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 140 mesh->useAnchors = useAnchors; 141 PetscFunctionReturn(0); 142 } 143 144 #undef __FUNCT__ 145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors" 146 /*@ 147 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints. 148 149 Input Parameter: 150 . dm - The DM object 151 152 Output Parameter: 153 . useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place. 154 155 Level: intermediate 156 157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors() 158 @*/ 159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors) 160 { 161 DM_Plex *mesh = (DM_Plex *) dm->data; 162 163 PetscFunctionBegin; 164 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 165 PetscValidIntPointer(useAnchors, 2); 166 *useAnchors = mesh->useAnchors; 167 PetscFunctionReturn(0); 168 } 169 170 #undef __FUNCT__ 171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal" 172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 173 { 174 const PetscInt *cone = NULL; 175 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c; 176 PetscErrorCode ierr; 177 178 PetscFunctionBeginHot; 179 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr); 180 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr); 181 for (c = 0; c <= coneSize; ++c) { 182 const PetscInt point = !c ? p : cone[c-1]; 183 const PetscInt *support = NULL; 184 PetscInt supportSize, s, q; 185 186 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr); 187 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr); 188 for (s = 0; s < supportSize; ++s) { 189 for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) { 190 if (support[s] == adj[q]) break; 191 } 192 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 193 } 194 } 195 *adjSize = numAdj; 196 PetscFunctionReturn(0); 197 } 198 199 #undef __FUNCT__ 200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal" 201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[]) 202 { 203 const PetscInt *support = NULL; 204 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s; 205 PetscErrorCode ierr; 206 207 PetscFunctionBeginHot; 208 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr); 209 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr); 210 for (s = 0; s <= supportSize; ++s) { 211 const PetscInt point = !s ? p : support[s-1]; 212 const PetscInt *cone = NULL; 213 PetscInt coneSize, c, q; 214 215 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr); 216 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr); 217 for (c = 0; c < coneSize; ++c) { 218 for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) { 219 if (cone[c] == adj[q]) break; 220 } 221 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 222 } 223 } 224 *adjSize = numAdj; 225 PetscFunctionReturn(0); 226 } 227 228 #undef __FUNCT__ 229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal" 230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[]) 231 { 232 PetscInt *star = NULL; 233 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s; 234 PetscErrorCode ierr; 235 236 PetscFunctionBeginHot; 237 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 238 for (s = 0; s < starSize*2; s += 2) { 239 const PetscInt *closure = NULL; 240 PetscInt closureSize, c, q; 241 242 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 243 for (c = 0; c < closureSize*2; c += 2) { 244 for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) { 245 if (closure[c] == adj[q]) break; 246 } 247 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 248 } 249 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr); 250 } 251 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr); 252 *adjSize = numAdj; 253 PetscFunctionReturn(0); 254 } 255 256 #undef __FUNCT__ 257 #define __FUNCT__ "DMPlexGetAdjacency_Internal" 258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[]) 259 { 260 static PetscInt asiz = 0; 261 PetscInt maxAnchors = 1; 262 PetscInt aStart = -1, aEnd = -1; 263 PetscInt maxAdjSize; 264 PetscSection aSec = NULL; 265 IS aIS = NULL; 266 const PetscInt *anchors; 267 PetscErrorCode ierr; 268 269 PetscFunctionBeginHot; 270 if (useAnchors) { 271 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr); 272 if (aSec) { 273 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr); 274 maxAnchors = PetscMax(1,maxAnchors); 275 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr); 276 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr); 277 } 278 } 279 if (!*adj) { 280 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd; 281 282 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr); 283 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 284 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr); 285 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1; 286 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1; 287 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries); 288 asiz *= maxAnchors; 289 asiz = PetscMin(asiz,pEnd-pStart); 290 ierr = PetscMalloc1(asiz,adj);CHKERRQ(ierr); 291 } 292 if (*adjSize < 0) *adjSize = asiz; 293 maxAdjSize = *adjSize; 294 if (useTransitiveClosure) { 295 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr); 296 } else if (useCone) { 297 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 298 } else { 299 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr); 300 } 301 if (useAnchors && aSec) { 302 PetscInt origSize = *adjSize; 303 PetscInt numAdj = origSize; 304 PetscInt i = 0, j; 305 PetscInt *orig = *adj; 306 307 while (i < origSize) { 308 PetscInt p = orig[i]; 309 PetscInt aDof = 0; 310 311 if (p >= aStart && p < aEnd) { 312 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr); 313 } 314 if (aDof) { 315 PetscInt aOff; 316 PetscInt s, q; 317 318 for (j = i + 1; j < numAdj; j++) { 319 orig[j - 1] = orig[j]; 320 } 321 origSize--; 322 numAdj--; 323 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr); 324 for (s = 0; s < aDof; ++s) { 325 for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) { 326 if (anchors[aOff+s] == orig[q]) break; 327 } 328 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize); 329 } 330 } 331 else { 332 i++; 333 } 334 } 335 *adjSize = numAdj; 336 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr); 337 } 338 PetscFunctionReturn(0); 339 } 340 341 #undef __FUNCT__ 342 #define __FUNCT__ "DMPlexGetAdjacency" 343 /*@ 344 DMPlexGetAdjacency - Return all points adjacent to the given point 345 346 Input Parameters: 347 + dm - The DM object 348 . p - The point 349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE 350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize 351 352 Output Parameters: 353 + adjSize - The number of adjacent points 354 - adj - The adjacent points 355 356 Level: advanced 357 358 Notes: The user must PetscFree the adj array if it was not passed in. 359 360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator() 361 @*/ 362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[]) 363 { 364 DM_Plex *mesh = (DM_Plex *) dm->data; 365 PetscErrorCode ierr; 366 367 PetscFunctionBeginHot; 368 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 369 PetscValidPointer(adjSize,3); 370 PetscValidPointer(adj,4); 371 ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr); 372 PetscFunctionReturn(0); 373 } 374 #undef __FUNCT__ 375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF" 376 /*@ 377 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity 378 379 Collective on DM 380 381 Input Parameters: 382 + dm - The DM 383 - sfPoint - The PetscSF which encodes point connectivity 384 385 Output Parameters: 386 + processRanks - A list of process neighbors, or NULL 387 - sfProcess - An SF encoding the two-sided process connectivity, or NULL 388 389 Level: developer 390 391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF() 392 @*/ 393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess) 394 { 395 const PetscSFNode *remotePoints; 396 PetscInt *localPointsNew; 397 PetscSFNode *remotePointsNew; 398 const PetscInt *nranks; 399 PetscInt *ranksNew; 400 PetscBT neighbors; 401 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n; 402 PetscMPIInt numProcs, proc, rank; 403 PetscErrorCode ierr; 404 405 PetscFunctionBegin; 406 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 407 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2); 408 if (processRanks) {PetscValidPointer(processRanks, 3);} 409 if (sfProcess) {PetscValidPointer(sfProcess, 4);} 410 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 411 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 412 ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr); 413 ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr); 414 ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr); 415 /* Compute root-to-leaf process connectivity */ 416 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr); 417 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr); 418 for (p = pStart; p < pEnd; ++p) { 419 PetscInt ndof, noff, n; 420 421 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr); 422 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr); 423 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);} 424 } 425 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr); 426 /* Compute leaf-to-neighbor process connectivity */ 427 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr); 428 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr); 429 for (p = pStart; p < pEnd; ++p) { 430 PetscInt ndof, noff, n; 431 432 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr); 433 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr); 434 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);} 435 } 436 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr); 437 /* Compute leaf-to-root process connectivity */ 438 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);} 439 /* Calculate edges */ 440 PetscBTClear(neighbors, rank); 441 for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;} 442 ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr); 443 ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr); 444 ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr); 445 for(proc = 0, n = 0; proc < numProcs; ++proc) { 446 if (PetscBTLookup(neighbors, proc)) { 447 ranksNew[n] = proc; 448 localPointsNew[n] = proc; 449 remotePointsNew[n].index = rank; 450 remotePointsNew[n].rank = proc; 451 ++n; 452 } 453 } 454 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr); 455 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);} 456 else {ierr = PetscFree(ranksNew);CHKERRQ(ierr);} 457 if (sfProcess) { 458 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr); 459 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr); 460 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr); 461 ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr); 462 } 463 PetscFunctionReturn(0); 464 } 465 466 #undef __FUNCT__ 467 #define __FUNCT__ "DMPlexDistributeOwnership" 468 /*@ 469 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF. 470 471 Collective on DM 472 473 Input Parameter: 474 . dm - The DM 475 476 Output Parameters: 477 + rootSection - The number of leaves for a given root point 478 . rootrank - The rank of each edge into the root point 479 . leafSection - The number of processes sharing a given leaf point 480 - leafrank - The rank of each process sharing a leaf point 481 482 Level: developer 483 484 .seealso: DMPlexCreateOverlap() 485 @*/ 486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank) 487 { 488 MPI_Comm comm; 489 PetscSF sfPoint; 490 const PetscInt *rootdegree; 491 PetscInt *myrank, *remoterank; 492 PetscInt pStart, pEnd, p, nedges; 493 PetscMPIInt rank; 494 PetscErrorCode ierr; 495 496 PetscFunctionBegin; 497 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 498 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 499 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 500 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 501 /* Compute number of leaves for each root */ 502 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr); 503 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr); 504 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr); 505 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr); 506 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);} 507 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr); 508 /* Gather rank of each leaf to root */ 509 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr); 510 ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr); 511 ierr = PetscMalloc1(nedges, &remoterank);CHKERRQ(ierr); 512 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank; 513 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 514 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr); 515 ierr = PetscFree(myrank);CHKERRQ(ierr); 516 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr); 517 /* Distribute remote ranks to leaves */ 518 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr); 519 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr); 520 PetscFunctionReturn(0); 521 } 522 523 #undef __FUNCT__ 524 #define __FUNCT__ "DMPlexCreateOverlap" 525 /*@C 526 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF. 527 528 Collective on DM 529 530 Input Parameters: 531 + dm - The DM 532 . levels - Number of overlap levels 533 . rootSection - The number of leaves for a given root point 534 . rootrank - The rank of each edge into the root point 535 . leafSection - The number of processes sharing a given leaf point 536 - leafrank - The rank of each process sharing a leaf point 537 538 Output Parameters: 539 + overlapSF - The star forest mapping remote overlap points into a contiguous leaf space 540 541 Level: developer 542 543 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute() 544 @*/ 545 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF) 546 { 547 MPI_Comm comm; 548 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */ 549 PetscSF sfPoint, sfProc; 550 DMLabel ovLeafLabel; 551 const PetscSFNode *remote; 552 const PetscInt *local; 553 const PetscInt *nrank, *rrank; 554 PetscInt *adj = NULL; 555 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l; 556 PetscMPIInt rank, numProcs; 557 PetscBool useCone, useClosure, flg; 558 PetscErrorCode ierr; 559 560 PetscFunctionBegin; 561 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr); 562 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 563 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 564 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr); 565 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 566 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr); 567 ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr); 568 ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr); 569 /* Handle leaves: shared with the root point */ 570 for (l = 0; l < nleaves; ++l) { 571 PetscInt adjSize = PETSC_DETERMINE, a; 572 573 ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr); 574 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);} 575 } 576 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr); 577 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr); 578 /* Handle roots */ 579 for (p = pStart; p < pEnd; ++p) { 580 PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a; 581 582 if ((p >= sStart) && (p < sEnd)) { 583 /* Some leaves share a root with other leaves on different processes */ 584 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr); 585 if (neighbors) { 586 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr); 587 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 588 for (n = 0; n < neighbors; ++n) { 589 const PetscInt remoteRank = nrank[noff+n]; 590 591 if (remoteRank == rank) continue; 592 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 593 } 594 } 595 } 596 /* Roots are shared with leaves */ 597 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr); 598 if (!neighbors) continue; 599 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr); 600 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr); 601 for (n = 0; n < neighbors; ++n) { 602 const PetscInt remoteRank = rrank[noff+n]; 603 604 if (remoteRank == rank) continue; 605 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);} 606 } 607 } 608 ierr = PetscFree(adj);CHKERRQ(ierr); 609 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr); 610 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr); 611 /* Add additional overlap levels */ 612 for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);} 613 /* We require the closure in the overlap */ 614 ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr); 615 ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr); 616 if (useCone || !useClosure) { 617 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr); 618 } 619 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr); 620 if (flg) { 621 ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr); 622 ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 623 } 624 /* Make process SF and invert sender to receiver label */ 625 ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr); 626 ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr); 627 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, ovLeafLabel);CHKERRQ(ierr); 628 /* Don't import points from yourself */ 629 ierr = DMLabelClearStratum(ovLeafLabel, rank);CHKERRQ(ierr); 630 /* Don't import points already in the pointSF */ 631 for (l = 0; l < nleaves; ++l) { 632 ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr); 633 } 634 /* Convert receiver label to SF */ 635 ierr = DMPlexPartitionLabelCreateSF(dm, ovLeafLabel, overlapSF);CHKERRQ(ierr); 636 ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr); 637 ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr); 638 if (flg) { 639 ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr); 640 ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 641 } 642 /* Clean up */ 643 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr); 644 ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr); 645 ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr); 646 PetscFunctionReturn(0); 647 } 648 649 #undef __FUNCT__ 650 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF" 651 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF) 652 { 653 MPI_Comm comm; 654 PetscMPIInt rank, numProcs; 655 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints; 656 PetscInt *pointDepths, *remoteDepths, *ilocal; 657 PetscInt *depthRecv, *depthShift, *depthIdx; 658 PetscSFNode *iremote; 659 PetscSF pointSF; 660 const PetscInt *sharedLocal; 661 const PetscSFNode *overlapRemote, *sharedRemote; 662 PetscErrorCode ierr; 663 664 PetscFunctionBegin; 665 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 666 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 667 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 668 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 669 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 670 671 /* Before building the migration SF we need to know the new stratum offsets */ 672 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr); 673 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr); 674 for (d=0; d<dim+1; d++) { 675 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 676 for (p=pStart; p<pEnd; p++) pointDepths[p] = d; 677 } 678 for (p=0; p<nleaves; p++) remoteDepths[p] = -1; 679 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 680 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr); 681 682 /* Count recevied points in each stratum and compute the internal strata shift */ 683 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr); 684 for (d=0; d<dim+1; d++) depthRecv[d]=0; 685 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++; 686 depthShift[dim] = 0; 687 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim]; 688 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0]; 689 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1]; 690 for (d=0; d<dim+1; d++) { 691 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 692 depthIdx[d] = pStart + depthShift[d]; 693 } 694 695 /* Form the overlap SF build an SF that describes the full overlap migration SF */ 696 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 697 newLeaves = pEnd - pStart + nleaves; 698 ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr); 699 ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr); 700 /* First map local points to themselves */ 701 for (d=0; d<dim+1; d++) { 702 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 703 for (p=pStart; p<pEnd; p++) { 704 point = p + depthShift[d]; 705 ilocal[point] = point; 706 iremote[point].index = p; 707 iremote[point].rank = rank; 708 depthIdx[d]++; 709 } 710 } 711 712 /* Add in the remote roots for currently shared points */ 713 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 714 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr); 715 for (d=0; d<dim+1; d++) { 716 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr); 717 for (p=0; p<numSharedPoints; p++) { 718 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) { 719 point = sharedLocal[p] + depthShift[d]; 720 iremote[point].index = sharedRemote[p].index; 721 iremote[point].rank = sharedRemote[p].rank; 722 } 723 } 724 } 725 726 /* Now add the incoming overlap points */ 727 for (p=0; p<nleaves; p++) { 728 point = depthIdx[remoteDepths[p]]; 729 ilocal[point] = point; 730 iremote[point].index = overlapRemote[p].index; 731 iremote[point].rank = overlapRemote[p].rank; 732 depthIdx[remoteDepths[p]]++; 733 } 734 ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr); 735 736 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr); 737 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr); 738 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr); 739 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 740 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr); 741 742 ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr); 743 PetscFunctionReturn(0); 744 } 745 746 #undef __FUNCT__ 747 #define __FUNCT__ "DMPlexDistributeField" 748 /*@ 749 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 750 751 Collective on DM 752 753 Input Parameters: 754 + dm - The DMPlex object 755 . pointSF - The PetscSF describing the communication pattern 756 . originalSection - The PetscSection for existing data layout 757 - originalVec - The existing data 758 759 Output Parameters: 760 + newSection - The PetscSF describing the new data layout 761 - newVec - The new data 762 763 Level: developer 764 765 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData() 766 @*/ 767 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec) 768 { 769 PetscSF fieldSF; 770 PetscInt *remoteOffsets, fieldSize; 771 PetscScalar *originalValues, *newValues; 772 PetscErrorCode ierr; 773 774 PetscFunctionBegin; 775 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 776 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 777 778 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 779 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr); 780 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr); 781 782 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr); 783 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr); 784 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 785 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 786 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr); 787 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 788 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr); 789 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr); 790 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 791 PetscFunctionReturn(0); 792 } 793 794 #undef __FUNCT__ 795 #define __FUNCT__ "DMPlexDistributeFieldIS" 796 /*@ 797 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 798 799 Collective on DM 800 801 Input Parameters: 802 + dm - The DMPlex object 803 . pointSF - The PetscSF describing the communication pattern 804 . originalSection - The PetscSection for existing data layout 805 - originalIS - The existing data 806 807 Output Parameters: 808 + newSection - The PetscSF describing the new data layout 809 - newIS - The new data 810 811 Level: developer 812 813 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData() 814 @*/ 815 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS) 816 { 817 PetscSF fieldSF; 818 PetscInt *newValues, *remoteOffsets, fieldSize; 819 const PetscInt *originalValues; 820 PetscErrorCode ierr; 821 822 PetscFunctionBegin; 823 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 824 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 825 826 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 827 ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr); 828 829 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr); 830 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 831 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 832 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr); 833 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 834 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr); 835 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr); 836 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr); 837 PetscFunctionReturn(0); 838 } 839 840 #undef __FUNCT__ 841 #define __FUNCT__ "DMPlexDistributeData" 842 /*@ 843 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution 844 845 Collective on DM 846 847 Input Parameters: 848 + dm - The DMPlex object 849 . pointSF - The PetscSF describing the communication pattern 850 . originalSection - The PetscSection for existing data layout 851 . datatype - The type of data 852 - originalData - The existing data 853 854 Output Parameters: 855 + newSection - The PetscSection describing the new data layout 856 - newData - The new data 857 858 Level: developer 859 860 .seealso: DMPlexDistribute(), DMPlexDistributeField() 861 @*/ 862 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData) 863 { 864 PetscSF fieldSF; 865 PetscInt *remoteOffsets, fieldSize; 866 PetscMPIInt dataSize; 867 PetscErrorCode ierr; 868 869 PetscFunctionBegin; 870 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 871 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr); 872 873 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr); 874 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr); 875 ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr); 876 877 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr); 878 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 879 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr); 880 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr); 881 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr); 882 PetscFunctionReturn(0); 883 } 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "DMPlexDistributeCones" 887 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 888 { 889 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 890 MPI_Comm comm; 891 PetscSF coneSF; 892 PetscSection originalConeSection, newConeSection; 893 PetscInt *remoteOffsets, *cones, *newCones, newConesSize; 894 PetscBool flg; 895 PetscErrorCode ierr; 896 897 PetscFunctionBegin; 898 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 899 PetscValidPointer(dmParallel,4); 900 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 901 902 /* Distribute cone section */ 903 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 904 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); 905 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr); 906 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); 907 ierr = DMSetUp(dmParallel);CHKERRQ(ierr); 908 { 909 PetscInt pStart, pEnd, p; 910 911 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); 912 for (p = pStart; p < pEnd; ++p) { 913 PetscInt coneSize; 914 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); 915 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); 916 } 917 } 918 /* Communicate and renumber cones */ 919 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); 920 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 921 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr); 922 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 923 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 924 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); 925 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); 926 #if PETSC_USE_DEBUG 927 { 928 PetscInt p; 929 PetscBool valid = PETSC_TRUE; 930 for (p = 0; p < newConesSize; ++p) { 931 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);} 932 } 933 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map"); 934 } 935 #endif 936 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); 937 if (flg) { 938 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); 939 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 940 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); 941 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 942 ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); 943 } 944 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); 945 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr); 946 ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 947 ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); 948 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); 949 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); 950 /* Create supports and stratify sieve */ 951 { 952 PetscInt pStart, pEnd; 953 954 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); 955 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); 956 } 957 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr); 958 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr); 959 PetscFunctionReturn(0); 960 } 961 962 #undef __FUNCT__ 963 #define __FUNCT__ "DMPlexDistributeCoordinates" 964 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel) 965 { 966 MPI_Comm comm; 967 PetscSection originalCoordSection, newCoordSection; 968 Vec originalCoordinates, newCoordinates; 969 PetscInt bs; 970 const char *name; 971 const PetscReal *maxCell, *L; 972 PetscErrorCode ierr; 973 974 PetscFunctionBegin; 975 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 976 PetscValidPointer(dmParallel, 3); 977 978 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 979 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); 980 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr); 981 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); 982 if (originalCoordinates) { 983 ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); 984 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); 985 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); 986 987 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); 988 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr); 989 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); 990 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); 991 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); 992 } 993 ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr); 994 if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);} 995 PetscFunctionReturn(0); 996 } 997 998 #undef __FUNCT__ 999 #define __FUNCT__ "DMPlexDistributeLabels" 1000 /* Here we are assuming that process 0 always has everything */ 1001 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel) 1002 { 1003 MPI_Comm comm; 1004 PetscMPIInt rank; 1005 PetscInt numLabels, numLocalLabels, l; 1006 PetscBool hasLabels = PETSC_FALSE; 1007 PetscErrorCode ierr; 1008 1009 PetscFunctionBegin; 1010 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1011 PetscValidHeaderSpecific(dm, DM_CLASSID, 3); 1012 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1013 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1014 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1015 1016 /* Everyone must have either the same number of labels, or none */ 1017 ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr); 1018 numLabels = numLocalLabels; 1019 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1020 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE; 1021 for (l = numLabels-1; l >= 0; --l) { 1022 DMLabel label = NULL, labelNew = NULL; 1023 PetscBool isdepth; 1024 1025 if (hasLabels) { 1026 ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr); 1027 /* Skip "depth" because it is recreated */ 1028 ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr); 1029 } 1030 ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); 1031 if (isdepth) continue; 1032 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr); 1033 ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr); 1034 } 1035 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); 1036 PetscFunctionReturn(0); 1037 } 1038 1039 #undef __FUNCT__ 1040 #define __FUNCT__ "DMPlexDistributeSetupHybrid" 1041 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1042 { 1043 DM_Plex *mesh = (DM_Plex*) dm->data; 1044 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1045 MPI_Comm comm; 1046 const PetscInt *gpoints; 1047 PetscInt dim, depth, n, d; 1048 PetscErrorCode ierr; 1049 1050 PetscFunctionBegin; 1051 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1052 PetscValidPointer(dmParallel, 4); 1053 1054 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1055 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1056 1057 /* Setup hybrid structure */ 1058 for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} 1059 ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); 1060 ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); 1061 ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); 1062 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); 1063 for (d = 0; d <= dim; ++d) { 1064 PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; 1065 1066 if (pmax < 0) continue; 1067 ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); 1068 ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); 1069 ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); 1070 for (p = 0; p < n; ++p) { 1071 const PetscInt point = gpoints[p]; 1072 1073 if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; 1074 } 1075 if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; 1076 else pmesh->hybridPointMax[d] = -1; 1077 } 1078 ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); 1079 PetscFunctionReturn(0); 1080 } 1081 1082 #undef __FUNCT__ 1083 #define __FUNCT__ "DMPlexDistributeSetupTree" 1084 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel) 1085 { 1086 DM_Plex *mesh = (DM_Plex*) dm->data; 1087 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1088 MPI_Comm comm; 1089 DM refTree; 1090 PetscSection origParentSection, newParentSection; 1091 PetscInt *origParents, *origChildIDs; 1092 PetscBool flg; 1093 PetscErrorCode ierr; 1094 1095 PetscFunctionBegin; 1096 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1097 PetscValidHeaderSpecific(dm, DM_CLASSID, 4); 1098 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1099 1100 /* Set up tree */ 1101 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr); 1102 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr); 1103 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr); 1104 if (origParentSection) { 1105 PetscInt pStart, pEnd; 1106 PetscInt *newParents, *newChildIDs; 1107 PetscInt *remoteOffsetsParents, newParentSize; 1108 PetscSF parentSF; 1109 1110 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1111 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr); 1112 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr); 1113 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr); 1114 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr); 1115 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr); 1116 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr); 1117 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr); 1118 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr); 1119 ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1120 ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr); 1121 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr); 1122 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr); 1123 if (flg) { 1124 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr); 1125 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1126 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr); 1127 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1128 ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr); 1129 } 1130 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr); 1131 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr); 1132 ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr); 1133 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr); 1134 } 1135 pmesh->useAnchors = mesh->useAnchors; 1136 PetscFunctionReturn(0); 1137 } 1138 1139 #undef __FUNCT__ 1140 #define __FUNCT__ "DMPlexDistributeSF" 1141 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel) 1142 { 1143 DM_Plex *mesh = (DM_Plex*) dm->data; 1144 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data; 1145 PetscMPIInt rank, numProcs; 1146 MPI_Comm comm; 1147 PetscErrorCode ierr; 1148 1149 PetscFunctionBegin; 1150 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1151 PetscValidPointer(dmParallel,7); 1152 1153 /* Create point SF for parallel mesh */ 1154 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1155 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr); 1156 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1157 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1158 { 1159 const PetscInt *leaves; 1160 PetscSFNode *remotePoints, *rowners, *lowners; 1161 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; 1162 PetscInt pStart, pEnd; 1163 1164 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr); 1165 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); 1166 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); 1167 for (p=0; p<numRoots; p++) { 1168 rowners[p].rank = -1; 1169 rowners[p].index = -1; 1170 } 1171 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1172 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1173 for (p = 0; p < numLeaves; ++p) { 1174 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ 1175 lowners[p].rank = rank; 1176 lowners[p].index = leaves ? leaves[p] : p; 1177 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ 1178 lowners[p].rank = -2; 1179 lowners[p].index = -2; 1180 } 1181 } 1182 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ 1183 rowners[p].rank = -3; 1184 rowners[p].index = -3; 1185 } 1186 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1187 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); 1188 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1189 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); 1190 for (p = 0; p < numLeaves; ++p) { 1191 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); 1192 if (lowners[p].rank != rank) ++numGhostPoints; 1193 } 1194 ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); 1195 ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); 1196 for (p = 0, gp = 0; p < numLeaves; ++p) { 1197 if (lowners[p].rank != rank) { 1198 ghostPoints[gp] = leaves ? leaves[p] : p; 1199 remotePoints[gp].rank = lowners[p].rank; 1200 remotePoints[gp].index = lowners[p].index; 1201 ++gp; 1202 } 1203 } 1204 ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); 1205 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); 1206 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr); 1207 } 1208 pmesh->useCone = mesh->useCone; 1209 pmesh->useClosure = mesh->useClosure; 1210 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); 1211 PetscFunctionReturn(0); 1212 } 1213 1214 #undef __FUNCT__ 1215 #define __FUNCT__ "DMPlexMigrate" 1216 /*@C 1217 DMPlexMigrate - Migrates internal DM data over the supplied star forest 1218 1219 Input Parameter: 1220 + dm - The source DMPlex object 1221 . sf - The start forest communication context describing the migration pattern 1222 . origNumbering - Numbering IS to revert to global point numbering before migration 1223 1224 Output Parameter: 1225 + pointNumbering - Numbering IS to create local point numbering after migration 1226 - targetDM - The target DMPlex object 1227 1228 Note: If the original numbering is not supplied, a zero-to-all-ranks migration pattern 1229 is assumed and the point numbering is derived from the SF. 1230 1231 Level: developer 1232 1233 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap() 1234 @*/ 1235 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, ISLocalToGlobalMapping origNumbering, ISLocalToGlobalMapping *pointNumbering, DM targetDM) 1236 { 1237 PetscInt dim; 1238 ISLocalToGlobalMapping ltogMigration; 1239 PetscErrorCode ierr; 1240 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1243 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr); 1244 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr); 1245 1246 if (origNumbering) { 1247 PetscInt conesSize, nleaves; 1248 PetscInt *cones, *numbering_new; 1249 PetscSection coneSection; 1250 const PetscSFNode *iremote; 1251 const PetscInt *ilocal, *numbering_orig; 1252 /* Convert cones to global numbering before migrating them */ 1253 ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr); 1254 ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr); 1255 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); 1256 ierr = ISLocalToGlobalMappingApplyBlock(origNumbering, conesSize, cones, cones);CHKERRQ(ierr); 1257 /* Derive the new local-to-global mapping from the old one */ 1258 ierr = PetscSFGetGraph(sf, NULL, &nleaves, &ilocal, &iremote);CHKERRQ(ierr); 1259 ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr); 1260 ierr = ISLocalToGlobalMappingGetBlockIndices(origNumbering, &numbering_orig);CHKERRQ(ierr); 1261 ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1262 ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr); 1263 ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject) dm), 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, <ogMigration);CHKERRQ(ierr); 1264 } else { 1265 /* Assume zero-to-all-ranks pattern and derive LToG from SF */ 1266 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, <ogMigration);CHKERRQ(ierr); 1267 } 1268 /* Migrate DM data to target DM */ 1269 ierr = DMPlexDistributeCones(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1270 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr); 1271 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr); 1272 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1273 ierr = DMPlexDistributeSetupTree(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr); 1274 ierr = DMPlexCopyBoundary(dm, targetDM);CHKERRQ(ierr); 1275 1276 if (pointNumbering) {*pointNumbering = ltogMigration;} 1277 else {ierr = ISLocalToGlobalMappingDestroy(<ogMigration);} 1278 PetscFunctionReturn(0); 1279 } 1280 1281 #undef __FUNCT__ 1282 #define __FUNCT__ "DMPlexDistribute" 1283 /*@C 1284 DMPlexDistribute - Distributes the mesh and any associated sections. 1285 1286 Not Collective 1287 1288 Input Parameter: 1289 + dm - The original DMPlex object 1290 - overlap - The overlap of partitions, 0 is the default 1291 1292 Output Parameter: 1293 + sf - The PetscSF used for point distribution 1294 - parallelMesh - The distributed DMPlex object, or NULL 1295 1296 Note: If the mesh was not distributed, the return value is NULL. 1297 1298 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1299 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1300 representation on the mesh. 1301 1302 Level: intermediate 1303 1304 .keywords: mesh, elements 1305 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1306 @*/ 1307 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel) 1308 { 1309 MPI_Comm comm; 1310 PetscPartitioner partitioner; 1311 IS cellPart; 1312 PetscSection cellPartSection; 1313 DMLabel lblPartition, lblMigration; 1314 PetscSF sfProcess, sfMigration; 1315 ISLocalToGlobalMapping renumbering; 1316 PetscBool flg; 1317 PetscMPIInt rank, numProcs, p; 1318 PetscErrorCode ierr; 1319 1320 PetscFunctionBegin; 1321 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1322 if (sf) PetscValidPointer(sf,4); 1323 PetscValidPointer(dmParallel,5); 1324 1325 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1326 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1327 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1328 ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); 1329 1330 *dmParallel = NULL; 1331 if (numProcs == 1) PetscFunctionReturn(0); 1332 1333 /* Create cell partition */ 1334 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1335 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr); 1336 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr); 1337 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr); 1338 { 1339 /* Convert partition to DMLabel */ 1340 PetscInt proc, pStart, pEnd, npoints, poffset; 1341 const PetscInt *points; 1342 ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr); 1343 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr); 1344 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr); 1345 for (proc = pStart; proc < pEnd; proc++) { 1346 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr); 1347 ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr); 1348 for (p = poffset; p < poffset+npoints; p++) { 1349 ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr); 1350 } 1351 } 1352 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr); 1353 } 1354 ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr); 1355 { 1356 /* Build a global process SF */ 1357 PetscSFNode *remoteProc; 1358 ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr); 1359 for (p = 0; p < numProcs; ++p) { 1360 remoteProc[p].rank = p; 1361 remoteProc[p].index = rank; 1362 } 1363 ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr); 1364 ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr); 1365 ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr); 1366 } 1367 ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr); 1368 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr); 1369 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration); 1370 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1371 ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); 1372 if (flg) { 1373 ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr); 1374 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1375 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 1376 } 1377 1378 /* Create non-overlapping parallel DM and migrate internal data */ 1379 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); 1380 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); 1381 ierr = DMPlexMigrate(dm, sfMigration, NULL, &renumbering, *dmParallel);CHKERRQ(ierr); 1382 1383 /* Build the point SF without overlap */ 1384 ierr = DMPlexDistributeSF(dm, sfMigration, *dmParallel);CHKERRQ(ierr); 1385 if (flg) { 1386 ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); 1387 ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr); 1388 ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr); 1389 ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr); 1390 } 1391 1392 if (overlap > 0) { 1393 DM dmOverlap; 1394 PetscInt nroots, nleaves; 1395 PetscSFNode *newRemote; 1396 const PetscSFNode *oldRemote; 1397 PetscSF sfOverlap, sfOverlapPoint; 1398 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr); 1399 /* Add the partition overlap to the distributed DM */ 1400 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &sfOverlap, &dmOverlap);CHKERRQ(ierr); 1401 ierr = DMDestroy(dmParallel);CHKERRQ(ierr); 1402 *dmParallel = dmOverlap; 1403 if (flg) { 1404 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr); 1405 ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr); 1406 } 1407 1408 /* Re-map the migration SF to establish the full migration pattern */ 1409 ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr); 1410 ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr); 1411 ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr); 1412 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1413 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr); 1414 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr); 1415 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1416 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr); 1417 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr); 1418 sfMigration = sfOverlapPoint; 1419 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr); 1420 } 1421 /* Cleanup Partition */ 1422 ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr); 1423 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr); 1424 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr); 1425 ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); 1426 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); 1427 ierr = ISDestroy(&cellPart);CHKERRQ(ierr); 1428 if (sf) {*sf = sfMigration;} 1429 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);} 1430 ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr); 1431 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); 1432 PetscFunctionReturn(0); 1433 } 1434 1435 #undef __FUNCT__ 1436 #define __FUNCT__ "DMPlexDistributeOverlap" 1437 /*@C 1438 DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM. 1439 1440 Not Collective 1441 1442 Input Parameter: 1443 + dm - The non-overlapping distrbuted DMPlex object 1444 - overlap - The overlap of partitions, 0 is the default 1445 1446 Output Parameter: 1447 + sf - The PetscSF used for point distribution 1448 - dmOverlap - The overlapping distributed DMPlex object, or NULL 1449 1450 Note: If the mesh was not distributed, the return value is NULL. 1451 1452 The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and 1453 DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function 1454 representation on the mesh. 1455 1456 Level: intermediate 1457 1458 .keywords: mesh, elements 1459 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() 1460 @*/ 1461 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap) 1462 { 1463 MPI_Comm comm; 1464 PetscMPIInt rank; 1465 PetscSection rootSection, leafSection; 1466 IS rootrank, leafrank; 1467 PetscSF overlapSF, migrationSF, pointSF, newPointSF; 1468 PetscSFNode *ghostRemote; 1469 const PetscSFNode *overlapRemote; 1470 const PetscInt *overlapLocal; 1471 PetscInt p, pStart, pEnd, idx; 1472 PetscInt numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves; 1473 PetscInt *ghostLocal, *pointIDs, *recvPointIDs; 1474 PetscErrorCode ierr; 1475 1476 PetscFunctionBegin; 1477 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1478 if (sf) PetscValidPointer(sf, 3); 1479 PetscValidPointer(dmOverlap, 4); 1480 1481 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 1482 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1483 1484 /* Compute point overlap with neighbouring processes on the distributed DM */ 1485 ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1486 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr); 1487 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr); 1488 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr); 1489 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr); 1490 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr); 1491 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr); 1492 ierr = ISDestroy(&rootrank);CHKERRQ(ierr); 1493 ierr = ISDestroy(&leafrank);CHKERRQ(ierr); 1494 ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr); 1495 1496 /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */ 1497 ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr); 1498 1499 /* Build the overlapping DM */ 1500 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr); 1501 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr); 1502 ierr = DMPlexMigrate(dm, migrationSF, renumbering, NULL, *dmOverlap);CHKERRQ(ierr); 1503 1504 /* Build the new point SF by propagating the depthShift generate remote root indices */ 1505 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr); 1506 ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr); 1507 ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr); 1508 numGhostPoints = numSharedPoints + numOverlapPoints; 1509 ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr); 1510 ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr); 1511 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr); 1512 ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr); 1513 ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr); 1514 for (p=0; p<overlapLeaves; p++) { 1515 if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p]; 1516 } 1517 ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr); 1518 ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr); 1519 for (idx=0, p=0; p<overlapLeaves; p++) { 1520 if (overlapRemote[p].rank != rank) { 1521 ghostLocal[idx] = overlapLocal[p]; 1522 ghostRemote[idx].index = recvPointIDs[p]; 1523 ghostRemote[idx].rank = overlapRemote[p].rank; 1524 idx++; 1525 } 1526 } 1527 ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr); 1528 ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr); 1529 ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr); 1530 ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr); 1531 ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr); 1532 /* Cleanup overlap partition */ 1533 ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr); 1534 ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr); 1535 if (sf) *sf = migrationSF; 1536 else {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);} 1537 PetscFunctionReturn(0); 1538 } 1539