xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 825f8a23bfb072dd3baafb9731c6829445566127)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63 
64   Level: intermediate
65 
66 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67 @*/
68 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69 {
70   DM_Plex *mesh = (DM_Plex *) dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   mesh->useAnchors = useAnchors;
75   PetscFunctionReturn(0);
76 }
77 
78 /*@
79   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80 
81   Input Parameter:
82 . dm      - The DM object
83 
84   Output Parameter:
85 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86 
87   Level: intermediate
88 
89 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90 @*/
91 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92 {
93   DM_Plex *mesh = (DM_Plex *) dm->data;
94 
95   PetscFunctionBegin;
96   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
97   PetscValidIntPointer(useAnchors, 2);
98   *useAnchors = mesh->useAnchors;
99   PetscFunctionReturn(0);
100 }
101 
102 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103 {
104   const PetscInt *cone = NULL;
105   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106   PetscErrorCode  ierr;
107 
108   PetscFunctionBeginHot;
109   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
110   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
111   for (c = 0; c <= coneSize; ++c) {
112     const PetscInt  point   = !c ? p : cone[c-1];
113     const PetscInt *support = NULL;
114     PetscInt        supportSize, s, q;
115 
116     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
117     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
118     for (s = 0; s < supportSize; ++s) {
119       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120         if (support[s] == adj[q]) break;
121       }
122       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
123     }
124   }
125   *adjSize = numAdj;
126   PetscFunctionReturn(0);
127 }
128 
129 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130 {
131   const PetscInt *support = NULL;
132   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
133   PetscErrorCode  ierr;
134 
135   PetscFunctionBeginHot;
136   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
137   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
138   for (s = 0; s <= supportSize; ++s) {
139     const PetscInt  point = !s ? p : support[s-1];
140     const PetscInt *cone  = NULL;
141     PetscInt        coneSize, c, q;
142 
143     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
144     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
145     for (c = 0; c < coneSize; ++c) {
146       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147         if (cone[c] == adj[q]) break;
148       }
149       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
150     }
151   }
152   *adjSize = numAdj;
153   PetscFunctionReturn(0);
154 }
155 
156 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157 {
158   PetscInt      *star = NULL;
159   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160   PetscErrorCode ierr;
161 
162   PetscFunctionBeginHot;
163   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
164   for (s = 0; s < starSize*2; s += 2) {
165     const PetscInt *closure = NULL;
166     PetscInt        closureSize, c, q;
167 
168     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
169     for (c = 0; c < closureSize*2; c += 2) {
170       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171         if (closure[c] == adj[q]) break;
172       }
173       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
174     }
175     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
176   }
177   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
178   *adjSize = numAdj;
179   PetscFunctionReturn(0);
180 }
181 
182 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183 {
184   static PetscInt asiz = 0;
185   PetscInt maxAnchors = 1;
186   PetscInt aStart = -1, aEnd = -1;
187   PetscInt maxAdjSize;
188   PetscSection aSec = NULL;
189   IS aIS = NULL;
190   const PetscInt *anchors;
191   DM_Plex *mesh = (DM_Plex *)dm->data;
192   PetscErrorCode  ierr;
193 
194   PetscFunctionBeginHot;
195   if (useAnchors) {
196     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
197     if (aSec) {
198       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
199       maxAnchors = PetscMax(1,maxAnchors);
200       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
201       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
202     }
203   }
204   if (!*adj) {
205     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206 
207     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
208     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
209     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
210     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
213     asiz *= maxAnchors;
214     asiz  = PetscMin(asiz,pEnd-pStart);
215     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
216   }
217   if (*adjSize < 0) *adjSize = asiz;
218   maxAdjSize = *adjSize;
219   if (mesh->useradjacency) {
220     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
221   } else if (useTransitiveClosure) {
222     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
223   } else if (useCone) {
224     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
225   } else {
226     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
227   }
228   if (useAnchors && aSec) {
229     PetscInt origSize = *adjSize;
230     PetscInt numAdj = origSize;
231     PetscInt i = 0, j;
232     PetscInt *orig = *adj;
233 
234     while (i < origSize) {
235       PetscInt p = orig[i];
236       PetscInt aDof = 0;
237 
238       if (p >= aStart && p < aEnd) {
239         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
240       }
241       if (aDof) {
242         PetscInt aOff;
243         PetscInt s, q;
244 
245         for (j = i + 1; j < numAdj; j++) {
246           orig[j - 1] = orig[j];
247         }
248         origSize--;
249         numAdj--;
250         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
251         for (s = 0; s < aDof; ++s) {
252           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253             if (anchors[aOff+s] == orig[q]) break;
254           }
255           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
256         }
257       }
258       else {
259         i++;
260       }
261     }
262     *adjSize = numAdj;
263     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 /*@
269   DMPlexGetAdjacency - Return all points adjacent to the given point
270 
271   Input Parameters:
272 + dm - The DM object
273 . p  - The point
274 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276 
277   Output Parameters:
278 + adjSize - The number of adjacent points
279 - adj - The adjacent points
280 
281   Level: advanced
282 
283   Notes:
284     The user must PetscFree the adj array if it was not passed in.
285 
286 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287 @*/
288 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289 {
290   PetscBool      useCone, useClosure, useAnchors;
291   PetscErrorCode ierr;
292 
293   PetscFunctionBeginHot;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidPointer(adjSize,3);
296   PetscValidPointer(adj,4);
297   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
298   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
299   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
300   PetscFunctionReturn(0);
301 }
302 
303 /*@
304   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305 
306   Collective on DM
307 
308   Input Parameters:
309 + dm      - The DM
310 - sfPoint - The PetscSF which encodes point connectivity
311 
312   Output Parameters:
313 + processRanks - A list of process neighbors, or NULL
314 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
315 
316   Level: developer
317 
318 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319 @*/
320 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321 {
322   const PetscSFNode *remotePoints;
323   PetscInt          *localPointsNew;
324   PetscSFNode       *remotePointsNew;
325   const PetscInt    *nranks;
326   PetscInt          *ranksNew;
327   PetscBT            neighbors;
328   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329   PetscMPIInt        size, proc, rank;
330   PetscErrorCode     ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
334   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
335   if (processRanks) {PetscValidPointer(processRanks, 3);}
336   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
337   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
338   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
339   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
340   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
341   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
342   /* Compute root-to-leaf process connectivity */
343   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
344   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
345   for (p = pStart; p < pEnd; ++p) {
346     PetscInt ndof, noff, n;
347 
348     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
349     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
350     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
351   }
352   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
353   /* Compute leaf-to-neighbor process connectivity */
354   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
355   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
356   for (p = pStart; p < pEnd; ++p) {
357     PetscInt ndof, noff, n;
358 
359     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
360     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
361     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
362   }
363   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
364   /* Compute leaf-to-root process connectivity */
365   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366   /* Calculate edges */
367   PetscBTClear(neighbors, rank);
368   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
370   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
371   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
372   for(proc = 0, n = 0; proc < size; ++proc) {
373     if (PetscBTLookup(neighbors, proc)) {
374       ranksNew[n]              = proc;
375       localPointsNew[n]        = proc;
376       remotePointsNew[n].index = rank;
377       remotePointsNew[n].rank  = proc;
378       ++n;
379     }
380   }
381   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
382   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
383   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
384   if (sfProcess) {
385     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
386     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
387     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
388     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
389   }
390   PetscFunctionReturn(0);
391 }
392 
393 /*@
394   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395 
396   Collective on DM
397 
398   Input Parameter:
399 . dm - The DM
400 
401   Output Parameters:
402 + rootSection - The number of leaves for a given root point
403 . rootrank    - The rank of each edge into the root point
404 . leafSection - The number of processes sharing a given leaf point
405 - leafrank    - The rank of each process sharing a leaf point
406 
407   Level: developer
408 
409 .seealso: DMPlexCreateOverlap()
410 @*/
411 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412 {
413   MPI_Comm        comm;
414   PetscSF         sfPoint;
415   const PetscInt *rootdegree;
416   PetscInt       *myrank, *remoterank;
417   PetscInt        pStart, pEnd, p, nedges;
418   PetscMPIInt     rank;
419   PetscErrorCode  ierr;
420 
421   PetscFunctionBegin;
422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
423   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
424   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
425   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
426   /* Compute number of leaves for each root */
427   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
428   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
429   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
430   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
432   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
433   /* Gather rank of each leaf to root */
434   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
435   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
436   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
437   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
439   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
440   ierr = PetscFree(myrank);CHKERRQ(ierr);
441   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
442   /* Distribute remote ranks to leaves */
443   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
444   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 /*@C
449   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450 
451   Collective on DM
452 
453   Input Parameters:
454 + dm          - The DM
455 . levels      - Number of overlap levels
456 . rootSection - The number of leaves for a given root point
457 . rootrank    - The rank of each edge into the root point
458 . leafSection - The number of processes sharing a given leaf point
459 - leafrank    - The rank of each process sharing a leaf point
460 
461   Output Parameters:
462 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
463 
464   Level: developer
465 
466 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467 @*/
468 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469 {
470   MPI_Comm           comm;
471   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472   PetscSF            sfPoint, sfProc;
473   const PetscSFNode *remote;
474   const PetscInt    *local;
475   const PetscInt    *nrank, *rrank;
476   PetscInt          *adj = NULL;
477   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
478   PetscMPIInt        rank, size;
479   PetscBool          useCone, useClosure, flg;
480   PetscErrorCode     ierr;
481 
482   PetscFunctionBegin;
483   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
484   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
485   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
486   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
487   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
488   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
489   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
490   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
491   /* Handle leaves: shared with the root point */
492   for (l = 0; l < nleaves; ++l) {
493     PetscInt adjSize = PETSC_DETERMINE, a;
494 
495     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
496     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
497   }
498   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
499   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
500   /* Handle roots */
501   for (p = pStart; p < pEnd; ++p) {
502     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
503 
504     if ((p >= sStart) && (p < sEnd)) {
505       /* Some leaves share a root with other leaves on different processes */
506       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
507       if (neighbors) {
508         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
509         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
510         for (n = 0; n < neighbors; ++n) {
511           const PetscInt remoteRank = nrank[noff+n];
512 
513           if (remoteRank == rank) continue;
514           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
515         }
516       }
517     }
518     /* Roots are shared with leaves */
519     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
520     if (!neighbors) continue;
521     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
522     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
523     for (n = 0; n < neighbors; ++n) {
524       const PetscInt remoteRank = rrank[noff+n];
525 
526       if (remoteRank == rank) continue;
527       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
528     }
529   }
530   ierr = PetscFree(adj);CHKERRQ(ierr);
531   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
532   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
533   /* Add additional overlap levels */
534   for (l = 1; l < levels; l++) {
535     /* Propagate point donations over SF to capture remote connections */
536     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
537     /* Add next level of point donations to the label */
538     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
539   }
540   /* We require the closure in the overlap */
541   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
542   if (useCone || !useClosure) {
543     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
544   }
545   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
546   if (flg) {
547     PetscViewer viewer;
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr);
549     ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr);
550   }
551   /* Make global process SF and invert sender to receiver label */
552   {
553     /* Build a global process SF */
554     PetscSFNode *remoteProc;
555     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
556     for (p = 0; p < size; ++p) {
557       remoteProc[p].rank  = p;
558       remoteProc[p].index = rank;
559     }
560     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
561     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
562     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
563   }
564   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
565   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
566   /* Add owned points, except for shared local points */
567   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
568   for (l = 0; l < nleaves; ++l) {
569     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
570     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
571   }
572   /* Clean up */
573   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
574   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
575   PetscFunctionReturn(0);
576 }
577 
578 /*@C
579   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
580 
581   Collective on DM
582 
583   Input Parameters:
584 + dm          - The DM
585 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
586 
587   Output Parameters:
588 + migrationSF - An SF that maps original points in old locations to points in new locations
589 
590   Level: developer
591 
592 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
593 @*/
594 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
595 {
596   MPI_Comm           comm;
597   PetscMPIInt        rank, size;
598   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
599   PetscInt          *pointDepths, *remoteDepths, *ilocal;
600   PetscInt          *depthRecv, *depthShift, *depthIdx;
601   PetscSFNode       *iremote;
602   PetscSF            pointSF;
603   const PetscInt    *sharedLocal;
604   const PetscSFNode *overlapRemote, *sharedRemote;
605   PetscErrorCode     ierr;
606 
607   PetscFunctionBegin;
608   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
609   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
610   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
611   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
612   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
613 
614   /* Before building the migration SF we need to know the new stratum offsets */
615   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
616   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
617   for (d=0; d<dim+1; d++) {
618     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
619     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
620   }
621   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
622   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
623   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
624 
625   /* Count recevied points in each stratum and compute the internal strata shift */
626   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
627   for (d=0; d<dim+1; d++) depthRecv[d]=0;
628   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
629   depthShift[dim] = 0;
630   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
631   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
632   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
633   for (d=0; d<dim+1; d++) {
634     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
635     depthIdx[d] = pStart + depthShift[d];
636   }
637 
638   /* Form the overlap SF build an SF that describes the full overlap migration SF */
639   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
640   newLeaves = pEnd - pStart + nleaves;
641   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
642   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
643   /* First map local points to themselves */
644   for (d=0; d<dim+1; d++) {
645     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
646     for (p=pStart; p<pEnd; p++) {
647       point = p + depthShift[d];
648       ilocal[point] = point;
649       iremote[point].index = p;
650       iremote[point].rank = rank;
651       depthIdx[d]++;
652     }
653   }
654 
655   /* Add in the remote roots for currently shared points */
656   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
657   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
658   for (d=0; d<dim+1; d++) {
659     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
660     for (p=0; p<numSharedPoints; p++) {
661       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
662         point = sharedLocal[p] + depthShift[d];
663         iremote[point].index = sharedRemote[p].index;
664         iremote[point].rank = sharedRemote[p].rank;
665       }
666     }
667   }
668 
669   /* Now add the incoming overlap points */
670   for (p=0; p<nleaves; p++) {
671     point = depthIdx[remoteDepths[p]];
672     ilocal[point] = point;
673     iremote[point].index = overlapRemote[p].index;
674     iremote[point].rank = overlapRemote[p].rank;
675     depthIdx[remoteDepths[p]]++;
676   }
677   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
678 
679   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
680   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
681   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
682   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
683   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
684 
685   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
686   PetscFunctionReturn(0);
687 }
688 
689 /*@
690   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
691 
692   Input Parameter:
693 + dm          - The DM
694 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
695 
696   Output Parameter:
697 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
698 
699   Level: developer
700 
701 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
702 @*/
703 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
704 {
705   MPI_Comm           comm;
706   PetscMPIInt        rank, size;
707   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
708   PetscInt          *pointDepths, *remoteDepths, *ilocal;
709   PetscInt          *depthRecv, *depthShift, *depthIdx;
710   PetscInt           hybEnd[4];
711   const PetscSFNode *iremote;
712   PetscErrorCode     ierr;
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
716   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
717   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
718   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
719   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
720   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
721   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
722 
723   /* Before building the migration SF we need to know the new stratum offsets */
724   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
725   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
726   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
727   for (d = 0; d < depth+1; ++d) {
728     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
729     for (p = pStart; p < pEnd; ++p) {
730       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
731         pointDepths[p] = 2 * d;
732       } else {
733         pointDepths[p] = 2 * d + 1;
734       }
735     }
736   }
737   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
738   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
739   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
740   /* Count recevied points in each stratum and compute the internal strata shift */
741   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
742   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
743   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
744   depthShift[2*depth+1] = 0;
745   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
746   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
747   depthShift[0] += depthRecv[1];
748   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
749   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
750   for (d = 2 * depth-1; d > 2; --d) {
751     PetscInt e;
752 
753     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
754   }
755   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
756   /* Derive a new local permutation based on stratified indices */
757   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
758   for (p = 0; p < nleaves; ++p) {
759     const PetscInt dep = remoteDepths[p];
760 
761     ilocal[p] = depthShift[dep] + depthIdx[dep];
762     depthIdx[dep]++;
763   }
764   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
765   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
766   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
767   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
768   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
769   PetscFunctionReturn(0);
770 }
771 
772 /*@
773   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
774 
775   Collective on DM
776 
777   Input Parameters:
778 + dm - The DMPlex object
779 . pointSF - The PetscSF describing the communication pattern
780 . originalSection - The PetscSection for existing data layout
781 - originalVec - The existing data
782 
783   Output Parameters:
784 + newSection - The PetscSF describing the new data layout
785 - newVec - The new data
786 
787   Level: developer
788 
789 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
790 @*/
791 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
792 {
793   PetscSF        fieldSF;
794   PetscInt      *remoteOffsets, fieldSize;
795   PetscScalar   *originalValues, *newValues;
796   PetscErrorCode ierr;
797 
798   PetscFunctionBegin;
799   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
800   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
801 
802   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
803   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
804   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
805 
806   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
807   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
808   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
809   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
810   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
811   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
812   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
813   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
814   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
815   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
816   PetscFunctionReturn(0);
817 }
818 
819 /*@
820   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
821 
822   Collective on DM
823 
824   Input Parameters:
825 + dm - The DMPlex object
826 . pointSF - The PetscSF describing the communication pattern
827 . originalSection - The PetscSection for existing data layout
828 - originalIS - The existing data
829 
830   Output Parameters:
831 + newSection - The PetscSF describing the new data layout
832 - newIS - The new data
833 
834   Level: developer
835 
836 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
837 @*/
838 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
839 {
840   PetscSF         fieldSF;
841   PetscInt       *newValues, *remoteOffsets, fieldSize;
842   const PetscInt *originalValues;
843   PetscErrorCode  ierr;
844 
845   PetscFunctionBegin;
846   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
847   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
848 
849   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
850   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
851 
852   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
853   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
854   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
855   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
856   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
857   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
858   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
859   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
860   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 /*@
865   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
866 
867   Collective on DM
868 
869   Input Parameters:
870 + dm - The DMPlex object
871 . pointSF - The PetscSF describing the communication pattern
872 . originalSection - The PetscSection for existing data layout
873 . datatype - The type of data
874 - originalData - The existing data
875 
876   Output Parameters:
877 + newSection - The PetscSection describing the new data layout
878 - newData - The new data
879 
880   Level: developer
881 
882 .seealso: DMPlexDistribute(), DMPlexDistributeField()
883 @*/
884 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
885 {
886   PetscSF        fieldSF;
887   PetscInt      *remoteOffsets, fieldSize;
888   PetscMPIInt    dataSize;
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
893   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
894 
895   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
896   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
897   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
898 
899   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
900   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
901   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
902   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
903   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
904   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907 
908 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
909 {
910   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
911   MPI_Comm               comm;
912   PetscSF                coneSF;
913   PetscSection           originalConeSection, newConeSection;
914   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
915   PetscBool              flg;
916   PetscErrorCode         ierr;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
920   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
921 
922   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
923   /* Distribute cone section */
924   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
925   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
926   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
927   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
928   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
929   {
930     PetscInt pStart, pEnd, p;
931 
932     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
933     for (p = pStart; p < pEnd; ++p) {
934       PetscInt coneSize;
935       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
936       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
937     }
938   }
939   /* Communicate and renumber cones */
940   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
941   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
942   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
943   if (original) {
944     PetscInt numCones;
945 
946     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
947     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
948     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
949   } else {
950     globCones = cones;
951   }
952   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
953   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
954   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
955   if (original) {
956     ierr = PetscFree(globCones);CHKERRQ(ierr);
957   }
958   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
959   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
960 #if defined(PETSC_USE_DEBUG)
961   {
962     PetscInt  p;
963     PetscBool valid = PETSC_TRUE;
964     for (p = 0; p < newConesSize; ++p) {
965       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
966     }
967     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
968   }
969 #endif
970   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
971   if (flg) {
972     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
973     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
974     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
975     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
976     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
977   }
978   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
979   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
980   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
981   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
982   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
983   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
984   /* Create supports and stratify DMPlex */
985   {
986     PetscInt pStart, pEnd;
987 
988     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
989     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
990   }
991   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
992   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
993   {
994     PetscBool useCone, useClosure, useAnchors;
995 
996     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
997     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
998     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
999     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1000   }
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1005 {
1006   MPI_Comm         comm;
1007   PetscSection     originalCoordSection, newCoordSection;
1008   Vec              originalCoordinates, newCoordinates;
1009   PetscInt         bs;
1010   PetscBool        isper;
1011   const char      *name;
1012   const PetscReal *maxCell, *L;
1013   const DMBoundaryType *bd;
1014   PetscErrorCode   ierr;
1015 
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1018   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1019 
1020   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1021   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1022   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1023   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1024   if (originalCoordinates) {
1025     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1026     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1027     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1028 
1029     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1030     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1031     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1032     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1033     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1034   }
1035   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1036   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 /* Here we are assuming that process 0 always has everything */
1041 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1042 {
1043   DM_Plex         *mesh = (DM_Plex*) dm->data;
1044   MPI_Comm         comm;
1045   DMLabel          depthLabel;
1046   PetscMPIInt      rank;
1047   PetscInt         depth, d, numLabels, numLocalLabels, l;
1048   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1049   PetscObjectState depthState = -1;
1050   PetscErrorCode   ierr;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1054   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1055 
1056   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1057   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1058   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1059 
1060   /* If the user has changed the depth label, communicate it instead */
1061   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1062   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1063   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1064   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1065   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1066   if (sendDepth) {
1067     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1068     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1069   }
1070   /* Everyone must have either the same number of labels, or none */
1071   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1072   numLabels = numLocalLabels;
1073   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1074   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1075   for (l = numLabels-1; l >= 0; --l) {
1076     DMLabel     label = NULL, labelNew = NULL;
1077     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1078     const char *name = NULL;
1079 
1080     if (hasLabels) {
1081       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1082       /* Skip "depth" because it is recreated */
1083       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1084       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1085     }
1086     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1087     if (isDepth && !sendDepth) continue;
1088     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1089     if (isDepth) {
1090       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1091       PetscInt gdepth;
1092 
1093       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1094       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1095       for (d = 0; d <= gdepth; ++d) {
1096         PetscBool has;
1097 
1098         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1099         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1100       }
1101     }
1102     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1103     /* Put the output flag in the new label */
1104     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1105     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1106     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1107     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1108   }
1109   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1110   PetscFunctionReturn(0);
1111 }
1112 
1113 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1114 {
1115   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1116   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1117   PetscBool      *isHybrid, *isHybridParallel;
1118   PetscInt        dim, depth, d;
1119   PetscInt        pStart, pEnd, pStartP, pEndP;
1120   PetscErrorCode  ierr;
1121 
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1124   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1125 
1126   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1127   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1128   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1129   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1130   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1131   for (d = 0; d <= depth; d++) {
1132     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1133 
1134     if (hybridMax >= 0) {
1135       PetscInt sStart, sEnd, p;
1136 
1137       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1138       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1139     }
1140   }
1141   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1142   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1143   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1144   for (d = 0; d <= depth; d++) {
1145     PetscInt sStart, sEnd, p, dd;
1146 
1147     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1148     dd = (depth == 1 && d == 1) ? dim : d;
1149     for (p = sStart; p < sEnd; p++) {
1150       if (isHybridParallel[p-pStartP]) {
1151         pmesh->hybridPointMax[dd] = p;
1152         break;
1153       }
1154     }
1155   }
1156   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1161 {
1162   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1163   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1164   MPI_Comm        comm;
1165   DM              refTree;
1166   PetscSection    origParentSection, newParentSection;
1167   PetscInt        *origParents, *origChildIDs;
1168   PetscBool       flg;
1169   PetscErrorCode  ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1173   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1174   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1175 
1176   /* Set up tree */
1177   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1178   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1179   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1180   if (origParentSection) {
1181     PetscInt        pStart, pEnd;
1182     PetscInt        *newParents, *newChildIDs, *globParents;
1183     PetscInt        *remoteOffsetsParents, newParentSize;
1184     PetscSF         parentSF;
1185 
1186     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1187     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1188     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1189     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1190     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1191     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1192     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1193     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1194     if (original) {
1195       PetscInt numParents;
1196 
1197       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1198       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1199       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1200     }
1201     else {
1202       globParents = origParents;
1203     }
1204     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1205     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1206     if (original) {
1207       ierr = PetscFree(globParents);CHKERRQ(ierr);
1208     }
1209     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1210     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1211     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1212 #if defined(PETSC_USE_DEBUG)
1213     {
1214       PetscInt  p;
1215       PetscBool valid = PETSC_TRUE;
1216       for (p = 0; p < newParentSize; ++p) {
1217         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1218       }
1219       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1220     }
1221 #endif
1222     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1223     if (flg) {
1224       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1225       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1226       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1227       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1228       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1229     }
1230     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1231     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1232     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1233     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1234   }
1235   pmesh->useAnchors = mesh->useAnchors;
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1240 {
1241   PetscMPIInt            rank, size;
1242   MPI_Comm               comm;
1243   PetscErrorCode         ierr;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1247   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1248 
1249   /* Create point SF for parallel mesh */
1250   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1251   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1252   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1253   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1254   {
1255     const PetscInt *leaves;
1256     PetscSFNode    *remotePoints, *rowners, *lowners;
1257     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1258     PetscInt        pStart, pEnd;
1259 
1260     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1261     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1262     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1263     for (p=0; p<numRoots; p++) {
1264       rowners[p].rank  = -1;
1265       rowners[p].index = -1;
1266     }
1267     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1268     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1269     for (p = 0; p < numLeaves; ++p) {
1270       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1271         lowners[p].rank  = rank;
1272         lowners[p].index = leaves ? leaves[p] : p;
1273       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1274         lowners[p].rank  = -2;
1275         lowners[p].index = -2;
1276       }
1277     }
1278     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1279       rowners[p].rank  = -3;
1280       rowners[p].index = -3;
1281     }
1282     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1283     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1284     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1285     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1286     for (p = 0; p < numLeaves; ++p) {
1287       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1288       if (lowners[p].rank != rank) ++numGhostPoints;
1289     }
1290     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1291     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1292     for (p = 0, gp = 0; p < numLeaves; ++p) {
1293       if (lowners[p].rank != rank) {
1294         ghostPoints[gp]        = leaves ? leaves[p] : p;
1295         remotePoints[gp].rank  = lowners[p].rank;
1296         remotePoints[gp].index = lowners[p].index;
1297         ++gp;
1298       }
1299     }
1300     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1301     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1302     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1303   }
1304   {
1305     PetscBool useCone, useClosure, useAnchors;
1306 
1307     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1308     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1309     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1310     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1311   }
1312   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 /*@
1317   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1318 
1319   Input Parameters:
1320 + dm - The DMPlex object
1321 - flg - Balance the partition?
1322 
1323   Level: intermediate
1324 
1325 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1326 @*/
1327 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1328 {
1329   DM_Plex *mesh = (DM_Plex *)dm->data;
1330 
1331   PetscFunctionBegin;
1332   mesh->partitionBalance = flg;
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 /*@
1337   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1338 
1339   Input Parameter:
1340 + dm - The DMPlex object
1341 
1342   Output Parameter:
1343 + flg - Balance the partition?
1344 
1345   Level: intermediate
1346 
1347 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1348 @*/
1349 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1350 {
1351   DM_Plex *mesh = (DM_Plex *)dm->data;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1355   PetscValidIntPointer(flg, 2);
1356   *flg = mesh->partitionBalance;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 /*@C
1361   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1362 
1363   Input Parameter:
1364 + dm          - The source DMPlex object
1365 . migrationSF - The star forest that describes the parallel point remapping
1366 . ownership   - Flag causing a vote to determine point ownership
1367 
1368   Output Parameter:
1369 - pointSF     - The star forest describing the point overlap in the remapped DM
1370 
1371   Level: developer
1372 
1373 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1374 @*/
1375 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1376 {
1377   PetscMPIInt        rank, size;
1378   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1379   PetscInt          *pointLocal;
1380   const PetscInt    *leaves;
1381   const PetscSFNode *roots;
1382   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1383   Vec                shifts;
1384   const PetscInt     numShifts = 13759;
1385   const PetscScalar *shift = NULL;
1386   const PetscBool    shiftDebug = PETSC_FALSE;
1387   PetscBool          balance;
1388   PetscErrorCode     ierr;
1389 
1390   PetscFunctionBegin;
1391   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1392   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1393   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1394 
1395   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1396   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1397   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1398   if (ownership) {
1399     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1400     if (balance) {
1401       PetscRandom r;
1402 
1403       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1404       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1405       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1406       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1407       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1408       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1409       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1410       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1411     }
1412 
1413     /* Point ownership vote: Process with highest rank owns shared points */
1414     for (p = 0; p < nleaves; ++p) {
1415       if (shiftDebug) {
1416         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1417       }
1418       /* Either put in a bid or we know we own it */
1419       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1420       leafNodes[p].index = p;
1421     }
1422     for (p = 0; p < nroots; p++) {
1423       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1424       rootNodes[p].rank  = -3;
1425       rootNodes[p].index = -3;
1426     }
1427     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1428     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1429     if (balance) {
1430       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1431        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1432        * rank instead of the index. */
1433       PetscSFNode *rootRanks = NULL;
1434       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1435       for (p = 0; p < nroots; p++) {
1436         rootRanks[p].rank = -3;
1437         rootRanks[p].index = -3;
1438       }
1439       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1440       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1441       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1442       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1443       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1444     }
1445   } else {
1446     for (p = 0; p < nroots; p++) {
1447       rootNodes[p].index = -1;
1448       rootNodes[p].rank = rank;
1449     };
1450     for (p = 0; p < nleaves; p++) {
1451       /* Write new local id into old location */
1452       if (roots[p].rank == rank) {
1453         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1454       }
1455     }
1456   }
1457   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1458   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1459 
1460   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1461     if (leafNodes[p].rank != rank) npointLeaves++;
1462   }
1463   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1464   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1465   for (idx = 0, p = 0; p < nleaves; p++) {
1466     if (leafNodes[p].rank != rank) {
1467       pointLocal[idx] = p;
1468       pointRemote[idx] = leafNodes[p];
1469       idx++;
1470     }
1471   }
1472   if (shift) {
1473     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1474     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1475   }
1476   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1477   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1478   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1479   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1480   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 /*@C
1485   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1486 
1487   Collective on DM and PetscSF
1488 
1489   Input Parameter:
1490 + dm       - The source DMPlex object
1491 . sf       - The star forest communication context describing the migration pattern
1492 
1493   Output Parameter:
1494 - targetDM - The target DMPlex object
1495 
1496   Level: intermediate
1497 
1498 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1499 @*/
1500 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1501 {
1502   MPI_Comm               comm;
1503   PetscInt               dim, cdim, nroots;
1504   PetscSF                sfPoint;
1505   ISLocalToGlobalMapping ltogMigration;
1506   ISLocalToGlobalMapping ltogOriginal = NULL;
1507   PetscBool              flg;
1508   PetscErrorCode         ierr;
1509 
1510   PetscFunctionBegin;
1511   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1512   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1513   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1514   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1515   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1516   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1517   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1518 
1519   /* Check for a one-to-all distribution pattern */
1520   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1521   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1522   if (nroots >= 0) {
1523     IS        isOriginal;
1524     PetscInt  n, size, nleaves;
1525     PetscInt  *numbering_orig, *numbering_new;
1526 
1527     /* Get the original point numbering */
1528     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1529     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1530     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1531     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1532     /* Convert to positive global numbers */
1533     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1534     /* Derive the new local-to-global mapping from the old one */
1535     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1536     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1537     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1538     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1539     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1540     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1541     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1542   } else {
1543     /* One-to-all distribution pattern: We can derive LToG from SF */
1544     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1545   }
1546   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1547   if (flg) {
1548     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1549     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1550   }
1551   /* Migrate DM data to target DM */
1552   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1553   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1554   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1555   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1556   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1557   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1558   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1559   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 PETSC_INTERN PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
1564 
1565 /*@C
1566   DMPlexDistribute - Distributes the mesh and any associated sections.
1567 
1568   Collective on DM
1569 
1570   Input Parameter:
1571 + dm  - The original DMPlex object
1572 - overlap - The overlap of partitions, 0 is the default
1573 
1574   Output Parameter:
1575 + sf - The PetscSF used for point distribution, or NULL if not needed
1576 - dmParallel - The distributed DMPlex object
1577 
1578   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1579 
1580   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1581   representation on the mesh.
1582 
1583   Level: intermediate
1584 
1585 .keywords: mesh, elements
1586 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1587 @*/
1588 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1589 {
1590   MPI_Comm               comm;
1591   PetscPartitioner       partitioner;
1592   IS                     cellPart;
1593   PetscSection           cellPartSection;
1594   DM                     dmCoord;
1595   DMLabel                lblPartition, lblMigration;
1596   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1597   PetscBool              flg, balance;
1598   PetscMPIInt            rank, size, p;
1599   PetscErrorCode         ierr;
1600 
1601   PetscFunctionBegin;
1602   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1603   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1604   if (sf) PetscValidPointer(sf,3);
1605   PetscValidPointer(dmParallel,4);
1606 
1607   if (sf) *sf = NULL;
1608   *dmParallel = NULL;
1609   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1610   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1611   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1612   if (size == 1) PetscFunctionReturn(0);
1613 
1614   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1615   /* Create cell partition */
1616   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1617   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1618   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1619   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1620   {
1621     /* Convert partition to DMLabel */
1622     IS         is;
1623     PetscHSetI ht;
1624     PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks, *iranks;
1625     const PetscInt *points;
1626 
1627     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1628     /* Preallocate strata */
1629     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1630     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1631     for (proc = pStart; proc < pEnd; proc++) {
1632       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1633       if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr);}
1634     }
1635     ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr);
1636     ierr = PetscMalloc1(nranks, &iranks);CHKERRQ(ierr);
1637     ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr);
1638     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1639     ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr);
1640     ierr = PetscFree(iranks);CHKERRQ(ierr);
1641     /* Inline DMPlexPartitionLabelClosure() */
1642     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1643     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1644     for (proc = pStart; proc < pEnd; proc++) {
1645       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1646       if (!npoints) continue;
1647       ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr);
1648       ierr = DMPlexPartitionLabelClosure_Private(dm, lblPartition, proc, npoints, points+poff, &is);CHKERRQ(ierr);
1649       ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr);
1650       ierr = ISDestroy(&is);CHKERRQ(ierr);
1651     }
1652     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1653   }
1654   {
1655     /* Build a global process SF */
1656     PetscSFNode *remoteProc;
1657     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1658     for (p = 0; p < size; ++p) {
1659       remoteProc[p].rank  = p;
1660       remoteProc[p].index = rank;
1661     }
1662     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1663     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1664     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1665   }
1666   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
1667   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1668   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1669   /* Stratify the SF in case we are migrating an already parallel plex */
1670   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1671   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1672   sfMigration = sfStratified;
1673   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1674   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1675   if (flg) {
1676     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1677     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1678   }
1679 
1680   /* Create non-overlapping parallel DM and migrate internal data */
1681   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1682   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1683   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1684 
1685   /* Build the point SF without overlap */
1686   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1687   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
1688   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1689   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1690   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1691   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1692   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1693 
1694   if (overlap > 0) {
1695     DM                 dmOverlap;
1696     PetscInt           nroots, nleaves;
1697     PetscSFNode       *newRemote;
1698     const PetscSFNode *oldRemote;
1699     PetscSF            sfOverlap, sfOverlapPoint;
1700     /* Add the partition overlap to the distributed DM */
1701     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1702     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1703     *dmParallel = dmOverlap;
1704     if (flg) {
1705       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1706       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1707     }
1708 
1709     /* Re-map the migration SF to establish the full migration pattern */
1710     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1711     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1712     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1713     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1714     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1715     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1716     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1717     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1718     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1719     sfMigration = sfOverlapPoint;
1720   }
1721   /* Cleanup Partition */
1722   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1723   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1724   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1725   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1726   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1727   /* Copy BC */
1728   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1729   /* Create sfNatural */
1730   if (dm->useNatural) {
1731     PetscSection section;
1732 
1733     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1734     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1735     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1736   }
1737   /* Cleanup */
1738   if (sf) {*sf = sfMigration;}
1739   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1740   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1741   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 /*@C
1746   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1747 
1748   Collective on DM
1749 
1750   Input Parameter:
1751 + dm  - The non-overlapping distrbuted DMPlex object
1752 - overlap - The overlap of partitions
1753 
1754   Output Parameter:
1755 + sf - The PetscSF used for point distribution
1756 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1757 
1758   Note: If the mesh was not distributed, the return value is NULL.
1759 
1760   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1761   representation on the mesh.
1762 
1763   Level: intermediate
1764 
1765 .keywords: mesh, elements
1766 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1767 @*/
1768 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1769 {
1770   MPI_Comm               comm;
1771   PetscMPIInt            size, rank;
1772   PetscSection           rootSection, leafSection;
1773   IS                     rootrank, leafrank;
1774   DM                     dmCoord;
1775   DMLabel                lblOverlap;
1776   PetscSF                sfOverlap, sfStratified, sfPoint;
1777   PetscErrorCode         ierr;
1778 
1779   PetscFunctionBegin;
1780   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1781   if (sf) PetscValidPointer(sf, 3);
1782   PetscValidPointer(dmOverlap, 4);
1783 
1784   if (sf) *sf = NULL;
1785   *dmOverlap  = NULL;
1786   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1787   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1788   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1789   if (size == 1) PetscFunctionReturn(0);
1790 
1791   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1792   /* Compute point overlap with neighbouring processes on the distributed DM */
1793   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1794   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1795   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1796   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1797   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1798   /* Convert overlap label to stratified migration SF */
1799   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1800   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1801   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1802   sfOverlap = sfStratified;
1803   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1804   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1805 
1806   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1807   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1808   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1809   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1810   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1811 
1812   /* Build the overlapping DM */
1813   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1814   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1815   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1816   /* Build the new point SF */
1817   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1818   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1819   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1820   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1821   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1822   /* Cleanup overlap partition */
1823   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1824   if (sf) *sf = sfOverlap;
1825   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1826   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1827   PetscFunctionReturn(0);
1828 }
1829 
1830 /*@C
1831   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1832   root process of the original's communicator.
1833 
1834   Collective on DM
1835 
1836   Input Parameters:
1837 . dm - the original DMPlex object
1838 
1839   Output Parameters:
1840 + sf - the PetscSF used for point distribution (optional)
1841 - gatherMesh - the gathered DM object, or NULL
1842 
1843   Level: intermediate
1844 
1845 .keywords: mesh
1846 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1847 @*/
1848 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1849 {
1850   MPI_Comm       comm;
1851   PetscMPIInt    size;
1852   PetscPartitioner oldPart, gatherPart;
1853   PetscErrorCode ierr;
1854 
1855   PetscFunctionBegin;
1856   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1857   PetscValidPointer(gatherMesh,2);
1858   *gatherMesh = NULL;
1859   if (sf) *sf = NULL;
1860   comm = PetscObjectComm((PetscObject)dm);
1861   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1862   if (size == 1) PetscFunctionReturn(0);
1863   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1864   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1865   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1866   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1867   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1868   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
1869 
1870   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1871   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1872   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 /*@C
1877   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1878 
1879   Collective on DM
1880 
1881   Input Parameters:
1882 . dm - the original DMPlex object
1883 
1884   Output Parameters:
1885 + sf - the PetscSF used for point distribution (optional)
1886 - redundantMesh - the redundant DM object, or NULL
1887 
1888   Level: intermediate
1889 
1890 .keywords: mesh
1891 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1892 @*/
1893 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1894 {
1895   MPI_Comm       comm;
1896   PetscMPIInt    size, rank;
1897   PetscInt       pStart, pEnd, p;
1898   PetscInt       numPoints = -1;
1899   PetscSF        migrationSF, sfPoint, gatherSF;
1900   DM             gatherDM, dmCoord;
1901   PetscSFNode    *points;
1902   PetscErrorCode ierr;
1903 
1904   PetscFunctionBegin;
1905   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1906   PetscValidPointer(redundantMesh,2);
1907   *redundantMesh = NULL;
1908   comm = PetscObjectComm((PetscObject)dm);
1909   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1910   if (size == 1) {
1911     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1912     *redundantMesh = dm;
1913     if (sf) *sf = NULL;
1914     PetscFunctionReturn(0);
1915   }
1916   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
1917   if (!gatherDM) PetscFunctionReturn(0);
1918   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1919   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1920   numPoints = pEnd - pStart;
1921   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1922   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1923   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1924   for (p = 0; p < numPoints; p++) {
1925     points[p].index = p;
1926     points[p].rank  = 0;
1927   }
1928   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1929   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1930   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1931   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1932   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1933   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1934   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1935   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1936   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1937   if (sf) {
1938     PetscSF tsf;
1939 
1940     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
1941     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
1942     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
1943   }
1944   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1945   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
1946   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1947   PetscFunctionReturn(0);
1948 }
1949