xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision c965bd60c99fe70cccd739ad17f51549fdc2a3e5)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 #if defined(PETSC_HAVE_PARMETIS)
4 #include <parmetis.h>
5 #endif
6 
7 /*@C
8   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
9 
10   Input Parameters:
11 + dm      - The DM object
12 . user    - The user callback, may be NULL (to clear the callback)
13 - ctx     - context for callback evaluation, may be NULL
14 
15   Level: advanced
16 
17   Notes:
18      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
19 
20      Any setting here overrides other configuration of DMPlex adjacency determination.
21 
22 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
23 @*/
24 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
25 {
26   DM_Plex *mesh = (DM_Plex *)dm->data;
27 
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
30   mesh->useradjacency = user;
31   mesh->useradjacencyctx = ctx;
32   PetscFunctionReturn(0);
33 }
34 
35 /*@C
36   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
37 
38   Input Parameter:
39 . dm      - The DM object
40 
41   Output Parameters:
42 - user    - The user callback
43 - ctx     - context for callback evaluation
44 
45   Level: advanced
46 
47 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
48 @*/
49 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
50 {
51   DM_Plex *mesh = (DM_Plex *)dm->data;
52 
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
55   if (user) *user = mesh->useradjacency;
56   if (ctx) *ctx = mesh->useradjacencyctx;
57   PetscFunctionReturn(0);
58 }
59 
60 /*@
61   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
62 
63   Input Parameters:
64 + dm      - The DM object
65 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
66 
67   Level: intermediate
68 
69 .seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
70 @*/
71 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
72 {
73   DM_Plex *mesh = (DM_Plex *) dm->data;
74 
75   PetscFunctionBegin;
76   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
77   mesh->useAnchors = useAnchors;
78   PetscFunctionReturn(0);
79 }
80 
81 /*@
82   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
83 
84   Input Parameter:
85 . dm      - The DM object
86 
87   Output Parameter:
88 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
89 
90   Level: intermediate
91 
92 .seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
93 @*/
94 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
95 {
96   DM_Plex *mesh = (DM_Plex *) dm->data;
97 
98   PetscFunctionBegin;
99   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
100   PetscValidIntPointer(useAnchors, 2);
101   *useAnchors = mesh->useAnchors;
102   PetscFunctionReturn(0);
103 }
104 
105 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
106 {
107   const PetscInt *cone = NULL;
108   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
109   PetscErrorCode  ierr;
110 
111   PetscFunctionBeginHot;
112   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
113   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
114   for (c = 0; c <= coneSize; ++c) {
115     const PetscInt  point   = !c ? p : cone[c-1];
116     const PetscInt *support = NULL;
117     PetscInt        supportSize, s, q;
118 
119     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
120     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
121     for (s = 0; s < supportSize; ++s) {
122       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
123         if (support[s] == adj[q]) break;
124       }
125       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
126     }
127   }
128   *adjSize = numAdj;
129   PetscFunctionReturn(0);
130 }
131 
132 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
133 {
134   const PetscInt *support = NULL;
135   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
136   PetscErrorCode  ierr;
137 
138   PetscFunctionBeginHot;
139   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
140   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
141   for (s = 0; s <= supportSize; ++s) {
142     const PetscInt  point = !s ? p : support[s-1];
143     const PetscInt *cone  = NULL;
144     PetscInt        coneSize, c, q;
145 
146     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
147     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
148     for (c = 0; c < coneSize; ++c) {
149       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
150         if (cone[c] == adj[q]) break;
151       }
152       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
153     }
154   }
155   *adjSize = numAdj;
156   PetscFunctionReturn(0);
157 }
158 
159 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
160 {
161   PetscInt      *star = NULL;
162   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
163   PetscErrorCode ierr;
164 
165   PetscFunctionBeginHot;
166   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
167   for (s = 0; s < starSize*2; s += 2) {
168     const PetscInt *closure = NULL;
169     PetscInt        closureSize, c, q;
170 
171     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
172     for (c = 0; c < closureSize*2; c += 2) {
173       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
174         if (closure[c] == adj[q]) break;
175       }
176       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
177     }
178     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
179   }
180   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
181   *adjSize = numAdj;
182   PetscFunctionReturn(0);
183 }
184 
185 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
186 {
187   static PetscInt asiz = 0;
188   PetscInt maxAnchors = 1;
189   PetscInt aStart = -1, aEnd = -1;
190   PetscInt maxAdjSize;
191   PetscSection aSec = NULL;
192   IS aIS = NULL;
193   const PetscInt *anchors;
194   DM_Plex *mesh = (DM_Plex *)dm->data;
195   PetscErrorCode  ierr;
196 
197   PetscFunctionBeginHot;
198   if (useAnchors) {
199     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
200     if (aSec) {
201       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
202       maxAnchors = PetscMax(1,maxAnchors);
203       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
204       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
205     }
206   }
207   if (!*adj) {
208     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
209 
210     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
211     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
212     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
213     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
214     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
215     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
216     asiz *= maxAnchors;
217     asiz  = PetscMin(asiz,pEnd-pStart);
218     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
219   }
220   if (*adjSize < 0) *adjSize = asiz;
221   maxAdjSize = *adjSize;
222   if (mesh->useradjacency) {
223     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
224   } else if (useTransitiveClosure) {
225     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
226   } else if (useCone) {
227     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
228   } else {
229     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
230   }
231   if (useAnchors && aSec) {
232     PetscInt origSize = *adjSize;
233     PetscInt numAdj = origSize;
234     PetscInt i = 0, j;
235     PetscInt *orig = *adj;
236 
237     while (i < origSize) {
238       PetscInt p = orig[i];
239       PetscInt aDof = 0;
240 
241       if (p >= aStart && p < aEnd) {
242         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
243       }
244       if (aDof) {
245         PetscInt aOff;
246         PetscInt s, q;
247 
248         for (j = i + 1; j < numAdj; j++) {
249           orig[j - 1] = orig[j];
250         }
251         origSize--;
252         numAdj--;
253         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
254         for (s = 0; s < aDof; ++s) {
255           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
256             if (anchors[aOff+s] == orig[q]) break;
257           }
258           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
259         }
260       }
261       else {
262         i++;
263       }
264     }
265     *adjSize = numAdj;
266     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
267   }
268   PetscFunctionReturn(0);
269 }
270 
271 /*@
272   DMPlexGetAdjacency - Return all points adjacent to the given point
273 
274   Input Parameters:
275 + dm - The DM object
276 . p  - The point
277 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
278 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
279 
280   Output Parameters:
281 + adjSize - The number of adjacent points
282 - adj - The adjacent points
283 
284   Level: advanced
285 
286   Notes:
287     The user must PetscFree the adj array if it was not passed in.
288 
289 .seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
290 @*/
291 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
292 {
293   PetscBool      useCone, useClosure, useAnchors;
294   PetscErrorCode ierr;
295 
296   PetscFunctionBeginHot;
297   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
298   PetscValidPointer(adjSize,3);
299   PetscValidPointer(adj,4);
300   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
301   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
302   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
303   PetscFunctionReturn(0);
304 }
305 
306 /*@
307   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
308 
309   Collective on DM
310 
311   Input Parameters:
312 + dm      - The DM
313 - sfPoint - The PetscSF which encodes point connectivity
314 
315   Output Parameters:
316 + processRanks - A list of process neighbors, or NULL
317 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
318 
319   Level: developer
320 
321 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
322 @*/
323 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
324 {
325   const PetscSFNode *remotePoints;
326   PetscInt          *localPointsNew;
327   PetscSFNode       *remotePointsNew;
328   const PetscInt    *nranks;
329   PetscInt          *ranksNew;
330   PetscBT            neighbors;
331   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
332   PetscMPIInt        size, proc, rank;
333   PetscErrorCode     ierr;
334 
335   PetscFunctionBegin;
336   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
337   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
338   if (processRanks) {PetscValidPointer(processRanks, 3);}
339   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
340   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
341   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
342   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
343   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
344   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
345   /* Compute root-to-leaf process connectivity */
346   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
347   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
348   for (p = pStart; p < pEnd; ++p) {
349     PetscInt ndof, noff, n;
350 
351     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
352     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
353     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
354   }
355   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
356   /* Compute leaf-to-neighbor process connectivity */
357   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
358   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
359   for (p = pStart; p < pEnd; ++p) {
360     PetscInt ndof, noff, n;
361 
362     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
363     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
364     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
365   }
366   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
367   /* Compute leaf-to-root process connectivity */
368   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
369   /* Calculate edges */
370   PetscBTClear(neighbors, rank);
371   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
372   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
373   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
374   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
375   for(proc = 0, n = 0; proc < size; ++proc) {
376     if (PetscBTLookup(neighbors, proc)) {
377       ranksNew[n]              = proc;
378       localPointsNew[n]        = proc;
379       remotePointsNew[n].index = rank;
380       remotePointsNew[n].rank  = proc;
381       ++n;
382     }
383   }
384   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
385   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
386   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
387   if (sfProcess) {
388     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
389     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
390     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
391     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
392   }
393   PetscFunctionReturn(0);
394 }
395 
396 /*@
397   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
398 
399   Collective on DM
400 
401   Input Parameter:
402 . dm - The DM
403 
404   Output Parameters:
405 + rootSection - The number of leaves for a given root point
406 . rootrank    - The rank of each edge into the root point
407 . leafSection - The number of processes sharing a given leaf point
408 - leafrank    - The rank of each process sharing a leaf point
409 
410   Level: developer
411 
412 .seealso: DMPlexCreateOverlap()
413 @*/
414 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
415 {
416   MPI_Comm        comm;
417   PetscSF         sfPoint;
418   const PetscInt *rootdegree;
419   PetscInt       *myrank, *remoterank;
420   PetscInt        pStart, pEnd, p, nedges;
421   PetscMPIInt     rank;
422   PetscErrorCode  ierr;
423 
424   PetscFunctionBegin;
425   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
426   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
427   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
429   /* Compute number of leaves for each root */
430   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
431   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
432   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
433   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
434   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
435   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
436   /* Gather rank of each leaf to root */
437   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
438   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
439   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
440   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
441   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
442   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
443   ierr = PetscFree(myrank);CHKERRQ(ierr);
444   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
445   /* Distribute remote ranks to leaves */
446   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
447   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450 
451 /*@C
452   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
453 
454   Collective on DM
455 
456   Input Parameters:
457 + dm          - The DM
458 . levels      - Number of overlap levels
459 . rootSection - The number of leaves for a given root point
460 . rootrank    - The rank of each edge into the root point
461 . leafSection - The number of processes sharing a given leaf point
462 - leafrank    - The rank of each process sharing a leaf point
463 
464   Output Parameters:
465 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
466 
467   Level: developer
468 
469 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
470 @*/
471 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
472 {
473   MPI_Comm           comm;
474   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
475   PetscSF            sfPoint, sfProc;
476   const PetscSFNode *remote;
477   const PetscInt    *local;
478   const PetscInt    *nrank, *rrank;
479   PetscInt          *adj = NULL;
480   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
481   PetscMPIInt        rank, size;
482   PetscBool          useCone, useClosure, flg;
483   PetscErrorCode     ierr;
484 
485   PetscFunctionBegin;
486   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
487   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
488   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
489   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
490   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
491   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
492   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
493   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
494   /* Handle leaves: shared with the root point */
495   for (l = 0; l < nleaves; ++l) {
496     PetscInt adjSize = PETSC_DETERMINE, a;
497 
498     ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr);
499     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
500   }
501   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
502   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
503   /* Handle roots */
504   for (p = pStart; p < pEnd; ++p) {
505     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
506 
507     if ((p >= sStart) && (p < sEnd)) {
508       /* Some leaves share a root with other leaves on different processes */
509       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
510       if (neighbors) {
511         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
512         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
513         for (n = 0; n < neighbors; ++n) {
514           const PetscInt remoteRank = nrank[noff+n];
515 
516           if (remoteRank == rank) continue;
517           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
518         }
519       }
520     }
521     /* Roots are shared with leaves */
522     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
523     if (!neighbors) continue;
524     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
525     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
526     for (n = 0; n < neighbors; ++n) {
527       const PetscInt remoteRank = rrank[noff+n];
528 
529       if (remoteRank == rank) continue;
530       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
531     }
532   }
533   ierr = PetscFree(adj);CHKERRQ(ierr);
534   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
535   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
536   /* Add additional overlap levels */
537   for (l = 1; l < levels; l++) {
538     /* Propagate point donations over SF to capture remote connections */
539     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
540     /* Add next level of point donations to the label */
541     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
542   }
543   /* We require the closure in the overlap */
544   ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
545   if (useCone || !useClosure) {
546     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
547   }
548   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
549   if (flg) {
550     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
551   }
552   /* Make global process SF and invert sender to receiver label */
553   {
554     /* Build a global process SF */
555     PetscSFNode *remoteProc;
556     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
557     for (p = 0; p < size; ++p) {
558       remoteProc[p].rank  = p;
559       remoteProc[p].index = rank;
560     }
561     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
562     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
563     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
564   }
565   ierr = DMLabelCreate(PETSC_COMM_SELF, "Overlap label", ovLabel);CHKERRQ(ierr);
566   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
567   /* Add owned points, except for shared local points */
568   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
569   for (l = 0; l < nleaves; ++l) {
570     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
571     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
572   }
573   /* Clean up */
574   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
575   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 /*@C
580   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
581 
582   Collective on DM
583 
584   Input Parameters:
585 + dm          - The DM
586 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
587 
588   Output Parameters:
589 + migrationSF - An SF that maps original points in old locations to points in new locations
590 
591   Level: developer
592 
593 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
594 @*/
595 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
596 {
597   MPI_Comm           comm;
598   PetscMPIInt        rank, size;
599   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
600   PetscInt          *pointDepths, *remoteDepths, *ilocal;
601   PetscInt          *depthRecv, *depthShift, *depthIdx;
602   PetscSFNode       *iremote;
603   PetscSF            pointSF;
604   const PetscInt    *sharedLocal;
605   const PetscSFNode *overlapRemote, *sharedRemote;
606   PetscErrorCode     ierr;
607 
608   PetscFunctionBegin;
609   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
610   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
611   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
612   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
613   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
614 
615   /* Before building the migration SF we need to know the new stratum offsets */
616   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
617   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
618   for (d=0; d<dim+1; d++) {
619     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
620     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
621   }
622   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
623   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
624   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
625 
626   /* Count recevied points in each stratum and compute the internal strata shift */
627   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
628   for (d=0; d<dim+1; d++) depthRecv[d]=0;
629   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
630   depthShift[dim] = 0;
631   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
632   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
633   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
634   for (d=0; d<dim+1; d++) {
635     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
636     depthIdx[d] = pStart + depthShift[d];
637   }
638 
639   /* Form the overlap SF build an SF that describes the full overlap migration SF */
640   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
641   newLeaves = pEnd - pStart + nleaves;
642   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
643   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
644   /* First map local points to themselves */
645   for (d=0; d<dim+1; d++) {
646     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
647     for (p=pStart; p<pEnd; p++) {
648       point = p + depthShift[d];
649       ilocal[point] = point;
650       iremote[point].index = p;
651       iremote[point].rank = rank;
652       depthIdx[d]++;
653     }
654   }
655 
656   /* Add in the remote roots for currently shared points */
657   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
658   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
659   for (d=0; d<dim+1; d++) {
660     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
661     for (p=0; p<numSharedPoints; p++) {
662       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
663         point = sharedLocal[p] + depthShift[d];
664         iremote[point].index = sharedRemote[p].index;
665         iremote[point].rank = sharedRemote[p].rank;
666       }
667     }
668   }
669 
670   /* Now add the incoming overlap points */
671   for (p=0; p<nleaves; p++) {
672     point = depthIdx[remoteDepths[p]];
673     ilocal[point] = point;
674     iremote[point].index = overlapRemote[p].index;
675     iremote[point].rank = overlapRemote[p].rank;
676     depthIdx[remoteDepths[p]]++;
677   }
678   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
679 
680   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
681   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
682   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
683   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
684   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
685 
686   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
687   PetscFunctionReturn(0);
688 }
689 
690 /*@
691   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
692 
693   Input Parameter:
694 + dm          - The DM
695 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
696 
697   Output Parameter:
698 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
699 
700   Level: developer
701 
702 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
703 @*/
704 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
705 {
706   MPI_Comm           comm;
707   PetscMPIInt        rank, size;
708   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
709   PetscInt          *pointDepths, *remoteDepths, *ilocal;
710   PetscInt          *depthRecv, *depthShift, *depthIdx;
711   PetscInt           hybEnd[4];
712   const PetscSFNode *iremote;
713   PetscErrorCode     ierr;
714 
715   PetscFunctionBegin;
716   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
717   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
718   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
719   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
720   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
721   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
722   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
723 
724   /* Before building the migration SF we need to know the new stratum offsets */
725   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
726   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
727   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
728   for (d = 0; d < depth+1; ++d) {
729     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
730     for (p = pStart; p < pEnd; ++p) {
731       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
732         pointDepths[p] = 2 * d;
733       } else {
734         pointDepths[p] = 2 * d + 1;
735       }
736     }
737   }
738   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
739   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
740   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
741   /* Count recevied points in each stratum and compute the internal strata shift */
742   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
743   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
744   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
745   depthShift[2*depth+1] = 0;
746   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
747   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
748   depthShift[0] += depthRecv[1];
749   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
750   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
751   for (d = 2 * depth-1; d > 2; --d) {
752     PetscInt e;
753 
754     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
755   }
756   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
757   /* Derive a new local permutation based on stratified indices */
758   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
759   for (p = 0; p < nleaves; ++p) {
760     const PetscInt dep = remoteDepths[p];
761 
762     ilocal[p] = depthShift[dep] + depthIdx[dep];
763     depthIdx[dep]++;
764   }
765   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
766   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
767   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
768   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
769   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 /*@
774   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
775 
776   Collective on DM
777 
778   Input Parameters:
779 + dm - The DMPlex object
780 . pointSF - The PetscSF describing the communication pattern
781 . originalSection - The PetscSection for existing data layout
782 - originalVec - The existing data
783 
784   Output Parameters:
785 + newSection - The PetscSF describing the new data layout
786 - newVec - The new data
787 
788   Level: developer
789 
790 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
791 @*/
792 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
793 {
794   PetscSF        fieldSF;
795   PetscInt      *remoteOffsets, fieldSize;
796   PetscScalar   *originalValues, *newValues;
797   PetscErrorCode ierr;
798 
799   PetscFunctionBegin;
800   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
801   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
802 
803   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
804   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
805   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
806 
807   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
808   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
809   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
810   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
811   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
812   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
813   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
814   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
815   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
816   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
822 
823   Collective on DM
824 
825   Input Parameters:
826 + dm - The DMPlex object
827 . pointSF - The PetscSF describing the communication pattern
828 . originalSection - The PetscSection for existing data layout
829 - originalIS - The existing data
830 
831   Output Parameters:
832 + newSection - The PetscSF describing the new data layout
833 - newIS - The new data
834 
835   Level: developer
836 
837 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
838 @*/
839 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
840 {
841   PetscSF         fieldSF;
842   PetscInt       *newValues, *remoteOffsets, fieldSize;
843   const PetscInt *originalValues;
844   PetscErrorCode  ierr;
845 
846   PetscFunctionBegin;
847   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
849 
850   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
851   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
852 
853   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
854   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
855   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
856   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
857   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
858   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
859   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
860   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
861   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
862   PetscFunctionReturn(0);
863 }
864 
865 /*@
866   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
867 
868   Collective on DM
869 
870   Input Parameters:
871 + dm - The DMPlex object
872 . pointSF - The PetscSF describing the communication pattern
873 . originalSection - The PetscSection for existing data layout
874 . datatype - The type of data
875 - originalData - The existing data
876 
877   Output Parameters:
878 + newSection - The PetscSection describing the new data layout
879 - newData - The new data
880 
881   Level: developer
882 
883 .seealso: DMPlexDistribute(), DMPlexDistributeField()
884 @*/
885 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
886 {
887   PetscSF        fieldSF;
888   PetscInt      *remoteOffsets, fieldSize;
889   PetscMPIInt    dataSize;
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
894   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
895 
896   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
897   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
898   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
899 
900   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
901   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
902   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
903   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
904   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
905   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
910 {
911   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
912   MPI_Comm               comm;
913   PetscSF                coneSF;
914   PetscSection           originalConeSection, newConeSection;
915   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
916   PetscBool              flg;
917   PetscErrorCode         ierr;
918 
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
921   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
922 
923   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
924   /* Distribute cone section */
925   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
926   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
927   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
928   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
929   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
930   {
931     PetscInt pStart, pEnd, p;
932 
933     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
934     for (p = pStart; p < pEnd; ++p) {
935       PetscInt coneSize;
936       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
937       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
938     }
939   }
940   /* Communicate and renumber cones */
941   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
942   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
943   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
944   if (original) {
945     PetscInt numCones;
946 
947     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr);
948     ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
949     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
950   } else {
951     globCones = cones;
952   }
953   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
954   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
955   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
956   if (original) {
957     ierr = PetscFree(globCones);CHKERRQ(ierr);
958   }
959   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
960   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
961 #if defined(PETSC_USE_DEBUG)
962   {
963     PetscInt  p;
964     PetscBool valid = PETSC_TRUE;
965     for (p = 0; p < newConesSize; ++p) {
966       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr);}
967     }
968     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
969   }
970 #endif
971   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
972   if (flg) {
973     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
974     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
975     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
976     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
977     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
978   }
979   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
980   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
981   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
982   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
983   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
984   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
985   /* Create supports and stratify DMPlex */
986   {
987     PetscInt pStart, pEnd;
988 
989     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
990     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
991   }
992   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
993   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
994   {
995     PetscBool useCone, useClosure, useAnchors;
996 
997     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
998     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
999     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1000     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1001   }
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1006 {
1007   MPI_Comm         comm;
1008   PetscSection     originalCoordSection, newCoordSection;
1009   Vec              originalCoordinates, newCoordinates;
1010   PetscInt         bs;
1011   PetscBool        isper;
1012   const char      *name;
1013   const PetscReal *maxCell, *L;
1014   const DMBoundaryType *bd;
1015   PetscErrorCode   ierr;
1016 
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1020 
1021   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1022   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1023   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1024   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1025   if (originalCoordinates) {
1026     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1027     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1028     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1029 
1030     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1031     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1032     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1033     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1034     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1035   }
1036   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1037   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 /* Here we are assuming that process 0 always has everything */
1042 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1043 {
1044   DM_Plex         *mesh = (DM_Plex*) dm->data;
1045   MPI_Comm         comm;
1046   DMLabel          depthLabel;
1047   PetscMPIInt      rank;
1048   PetscInt         depth, d, numLabels, numLocalLabels, l;
1049   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1050   PetscObjectState depthState = -1;
1051   PetscErrorCode   ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1055   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1056 
1057   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1058   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1059   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1060 
1061   /* If the user has changed the depth label, communicate it instead */
1062   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1063   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1064   if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr);}
1065   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1066   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1067   if (sendDepth) {
1068     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1069     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1070   }
1071   /* Everyone must have either the same number of labels, or none */
1072   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1073   numLabels = numLocalLabels;
1074   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1075   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1076   for (l = numLabels-1; l >= 0; --l) {
1077     DMLabel     label = NULL, labelNew = NULL;
1078     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1079     const char *name = NULL;
1080 
1081     if (hasLabels) {
1082       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1083       /* Skip "depth" because it is recreated */
1084       ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr);
1085       ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr);
1086     }
1087     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1088     if (isDepth && !sendDepth) continue;
1089     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1090     if (isDepth) {
1091       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1092       PetscInt gdepth;
1093 
1094       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1095       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1096       for (d = 0; d <= gdepth; ++d) {
1097         PetscBool has;
1098 
1099         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1100         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1101       }
1102     }
1103     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1104     /* Put the output flag in the new label */
1105     if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr);}
1106     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1107     ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr);
1108     ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr);
1109   }
1110   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1115 {
1116   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1117   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1118   PetscBool      *isHybrid, *isHybridParallel;
1119   PetscInt        dim, depth, d;
1120   PetscInt        pStart, pEnd, pStartP, pEndP;
1121   PetscErrorCode  ierr;
1122 
1123   PetscFunctionBegin;
1124   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1125   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1126 
1127   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1128   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1129   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1130   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1131   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1132   for (d = 0; d <= depth; d++) {
1133     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1134 
1135     if (hybridMax >= 0) {
1136       PetscInt sStart, sEnd, p;
1137 
1138       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1139       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1140     }
1141   }
1142   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1143   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1144   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1145   for (d = 0; d <= depth; d++) {
1146     PetscInt sStart, sEnd, p, dd;
1147 
1148     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1149     dd = (depth == 1 && d == 1) ? dim : d;
1150     for (p = sStart; p < sEnd; p++) {
1151       if (isHybridParallel[p-pStartP]) {
1152         pmesh->hybridPointMax[dd] = p;
1153         break;
1154       }
1155     }
1156   }
1157   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1162 {
1163   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1164   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1165   MPI_Comm        comm;
1166   DM              refTree;
1167   PetscSection    origParentSection, newParentSection;
1168   PetscInt        *origParents, *origChildIDs;
1169   PetscBool       flg;
1170   PetscErrorCode  ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1174   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5);
1175   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1176 
1177   /* Set up tree */
1178   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1179   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1180   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1181   if (origParentSection) {
1182     PetscInt        pStart, pEnd;
1183     PetscInt        *newParents, *newChildIDs, *globParents;
1184     PetscInt        *remoteOffsetsParents, newParentSize;
1185     PetscSF         parentSF;
1186 
1187     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1188     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1189     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1190     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1191     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1192     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1193     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1194     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1195     if (original) {
1196       PetscInt numParents;
1197 
1198       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1199       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1200       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1201     }
1202     else {
1203       globParents = origParents;
1204     }
1205     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1206     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1207     if (original) {
1208       ierr = PetscFree(globParents);CHKERRQ(ierr);
1209     }
1210     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1211     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1212     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1213 #if defined(PETSC_USE_DEBUG)
1214     {
1215       PetscInt  p;
1216       PetscBool valid = PETSC_TRUE;
1217       for (p = 0; p < newParentSize; ++p) {
1218         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1219       }
1220       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1221     }
1222 #endif
1223     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1224     if (flg) {
1225       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1226       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1227       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1228       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1229       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1230     }
1231     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1232     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1233     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1234     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1235   }
1236   pmesh->useAnchors = mesh->useAnchors;
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1241 {
1242   PetscMPIInt            rank, size;
1243   MPI_Comm               comm;
1244   PetscErrorCode         ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1248   PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3);
1249 
1250   /* Create point SF for parallel mesh */
1251   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1252   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1253   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1254   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1255   {
1256     const PetscInt *leaves;
1257     PetscSFNode    *remotePoints, *rowners, *lowners;
1258     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1259     PetscInt        pStart, pEnd;
1260 
1261     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1262     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1263     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1264     for (p=0; p<numRoots; p++) {
1265       rowners[p].rank  = -1;
1266       rowners[p].index = -1;
1267     }
1268     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1269     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1270     for (p = 0; p < numLeaves; ++p) {
1271       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1272         lowners[p].rank  = rank;
1273         lowners[p].index = leaves ? leaves[p] : p;
1274       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1275         lowners[p].rank  = -2;
1276         lowners[p].index = -2;
1277       }
1278     }
1279     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1280       rowners[p].rank  = -3;
1281       rowners[p].index = -3;
1282     }
1283     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1284     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1285     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1286     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1287     for (p = 0; p < numLeaves; ++p) {
1288       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1289       if (lowners[p].rank != rank) ++numGhostPoints;
1290     }
1291     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1292     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1293     for (p = 0, gp = 0; p < numLeaves; ++p) {
1294       if (lowners[p].rank != rank) {
1295         ghostPoints[gp]        = leaves ? leaves[p] : p;
1296         remotePoints[gp].rank  = lowners[p].rank;
1297         remotePoints[gp].index = lowners[p].index;
1298         ++gp;
1299       }
1300     }
1301     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1302     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1303     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1304   }
1305   {
1306     PetscBool useCone, useClosure, useAnchors;
1307 
1308     ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr);
1309     ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr);
1310     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1311     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1312   }
1313   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1314   PetscFunctionReturn(0);
1315 }
1316 
1317 
1318 /*@
1319   DMPlexRebalanceSharedPoints - Redistribute points in the plex that are shared in order to achieve better balancing.
1320 
1321   Input parameters:
1322   + dm - The DMPlex object
1323 
1324   Level: user
1325 
1326 @*/
1327 
1328 PetscErrorCode DMPlexRebalanceSharedPoints(DM dm)
1329 {
1330   PetscSF sf;
1331   PetscInt ierr, i, j;
1332   PetscInt vBegin, vEnd, nroots, nleafs, pStart, pEnd, sumDegrees;
1333   const PetscInt *degree, *ilocal;
1334   const PetscSFNode *iremote; /* Do I need to free these? */
1335   PetscBool *toBalance, *isLeaf, *isExclusivelyOwned, *isNonExclusivelyOwned;
1336   PetscInt numExclusivelyOwned, numNonExclusivelyOwned, numLocalVertices;
1337   PetscMPIInt rank, size;
1338   PetscInt *numLocalVerticesAllProcesses, *cumSumVertices, *cumSumDegrees, *globalNumbersOfLocalOwnedVertices, *locationsOfLeafs, *rankOnLeafs, *remoteLocalPointOfLeafs, *points, *leafGlobalNumbers;
1339   PetscInt offset, counter;
1340   PetscInt lenadjncy, numNonExclusivelyOwnedConnectTo, numLeafs;
1341   PetscInt *xadj, *adjncy, *vtxwgt;
1342   PetscInt lenxadj;
1343 
1344   PetscInt *adjwgt=NULL;
1345   PetscInt *part, *options;
1346   PetscInt nparts, wgtflag, numflag, ncon, ncommonnodes, edgecut;
1347   real_t *ubvec;
1348   PetscInt *firstVertices, *renumbering;
1349   PetscInt failed, failedGlobal;
1350   MPI_Comm comm;
1351   PetscInt leafCounter;
1352   PetscInt *leafsNew;
1353   PetscSFNode *leafLocationsNew;
1354 
1355   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1356 
1357   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1358   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1359 
1360   /* Figure out all points in the plex that we are interested in balancing.  In
1361    * this case we are just interested in vertices. Should generalize that at
1362    * some point.  */
1363   ierr = DMPlexGetDepthStratum(dm, 0, &vBegin, &vEnd);CHKERRQ(ierr);
1364   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1365   ierr = PetscMalloc1(pEnd-pStart, &toBalance);CHKERRQ(ierr);
1366   for(i=0; i<pEnd-pStart; i++) {
1367     toBalance[i] = (i-pStart>=vBegin && i-pStart<vEnd);
1368   }
1369 
1370   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
1371   ierr = PetscSFComputeDegreeBegin(sf, &degree);CHKERRQ(ierr);
1372   ierr = PetscSFComputeDegreeEnd(sf, &degree);CHKERRQ(ierr);
1373   sumDegrees = 0;
1374   for(i=0; i<pEnd-pStart; i++) {
1375     sumDegrees += degree[i];
1376   }
1377 
1378 
1379   ierr = PetscSFGetGraph(sf, &nroots, &nleafs, &ilocal, &iremote); CHKERRQ(ierr);
1380   ierr = PetscMalloc1(pEnd-pStart, &isLeaf);CHKERRQ(ierr);
1381   ierr = PetscMalloc1(pEnd-pStart, &isNonExclusivelyOwned);CHKERRQ(ierr);
1382   ierr = PetscMalloc1(pEnd-pStart, &isExclusivelyOwned);CHKERRQ(ierr);
1383   for(i=0; i<pEnd-pStart; i++) {
1384     isLeaf[i] = PETSC_FALSE;
1385     isNonExclusivelyOwned[i] = PETSC_FALSE;
1386     isExclusivelyOwned[i] = PETSC_FALSE;
1387   }
1388   for(i=0; i<nleafs; i++) {
1389     isLeaf[ilocal[i]] = PETSC_TRUE;
1390   }
1391 
1392   numExclusivelyOwned=0;
1393   numNonExclusivelyOwned=0;
1394   for(i=0; i<pEnd-pStart; i++) {
1395     if(toBalance[i]) {
1396       if(degree[i] > 0) {
1397         isNonExclusivelyOwned[i] = PETSC_TRUE;
1398         numNonExclusivelyOwned += 1;
1399       } else {
1400         if(!isLeaf[i]) {
1401           isExclusivelyOwned[i] = PETSC_TRUE;
1402           numExclusivelyOwned += 1;
1403         }
1404       }
1405     }
1406   }
1407 
1408 
1409   /* We build a graph with one vertex per core representing the exclusively owned points
1410    * and then one vertex per nonExclusively owned point. */
1411   ierr = PetscMalloc1(size, &numLocalVerticesAllProcesses);CHKERRQ(ierr);
1412   numLocalVerticesAllProcesses[rank] = 1 + numNonExclusivelyOwned;
1413   /* For numbering purposes we need to figure out how many vertices are on each core. */
1414   MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,numLocalVerticesAllProcesses,1,MPIU_INT,comm);CHKERRQ(ierr);
1415 
1416   ierr = PetscMalloc1(size+1, &cumSumVertices);CHKERRQ(ierr);
1417   cumSumVertices[0] = 0;
1418   for(i=1; i<=size; i++) {
1419     cumSumVertices[i] = cumSumVertices[i-1] + numLocalVerticesAllProcesses[i-1];
1420   }
1421   ierr = PetscMalloc1(pEnd-pStart+1, &cumSumDegrees);CHKERRQ(ierr);
1422   cumSumDegrees[0] = 0;
1423   for(i=1; i<=pEnd-pStart; i++) {
1424     cumSumDegrees[i] = cumSumDegrees[i-1] + degree[i-1];
1425   }
1426   ierr = PetscMalloc1(pEnd-pStart, &globalNumbersOfLocalOwnedVertices);CHKERRQ(ierr);
1427 
1428   offset = cumSumVertices[rank];
1429   counter = 0;
1430   for(i=0; i<pEnd-pStart; i++) {
1431     if(toBalance[i]) {
1432       if(degree[i] > 0) {
1433         globalNumbersOfLocalOwnedVertices[i] = counter + 1 + offset;
1434         counter++;
1435       }
1436     }
1437   }
1438 
1439   /* get the location of my leafs */
1440   ierr = PetscMalloc1(sumDegrees, &locationsOfLeafs);CHKERRQ(ierr);
1441   ierr = PetscMalloc1(pEnd-pStart, &rankOnLeafs);CHKERRQ(ierr);
1442   for(i=0; i<pEnd-pStart; i++) {
1443     rankOnLeafs[i] = rank;
1444   }
1445   ierr = PetscSFGatherBegin(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
1446   ierr = PetscSFGatherEnd(sf, MPIU_INT, rankOnLeafs, locationsOfLeafs);CHKERRQ(ierr);
1447 
1448   /* get the remote local points of my leaves */
1449   ierr = PetscMalloc1(sumDegrees, &remoteLocalPointOfLeafs);CHKERRQ(ierr);
1450   ierr = PetscMalloc1(pEnd-pStart, &points);CHKERRQ(ierr);
1451   for(i=0; i<pEnd-pStart; i++) {
1452     points[i] = pStart+i;
1453   }
1454   ierr = PetscSFGatherBegin(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
1455   ierr = PetscSFGatherEnd(sf, MPIU_INT, points, remoteLocalPointOfLeafs);CHKERRQ(ierr);
1456 
1457   /* send the global numbers of vertices I own to the leafs so that they know to connect to it */
1458   ierr = PetscMalloc1(pEnd-pStart, &leafGlobalNumbers);CHKERRQ(ierr);
1459   ierr = PetscSFBcastBegin(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
1460   ierr = PetscSFBcastEnd(sf, MPIU_INT, globalNumbersOfLocalOwnedVertices, leafGlobalNumbers);CHKERRQ(ierr);
1461 
1462     /*vtxdist = cum_sum_vertices*/
1463 
1464 
1465   numNonExclusivelyOwnedConnectTo = 0;
1466   numLeafs = 0;
1467   for(i=0; i<pEnd-pStart; i++) {
1468     if(toBalance[i]) {
1469       if(isNonExclusivelyOwned[i]) {
1470         numNonExclusivelyOwnedConnectTo += 1 + degree[i];
1471       }
1472       if(isLeaf[i]) {
1473         numLeafs++;
1474       }
1475     }
1476   }
1477 
1478   lenxadj = 2 + numNonExclusivelyOwned;
1479   ierr = PetscMalloc1(lenxadj, &xadj);CHKERRQ(ierr);
1480   xadj[0] = 0;
1481   xadj[1] = numNonExclusivelyOwned + numLeafs;
1482   counter = 2;
1483   for(i=0; i<pEnd-pStart; i++) {
1484     if(toBalance[i]) {
1485       if(isNonExclusivelyOwned[i]) {
1486         xadj[counter] = xadj[counter-1] + degree[i] + 1;
1487         counter++;
1488       }
1489     }
1490   }
1491   lenadjncy = xadj[counter-1];
1492   ierr = PetscMalloc1(1 + numNonExclusivelyOwned, &vtxwgt);CHKERRQ(ierr);
1493   vtxwgt[0] = numExclusivelyOwned;
1494   for(i=0; i<numNonExclusivelyOwned; i++) {
1495     vtxwgt[i+1] = 1;
1496   }
1497 
1498   ierr = PetscMalloc1(lenadjncy, &adjncy);CHKERRQ(ierr);
1499   counter = 0;
1500   for(i=0; i<pEnd-pStart; i++) {
1501     if(toBalance[i]) {
1502       if(isNonExclusivelyOwned[i]) {
1503         adjncy[counter] = globalNumbersOfLocalOwnedVertices[i];
1504         counter++;
1505       } else if (isLeaf[i]) {
1506         adjncy[counter] = leafGlobalNumbers[i];
1507         counter++;
1508       }
1509     }
1510   }
1511 
1512   for(i=0; i<pEnd-pStart; i++) {
1513     if(toBalance[i]) {
1514       if(isNonExclusivelyOwned[i]) {
1515         adjncy[counter] = cumSumVertices[rank];
1516         counter++;
1517         for(j=0; j<degree[i]; j++) {
1518           adjncy[counter] = cumSumVertices[locationsOfLeafs[cumSumDegrees[i]+j]];
1519           counter++;
1520         }
1521       }
1522     }
1523   }
1524 
1525   nparts = size;
1526   wgtflag = 2;
1527 
1528   numflag = 0;
1529   ncon = 1;
1530   ncommonnodes = 2;
1531   real_t *tpwgts;
1532   ierr = PetscMalloc1(ncon * nparts, &tpwgts);CHKERRQ(ierr);
1533   for(i=0; i<ncon*nparts; i++) {
1534     tpwgts[i] = 1./(ncon*nparts);
1535   }
1536   ierr = PetscMalloc1(1, &ubvec);CHKERRQ(ierr);
1537   ubvec[0] = 1.05;
1538   ierr = PetscMalloc1(1, &options);CHKERRQ(ierr);
1539   options[0] = 0;
1540 
1541   ierr = PetscMalloc1(cumSumVertices[rank+1]-cumSumVertices[rank], &part);CHKERRQ(ierr);
1542   PetscStackPush("ParMETIS_V3_PartKway");
1543   ierr = ParMETIS_V3_PartKway(cumSumVertices, xadj, adjncy, vtxwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &edgecut, part, &comm);
1544   PetscStackPop;
1545 
1546   ierr = PetscFree(ubvec);CHKERRQ(ierr);
1547   ierr = PetscFree(tpwgts);CHKERRQ(ierr);
1548   ierr = PetscFree(options);CHKERRQ(ierr);
1549 
1550   /* Now rename things so that the vertex resembling the exclusively owned points stays on the same rank */
1551 
1552   ierr = PetscMalloc1(size, &firstVertices);CHKERRQ(ierr);
1553   ierr = PetscMalloc1(size, &renumbering);CHKERRQ(ierr);
1554   firstVertices[rank] = part[0];
1555   MPI_Allgather(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,firstVertices,1,MPIU_INT,comm);CHKERRQ(ierr);
1556   for(i=0; i<size; i++) {
1557     renumbering[firstVertices[i]] = i;
1558   }
1559   for(i=0; i<cumSumVertices[rank+1]-cumSumVertices[rank]; i++) {
1560     part[i] = renumbering[part[i]];
1561   }
1562   failed = (PetscInt)(part[0] != rank);
1563   MPI_Allreduce(&failed, &failedGlobal, 1, MPIU_INT, MPI_SUM, comm);
1564 
1565   if(failedGlobal > 0) {
1566     PetscFunctionReturn(1);
1567   }
1568 
1569   ierr = PetscFree(firstVertices);CHKERRQ(ierr);
1570   ierr = PetscFree(renumbering);CHKERRQ(ierr);
1571   ierr = PetscFree(xadj);CHKERRQ(ierr);
1572   ierr = PetscFree(adjncy);CHKERRQ(ierr);
1573 
1574 
1575   PetscInt *newOwners, *newNumbers;
1576   ierr = PetscMalloc1(pEnd-pStart, &newOwners);CHKERRQ(ierr);
1577   ierr = PetscMalloc1(pEnd-pStart, &newNumbers);CHKERRQ(ierr);
1578   for(i=0; i<pEnd-pStart; i++) {
1579     newOwners[i] = -1;
1580     newNumbers[i] = -1;
1581   }
1582   counter = 1;
1583   for(i=0; i<pEnd-pStart; i++) {
1584     if(toBalance[i]) {
1585       if(isNonExclusivelyOwned[i]) {
1586         PetscInt oldNumber, newNumber, oldOwner, newOwner;
1587         oldNumber = i;
1588         oldOwner = rank;
1589         newOwner = part[counter];
1590         if(oldOwner == newOwner) {
1591           newNumber = oldNumber;
1592         } else {
1593           for(j=0; j<degree[i]; j++) {
1594             if(locationsOfLeafs[cumSumDegrees[oldNumber]+j] == newOwner) {
1595               newNumber = remoteLocalPointOfLeafs[cumSumDegrees[oldNumber]+j];
1596               break;
1597             }
1598           }
1599         }
1600         newOwners[oldNumber] = newOwner;
1601         newNumbers[oldNumber] = newNumber;
1602         counter++;
1603       }
1604     }
1605   }
1606 
1607   PetscSFBcastBegin(sf, MPIU_INT, newOwners, newOwners);
1608   PetscSFBcastEnd(sf, MPIU_INT, newOwners, newOwners);
1609   PetscSFBcastBegin(sf, MPIU_INT, newNumbers, newNumbers);
1610   PetscSFBcastEnd(sf, MPIU_INT, newNumbers, newNumbers);
1611 
1612   leafCounter=0;
1613   for(i=0; i<pEnd-pStart; i++) {
1614     if(toBalance[i]){
1615       if(newOwners[i] >= 0 && newOwners[i] != rank) {
1616         leafCounter++;
1617       }
1618     } else {
1619       if(isLeaf[i]) {
1620         leafCounter++;
1621       }
1622     }
1623   }
1624 
1625   ierr = PetscMalloc1(leafCounter, &leafsNew);CHKERRQ(ierr);
1626   ierr = PetscMalloc1(leafCounter, &leafLocationsNew);CHKERRQ(ierr);
1627 
1628   counter = 0;
1629   leafCounter = 0;
1630   for(i=0; i<pEnd-pStart; i++) {
1631     if(toBalance[i]){
1632       if(isLeaf[i]) {
1633         counter++;
1634       }
1635       if(newOwners[i] >= 0 && newOwners[i] != rank) {
1636         leafsNew[leafCounter] = i;
1637         leafLocationsNew[leafCounter].rank = newOwners[i];
1638         leafLocationsNew[leafCounter].index = newNumbers[i];
1639         leafCounter++;
1640       }
1641     } else {
1642       if(isLeaf[i]) {
1643         leafsNew[leafCounter] = i;
1644         leafLocationsNew[leafCounter].rank = iremote[counter].rank;
1645         leafLocationsNew[leafCounter].index = iremote[counter].index;
1646         leafCounter++;
1647         counter++;
1648       }
1649     }
1650   }
1651 
1652   ierr = PetscSFSetGraph(sf, nroots, leafCounter, leafsNew, PETSC_OWN_POINTER, leafLocationsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
1653 
1654 
1655   ierr = PetscFree(toBalance);CHKERRQ(ierr);
1656   ierr = PetscFree(isLeaf);CHKERRQ(ierr);
1657   ierr = PetscFree(numLocalVerticesAllProcesses);CHKERRQ(ierr);
1658   ierr = PetscFree(cumSumVertices);CHKERRQ(ierr);
1659   ierr = PetscFree(locationsOfLeafs);CHKERRQ(ierr);
1660   ierr = PetscFree(rankOnLeafs);CHKERRQ(ierr);
1661   ierr = PetscFree(remoteLocalPointOfLeafs);CHKERRQ(ierr);
1662   ierr = PetscFree(points);CHKERRQ(ierr);
1663   ierr = PetscFree(leafGlobalNumbers);CHKERRQ(ierr);
1664   ierr = PetscFree(newOwners);CHKERRQ(ierr);
1665   ierr = PetscFree(newNumbers);CHKERRQ(ierr);
1666   PetscFunctionReturn(0);
1667 }
1668 
1669 /*@
1670   DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1671 
1672   Input Parameters:
1673 + dm - The DMPlex object
1674 - flg - Balance the partition?
1675 
1676   Level: intermediate
1677 
1678 .seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1679 @*/
1680 PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1681 {
1682   DM_Plex *mesh = (DM_Plex *)dm->data;
1683 
1684   PetscFunctionBegin;
1685   mesh->partitionBalance = flg;
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 /*@
1690   DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1691 
1692   Input Parameter:
1693 + dm - The DMPlex object
1694 
1695   Output Parameter:
1696 + flg - Balance the partition?
1697 
1698   Level: intermediate
1699 
1700 .seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1701 @*/
1702 PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1703 {
1704   DM_Plex *mesh = (DM_Plex *)dm->data;
1705 
1706   PetscFunctionBegin;
1707   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1708   PetscValidIntPointer(flg, 2);
1709   *flg = mesh->partitionBalance;
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 /*@C
1714   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1715 
1716   Input Parameter:
1717 + dm          - The source DMPlex object
1718 . migrationSF - The star forest that describes the parallel point remapping
1719 . ownership   - Flag causing a vote to determine point ownership
1720 
1721   Output Parameter:
1722 - pointSF     - The star forest describing the point overlap in the remapped DM
1723 
1724   Level: developer
1725 
1726 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1727 @*/
1728 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1729 {
1730   PetscMPIInt        rank, size;
1731   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1732   PetscInt          *pointLocal;
1733   const PetscInt    *leaves;
1734   const PetscSFNode *roots;
1735   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1736   Vec                shifts;
1737   const PetscInt     numShifts = 13759;
1738   const PetscScalar *shift = NULL;
1739   const PetscBool    shiftDebug = PETSC_FALSE;
1740   PetscBool          balance;
1741   PetscErrorCode     ierr;
1742 
1743   PetscFunctionBegin;
1744   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1745   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1746   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1747 
1748   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
1749   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1750   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1751   if (ownership) {
1752     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1753     if (balance) {
1754       PetscRandom r;
1755 
1756       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1757       ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr);
1758       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1759       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1760       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1761       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1762       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1763       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1764     }
1765 
1766     /* Point ownership vote: Process with highest rank owns shared points */
1767     for (p = 0; p < nleaves; ++p) {
1768       if (shiftDebug) {
1769         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr);
1770       }
1771       /* Either put in a bid or we know we own it */
1772       leafNodes[p].rank  = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts]) : 0))%size;
1773       leafNodes[p].index = p;
1774     }
1775     for (p = 0; p < nroots; p++) {
1776       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1777       rootNodes[p].rank  = -3;
1778       rootNodes[p].index = -3;
1779     }
1780     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1781     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1782     if (balance) {
1783       /* We've voted, now we need to get the rank.  When we're balancing the partition, the "rank" in rootNotes is not
1784        * the rank but rather (rank + random)%size.  So we do another reduction, voting the same way, but sending the
1785        * rank instead of the index. */
1786       PetscSFNode *rootRanks = NULL;
1787       ierr = PetscMalloc1(nroots, &rootRanks);CHKERRQ(ierr);
1788       for (p = 0; p < nroots; p++) {
1789         rootRanks[p].rank = -3;
1790         rootRanks[p].index = -3;
1791       }
1792       for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1793       ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1794       ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootRanks, MPI_MAXLOC);CHKERRQ(ierr);
1795       for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1796       ierr = PetscFree(rootRanks);CHKERRQ(ierr);
1797     }
1798   } else {
1799     for (p = 0; p < nroots; p++) {
1800       rootNodes[p].index = -1;
1801       rootNodes[p].rank = rank;
1802     };
1803     for (p = 0; p < nleaves; p++) {
1804       /* Write new local id into old location */
1805       if (roots[p].rank == rank) {
1806         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1807       }
1808     }
1809   }
1810   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1811   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1812 
1813   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1814     if (leafNodes[p].rank != rank) npointLeaves++;
1815   }
1816   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1817   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1818   for (idx = 0, p = 0; p < nleaves; p++) {
1819     if (leafNodes[p].rank != rank) {
1820       pointLocal[idx] = p;
1821       pointRemote[idx] = leafNodes[p];
1822       idx++;
1823     }
1824   }
1825   if (shift) {
1826     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1827     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1828   }
1829   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1830   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1831   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1832   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1833   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1834   PetscFunctionReturn(0);
1835 }
1836 
1837 /*@C
1838   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1839 
1840   Collective on DM and PetscSF
1841 
1842   Input Parameter:
1843 + dm       - The source DMPlex object
1844 . sf       - The star forest communication context describing the migration pattern
1845 
1846   Output Parameter:
1847 - targetDM - The target DMPlex object
1848 
1849   Level: intermediate
1850 
1851 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1852 @*/
1853 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1854 {
1855   MPI_Comm               comm;
1856   PetscInt               dim, cdim, nroots;
1857   PetscSF                sfPoint;
1858   ISLocalToGlobalMapping ltogMigration;
1859   ISLocalToGlobalMapping ltogOriginal = NULL;
1860   PetscBool              flg;
1861   PetscErrorCode         ierr;
1862 
1863   PetscFunctionBegin;
1864   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1865   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1866   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1867   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1868   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1869   ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr);
1870   ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr);
1871 
1872   /* Check for a one-to-all distribution pattern */
1873   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1874   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1875   if (nroots >= 0) {
1876     IS        isOriginal;
1877     PetscInt  n, size, nleaves;
1878     PetscInt  *numbering_orig, *numbering_new;
1879 
1880     /* Get the original point numbering */
1881     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1882     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1883     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1884     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1885     /* Convert to positive global numbers */
1886     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1887     /* Derive the new local-to-global mapping from the old one */
1888     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1889     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1890     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1891     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1892     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1893     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1894     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1895   } else {
1896     /* One-to-all distribution pattern: We can derive LToG from SF */
1897     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1898   }
1899   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1900   if (flg) {
1901     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1902     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1903   }
1904   /* Migrate DM data to target DM */
1905   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1906   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1907   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1908   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1909   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1910   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1911   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1912   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1913   PetscFunctionReturn(0);
1914 }
1915 
1916 /*@C
1917   DMPlexDistribute - Distributes the mesh and any associated sections.
1918 
1919   Collective on DM
1920 
1921   Input Parameter:
1922 + dm  - The original DMPlex object
1923 - overlap - The overlap of partitions, 0 is the default
1924 
1925   Output Parameter:
1926 + sf - The PetscSF used for point distribution, or NULL if not needed
1927 - dmParallel - The distributed DMPlex object
1928 
1929   Note: If the mesh was not distributed, the output dmParallel will be NULL.
1930 
1931   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1932   representation on the mesh.
1933 
1934   Level: intermediate
1935 
1936 .keywords: mesh, elements
1937 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1938 @*/
1939 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1940 {
1941   MPI_Comm               comm;
1942   PetscPartitioner       partitioner;
1943   IS                     cellPart;
1944   PetscSection           cellPartSection;
1945   DM                     dmCoord;
1946   DMLabel                lblPartition, lblMigration;
1947   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1948   PetscBool              flg, balance;
1949   PetscMPIInt            rank, size, p;
1950   PetscErrorCode         ierr;
1951 
1952   PetscFunctionBegin;
1953   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1954   PetscValidLogicalCollectiveInt(dm, overlap, 2);
1955   if (sf) PetscValidPointer(sf,3);
1956   PetscValidPointer(dmParallel,4);
1957 
1958   if (sf) *sf = NULL;
1959   *dmParallel = NULL;
1960   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1961   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1962   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1963   if (size == 1) PetscFunctionReturn(0);
1964 
1965   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1966   /* Create cell partition */
1967   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
1968   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1969   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1970   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1971   {
1972     /* Convert partition to DMLabel */
1973     PetscInt proc, pStart, pEnd, npoints, poffset;
1974     const PetscInt *points;
1975     ierr = DMLabelCreate(PETSC_COMM_SELF, "Point Partition", &lblPartition);CHKERRQ(ierr);
1976     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1977     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1978     for (proc = pStart; proc < pEnd; proc++) {
1979       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1980       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1981       for (p = poffset; p < poffset+npoints; p++) {
1982         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1983       }
1984     }
1985     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1986   }
1987   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1988   {
1989     /* Build a global process SF */
1990     PetscSFNode *remoteProc;
1991     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1992     for (p = 0; p < size; ++p) {
1993       remoteProc[p].rank  = p;
1994       remoteProc[p].index = rank;
1995     }
1996     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1997     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1998     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1999   }
2000   ierr = DMLabelCreate(PETSC_COMM_SELF, "Point migration", &lblMigration);CHKERRQ(ierr);
2001   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
2002   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
2003   /* Stratify the SF in case we are migrating an already parallel plex */
2004   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
2005   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
2006   sfMigration = sfStratified;
2007   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
2008   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
2009   if (flg) {
2010     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
2011     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
2012   }
2013 
2014   /* Create non-overlapping parallel DM and migrate internal data */
2015   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
2016   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
2017   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
2018 
2019   /* Build the point SF without overlap */
2020   ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr);
2021   ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr);
2022   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
2023   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
2024   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
2025   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
2026   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
2027 
2028   if (overlap > 0) {
2029     DM                 dmOverlap;
2030     PetscInt           nroots, nleaves;
2031     PetscSFNode       *newRemote;
2032     const PetscSFNode *oldRemote;
2033     PetscSF            sfOverlap, sfOverlapPoint;
2034     /* Add the partition overlap to the distributed DM */
2035     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
2036     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
2037     *dmParallel = dmOverlap;
2038     if (flg) {
2039       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
2040       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
2041     }
2042 
2043     /* Re-map the migration SF to establish the full migration pattern */
2044     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
2045     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
2046     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
2047     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
2048     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
2049     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
2050     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2051     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
2052     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
2053     sfMigration = sfOverlapPoint;
2054   }
2055   /* Cleanup Partition */
2056   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
2057   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
2058   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
2059   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
2060   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
2061   /* Copy BC */
2062   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
2063   /* Create sfNatural */
2064   if (dm->useNatural) {
2065     PetscSection section;
2066 
2067     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
2068     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
2069     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
2070   }
2071   /* Cleanup */
2072   if (sf) {*sf = sfMigration;}
2073   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
2074   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
2075   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
2076   PetscFunctionReturn(0);
2077 }
2078 
2079 /*@C
2080   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
2081 
2082   Collective on DM
2083 
2084   Input Parameter:
2085 + dm  - The non-overlapping distrbuted DMPlex object
2086 - overlap - The overlap of partitions
2087 
2088   Output Parameter:
2089 + sf - The PetscSF used for point distribution
2090 - dmOverlap - The overlapping distributed DMPlex object, or NULL
2091 
2092   Note: If the mesh was not distributed, the return value is NULL.
2093 
2094   The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
2095   representation on the mesh.
2096 
2097   Level: intermediate
2098 
2099 .keywords: mesh, elements
2100 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
2101 @*/
2102 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
2103 {
2104   MPI_Comm               comm;
2105   PetscMPIInt            size, rank;
2106   PetscSection           rootSection, leafSection;
2107   IS                     rootrank, leafrank;
2108   DM                     dmCoord;
2109   DMLabel                lblOverlap;
2110   PetscSF                sfOverlap, sfStratified, sfPoint;
2111   PetscErrorCode         ierr;
2112 
2113   PetscFunctionBegin;
2114   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2115   if (sf) PetscValidPointer(sf, 3);
2116   PetscValidPointer(dmOverlap, 4);
2117 
2118   if (sf) *sf = NULL;
2119   *dmOverlap  = NULL;
2120   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2121   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2122   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2123   if (size == 1) PetscFunctionReturn(0);
2124 
2125   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
2126   /* Compute point overlap with neighbouring processes on the distributed DM */
2127   ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
2128   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2129   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2130   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
2131   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
2132   /* Convert overlap label to stratified migration SF */
2133   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
2134   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
2135   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
2136   sfOverlap = sfStratified;
2137   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
2138   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
2139 
2140   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2141   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2142   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
2143   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
2144   ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
2145 
2146   /* Build the overlapping DM */
2147   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
2148   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
2149   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
2150   /* Build the new point SF */
2151   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
2152   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
2153   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
2154   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
2155   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
2156   /* Cleanup overlap partition */
2157   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
2158   if (sf) *sf = sfOverlap;
2159   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
2160   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
2161   PetscFunctionReturn(0);
2162 }
2163 
2164 /*@C
2165   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
2166   root process of the original's communicator.
2167 
2168   Collective on DM
2169 
2170   Input Parameters:
2171 . dm - the original DMPlex object
2172 
2173   Output Parameters:
2174 + sf - the PetscSF used for point distribution (optional)
2175 - gatherMesh - the gathered DM object, or NULL
2176 
2177   Level: intermediate
2178 
2179 .keywords: mesh
2180 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
2181 @*/
2182 PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
2183 {
2184   MPI_Comm       comm;
2185   PetscMPIInt    size;
2186   PetscPartitioner oldPart, gatherPart;
2187   PetscErrorCode ierr;
2188 
2189   PetscFunctionBegin;
2190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2191   PetscValidPointer(gatherMesh,2);
2192   *gatherMesh = NULL;
2193   if (sf) *sf = NULL;
2194   comm = PetscObjectComm((PetscObject)dm);
2195   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2196   if (size == 1) PetscFunctionReturn(0);
2197   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
2198   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
2199   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
2200   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
2201   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
2202   ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr);
2203 
2204   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
2205   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
2206   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
2207   PetscFunctionReturn(0);
2208 }
2209 
2210 /*@C
2211   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
2212 
2213   Collective on DM
2214 
2215   Input Parameters:
2216 . dm - the original DMPlex object
2217 
2218   Output Parameters:
2219 + sf - the PetscSF used for point distribution (optional)
2220 - redundantMesh - the redundant DM object, or NULL
2221 
2222   Level: intermediate
2223 
2224 .keywords: mesh
2225 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
2226 @*/
2227 PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
2228 {
2229   MPI_Comm       comm;
2230   PetscMPIInt    size, rank;
2231   PetscInt       pStart, pEnd, p;
2232   PetscInt       numPoints = -1;
2233   PetscSF        migrationSF, sfPoint, gatherSF;
2234   DM             gatherDM, dmCoord;
2235   PetscSFNode    *points;
2236   PetscErrorCode ierr;
2237 
2238   PetscFunctionBegin;
2239   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2240   PetscValidPointer(redundantMesh,2);
2241   *redundantMesh = NULL;
2242   comm = PetscObjectComm((PetscObject)dm);
2243   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2244   if (size == 1) {
2245     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
2246     *redundantMesh = dm;
2247     if (sf) *sf = NULL;
2248     PetscFunctionReturn(0);
2249   }
2250   ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr);
2251   if (!gatherDM) PetscFunctionReturn(0);
2252   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2253   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
2254   numPoints = pEnd - pStart;
2255   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
2256   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
2257   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
2258   for (p = 0; p < numPoints; p++) {
2259     points[p].index = p;
2260     points[p].rank  = 0;
2261   }
2262   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
2263   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
2264   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
2265   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
2266   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
2267   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
2268   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
2269   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
2270   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
2271   if (sf) {
2272     PetscSF tsf;
2273 
2274     ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr);
2275     ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr);
2276     ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr);
2277   }
2278   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
2279   ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr);
2280   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
2281   PetscFunctionReturn(0);
2282 }
2283