xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 23193802fd335d5491b609248d055e9c0d2f9073)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (!Nf) {
84     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, &useClosure);CHKERRQ(ierr);
85     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
86   } else {
87     ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
88     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useCone - Flag to use the cone first
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
112 {
113   PetscDS        prob;
114   PetscInt       Nf;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
119   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
120   if (!Nf) {
121     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, useCone, NULL);CHKERRQ(ierr);
122   } else {
123     ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /*@
129   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
130 
131   Input Parameters:
132 + dm      - The DM object
133 - useClosure - Flag to use the closure
134 
135   Level: intermediate
136 
137   Notes:
138 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
139 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
140 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
141 
142 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
143 @*/
144 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
145 {
146   PetscDS        prob;
147   PetscBool      useCone;
148   PetscInt       Nf;
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
153   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
154   if (!Nf) {
155     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, &useCone, NULL);CHKERRQ(ierr);
156     ierr = PetscDSSetAdjacency(prob, PETSC_DEFAULT, useCone, useClosure);CHKERRQ(ierr);
157   } else {
158     ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
159     ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 /*@
165   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
166 
167   Input Parameter:
168 . dm      - The DM object
169 
170   Output Parameter:
171 . useClosure - Flag to use the closure
172 
173   Level: intermediate
174 
175   Notes:
176 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
177 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
178 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
179 
180 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
181 @*/
182 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
183 {
184   PetscDS        prob;
185   PetscInt       Nf;
186   PetscErrorCode ierr;
187 
188   PetscFunctionBegin;
189   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
190   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
191   if (!Nf) {
192     ierr = PetscDSGetAdjacency(prob, PETSC_DEFAULT, NULL, useClosure);CHKERRQ(ierr);
193   } else {
194     ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
195   }
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
201 
202   Input Parameters:
203 + dm      - The DM object
204 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
205 
206   Level: intermediate
207 
208 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
209 @*/
210 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
211 {
212   DM_Plex *mesh = (DM_Plex *) dm->data;
213 
214   PetscFunctionBegin;
215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
216   mesh->useAnchors = useAnchors;
217   PetscFunctionReturn(0);
218 }
219 
220 /*@
221   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
222 
223   Input Parameter:
224 . dm      - The DM object
225 
226   Output Parameter:
227 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
228 
229   Level: intermediate
230 
231 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
232 @*/
233 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
234 {
235   DM_Plex *mesh = (DM_Plex *) dm->data;
236 
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
239   PetscValidIntPointer(useAnchors, 2);
240   *useAnchors = mesh->useAnchors;
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
245 {
246   const PetscInt *cone = NULL;
247   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
248   PetscErrorCode  ierr;
249 
250   PetscFunctionBeginHot;
251   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
252   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
253   for (c = 0; c <= coneSize; ++c) {
254     const PetscInt  point   = !c ? p : cone[c-1];
255     const PetscInt *support = NULL;
256     PetscInt        supportSize, s, q;
257 
258     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
259     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
260     for (s = 0; s < supportSize; ++s) {
261       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
262         if (support[s] == adj[q]) break;
263       }
264       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
265     }
266   }
267   *adjSize = numAdj;
268   PetscFunctionReturn(0);
269 }
270 
271 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
272 {
273   const PetscInt *support = NULL;
274   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
275   PetscErrorCode  ierr;
276 
277   PetscFunctionBeginHot;
278   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
279   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
280   for (s = 0; s <= supportSize; ++s) {
281     const PetscInt  point = !s ? p : support[s-1];
282     const PetscInt *cone  = NULL;
283     PetscInt        coneSize, c, q;
284 
285     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
286     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
287     for (c = 0; c < coneSize; ++c) {
288       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
289         if (cone[c] == adj[q]) break;
290       }
291       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
292     }
293   }
294   *adjSize = numAdj;
295   PetscFunctionReturn(0);
296 }
297 
298 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
299 {
300   PetscInt      *star = NULL;
301   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
302   PetscErrorCode ierr;
303 
304   PetscFunctionBeginHot;
305   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   for (s = 0; s < starSize*2; s += 2) {
307     const PetscInt *closure = NULL;
308     PetscInt        closureSize, c, q;
309 
310     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
311     for (c = 0; c < closureSize*2; c += 2) {
312       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
313         if (closure[c] == adj[q]) break;
314       }
315       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
316     }
317     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
318   }
319   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
320   *adjSize = numAdj;
321   PetscFunctionReturn(0);
322 }
323 
324 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
325 {
326   static PetscInt asiz = 0;
327   PetscInt maxAnchors = 1;
328   PetscInt aStart = -1, aEnd = -1;
329   PetscInt maxAdjSize;
330   PetscSection aSec = NULL;
331   IS aIS = NULL;
332   const PetscInt *anchors;
333   DM_Plex *mesh = (DM_Plex *)dm->data;
334   PetscErrorCode  ierr;
335 
336   PetscFunctionBeginHot;
337   if (useAnchors) {
338     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
339     if (aSec) {
340       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
341       maxAnchors = PetscMax(1,maxAnchors);
342       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
343       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
344     }
345   }
346   if (!*adj) {
347     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
348 
349     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
350     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
351     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
352     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
353     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
354     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
355     asiz *= maxAnchors;
356     asiz  = PetscMin(asiz,pEnd-pStart);
357     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
358   }
359   if (*adjSize < 0) *adjSize = asiz;
360   maxAdjSize = *adjSize;
361   if (mesh->useradjacency) {
362     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
363   } else if (useTransitiveClosure) {
364     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
365   } else if (useCone) {
366     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
367   } else {
368     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
369   }
370   if (useAnchors && aSec) {
371     PetscInt origSize = *adjSize;
372     PetscInt numAdj = origSize;
373     PetscInt i = 0, j;
374     PetscInt *orig = *adj;
375 
376     while (i < origSize) {
377       PetscInt p = orig[i];
378       PetscInt aDof = 0;
379 
380       if (p >= aStart && p < aEnd) {
381         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
382       }
383       if (aDof) {
384         PetscInt aOff;
385         PetscInt s, q;
386 
387         for (j = i + 1; j < numAdj; j++) {
388           orig[j - 1] = orig[j];
389         }
390         origSize--;
391         numAdj--;
392         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
393         for (s = 0; s < aDof; ++s) {
394           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
395             if (anchors[aOff+s] == orig[q]) break;
396           }
397           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
398         }
399       }
400       else {
401         i++;
402       }
403     }
404     *adjSize = numAdj;
405     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
406   }
407   PetscFunctionReturn(0);
408 }
409 
410 /*@
411   DMPlexGetAdjacency - Return all points adjacent to the given point
412 
413   Input Parameters:
414 + dm - The DM object
415 . p  - The point
416 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
417 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
418 
419   Output Parameters:
420 + adjSize - The number of adjacent points
421 - adj - The adjacent points
422 
423   Level: advanced
424 
425   Notes: The user must PetscFree the adj array if it was not passed in.
426 
427 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
428 @*/
429 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
430 {
431   PetscBool      useCone, useClosure, useAnchors;
432   PetscErrorCode ierr;
433 
434   PetscFunctionBeginHot;
435   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
436   PetscValidPointer(adjSize,3);
437   PetscValidPointer(adj,4);
438   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
439   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
440   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
441   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 /*@
446   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
447 
448   Collective on DM
449 
450   Input Parameters:
451 + dm      - The DM
452 - sfPoint - The PetscSF which encodes point connectivity
453 
454   Output Parameters:
455 + processRanks - A list of process neighbors, or NULL
456 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
457 
458   Level: developer
459 
460 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
461 @*/
462 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
463 {
464   const PetscSFNode *remotePoints;
465   PetscInt          *localPointsNew;
466   PetscSFNode       *remotePointsNew;
467   const PetscInt    *nranks;
468   PetscInt          *ranksNew;
469   PetscBT            neighbors;
470   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
471   PetscMPIInt        size, proc, rank;
472   PetscErrorCode     ierr;
473 
474   PetscFunctionBegin;
475   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
476   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
477   if (processRanks) {PetscValidPointer(processRanks, 3);}
478   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
479   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
480   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
481   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
482   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
483   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
484   /* Compute root-to-leaf process connectivity */
485   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
486   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
487   for (p = pStart; p < pEnd; ++p) {
488     PetscInt ndof, noff, n;
489 
490     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
491     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
492     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
493   }
494   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
495   /* Compute leaf-to-neighbor process connectivity */
496   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
497   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
498   for (p = pStart; p < pEnd; ++p) {
499     PetscInt ndof, noff, n;
500 
501     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
502     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
503     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
504   }
505   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
506   /* Compute leaf-to-root process connectivity */
507   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
508   /* Calculate edges */
509   PetscBTClear(neighbors, rank);
510   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
511   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
512   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
513   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
514   for(proc = 0, n = 0; proc < size; ++proc) {
515     if (PetscBTLookup(neighbors, proc)) {
516       ranksNew[n]              = proc;
517       localPointsNew[n]        = proc;
518       remotePointsNew[n].index = rank;
519       remotePointsNew[n].rank  = proc;
520       ++n;
521     }
522   }
523   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
524   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
525   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
526   if (sfProcess) {
527     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
528     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
529     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
530     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
531   }
532   PetscFunctionReturn(0);
533 }
534 
535 /*@
536   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
537 
538   Collective on DM
539 
540   Input Parameter:
541 . dm - The DM
542 
543   Output Parameters:
544 + rootSection - The number of leaves for a given root point
545 . rootrank    - The rank of each edge into the root point
546 . leafSection - The number of processes sharing a given leaf point
547 - leafrank    - The rank of each process sharing a leaf point
548 
549   Level: developer
550 
551 .seealso: DMPlexCreateOverlap()
552 @*/
553 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
554 {
555   MPI_Comm        comm;
556   PetscSF         sfPoint;
557   const PetscInt *rootdegree;
558   PetscInt       *myrank, *remoterank;
559   PetscInt        pStart, pEnd, p, nedges;
560   PetscMPIInt     rank;
561   PetscErrorCode  ierr;
562 
563   PetscFunctionBegin;
564   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
565   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
566   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
567   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
568   /* Compute number of leaves for each root */
569   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
570   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
571   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
572   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
573   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
574   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
575   /* Gather rank of each leaf to root */
576   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
577   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
578   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
579   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
580   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
581   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
582   ierr = PetscFree(myrank);CHKERRQ(ierr);
583   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
584   /* Distribute remote ranks to leaves */
585   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
586   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 /*@C
591   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
592 
593   Collective on DM
594 
595   Input Parameters:
596 + dm          - The DM
597 . levels      - Number of overlap levels
598 . rootSection - The number of leaves for a given root point
599 . rootrank    - The rank of each edge into the root point
600 . leafSection - The number of processes sharing a given leaf point
601 - leafrank    - The rank of each process sharing a leaf point
602 
603   Output Parameters:
604 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
605 
606   Level: developer
607 
608 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
609 @*/
610 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
611 {
612   MPI_Comm           comm;
613   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
614   PetscSF            sfPoint, sfProc;
615   const PetscSFNode *remote;
616   const PetscInt    *local;
617   const PetscInt    *nrank, *rrank;
618   PetscInt          *adj = NULL;
619   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
620   PetscMPIInt        rank, size;
621   PetscBool          useCone, useClosure, flg;
622   PetscErrorCode     ierr;
623 
624   PetscFunctionBegin;
625   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
626   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
627   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
628   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
629   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
630   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
631   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
632   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
633   /* Handle leaves: shared with the root point */
634   for (l = 0; l < nleaves; ++l) {
635     PetscInt adjSize = PETSC_DETERMINE, a;
636 
637     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
638     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
639   }
640   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
641   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
642   /* Handle roots */
643   for (p = pStart; p < pEnd; ++p) {
644     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
645 
646     if ((p >= sStart) && (p < sEnd)) {
647       /* Some leaves share a root with other leaves on different processes */
648       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
649       if (neighbors) {
650         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
651         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
652         for (n = 0; n < neighbors; ++n) {
653           const PetscInt remoteRank = nrank[noff+n];
654 
655           if (remoteRank == rank) continue;
656           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
657         }
658       }
659     }
660     /* Roots are shared with leaves */
661     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
662     if (!neighbors) continue;
663     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
664     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
665     for (n = 0; n < neighbors; ++n) {
666       const PetscInt remoteRank = rrank[noff+n];
667 
668       if (remoteRank == rank) continue;
669       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
670     }
671   }
672   ierr = PetscFree(adj);CHKERRQ(ierr);
673   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
674   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
675   /* Add additional overlap levels */
676   for (l = 1; l < levels; l++) {
677     /* Propagate point donations over SF to capture remote connections */
678     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
679     /* Add next level of point donations to the label */
680     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
681   }
682   /* We require the closure in the overlap */
683   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
684   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
685   if (useCone || !useClosure) {
686     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
687   }
688   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
689   if (flg) {
690     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
691   }
692   /* Make global process SF and invert sender to receiver label */
693   {
694     /* Build a global process SF */
695     PetscSFNode *remoteProc;
696     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
697     for (p = 0; p < size; ++p) {
698       remoteProc[p].rank  = p;
699       remoteProc[p].index = rank;
700     }
701     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
702     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
703     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
704   }
705   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
706   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
707   /* Add owned points, except for shared local points */
708   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
709   for (l = 0; l < nleaves; ++l) {
710     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
711     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
712   }
713   /* Clean up */
714   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
715   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
716   PetscFunctionReturn(0);
717 }
718 
719 /*@C
720   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
721 
722   Collective on DM
723 
724   Input Parameters:
725 + dm          - The DM
726 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
727 
728   Output Parameters:
729 + migrationSF - An SF that maps original points in old locations to points in new locations
730 
731   Level: developer
732 
733 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
734 @*/
735 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
736 {
737   MPI_Comm           comm;
738   PetscMPIInt        rank, size;
739   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
740   PetscInt          *pointDepths, *remoteDepths, *ilocal;
741   PetscInt          *depthRecv, *depthShift, *depthIdx;
742   PetscSFNode       *iremote;
743   PetscSF            pointSF;
744   const PetscInt    *sharedLocal;
745   const PetscSFNode *overlapRemote, *sharedRemote;
746   PetscErrorCode     ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
750   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
751   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
752   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
753   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
754 
755   /* Before building the migration SF we need to know the new stratum offsets */
756   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
757   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
758   for (d=0; d<dim+1; d++) {
759     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
760     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
761   }
762   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
763   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
764   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
765 
766   /* Count recevied points in each stratum and compute the internal strata shift */
767   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
768   for (d=0; d<dim+1; d++) depthRecv[d]=0;
769   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
770   depthShift[dim] = 0;
771   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
772   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
773   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
774   for (d=0; d<dim+1; d++) {
775     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
776     depthIdx[d] = pStart + depthShift[d];
777   }
778 
779   /* Form the overlap SF build an SF that describes the full overlap migration SF */
780   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
781   newLeaves = pEnd - pStart + nleaves;
782   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
783   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
784   /* First map local points to themselves */
785   for (d=0; d<dim+1; d++) {
786     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
787     for (p=pStart; p<pEnd; p++) {
788       point = p + depthShift[d];
789       ilocal[point] = point;
790       iremote[point].index = p;
791       iremote[point].rank = rank;
792       depthIdx[d]++;
793     }
794   }
795 
796   /* Add in the remote roots for currently shared points */
797   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
798   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
799   for (d=0; d<dim+1; d++) {
800     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
801     for (p=0; p<numSharedPoints; p++) {
802       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
803         point = sharedLocal[p] + depthShift[d];
804         iremote[point].index = sharedRemote[p].index;
805         iremote[point].rank = sharedRemote[p].rank;
806       }
807     }
808   }
809 
810   /* Now add the incoming overlap points */
811   for (p=0; p<nleaves; p++) {
812     point = depthIdx[remoteDepths[p]];
813     ilocal[point] = point;
814     iremote[point].index = overlapRemote[p].index;
815     iremote[point].rank = overlapRemote[p].rank;
816     depthIdx[remoteDepths[p]]++;
817   }
818   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
819 
820   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
821   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
822   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
823   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
824   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
825 
826   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 /*@
831   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
832 
833   Input Parameter:
834 + dm          - The DM
835 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
836 
837   Output Parameter:
838 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
839 
840   Level: developer
841 
842 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
843 @*/
844 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
845 {
846   MPI_Comm           comm;
847   PetscMPIInt        rank, size;
848   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
849   PetscInt          *pointDepths, *remoteDepths, *ilocal;
850   PetscInt          *depthRecv, *depthShift, *depthIdx;
851   PetscInt           hybEnd[4];
852   const PetscSFNode *iremote;
853   PetscErrorCode     ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
857   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
858   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
859   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
860   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
861   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
862   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
863 
864   /* Before building the migration SF we need to know the new stratum offsets */
865   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
866   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
867   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
868   for (d = 0; d < depth+1; ++d) {
869     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
870     for (p = pStart; p < pEnd; ++p) {
871       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
872         pointDepths[p] = 2 * d;
873       } else {
874         pointDepths[p] = 2 * d + 1;
875       }
876     }
877   }
878   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
879   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
880   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
881   /* Count recevied points in each stratum and compute the internal strata shift */
882   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
883   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
884   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
885   depthShift[2*depth+1] = 0;
886   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
887   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
888   depthShift[0] += depthRecv[1];
889   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
890   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
891   for (d = 2 * depth-1; d > 2; --d) {
892     PetscInt e;
893 
894     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
895   }
896   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
897   /* Derive a new local permutation based on stratified indices */
898   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
899   for (p = 0; p < nleaves; ++p) {
900     const PetscInt dep = remoteDepths[p];
901 
902     ilocal[p] = depthShift[dep] + depthIdx[dep];
903     depthIdx[dep]++;
904   }
905   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
906   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
907   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
908   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
909   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 /*@
914   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
915 
916   Collective on DM
917 
918   Input Parameters:
919 + dm - The DMPlex object
920 . pointSF - The PetscSF describing the communication pattern
921 . originalSection - The PetscSection for existing data layout
922 - originalVec - The existing data
923 
924   Output Parameters:
925 + newSection - The PetscSF describing the new data layout
926 - newVec - The new data
927 
928   Level: developer
929 
930 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
931 @*/
932 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
933 {
934   PetscSF        fieldSF;
935   PetscInt      *remoteOffsets, fieldSize;
936   PetscScalar   *originalValues, *newValues;
937   PetscErrorCode ierr;
938 
939   PetscFunctionBegin;
940   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
941   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
942 
943   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
944   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
945   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
946 
947   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
948   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
951   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
952   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
953   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
954   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
955   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
956   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
962 
963   Collective on DM
964 
965   Input Parameters:
966 + dm - The DMPlex object
967 . pointSF - The PetscSF describing the communication pattern
968 . originalSection - The PetscSection for existing data layout
969 - originalIS - The existing data
970 
971   Output Parameters:
972 + newSection - The PetscSF describing the new data layout
973 - newIS - The new data
974 
975   Level: developer
976 
977 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
978 @*/
979 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
980 {
981   PetscSF         fieldSF;
982   PetscInt       *newValues, *remoteOffsets, fieldSize;
983   const PetscInt *originalValues;
984   PetscErrorCode  ierr;
985 
986   PetscFunctionBegin;
987   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
988   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
989 
990   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
991   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
992 
993   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
994   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
995   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
996   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
997   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
998   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
999   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
1000   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
1001   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 /*@
1006   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
1007 
1008   Collective on DM
1009 
1010   Input Parameters:
1011 + dm - The DMPlex object
1012 . pointSF - The PetscSF describing the communication pattern
1013 . originalSection - The PetscSection for existing data layout
1014 . datatype - The type of data
1015 - originalData - The existing data
1016 
1017   Output Parameters:
1018 + newSection - The PetscSection describing the new data layout
1019 - newData - The new data
1020 
1021   Level: developer
1022 
1023 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1024 @*/
1025 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1026 {
1027   PetscSF        fieldSF;
1028   PetscInt      *remoteOffsets, fieldSize;
1029   PetscMPIInt    dataSize;
1030   PetscErrorCode ierr;
1031 
1032   PetscFunctionBegin;
1033   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1034   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1035 
1036   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1037   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1038   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1039 
1040   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1041   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1042   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1043   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1044   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1045   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1050 {
1051   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1052   MPI_Comm               comm;
1053   PetscSF                coneSF;
1054   PetscSection           originalConeSection, newConeSection;
1055   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1056   PetscBool              flg;
1057   PetscErrorCode         ierr;
1058 
1059   PetscFunctionBegin;
1060   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1061   PetscValidPointer(dmParallel,4);
1062   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1063 
1064   /* Distribute cone section */
1065   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1066   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1067   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1068   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1069   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1070   {
1071     PetscInt pStart, pEnd, p;
1072 
1073     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1074     for (p = pStart; p < pEnd; ++p) {
1075       PetscInt coneSize;
1076       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1077       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1078     }
1079   }
1080   /* Communicate and renumber cones */
1081   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1082   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1083   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1084   if (original) {
1085     PetscInt numCones;
1086 
1087     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1088     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1089   }
1090   else {
1091     globCones = cones;
1092   }
1093   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1094   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1095   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1096   if (original) {
1097     ierr = PetscFree(globCones);CHKERRQ(ierr);
1098   }
1099   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1100   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1101 #if PETSC_USE_DEBUG
1102   {
1103     PetscInt  p;
1104     PetscBool valid = PETSC_TRUE;
1105     for (p = 0; p < newConesSize; ++p) {
1106       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1107     }
1108     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1109   }
1110 #endif
1111   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1112   if (flg) {
1113     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1114     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1115     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1116     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1117     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1118   }
1119   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1120   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1121   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1122   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1123   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1124   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1125   /* Create supports and stratify DMPlex */
1126   {
1127     PetscInt pStart, pEnd;
1128 
1129     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1130     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1131   }
1132   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1133   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1134   {
1135     PetscBool useCone, useClosure, useAnchors;
1136 
1137     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1138     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1139     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1140     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1141     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1142     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1143   }
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1148 {
1149   MPI_Comm         comm;
1150   PetscSection     originalCoordSection, newCoordSection;
1151   Vec              originalCoordinates, newCoordinates;
1152   PetscInt         bs;
1153   PetscBool        isper;
1154   const char      *name;
1155   const PetscReal *maxCell, *L;
1156   const DMBoundaryType *bd;
1157   PetscErrorCode   ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1161   PetscValidPointer(dmParallel, 3);
1162 
1163   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1164   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1165   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1166   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1167   if (originalCoordinates) {
1168     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1169     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1170     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1171 
1172     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1173     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1174     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1175     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1176     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1177   }
1178   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1179   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 /* Here we are assuming that process 0 always has everything */
1184 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1185 {
1186   DM_Plex         *mesh = (DM_Plex*) dm->data;
1187   MPI_Comm         comm;
1188   DMLabel          depthLabel;
1189   PetscMPIInt      rank;
1190   PetscInt         depth, d, numLabels, numLocalLabels, l;
1191   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1192   PetscObjectState depthState = -1;
1193   PetscErrorCode   ierr;
1194 
1195   PetscFunctionBegin;
1196   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1198   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1199   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1200   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1201 
1202   /* If the user has changed the depth label, communicate it instead */
1203   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1204   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1205   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1206   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1207   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1208   if (sendDepth) {
1209     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1210     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1211   }
1212   /* Everyone must have either the same number of labels, or none */
1213   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1214   numLabels = numLocalLabels;
1215   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1216   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1217   for (l = numLabels-1; l >= 0; --l) {
1218     DMLabel     label = NULL, labelNew = NULL;
1219     PetscBool   isDepth, lisOutput = PETSC_TRUE, isOutput;
1220 
1221     if (hasLabels) {ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);}
1222     /* Skip "depth" because it is recreated */
1223     if (hasLabels) {ierr = PetscStrcmp(label->name, "depth", &isDepth);CHKERRQ(ierr);}
1224     ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1225     if (isDepth && !sendDepth) continue;
1226     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1227     if (isDepth) {
1228       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1229       PetscInt gdepth;
1230 
1231       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1232       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1233       for (d = 0; d <= gdepth; ++d) {
1234         PetscBool has;
1235 
1236         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1237         if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);}
1238       }
1239     }
1240     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1241     /* Put the output flag in the new label */
1242     if (hasLabels) {ierr = DMGetLabelOutput(dm, label->name, &lisOutput);CHKERRQ(ierr);}
1243     ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm);CHKERRQ(ierr);
1244     ierr = DMSetLabelOutput(dmParallel, labelNew->name, isOutput);CHKERRQ(ierr);
1245   }
1246   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1251 {
1252   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1253   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1254   PetscBool      *isHybrid, *isHybridParallel;
1255   PetscInt        dim, depth, d;
1256   PetscInt        pStart, pEnd, pStartP, pEndP;
1257   PetscErrorCode  ierr;
1258 
1259   PetscFunctionBegin;
1260   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1261   PetscValidPointer(dmParallel, 4);
1262 
1263   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1264   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1265   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1266   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1267   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1268   for (d = 0; d <= depth; d++) {
1269     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1270 
1271     if (hybridMax >= 0) {
1272       PetscInt sStart, sEnd, p;
1273 
1274       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1275       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1276     }
1277   }
1278   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1279   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1280   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1281   for (d = 0; d <= depth; d++) {
1282     PetscInt sStart, sEnd, p, dd;
1283 
1284     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1285     dd = (depth == 1 && d == 1) ? dim : d;
1286     for (p = sStart; p < sEnd; p++) {
1287       if (isHybridParallel[p-pStartP]) {
1288         pmesh->hybridPointMax[dd] = p;
1289         break;
1290       }
1291     }
1292   }
1293   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1298 {
1299   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1300   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1301   MPI_Comm        comm;
1302   DM              refTree;
1303   PetscSection    origParentSection, newParentSection;
1304   PetscInt        *origParents, *origChildIDs;
1305   PetscBool       flg;
1306   PetscErrorCode  ierr;
1307 
1308   PetscFunctionBegin;
1309   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1310   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1311   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1312 
1313   /* Set up tree */
1314   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1315   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1316   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1317   if (origParentSection) {
1318     PetscInt        pStart, pEnd;
1319     PetscInt        *newParents, *newChildIDs, *globParents;
1320     PetscInt        *remoteOffsetsParents, newParentSize;
1321     PetscSF         parentSF;
1322 
1323     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1324     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1325     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1326     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1327     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1328     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1329     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1330     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1331     if (original) {
1332       PetscInt numParents;
1333 
1334       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1335       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1336       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1337     }
1338     else {
1339       globParents = origParents;
1340     }
1341     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1342     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1343     if (original) {
1344       ierr = PetscFree(globParents);CHKERRQ(ierr);
1345     }
1346     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1347     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1348     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1349 #if PETSC_USE_DEBUG
1350     {
1351       PetscInt  p;
1352       PetscBool valid = PETSC_TRUE;
1353       for (p = 0; p < newParentSize; ++p) {
1354         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1355       }
1356       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1357     }
1358 #endif
1359     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1360     if (flg) {
1361       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1362       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1363       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1364       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1365       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1366     }
1367     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1368     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1369     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1370     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1371   }
1372   pmesh->useAnchors = mesh->useAnchors;
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1377 {
1378   PetscMPIInt            rank, size;
1379   MPI_Comm               comm;
1380   PetscErrorCode         ierr;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1384   PetscValidPointer(dmParallel,7);
1385 
1386   /* Create point SF for parallel mesh */
1387   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1388   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1389   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1390   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1391   {
1392     const PetscInt *leaves;
1393     PetscSFNode    *remotePoints, *rowners, *lowners;
1394     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1395     PetscInt        pStart, pEnd;
1396 
1397     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1398     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1399     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1400     for (p=0; p<numRoots; p++) {
1401       rowners[p].rank  = -1;
1402       rowners[p].index = -1;
1403     }
1404     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1405     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1406     for (p = 0; p < numLeaves; ++p) {
1407       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1408         lowners[p].rank  = rank;
1409         lowners[p].index = leaves ? leaves[p] : p;
1410       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1411         lowners[p].rank  = -2;
1412         lowners[p].index = -2;
1413       }
1414     }
1415     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1416       rowners[p].rank  = -3;
1417       rowners[p].index = -3;
1418     }
1419     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1420     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1421     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1422     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1423     for (p = 0; p < numLeaves; ++p) {
1424       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1425       if (lowners[p].rank != rank) ++numGhostPoints;
1426     }
1427     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1428     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1429     for (p = 0, gp = 0; p < numLeaves; ++p) {
1430       if (lowners[p].rank != rank) {
1431         ghostPoints[gp]        = leaves ? leaves[p] : p;
1432         remotePoints[gp].rank  = lowners[p].rank;
1433         remotePoints[gp].index = lowners[p].index;
1434         ++gp;
1435       }
1436     }
1437     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1438     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1439     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1440   }
1441   {
1442     PetscBool useCone, useClosure, useAnchors;
1443 
1444     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1445     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1446     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1447     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1448     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1449     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1450   }
1451   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 /*@C
1456   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1457 
1458   Input Parameter:
1459 + dm          - The source DMPlex object
1460 . migrationSF - The star forest that describes the parallel point remapping
1461 . ownership   - Flag causing a vote to determine point ownership
1462 
1463   Output Parameter:
1464 - pointSF     - The star forest describing the point overlap in the remapped DM
1465 
1466   Level: developer
1467 
1468 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1469 @*/
1470 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1471 {
1472   DM_Plex           *mesh = (DM_Plex *) dm->data;
1473   PetscMPIInt        rank, size;
1474   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1475   PetscInt          *pointLocal;
1476   const PetscInt    *leaves;
1477   const PetscSFNode *roots;
1478   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1479   Vec                shifts;
1480   const PetscInt     numShifts = 37; /* TODO Use larger prime */
1481   const PetscScalar *shift = NULL;
1482   const PetscBool    shiftDebug = PETSC_FALSE;
1483   PetscErrorCode     ierr;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1487   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1488 
1489   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1490   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1491   if (ownership) {
1492     /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1493     if (mesh->partitionBalance) {
1494       PetscRandom r;
1495 
1496       ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
1497       ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
1498       ierr = PetscRandomSetInterval(r, 0, 17*size);CHKERRQ(ierr);
1499       ierr = VecCreate(PETSC_COMM_SELF, &shifts);CHKERRQ(ierr);
1500       ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr);
1501       ierr = VecSetType(shifts, VECSTANDARD);CHKERRQ(ierr);
1502       ierr = VecSetRandom(shifts, r);CHKERRQ(ierr);
1503       ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
1504       ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr);
1505     }
1506 
1507     /* Point ownership vote: Process with highest rank owns shared points */
1508     for (p = 0; p < nleaves; ++p) {
1509       if (shiftDebug) {
1510         ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) shift[roots[p].index%numShifts], (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size);CHKERRQ(ierr);
1511       }
1512       /* Either put in a bid or we know we own it */
1513       leafNodes[p].rank  = (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size;
1514       leafNodes[p].index = p;
1515     }
1516     for (p = 0; p < nroots; p++) {
1517       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1518       rootNodes[p].rank  = -3;
1519       rootNodes[p].index = -3;
1520     }
1521     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1522     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1523   } else {
1524     for (p = 0; p < nroots; p++) {
1525       rootNodes[p].index = -1;
1526       rootNodes[p].rank = rank;
1527     };
1528     for (p = 0; p < nleaves; p++) {
1529       /* Write new local id into old location */
1530       if (roots[p].rank == rank) {
1531         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1532       }
1533     }
1534   }
1535   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1536   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1537 
1538   for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1539     if (shiftDebug) {
1540       ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Root %D, Rank %D MyRank %D\n", rank, roots[p].index, leafNodes[p].rank, (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size);CHKERRQ(ierr);
1541     }
1542     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size) npointLeaves++;
1543   }
1544   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1545   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1546   for (idx = 0, p = 0; p < nleaves; p++) {
1547     if (leafNodes[p].rank != (rank + (shift ? (PetscInt) shift[roots[p].index%numShifts] : 0))%size) {
1548       pointLocal[idx] = p;
1549       pointRemote[idx] = leafNodes[p];
1550       idx++;
1551     }
1552   }
1553   if (shift) {
1554     ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr);
1555     ierr = VecDestroy(&shifts);CHKERRQ(ierr);
1556   }
1557   if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr);}
1558   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1559   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1560   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1561   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1562   PetscFunctionReturn(0);
1563 }
1564 
1565 /*@C
1566   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1567 
1568   Input Parameter:
1569 + dm       - The source DMPlex object
1570 . sf       - The star forest communication context describing the migration pattern
1571 
1572   Output Parameter:
1573 - targetDM - The target DMPlex object
1574 
1575   Level: intermediate
1576 
1577 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1578 @*/
1579 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1580 {
1581   MPI_Comm               comm;
1582   PetscInt               dim, nroots;
1583   PetscSF                sfPoint;
1584   ISLocalToGlobalMapping ltogMigration;
1585   ISLocalToGlobalMapping ltogOriginal = NULL;
1586   PetscBool              flg;
1587   PetscErrorCode         ierr;
1588 
1589   PetscFunctionBegin;
1590   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1591   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1592   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1593   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1594   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1595 
1596   /* Check for a one-to-all distribution pattern */
1597   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1598   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1599   if (nroots >= 0) {
1600     IS                     isOriginal;
1601     PetscInt               n, size, nleaves;
1602     PetscInt              *numbering_orig, *numbering_new;
1603     /* Get the original point numbering */
1604     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1605     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1606     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1607     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1608     /* Convert to positive global numbers */
1609     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1610     /* Derive the new local-to-global mapping from the old one */
1611     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1612     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1613     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1614     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1615     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1616     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1617     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1618   } else {
1619     /* One-to-all distribution pattern: We can derive LToG from SF */
1620     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1621   }
1622   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1623   if (flg) {
1624     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1625     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1626   }
1627   /* Migrate DM data to target DM */
1628   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1629   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1630   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1631   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1632   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1633   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1634   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1635   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1636   PetscFunctionReturn(0);
1637 }
1638 
1639 /*@C
1640   DMPlexDistribute - Distributes the mesh and any associated sections.
1641 
1642   Not Collective
1643 
1644   Input Parameter:
1645 + dm  - The original DMPlex object
1646 - overlap - The overlap of partitions, 0 is the default
1647 
1648   Output Parameter:
1649 + sf - The PetscSF used for point distribution
1650 - parallelMesh - The distributed DMPlex object, or NULL
1651 
1652   Note: If the mesh was not distributed, the return value is NULL.
1653 
1654   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1655   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1656   representation on the mesh.
1657 
1658   Level: intermediate
1659 
1660 .keywords: mesh, elements
1661 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1662 @*/
1663 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1664 {
1665   MPI_Comm               comm;
1666   PetscPartitioner       partitioner;
1667   IS                     cellPart;
1668   PetscSection           cellPartSection;
1669   DM                     dmCoord;
1670   DMLabel                lblPartition, lblMigration;
1671   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1672   PetscBool              flg;
1673   PetscMPIInt            rank, size, p;
1674   PetscErrorCode         ierr;
1675 
1676   PetscFunctionBegin;
1677   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1678   if (sf) PetscValidPointer(sf,4);
1679   PetscValidPointer(dmParallel,5);
1680 
1681   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1682   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1683   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1684   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1685 
1686   if (sf) *sf = NULL;
1687   *dmParallel = NULL;
1688   if (size == 1) {
1689     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1690     PetscFunctionReturn(0);
1691   }
1692 
1693   /* Create cell partition */
1694   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1695   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1696   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1697   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1698   {
1699     /* Convert partition to DMLabel */
1700     PetscInt proc, pStart, pEnd, npoints, poffset;
1701     const PetscInt *points;
1702     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1703     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1704     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1705     for (proc = pStart; proc < pEnd; proc++) {
1706       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1707       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1708       for (p = poffset; p < poffset+npoints; p++) {
1709         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1710       }
1711     }
1712     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1713   }
1714   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1715   {
1716     /* Build a global process SF */
1717     PetscSFNode *remoteProc;
1718     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1719     for (p = 0; p < size; ++p) {
1720       remoteProc[p].rank  = p;
1721       remoteProc[p].index = rank;
1722     }
1723     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1724     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1725     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1726   }
1727   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1728   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1729   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1730   /* Stratify the SF in case we are migrating an already parallel plex */
1731   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1732   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1733   sfMigration = sfStratified;
1734   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1735   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1736   if (flg) {
1737     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1738     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1739   }
1740 
1741   /* Create non-overlapping parallel DM and migrate internal data */
1742   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1743   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1744   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1745 
1746   /* Build the point SF without overlap */
1747   ((DM_Plex*) (*dmParallel)->data)->partitionBalance = ((DM_Plex*) dm->data)->partitionBalance;
1748   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1749   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1750   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1751   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1752   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1753 
1754   if (overlap > 0) {
1755     DM                 dmOverlap;
1756     PetscInt           nroots, nleaves;
1757     PetscSFNode       *newRemote;
1758     const PetscSFNode *oldRemote;
1759     PetscSF            sfOverlap, sfOverlapPoint;
1760     /* Add the partition overlap to the distributed DM */
1761     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1762     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1763     *dmParallel = dmOverlap;
1764     if (flg) {
1765       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1766       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1767     }
1768 
1769     /* Re-map the migration SF to establish the full migration pattern */
1770     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1771     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1772     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1773     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1774     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1775     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1776     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1777     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1778     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1779     sfMigration = sfOverlapPoint;
1780   }
1781   /* Cleanup Partition */
1782   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1783   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1784   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1785   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1786   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1787   /* Copy BC */
1788   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1789   /* Create sfNatural */
1790   if (dm->useNatural) {
1791     PetscSection section;
1792 
1793     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1794     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1795     ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr);
1796   }
1797   /* Cleanup */
1798   if (sf) {*sf = sfMigration;}
1799   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1800   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1801   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 /*@C
1806   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1807 
1808   Not Collective
1809 
1810   Input Parameter:
1811 + dm  - The non-overlapping distrbuted DMPlex object
1812 - overlap - The overlap of partitions, 0 is the default
1813 
1814   Output Parameter:
1815 + sf - The PetscSF used for point distribution
1816 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1817 
1818   Note: If the mesh was not distributed, the return value is NULL.
1819 
1820   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1821   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1822   representation on the mesh.
1823 
1824   Level: intermediate
1825 
1826 .keywords: mesh, elements
1827 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1828 @*/
1829 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1830 {
1831   MPI_Comm               comm;
1832   PetscMPIInt            size, rank;
1833   PetscSection           rootSection, leafSection;
1834   IS                     rootrank, leafrank;
1835   DM                     dmCoord;
1836   DMLabel                lblOverlap;
1837   PetscSF                sfOverlap, sfStratified, sfPoint;
1838   PetscErrorCode         ierr;
1839 
1840   PetscFunctionBegin;
1841   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1842   if (sf) PetscValidPointer(sf, 3);
1843   PetscValidPointer(dmOverlap, 4);
1844 
1845   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1846   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1847   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1848   if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);}
1849   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1850 
1851   /* Compute point overlap with neighbouring processes on the distributed DM */
1852   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1853   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1854   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1855   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1856   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1857   /* Convert overlap label to stratified migration SF */
1858   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1859   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1860   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1861   sfOverlap = sfStratified;
1862   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1863   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1864 
1865   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1866   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1867   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1868   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1869   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1870 
1871   /* Build the overlapping DM */
1872   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1873   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1874   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1875   /* Build the new point SF */
1876   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1877   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1878   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1879   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1880   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1881   /* Cleanup overlap partition */
1882   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1883   if (sf) *sf = sfOverlap;
1884   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1885   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 /*@C
1890   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1891   root process of the original's communicator.
1892 
1893   Input Parameters:
1894 . dm - the original DMPlex object
1895 
1896   Output Parameters:
1897 . gatherMesh - the gathered DM object, or NULL
1898 
1899   Level: intermediate
1900 
1901 .keywords: mesh
1902 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1903 @*/
1904 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1905 {
1906   MPI_Comm       comm;
1907   PetscMPIInt    size;
1908   PetscPartitioner oldPart, gatherPart;
1909   PetscErrorCode ierr;
1910 
1911   PetscFunctionBegin;
1912   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1913   comm = PetscObjectComm((PetscObject)dm);
1914   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1915   *gatherMesh = NULL;
1916   if (size == 1) PetscFunctionReturn(0);
1917   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1918   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1919   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1920   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1921   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1922   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1923   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1924   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1925   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 /*@C
1930   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1931 
1932   Input Parameters:
1933 . dm - the original DMPlex object
1934 
1935   Output Parameters:
1936 . redundantMesh - the redundant DM object, or NULL
1937 
1938   Level: intermediate
1939 
1940 .keywords: mesh
1941 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1942 @*/
1943 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1944 {
1945   MPI_Comm       comm;
1946   PetscMPIInt    size, rank;
1947   PetscInt       pStart, pEnd, p;
1948   PetscInt       numPoints = -1;
1949   PetscSF        migrationSF, sfPoint;
1950   DM             gatherDM, dmCoord;
1951   PetscSFNode    *points;
1952   PetscErrorCode ierr;
1953 
1954   PetscFunctionBegin;
1955   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1956   comm = PetscObjectComm((PetscObject)dm);
1957   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1958   *redundantMesh = NULL;
1959   if (size == 1) {
1960     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1961     *redundantMesh = dm;
1962     PetscFunctionReturn(0);
1963   }
1964   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1965   if (!gatherDM) PetscFunctionReturn(0);
1966   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1967   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1968   numPoints = pEnd - pStart;
1969   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1970   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1971   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1972   for (p = 0; p < numPoints; p++) {
1973     points[p].index = p;
1974     points[p].rank  = 0;
1975   }
1976   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1977   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1978   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1979   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1980   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1981   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1982   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1983   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1984   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1985   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1986   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1987   PetscFunctionReturn(0);
1988 }
1989