xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision a9f1d5b258abdaef355ea4a9f4e40aa3c7daf853)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . levels      - Number of overlap levels
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
540 
541   Level: developer
542 
543 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
544 @*/
545 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
546 {
547   MPI_Comm           comm;
548   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
549   PetscSF            sfPoint, sfProc;
550   const PetscSFNode *remote;
551   const PetscInt    *local;
552   const PetscInt    *nrank, *rrank;
553   PetscInt          *adj = NULL;
554   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555   PetscMPIInt        rank, numProcs;
556   PetscBool          useCone, useClosure, flg;
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
561   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
563   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
564   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
565   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
566   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
567   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
568   /* Handle leaves: shared with the root point */
569   for (l = 0; l < nleaves; ++l) {
570     PetscInt adjSize = PETSC_DETERMINE, a;
571 
572     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
573     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
574   }
575   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
576   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
577   /* Handle roots */
578   for (p = pStart; p < pEnd; ++p) {
579     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
580 
581     if ((p >= sStart) && (p < sEnd)) {
582       /* Some leaves share a root with other leaves on different processes */
583       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
584       if (neighbors) {
585         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
586         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
587         for (n = 0; n < neighbors; ++n) {
588           const PetscInt remoteRank = nrank[noff+n];
589 
590           if (remoteRank == rank) continue;
591           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
592         }
593       }
594     }
595     /* Roots are shared with leaves */
596     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
597     if (!neighbors) continue;
598     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
599     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
600     for (n = 0; n < neighbors; ++n) {
601       const PetscInt remoteRank = rrank[noff+n];
602 
603       if (remoteRank == rank) continue;
604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
605     }
606   }
607   ierr = PetscFree(adj);CHKERRQ(ierr);
608   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
610   /* Add additional overlap levels */
611   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
612   /* We require the closure in the overlap */
613   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
614   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
615   if (useCone || !useClosure) {
616     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
617   }
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Make process SF and invert sender to receiver label */
624   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
625   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
626   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
627   /* Add owned points, except for shared local points */
628   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
629   for (l = 0; l < nleaves; ++l) {
630     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
631     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
632   }
633   /* Clean up */
634   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
635   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
641 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
642 {
643   MPI_Comm           comm;
644   PetscMPIInt        rank, numProcs;
645   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
646   PetscInt          *pointDepths, *remoteDepths, *ilocal;
647   PetscInt          *depthRecv, *depthShift, *depthIdx;
648   PetscSFNode       *iremote;
649   PetscSF            pointSF;
650   const PetscInt    *sharedLocal;
651   const PetscSFNode *overlapRemote, *sharedRemote;
652   PetscErrorCode     ierr;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
656   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
657   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
658   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
659   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
660 
661   /* Before building the migration SF we need to know the new stratum offsets */
662   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
663   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
664   for (d=0; d<dim+1; d++) {
665     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
666     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
667   }
668   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
669   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
670   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671 
672   /* Count recevied points in each stratum and compute the internal strata shift */
673   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
674   for (d=0; d<dim+1; d++) depthRecv[d]=0;
675   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
676   depthShift[dim] = 0;
677   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
678   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
679   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
680   for (d=0; d<dim+1; d++) {
681     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
682     depthIdx[d] = pStart + depthShift[d];
683   }
684 
685   /* Form the overlap SF build an SF that describes the full overlap migration SF */
686   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
687   newLeaves = pEnd - pStart + nleaves;
688   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
689   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
690   /* First map local points to themselves */
691   for (d=0; d<dim+1; d++) {
692     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
693     for (p=pStart; p<pEnd; p++) {
694       point = p + depthShift[d];
695       ilocal[point] = point;
696       iremote[point].index = p;
697       iremote[point].rank = rank;
698       depthIdx[d]++;
699     }
700   }
701 
702   /* Add in the remote roots for currently shared points */
703   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
704   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
705   for (d=0; d<dim+1; d++) {
706     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
707     for (p=0; p<numSharedPoints; p++) {
708       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
709         point = sharedLocal[p] + depthShift[d];
710         iremote[point].index = sharedRemote[p].index;
711         iremote[point].rank = sharedRemote[p].rank;
712       }
713     }
714   }
715 
716   /* Now add the incoming overlap points */
717   for (p=0; p<nleaves; p++) {
718     point = depthIdx[remoteDepths[p]];
719     ilocal[point] = point;
720     iremote[point].index = overlapRemote[p].index;
721     iremote[point].rank = overlapRemote[p].rank;
722     depthIdx[remoteDepths[p]]++;
723   }
724   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
725 
726   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
727   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
728   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
729   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
730   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
731 
732   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "DMPlexStratifyMigrationSF"
738 /*@
739   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
740 
741   Input Parameter:
742 + dm          - The DM
743 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
744 
745   Output Parameter:
746 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
747 
748   Level: developer
749 
750 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
751 @*/
752 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
753 {
754   MPI_Comm           comm;
755   PetscMPIInt        rank, numProcs;
756   PetscInt           d, dim, depth, p, pStart, pEnd, nroots, nleaves;
757   PetscInt          *pointDepths, *remoteDepths, *ilocal;
758   PetscInt          *depthRecv, *depthShift, *depthIdx;
759   const PetscSFNode *iremote;
760   PetscErrorCode     ierr;
761 
762   PetscFunctionBegin;
763   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
764   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
765   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
766   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
767   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
768 
769   /* Before building the migration SF we need to know the new stratum offsets */
770   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
771   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
772   for (d=0; d<dim+1; d++) {
773     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
774     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
775   }
776   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
777   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
778   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
779   /* Count recevied points in each stratum and compute the internal strata shift */
780   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
781   for (d=0; d<dim+1; d++) depthRecv[d]=0;
782   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
783   depthShift[dim] = 0;
784   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
785   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
786   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
787   for (d=0; d<dim+1; d++) {depthIdx[d] = 0;}
788   /* Derive a new local permutation based on stratified indices */
789   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
790   for (p=0; p<nleaves; p++) {
791     depth = remoteDepths[p];
792     ilocal[p] = depthShift[depth] + depthIdx[depth];
793     depthIdx[depth]++;
794   }
795   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
796   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
797   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
798   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
799   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "DMPlexDistributeField"
805 /*@
806   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
807 
808   Collective on DM
809 
810   Input Parameters:
811 + dm - The DMPlex object
812 . pointSF - The PetscSF describing the communication pattern
813 . originalSection - The PetscSection for existing data layout
814 - originalVec - The existing data
815 
816   Output Parameters:
817 + newSection - The PetscSF describing the new data layout
818 - newVec - The new data
819 
820   Level: developer
821 
822 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
823 @*/
824 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
825 {
826   PetscSF        fieldSF;
827   PetscInt      *remoteOffsets, fieldSize;
828   PetscScalar   *originalValues, *newValues;
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
833   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
834 
835   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
836   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
837   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
838 
839   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
840   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
841   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
842   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
843   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
844   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
845   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
846   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
847   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   PetscFunctionReturn(0);
849 }
850 
851 #undef __FUNCT__
852 #define __FUNCT__ "DMPlexDistributeFieldIS"
853 /*@
854   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
855 
856   Collective on DM
857 
858   Input Parameters:
859 + dm - The DMPlex object
860 . pointSF - The PetscSF describing the communication pattern
861 . originalSection - The PetscSection for existing data layout
862 - originalIS - The existing data
863 
864   Output Parameters:
865 + newSection - The PetscSF describing the new data layout
866 - newIS - The new data
867 
868   Level: developer
869 
870 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
871 @*/
872 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
873 {
874   PetscSF         fieldSF;
875   PetscInt       *newValues, *remoteOffsets, fieldSize;
876   const PetscInt *originalValues;
877   PetscErrorCode  ierr;
878 
879   PetscFunctionBegin;
880   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
881   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
882 
883   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
884   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
885 
886   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
887   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
888   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
889   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
890   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
891   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
892   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
893   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "DMPlexDistributeData"
899 /*@
900   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
901 
902   Collective on DM
903 
904   Input Parameters:
905 + dm - The DMPlex object
906 . pointSF - The PetscSF describing the communication pattern
907 . originalSection - The PetscSection for existing data layout
908 . datatype - The type of data
909 - originalData - The existing data
910 
911   Output Parameters:
912 + newSection - The PetscSection describing the new data layout
913 - newData - The new data
914 
915   Level: developer
916 
917 .seealso: DMPlexDistribute(), DMPlexDistributeField()
918 @*/
919 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
920 {
921   PetscSF        fieldSF;
922   PetscInt      *remoteOffsets, fieldSize;
923   PetscMPIInt    dataSize;
924   PetscErrorCode ierr;
925 
926   PetscFunctionBegin;
927   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
928   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
929 
930   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
931   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
932   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
933 
934   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
935   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
936   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
937   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
938   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "DMPlexDistributeCones"
944 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
945 {
946   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
947   MPI_Comm               comm;
948   PetscSF                coneSF;
949   PetscSection           originalConeSection, newConeSection;
950   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
951   PetscBool              flg;
952   PetscErrorCode         ierr;
953 
954   PetscFunctionBegin;
955   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
956   PetscValidPointer(dmParallel,4);
957   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
958 
959   /* Distribute cone section */
960   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
961   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
962   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
963   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
964   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
965   {
966     PetscInt pStart, pEnd, p;
967 
968     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
969     for (p = pStart; p < pEnd; ++p) {
970       PetscInt coneSize;
971       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
972       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
973     }
974   }
975   /* Communicate and renumber cones */
976   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
977   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
978   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
979   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
980   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
981   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
982   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
983 #if PETSC_USE_DEBUG
984   {
985     PetscInt  p;
986     PetscBool valid = PETSC_TRUE;
987     for (p = 0; p < newConesSize; ++p) {
988       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
989     }
990     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
991   }
992 #endif
993   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
994   if (flg) {
995     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
996     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
997     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
998     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
999     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1000   }
1001   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1002   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1003   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1004   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1005   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1006   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1007   /* Create supports and stratify sieve */
1008   {
1009     PetscInt pStart, pEnd;
1010 
1011     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1012     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1013   }
1014   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1015   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "DMPlexDistributeCoordinates"
1021 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1022 {
1023   MPI_Comm         comm;
1024   PetscSection     originalCoordSection, newCoordSection;
1025   Vec              originalCoordinates, newCoordinates;
1026   PetscInt         bs;
1027   const char      *name;
1028   const PetscReal *maxCell, *L;
1029   PetscErrorCode   ierr;
1030 
1031   PetscFunctionBegin;
1032   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1033   PetscValidPointer(dmParallel, 3);
1034 
1035   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1036   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1037   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1038   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1039   if (originalCoordinates) {
1040     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1041     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1042     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1043 
1044     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1045     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1046     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1047     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1048     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1049   }
1050   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1051   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "DMPlexDistributeLabels"
1057 /* Here we are assuming that process 0 always has everything */
1058 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1059 {
1060   MPI_Comm       comm;
1061   PetscMPIInt    rank;
1062   PetscInt       numLabels, numLocalLabels, l;
1063   PetscBool      hasLabels = PETSC_FALSE;
1064   PetscErrorCode ierr;
1065 
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1068   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1069   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1070   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1071   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1072 
1073   /* Everyone must have either the same number of labels, or none */
1074   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1075   numLabels = numLocalLabels;
1076   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1077   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1078   for (l = numLabels-1; l >= 0; --l) {
1079     DMLabel     label = NULL, labelNew = NULL;
1080     PetscBool   isdepth;
1081 
1082     if (hasLabels) {
1083       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1084       /* Skip "depth" because it is recreated */
1085       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1086     }
1087     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1088     if (isdepth) continue;
1089     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1090     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1091   }
1092   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1098 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1099 {
1100   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1101   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1102   MPI_Comm        comm;
1103   const PetscInt *gpoints;
1104   PetscInt        dim, depth, n, d;
1105   PetscErrorCode  ierr;
1106 
1107   PetscFunctionBegin;
1108   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1109   PetscValidPointer(dmParallel, 4);
1110 
1111   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1112   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1113 
1114   /* Setup hybrid structure */
1115   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1116   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1117   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1118   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1119   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1120   for (d = 0; d <= dim; ++d) {
1121     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1122 
1123     if (pmax < 0) continue;
1124     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1125     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1126     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1127     for (p = 0; p < n; ++p) {
1128       const PetscInt point = gpoints[p];
1129 
1130       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1131     }
1132     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1133     else            pmesh->hybridPointMax[d] = -1;
1134   }
1135   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1136   PetscFunctionReturn(0);
1137 }
1138 
1139 #undef __FUNCT__
1140 #define __FUNCT__ "DMPlexDistributeSetupTree"
1141 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1142 {
1143   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1144   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1145   MPI_Comm        comm;
1146   DM              refTree;
1147   PetscSection    origParentSection, newParentSection;
1148   PetscInt        *origParents, *origChildIDs;
1149   PetscBool       flg;
1150   PetscErrorCode  ierr;
1151 
1152   PetscFunctionBegin;
1153   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1154   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1155   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1156 
1157   /* Set up tree */
1158   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1159   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1160   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1161   if (origParentSection) {
1162     PetscInt        pStart, pEnd;
1163     PetscInt        *newParents, *newChildIDs;
1164     PetscInt        *remoteOffsetsParents, newParentSize;
1165     PetscSF         parentSF;
1166 
1167     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1168     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1169     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1170     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1171     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1172     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1173     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1174     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1175     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1176     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1177     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1178     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1179     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1180     if (flg) {
1181       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1182       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1183       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1184       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1185       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1186     }
1187     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1188     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1189     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1190     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1191   }
1192   pmesh->useAnchors = mesh->useAnchors;
1193   PetscFunctionReturn(0);
1194 }
1195 
1196 #undef __FUNCT__
1197 #define __FUNCT__ "DMPlexDistributeSF"
1198 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1199 {
1200   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1201   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1202   PetscMPIInt            rank, numProcs;
1203   MPI_Comm               comm;
1204   PetscErrorCode         ierr;
1205 
1206   PetscFunctionBegin;
1207   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1208   PetscValidPointer(dmParallel,7);
1209 
1210   /* Create point SF for parallel mesh */
1211   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1212   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1213   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1214   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1215   {
1216     const PetscInt *leaves;
1217     PetscSFNode    *remotePoints, *rowners, *lowners;
1218     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1219     PetscInt        pStart, pEnd;
1220 
1221     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1222     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1223     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1224     for (p=0; p<numRoots; p++) {
1225       rowners[p].rank  = -1;
1226       rowners[p].index = -1;
1227     }
1228     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1229     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1230     for (p = 0; p < numLeaves; ++p) {
1231       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1232         lowners[p].rank  = rank;
1233         lowners[p].index = leaves ? leaves[p] : p;
1234       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1235         lowners[p].rank  = -2;
1236         lowners[p].index = -2;
1237       }
1238     }
1239     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1240       rowners[p].rank  = -3;
1241       rowners[p].index = -3;
1242     }
1243     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1244     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1245     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1246     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1247     for (p = 0; p < numLeaves; ++p) {
1248       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1249       if (lowners[p].rank != rank) ++numGhostPoints;
1250     }
1251     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1252     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1253     for (p = 0, gp = 0; p < numLeaves; ++p) {
1254       if (lowners[p].rank != rank) {
1255         ghostPoints[gp]        = leaves ? leaves[p] : p;
1256         remotePoints[gp].rank  = lowners[p].rank;
1257         remotePoints[gp].index = lowners[p].index;
1258         ++gp;
1259       }
1260     }
1261     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1262     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1263     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1264   }
1265   pmesh->useCone    = mesh->useCone;
1266   pmesh->useClosure = mesh->useClosure;
1267   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 #undef __FUNCT__
1272 #define __FUNCT__ "DMPlexMigrate"
1273 /*@C
1274   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1275 
1276   Input Parameter:
1277 + dm       - The source DMPlex object
1278 . sf       - The start forest communication context describing the migration pattern
1279 
1280   Output Parameter:
1281 - targetDM - The target DMPlex object
1282 
1283   Level: intermediate
1284 
1285 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1286 @*/
1287 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1288 {
1289   MPI_Comm               comm;
1290   PetscInt               dim, nroots;
1291   PetscSF                sfPoint;
1292   ISLocalToGlobalMapping ltogMigration;
1293   PetscBool              flg;
1294   PetscErrorCode         ierr;
1295 
1296   PetscFunctionBegin;
1297   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1298   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1299   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1300   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1301 
1302   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1303   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1304   if (nroots > 0) {
1305     IS                     isOriginal;
1306     ISLocalToGlobalMapping ltogOriginal;
1307     PetscInt               n, size, nleaves, conesSize;
1308     PetscInt              *numbering_orig, *numbering_new, *cones;
1309     PetscSection           coneSection;
1310     /* Get the original point numbering */
1311     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1312     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1313     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1314     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1315     /* Convert to positive global numbers */
1316     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1317     /* Derive the new local-to-global mapping from the old one */
1318     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1319     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1320     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1321     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1322     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1323     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1324     /* Convert cones to global numbering before migrating them */
1325     ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1326     ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1327     ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1328     ierr = ISLocalToGlobalMappingApplyBlock(ltogOriginal, conesSize, cones, cones);CHKERRQ(ierr);
1329     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1330     ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);
1331   } else {
1332     /* Assume zero-to-all-ranks pattern and derive LToG from SF */
1333     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1334   }
1335   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1336   if (flg) {
1337     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1338     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1339   }
1340   /* Migrate DM data to target DM */
1341   ierr = DMPlexDistributeCones(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1342   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1343   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1344   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1345   ierr = DMPlexDistributeSetupTree(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1346   ierr = DMPlexCopyBoundary(dm, targetDM);CHKERRQ(ierr);
1347   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);
1348   PetscFunctionReturn(0);
1349 }
1350 
1351 #undef __FUNCT__
1352 #define __FUNCT__ "DMPlexDistribute"
1353 /*@C
1354   DMPlexDistribute - Distributes the mesh and any associated sections.
1355 
1356   Not Collective
1357 
1358   Input Parameter:
1359 + dm  - The original DMPlex object
1360 - overlap - The overlap of partitions, 0 is the default
1361 
1362   Output Parameter:
1363 + sf - The PetscSF used for point distribution
1364 - parallelMesh - The distributed DMPlex object, or NULL
1365 
1366   Note: If the mesh was not distributed, the return value is NULL.
1367 
1368   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1369   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1370   representation on the mesh.
1371 
1372   Level: intermediate
1373 
1374 .keywords: mesh, elements
1375 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1376 @*/
1377 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1378 {
1379   MPI_Comm               comm;
1380   PetscPartitioner       partitioner;
1381   IS                     cellPart;
1382   PetscSection           cellPartSection;
1383   DMLabel                lblPartition, lblMigration;
1384   PetscSF                sfProcess, sfMigration;
1385   PetscBool              flg;
1386   PetscMPIInt            rank, numProcs, p;
1387   PetscErrorCode         ierr;
1388 
1389   PetscFunctionBegin;
1390   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1391   if (sf) PetscValidPointer(sf,4);
1392   PetscValidPointer(dmParallel,5);
1393 
1394   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1395   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1396   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1397   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1398 
1399   *dmParallel = NULL;
1400   if (numProcs == 1) PetscFunctionReturn(0);
1401 
1402   /* Create cell partition */
1403   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1404   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1405   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1406   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1407   {
1408     /* Convert partition to DMLabel */
1409     PetscInt proc, pStart, pEnd, npoints, poffset;
1410     const PetscInt *points;
1411     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1412     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1413     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1414     for (proc = pStart; proc < pEnd; proc++) {
1415       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1416       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1417       for (p = poffset; p < poffset+npoints; p++) {
1418         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1419       }
1420     }
1421     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1422   }
1423   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1424   {
1425     /* Build a global process SF */
1426     PetscSFNode *remoteProc;
1427     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1428     for (p = 0; p < numProcs; ++p) {
1429       remoteProc[p].rank  = p;
1430       remoteProc[p].index = rank;
1431     }
1432     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1433     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1434     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1435   }
1436   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1437   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1438   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);
1439   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1440   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1441   if (flg) {
1442     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
1443     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1444     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1445   }
1446 
1447   /* Create non-overlapping parallel DM and migrate internal data */
1448   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1449   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1450   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1451 
1452   /* Build the point SF without overlap */
1453   ierr = DMPlexDistributeSF(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1454   if (flg) {
1455     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1456     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1457   }
1458 
1459   if (overlap > 0) {
1460     DM                 dmOverlap;
1461     PetscInt           nroots, nleaves;
1462     PetscSFNode       *newRemote;
1463     const PetscSFNode *oldRemote;
1464     PetscSF            sfOverlap, sfOverlapPoint;
1465     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1466     /* Add the partition overlap to the distributed DM */
1467     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1468     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1469     *dmParallel = dmOverlap;
1470     if (flg) {
1471       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1472       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1473     }
1474 
1475     /* Re-map the migration SF to establish the full migration pattern */
1476     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1477     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1478     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1479     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1480     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1481     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1482     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1483     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1484     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1485     sfMigration = sfOverlapPoint;
1486     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1487   }
1488   /* Cleanup Partition */
1489   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1490   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1491   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1492   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1493   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1494   if (sf) {*sf = sfMigration;}
1495   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1496   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1497   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "DMPlexDistributeOverlap"
1503 /*@C
1504   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1505 
1506   Not Collective
1507 
1508   Input Parameter:
1509 + dm  - The non-overlapping distrbuted DMPlex object
1510 - overlap - The overlap of partitions, 0 is the default
1511 
1512   Output Parameter:
1513 + sf - The PetscSF used for point distribution
1514 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1515 
1516   Note: If the mesh was not distributed, the return value is NULL.
1517 
1518   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1519   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1520   representation on the mesh.
1521 
1522   Level: intermediate
1523 
1524 .keywords: mesh, elements
1525 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1526 @*/
1527 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1528 {
1529   MPI_Comm               comm;
1530   PetscMPIInt            rank;
1531   PetscSection           rootSection, leafSection;
1532   IS                     rootrank, leafrank;
1533   DMLabel                lblOverlap;
1534   PetscSF                sfOverlap, sfStratified, newPointSF;
1535   PetscSFNode           *ghostRemote;
1536   const PetscSFNode     *overlapRemote;
1537   const PetscInt        *overlapLocal;
1538   PetscInt               p, pStart, pEnd, idx;
1539   PetscInt               numGhostPoints, numLocalPoints, overlapLeaves;
1540   PetscInt              *ghostLocal, *pointIDs, *recvPointIDs;
1541   PetscErrorCode         ierr;
1542 
1543   PetscFunctionBegin;
1544   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1545   if (sf) PetscValidPointer(sf, 3);
1546   PetscValidPointer(dmOverlap, 4);
1547 
1548   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1549   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1550 
1551   /* Compute point overlap with neighbouring processes on the distributed DM */
1552   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1553   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1554   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1555   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1556   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1557   /* Convert overlap label to stratified migration SF */
1558   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1559   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1560   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1561   sfOverlap = sfStratified;
1562   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1563   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1564 
1565   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1566   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1567   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1568   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1569   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1570 
1571   /* Build the overlapping DM */
1572   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1573   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1574   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1575 
1576   /* Build the new point SF by propagating the depthShift generate remote root indices */
1577   ierr = PetscSFGetGraph(sfOverlap, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1578   ierr = DMLabelGetStratumSize(lblOverlap, rank, &numLocalPoints);CHKERRQ(ierr);
1579   numGhostPoints = overlapLeaves - numLocalPoints;
1580   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1581   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1582   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1583 
1584   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1585   for (p=0; p<overlapLeaves; p++) {
1586     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1587   }
1588   ierr = PetscSFBcastBegin(sfOverlap, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1589   ierr = PetscSFBcastEnd(sfOverlap, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1590   for (idx=0, p=0; p<overlapLeaves; p++) {
1591     if (overlapRemote[p].rank != rank) {
1592       ghostLocal[idx] = overlapLocal[p];
1593       ghostRemote[idx].index = recvPointIDs[p];
1594       ghostRemote[idx].rank = overlapRemote[p].rank;
1595       idx++;
1596     }
1597   }
1598   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1599   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1600   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1601   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1602   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1603   /* Cleanup overlap partition */
1604   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1605   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1606   if (sf) *sf = sfOverlap;
1607   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1608   PetscFunctionReturn(0);
1609 }
1610