xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision aa3148a82a7e5c6a33bbee11fe8ef07dd40fca45)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . rootSection - The number of leaves for a given root point
533 . rootrank    - The rank of each edge into the root point
534 . leafSection - The number of processes sharing a given leaf point
535 - leafrank    - The rank of each process sharing a leaf point
536 
537   Output Parameters:
538 + ovRootSection - The number of new overlap points for each neighboring process
539 . ovRootPoints  - The new overlap points for each neighboring process
540 . ovLeafSection - The number of new overlap points from each neighboring process
541 - ovLeafPoints  - The new overlap points from each neighboring process
542 
543   Level: developer
544 
545 .seealso: DMPlexDistributeOwnership()
546 @*/
547 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
548 {
549   MPI_Comm           comm;
550   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
551   PetscSF            sfPoint, sfProc;
552   IS                 valueIS;
553   DMLabel            ovLeafLabel;
554   const PetscSFNode *remote;
555   const PetscInt    *local;
556   const PetscInt    *nrank, *rrank, *neighbors;
557   PetscSFNode       *ovRootPoints, *ovLeafPoints;
558   PetscSection       ovRootSection, ovLeafSection;
559   PetscInt          *adj = NULL;
560   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
561   PetscMPIInt        rank, numProcs;
562   PetscBool          useCone, useClosure, flg;
563   PetscErrorCode     ierr;
564 
565   PetscFunctionBegin;
566   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
567   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
568   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
569   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
570   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
571   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
572   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
573   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
574   /* Handle leaves: shared with the root point */
575   for (l = 0; l < nleaves; ++l) {
576     PetscInt adjSize = PETSC_DETERMINE, a;
577 
578     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
579     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
580   }
581   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
582   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
583   /* Handle roots */
584   for (p = pStart; p < pEnd; ++p) {
585     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
586 
587     if ((p >= sStart) && (p < sEnd)) {
588       /* Some leaves share a root with other leaves on different processes */
589       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
590       if (neighbors) {
591         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
592         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
593         for (n = 0; n < neighbors; ++n) {
594           const PetscInt remoteRank = nrank[noff+n];
595 
596           if (remoteRank == rank) continue;
597           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
598         }
599       }
600     }
601     /* Roots are shared with leaves */
602     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
603     if (!neighbors) continue;
604     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
605     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = rrank[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
611     }
612   }
613   ierr = PetscFree(adj);CHKERRQ(ierr);
614   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
615   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
616   /* We require the closure in the overlap */
617   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
618   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
619   if (useCone || !useClosure) {
620     IS              rankIS,   pointIS;
621     const PetscInt *ranks,   *points;
622     PetscInt        numRanks, numPoints, r, p;
623 
624     ierr = DMLabelGetValueIS(ovAdjByRank, &rankIS);CHKERRQ(ierr);
625     ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
626     ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
627     for (r = 0; r < numRanks; ++r) {
628       const PetscInt rank = ranks[r];
629 
630       ierr = DMLabelGetStratumIS(ovAdjByRank, rank, &pointIS);CHKERRQ(ierr);
631       ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
632       ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
633       for (p = 0; p < numPoints; ++p) {
634         PetscInt *closure = NULL, closureSize, c;
635 
636         ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
637         for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(ovAdjByRank, closure[c], rank);CHKERRQ(ierr);}
638         ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
639       }
640       ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
641       ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
642     }
643     ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
644     ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
645   }
646   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
647   if (flg) {
648     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
649     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
650   }
651   /* Convert to (point, rank) and use actual owners */
652   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
653   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
654   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
655   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
656   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
657   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
658   for (n = 0; n < numNeighbors; ++n) {
659     PetscInt numPoints;
660 
661     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
662     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
663   }
664   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
665   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
666   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
667   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
668   for (n = 0; n < numNeighbors; ++n) {
669     IS              pointIS;
670     const PetscInt *points;
671     PetscInt        off, numPoints, p;
672 
673     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
674     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
675     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
676     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
677     for (p = 0; p < numPoints; ++p) {
678       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
679       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
680       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
681     }
682     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
683     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
684   }
685   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
686   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
687   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
688   /* Make process SF */
689   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
690   if (flg) {
691     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
692   }
693   /* Communicate overlap */
694   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
695   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
696   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
697   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
698   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
699   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
700   for (p = 0; p < ovSize; p++) {
701     /* Don't import points from yourself */
702     if (ovLeafPoints[p].rank == rank) continue;
703     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
704   }
705   /* Don't import points already in the pointSF */
706   for (l = 0; l < nleaves; ++l) {
707     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
708   }
709   /* Convert receiver label to SF */
710   ierr = DMPlexPartitionLabelCreateSF(dm, ovLeafLabel, overlapSF);CHKERRQ(ierr);
711   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
712   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
713   if (flg) {
714     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
715     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
716   }
717   /* Clean up */
718   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
719   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
720   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
721   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
722   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
723   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
729 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
730 {
731   MPI_Comm           comm;
732   PetscMPIInt        rank, numProcs;
733   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
734   PetscInt          *pointDepths, *remoteDepths, *ilocal;
735   PetscInt          *depthRecv, *depthShift, *depthIdx;
736   PetscSFNode       *iremote;
737   PetscSF            pointSF;
738   const PetscInt    *sharedLocal;
739   const PetscSFNode *overlapRemote, *sharedRemote;
740   PetscErrorCode     ierr;
741 
742   PetscFunctionBegin;
743   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
744   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
745   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
746   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
747   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
748 
749   /* Before building the migration SF we need to know the new stratum offsets */
750   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
751   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
752   for (d=0; d<dim+1; d++) {
753     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
754     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
755   }
756   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
757   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
758   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
759 
760   /* Count recevied points in each stratum and compute the internal strata shift */
761   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
762   for (d=0; d<dim+1; d++) depthRecv[d]=0;
763   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
764   depthShift[dim] = 0;
765   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
766   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
767   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
768   for (d=0; d<dim+1; d++) {
769     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
770     depthIdx[d] = pStart + depthShift[d];
771   }
772 
773   /* Form the overlap SF build an SF that describes the full overlap migration SF */
774   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
775   newLeaves = pEnd - pStart + nleaves;
776   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
777   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
778   /* First map local points to themselves */
779   for (d=0; d<dim+1; d++) {
780     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
781     for (p=pStart; p<pEnd; p++) {
782       point = p + depthShift[d];
783       ilocal[point] = point;
784       iremote[point].index = p;
785       iremote[point].rank = rank;
786       depthIdx[d]++;
787     }
788   }
789 
790   /* Add in the remote roots for currently shared points */
791   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
792   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
793   for (d=0; d<dim+1; d++) {
794     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
795     for (p=0; p<numSharedPoints; p++) {
796       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
797         point = sharedLocal[p] + depthShift[d];
798         iremote[point].index = sharedRemote[p].index;
799         iremote[point].rank = sharedRemote[p].rank;
800       }
801     }
802   }
803 
804   /* Now add the incoming overlap points */
805   for (p=0; p<nleaves; p++) {
806     point = depthIdx[remoteDepths[p]];
807     ilocal[point] = point;
808     iremote[point].index = overlapRemote[p].index;
809     iremote[point].rank = overlapRemote[p].rank;
810     depthIdx[remoteDepths[p]]++;
811   }
812   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
813 
814   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
815   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
816   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
817   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
818   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
819 
820   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "DMPlexDistributeField"
826 /*@
827   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
828 
829   Collective on DM
830 
831   Input Parameters:
832 + dm - The DMPlex object
833 . pointSF - The PetscSF describing the communication pattern
834 . originalSection - The PetscSection for existing data layout
835 - originalVec - The existing data
836 
837   Output Parameters:
838 + newSection - The PetscSF describing the new data layout
839 - newVec - The new data
840 
841   Level: developer
842 
843 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
844 @*/
845 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
846 {
847   PetscSF        fieldSF;
848   PetscInt      *remoteOffsets, fieldSize;
849   PetscScalar   *originalValues, *newValues;
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
854   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
855 
856   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
857   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
858   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
859 
860   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
861   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
862   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
863   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
864   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
865   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
866   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
867   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
868   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
869   PetscFunctionReturn(0);
870 }
871 
872 #undef __FUNCT__
873 #define __FUNCT__ "DMPlexDistributeFieldIS"
874 /*@
875   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
876 
877   Collective on DM
878 
879   Input Parameters:
880 + dm - The DMPlex object
881 . pointSF - The PetscSF describing the communication pattern
882 . originalSection - The PetscSection for existing data layout
883 - originalIS - The existing data
884 
885   Output Parameters:
886 + newSection - The PetscSF describing the new data layout
887 - newIS - The new data
888 
889   Level: developer
890 
891 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
892 @*/
893 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
894 {
895   PetscSF         fieldSF;
896   PetscInt       *newValues, *remoteOffsets, fieldSize;
897   const PetscInt *originalValues;
898   PetscErrorCode  ierr;
899 
900   PetscFunctionBegin;
901   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
902   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
903 
904   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
905   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
906 
907   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
908   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
909   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
910   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
911   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
912   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
913   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
914   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
915   PetscFunctionReturn(0);
916 }
917 
918 #undef __FUNCT__
919 #define __FUNCT__ "DMPlexDistributeData"
920 /*@
921   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
922 
923   Collective on DM
924 
925   Input Parameters:
926 + dm - The DMPlex object
927 . pointSF - The PetscSF describing the communication pattern
928 . originalSection - The PetscSection for existing data layout
929 . datatype - The type of data
930 - originalData - The existing data
931 
932   Output Parameters:
933 + newSection - The PetscSection describing the new data layout
934 - newData - The new data
935 
936   Level: developer
937 
938 .seealso: DMPlexDistribute(), DMPlexDistributeField()
939 @*/
940 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
941 {
942   PetscSF        fieldSF;
943   PetscInt      *remoteOffsets, fieldSize;
944   PetscMPIInt    dataSize;
945   PetscErrorCode ierr;
946 
947   PetscFunctionBegin;
948   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
949   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
950 
951   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
952   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
953   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
954 
955   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
956   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
957   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
958   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
959   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "DMPlexDistributeCones"
965 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
966 {
967   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
968   MPI_Comm               comm;
969   PetscSF                coneSF;
970   PetscSection           originalConeSection, newConeSection;
971   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
972   PetscBool              flg;
973   PetscErrorCode         ierr;
974 
975   PetscFunctionBegin;
976   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
977   PetscValidPointer(dmParallel,4);
978   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
979 
980   /* Distribute cone section */
981   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
982   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
983   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
984   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
985   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
986   {
987     PetscInt pStart, pEnd, p;
988 
989     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
990     for (p = pStart; p < pEnd; ++p) {
991       PetscInt coneSize;
992       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
993       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
994     }
995   }
996   /* Communicate and renumber cones */
997   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
998   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
999   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1000   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1001   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1002   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1003   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1004 #if PETSC_USE_DEBUG
1005   {
1006     PetscInt  p;
1007     PetscBool valid = PETSC_TRUE;
1008     for (p = 0; p < newConesSize; ++p) {
1009       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1010     }
1011     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1012   }
1013 #endif
1014   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1015   if (flg) {
1016     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1017     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1018     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1019     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1020     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1021   }
1022   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1023   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1024   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1025   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1026   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1027   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1028   /* Create supports and stratify sieve */
1029   {
1030     PetscInt pStart, pEnd;
1031 
1032     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1033     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1034   }
1035   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1036   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 #undef __FUNCT__
1041 #define __FUNCT__ "DMPlexDistributeCoordinates"
1042 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1043 {
1044   MPI_Comm         comm;
1045   PetscSection     originalCoordSection, newCoordSection;
1046   Vec              originalCoordinates, newCoordinates;
1047   PetscInt         bs;
1048   const char      *name;
1049   const PetscReal *maxCell, *L;
1050   PetscErrorCode   ierr;
1051 
1052   PetscFunctionBegin;
1053   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1054   PetscValidPointer(dmParallel, 3);
1055 
1056   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1057   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1058   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1059   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1060   if (originalCoordinates) {
1061     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1062     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1063     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1064 
1065     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1066     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1067     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1068     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1069     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1070   }
1071   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1072   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 #undef __FUNCT__
1077 #define __FUNCT__ "DMPlexDistributeLabels"
1078 /* Here we are assuming that process 0 always has everything */
1079 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1080 {
1081   MPI_Comm       comm;
1082   PetscMPIInt    rank;
1083   PetscInt       numLabels, numLocalLabels, l;
1084   PetscBool      hasLabels = PETSC_FALSE;
1085   PetscErrorCode ierr;
1086 
1087   PetscFunctionBegin;
1088   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1089   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1090   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1091   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1092   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1093 
1094   /* Everyone must have either the same number of labels, or none */
1095   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1096   numLabels = numLocalLabels;
1097   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1098   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1099   for (l = numLabels-1; l >= 0; --l) {
1100     DMLabel     label = NULL, labelNew = NULL;
1101     PetscBool   isdepth;
1102 
1103     if (hasLabels) {
1104       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1105       /* Skip "depth" because it is recreated */
1106       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1107     }
1108     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1109     if (isdepth) continue;
1110     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1111     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1112   }
1113   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 #undef __FUNCT__
1118 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1119 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1120 {
1121   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1122   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1123   MPI_Comm        comm;
1124   const PetscInt *gpoints;
1125   PetscInt        dim, depth, n, d;
1126   PetscErrorCode  ierr;
1127 
1128   PetscFunctionBegin;
1129   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1130   PetscValidPointer(dmParallel, 4);
1131 
1132   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1133   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1134 
1135   /* Setup hybrid structure */
1136   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1137   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1138   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1139   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1140   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1141   for (d = 0; d <= dim; ++d) {
1142     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1143 
1144     if (pmax < 0) continue;
1145     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1146     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1147     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1148     for (p = 0; p < n; ++p) {
1149       const PetscInt point = gpoints[p];
1150 
1151       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1152     }
1153     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1154     else            pmesh->hybridPointMax[d] = -1;
1155   }
1156   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "DMPlexDistributeSetupTree"
1162 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1163 {
1164   MPI_Comm        comm;
1165   DM              refTree;
1166   PetscSection    origParentSection, newParentSection;
1167   PetscInt        *origParents, *origChildIDs;
1168   PetscBool       flg;
1169   PetscErrorCode  ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1173   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1174   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1175 
1176   /* Set up tree */
1177   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1178   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1179   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1180   if (origParentSection) {
1181     PetscInt        pStart, pEnd;
1182     PetscInt        *newParents, *newChildIDs;
1183     PetscInt        *remoteOffsetsParents, newParentSize;
1184     PetscSF         parentSF;
1185 
1186     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1187     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1188     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1189     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1190     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1191     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1192     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1193     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1194     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1195     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1196     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1197     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1198     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1199     if (flg) {
1200       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1201       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1202       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1203       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1204       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1205     }
1206     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1207     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1208     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1209     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1210   }
1211   PetscFunctionReturn(0);
1212 }
1213 
1214 #undef __FUNCT__
1215 #define __FUNCT__ "DMPlexDistributeSF"
1216 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1217 {
1218   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1219   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1220   PetscMPIInt            rank, numProcs;
1221   MPI_Comm               comm;
1222   PetscErrorCode         ierr;
1223 
1224   PetscFunctionBegin;
1225   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1226   PetscValidPointer(dmParallel,7);
1227 
1228   /* Create point SF for parallel mesh */
1229   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1230   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1231   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1232   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1233   {
1234     const PetscInt *leaves;
1235     PetscSFNode    *remotePoints, *rowners, *lowners;
1236     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1237     PetscInt        pStart, pEnd;
1238 
1239     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1240     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1241     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1242     for (p=0; p<numRoots; p++) {
1243       rowners[p].rank  = -1;
1244       rowners[p].index = -1;
1245     }
1246     if (origPart) {
1247       /* Make sure points in the original partition are not assigned to other procs */
1248       const PetscInt *origPoints;
1249 
1250       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1251       for (p = 0; p < numProcs; ++p) {
1252         PetscInt dof, off, d;
1253 
1254         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1255         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1256         for (d = off; d < off+dof; ++d) {
1257           rowners[origPoints[d]].rank = p;
1258         }
1259       }
1260       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1261     }
1262     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1263     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1264     for (p = 0; p < numLeaves; ++p) {
1265       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1266         lowners[p].rank  = rank;
1267         lowners[p].index = leaves ? leaves[p] : p;
1268       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1269         lowners[p].rank  = -2;
1270         lowners[p].index = -2;
1271       }
1272     }
1273     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1274       rowners[p].rank  = -3;
1275       rowners[p].index = -3;
1276     }
1277     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1278     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1279     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1280     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1281     for (p = 0; p < numLeaves; ++p) {
1282       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1283       if (lowners[p].rank != rank) ++numGhostPoints;
1284     }
1285     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1286     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1287     for (p = 0, gp = 0; p < numLeaves; ++p) {
1288       if (lowners[p].rank != rank) {
1289         ghostPoints[gp]        = leaves ? leaves[p] : p;
1290         remotePoints[gp].rank  = lowners[p].rank;
1291         remotePoints[gp].index = lowners[p].index;
1292         ++gp;
1293       }
1294     }
1295     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1296     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1297     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1298   }
1299   pmesh->useCone    = mesh->useCone;
1300   pmesh->useClosure = mesh->useClosure;
1301   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 #undef __FUNCT__
1306 #define __FUNCT__ "DMPlexDistribute"
1307 /*@C
1308   DMPlexDistribute - Distributes the mesh and any associated sections.
1309 
1310   Not Collective
1311 
1312   Input Parameter:
1313 + dm  - The original DMPlex object
1314 - overlap - The overlap of partitions, 0 is the default
1315 
1316   Output Parameter:
1317 + sf - The PetscSF used for point distribution
1318 - parallelMesh - The distributed DMPlex object, or NULL
1319 
1320   Note: If the mesh was not distributed, the return value is NULL.
1321 
1322   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1323   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1324   representation on the mesh.
1325 
1326   Level: intermediate
1327 
1328 .keywords: mesh, elements
1329 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1330 @*/
1331 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1332 {
1333   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1334   MPI_Comm               comm;
1335   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1336   DM                     dmOverlap;
1337   IS                     cellPart,        part;
1338   PetscSection           cellPartSection, partSection;
1339   PetscSFNode           *remoteRanks, *newRemote;
1340   const PetscSFNode     *oldRemote;
1341   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1342   ISLocalToGlobalMapping renumbering;
1343   PetscBool              flg;
1344   PetscMPIInt            rank, numProcs, p;
1345   PetscErrorCode         ierr;
1346 
1347   PetscFunctionBegin;
1348   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1349   if (sf) PetscValidPointer(sf,4);
1350   PetscValidPointer(dmParallel,5);
1351 
1352   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1353   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1354   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1355   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1356 
1357   *dmParallel = NULL;
1358   if (numProcs == 1) PetscFunctionReturn(0);
1359 
1360   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1361   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1362   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1363   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1364   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1365   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1366   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1367   if (!rank) numRemoteRanks = numProcs;
1368   else       numRemoteRanks = 0;
1369   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1370   for (p = 0; p < numRemoteRanks; ++p) {
1371     remoteRanks[p].rank  = p;
1372     remoteRanks[p].index = 0;
1373   }
1374   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1375   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1376   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1377   if (flg) {
1378     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1379     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1380     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1381     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1382   }
1383   /* Close the partition over the mesh */
1384   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1385   /* Create new mesh */
1386   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1387   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1388   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1389   pmesh = (DM_Plex*) (*dmParallel)->data;
1390   pmesh->useAnchors = mesh->useAnchors;
1391 
1392   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1393   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1394   if (flg) {
1395     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1396     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1397     ierr = ISView(part, NULL);CHKERRQ(ierr);
1398     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1399     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1400     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1401   }
1402   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1403 
1404   /* Migrate data to a non-overlapping parallel DM */
1405   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1406   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1407   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1408   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1409   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1410 
1411   /* Build the point SF without overlap */
1412   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1413   if (flg) {
1414     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1415     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1416   }
1417 
1418   if (overlap > 0) {
1419     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1420     /* Add the partition overlap to the distributed DM */
1421     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1422     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1423     *dmParallel = dmOverlap;
1424     if (flg) {
1425       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1426       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1427     }
1428 
1429     /* Re-map the pointSF to establish the full migration pattern */
1430     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1431     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1432     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1433     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1434     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1435     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1436     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1437     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1438     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1439     pointSF = overlapPointSF;
1440     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1441   }
1442   /* Cleanup Partition */
1443   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1444   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1445   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1446   ierr = ISDestroy(&part);CHKERRQ(ierr);
1447   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1448   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1449   /* Copy BC */
1450   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1451   /* Cleanup */
1452   if (sf) {*sf = pointSF;}
1453   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1454   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1455   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "DMPlexDistributeOverlap"
1461 /*@C
1462   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1463 
1464   Not Collective
1465 
1466   Input Parameter:
1467 + dm  - The non-overlapping distrbuted DMPlex object
1468 - overlap - The overlap of partitions, 0 is the default
1469 
1470   Output Parameter:
1471 + sf - The PetscSF used for point distribution
1472 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1473 
1474   Note: If the mesh was not distributed, the return value is NULL.
1475 
1476   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1477   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1478   representation on the mesh.
1479 
1480   Level: intermediate
1481 
1482 .keywords: mesh, elements
1483 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1484 @*/
1485 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1486 {
1487   MPI_Comm               comm;
1488   PetscMPIInt            rank;
1489   PetscSection           rootSection, leafSection;
1490   IS                     rootrank, leafrank;
1491   PetscSection           coneSection;
1492   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1493   PetscSFNode           *ghostRemote;
1494   const PetscSFNode     *overlapRemote;
1495   ISLocalToGlobalMapping overlapRenumbering;
1496   const PetscInt        *renumberingArray, *overlapLocal;
1497   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1498   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1499   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1500   PetscErrorCode         ierr;
1501 
1502   PetscFunctionBegin;
1503   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1504   if (sf) PetscValidPointer(sf, 3);
1505   PetscValidPointer(dmOverlap, 4);
1506 
1507   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1508   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1509   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1510 
1511   /* Compute point overlap with neighbouring processes on the distributed DM */
1512   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1513   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1514   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1515   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1516   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1517   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1518   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1519   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1520   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1521   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1522 
1523   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1524   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1525 
1526   /* Convert cones to global numbering before migrating them */
1527   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1528   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1529   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1530   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1531 
1532   /* Derive the new local-to-global mapping from the old one */
1533   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1534   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1535   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1536   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1537   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1538   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1539 
1540   /* Build the overlapping DM */
1541   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1542   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1543   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1544   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1545   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1546   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1547   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1548 
1549   /* Build the new point SF by propagating the depthShift generate remote root indices */
1550   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1551   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1552   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1553   numGhostPoints = numSharedPoints + numOverlapPoints;
1554   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1555   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1556   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1557   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1558   for (p=0; p<overlapLeaves; p++) {
1559     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1560   }
1561   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1562   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1563   for (idx=0, p=0; p<overlapLeaves; p++) {
1564     if (overlapRemote[p].rank != rank) {
1565       ghostLocal[idx] = overlapLocal[p];
1566       ghostRemote[idx].index = recvPointIDs[p];
1567       ghostRemote[idx].rank = overlapRemote[p].rank;
1568       idx++;
1569     }
1570   }
1571   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1572   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1573   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1574   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1575   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1576   /* Cleanup overlap partition */
1577   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1578   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1579   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1580   if (sf) *sf = migrationSF;
1581   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1582   PetscFunctionReturn(0);
1583 }
1584