xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 933c14ac84e02798fb956f63d1e52edc5d8c09a5)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovRootSection - The number of new overlap points for each neighboring process
540 . ovRootPoints  - The new overlap points for each neighboring process
541 . ovLeafSection - The number of new overlap points from each neighboring process
542 - ovLeafPoints  - The new overlap points from each neighboring process
543 
544   Level: developer
545 
546 .seealso: DMPlexDistributeOwnership()
547 @*/
548 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
549 {
550   MPI_Comm           comm;
551   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
552   PetscSF            sfPoint, sfProc;
553   IS                 valueIS;
554   DMLabel            ovLeafLabel;
555   const PetscSFNode *remote;
556   const PetscInt    *local;
557   const PetscInt    *nrank, *rrank, *neighbors;
558   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
559   PetscSection       ovRootSection, ovLeafSection;
560   PetscInt          *adj = NULL;
561   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
562   PetscInt           idx, numRemote;
563   PetscMPIInt        rank, numProcs;
564   PetscBool          useCone, useClosure, flg;
565   PetscErrorCode     ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
569   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
570   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
571   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
572   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
573   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
574   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
575   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
576   /* Handle leaves: shared with the root point */
577   for (l = 0; l < nleaves; ++l) {
578     PetscInt adjSize = PETSC_DETERMINE, a;
579 
580     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
581     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
582   }
583   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
584   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
585   /* Handle roots */
586   for (p = pStart; p < pEnd; ++p) {
587     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
588 
589     if ((p >= sStart) && (p < sEnd)) {
590       /* Some leaves share a root with other leaves on different processes */
591       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
592       if (neighbors) {
593         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
594         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
595         for (n = 0; n < neighbors; ++n) {
596           const PetscInt remoteRank = nrank[noff+n];
597 
598           if (remoteRank == rank) continue;
599           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
600         }
601       }
602     }
603     /* Roots are shared with leaves */
604     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
605     if (!neighbors) continue;
606     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
607     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
608     for (n = 0; n < neighbors; ++n) {
609       const PetscInt remoteRank = rrank[noff+n];
610 
611       if (remoteRank == rank) continue;
612       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
613     }
614   }
615   ierr = PetscFree(adj);CHKERRQ(ierr);
616   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
617   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
618   /* We require the closure in the overlap */
619   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
620   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
621   if (useCone || !useClosure) {
622     IS              rankIS,   pointIS;
623     const PetscInt *ranks,   *points;
624     PetscInt        numRanks, numPoints, r, p;
625 
626     ierr = DMLabelGetValueIS(ovAdjByRank, &rankIS);CHKERRQ(ierr);
627     ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
628     ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
629     for (r = 0; r < numRanks; ++r) {
630       const PetscInt rank = ranks[r];
631 
632       ierr = DMLabelGetStratumIS(ovAdjByRank, rank, &pointIS);CHKERRQ(ierr);
633       ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
634       ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
635       for (p = 0; p < numPoints; ++p) {
636         PetscInt *closure = NULL, closureSize, c;
637 
638         ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
639         for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(ovAdjByRank, closure[c], rank);CHKERRQ(ierr);}
640         ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
641       }
642       ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
643       ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
644     }
645     ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
646     ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
647   }
648   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
649   if (flg) {
650     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
651     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
652   }
653   /* Convert to (point, rank) and use actual owners */
654   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
655   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
656   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
657   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
658   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
659   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
660   for (n = 0; n < numNeighbors; ++n) {
661     PetscInt numPoints;
662 
663     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
664     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
665   }
666   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
667   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
668   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
669   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
670   for (n = 0; n < numNeighbors; ++n) {
671     IS              pointIS;
672     const PetscInt *points;
673     PetscInt        off, numPoints, p;
674 
675     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
676     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
677     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
678     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
679     for (p = 0; p < numPoints; ++p) {
680       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
681       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
682       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
683     }
684     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
685     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
686   }
687   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
688   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
689   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
690   /* Make process SF */
691   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
692   if (flg) {
693     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
694   }
695   /* Communicate overlap */
696   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
697   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
698   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
699   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
700   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
701   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
702   for (p = 0; p < ovSize; p++) {
703     /* Don't import points from yourself */
704     if (ovLeafPoints[p].rank == rank) continue;
705     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
706   }
707   /* Don't import points already in the pointSF */
708   for (l = 0; l < nleaves; ++l) {
709     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
710   }
711   for (numRemote = 0, n = 0; n < numProcs; ++n) {
712     PetscInt numPoints;
713     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
714     numRemote += numPoints;
715   }
716   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
717   for (idx = 0, n = 0; n < numProcs; ++n) {
718     IS remoteRootIS;
719     PetscInt numPoints;
720     const PetscInt *remoteRoots;
721     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
722     if (numPoints <= 0) continue;
723     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
724     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
725     for (p = 0; p < numPoints; p++) {
726       remotePoints[idx].index = remoteRoots[p];
727       remotePoints[idx].rank = n;
728       idx++;
729     }
730     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
731     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
732   }
733   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
734   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
735   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
736   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
737   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
738   if (flg) {
739     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
740     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
741   }
742   /* Clean up */
743   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
744   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
745   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
746   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
747   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
748   PetscFunctionReturn(0);
749 }
750 
751 #undef __FUNCT__
752 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
753 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
754 {
755   MPI_Comm           comm;
756   PetscMPIInt        rank, numProcs;
757   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
758   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759   PetscInt          *depthRecv, *depthShift, *depthIdx;
760   PetscSFNode       *iremote;
761   PetscSF            pointSF;
762   const PetscInt    *sharedLocal;
763   const PetscSFNode *overlapRemote, *sharedRemote;
764   PetscErrorCode     ierr;
765 
766   PetscFunctionBegin;
767   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
768   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
769   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
770   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
771   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
772 
773   /* Before building the migration SF we need to know the new stratum offsets */
774   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
775   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
776   for (d=0; d<dim+1; d++) {
777     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
778     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
779   }
780   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
781   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
782   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
783 
784   /* Count recevied points in each stratum and compute the internal strata shift */
785   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
786   for (d=0; d<dim+1; d++) depthRecv[d]=0;
787   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
788   depthShift[dim] = 0;
789   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
790   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
791   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
792   for (d=0; d<dim+1; d++) {
793     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
794     depthIdx[d] = pStart + depthShift[d];
795   }
796 
797   /* Form the overlap SF build an SF that describes the full overlap migration SF */
798   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
799   newLeaves = pEnd - pStart + nleaves;
800   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
801   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
802   /* First map local points to themselves */
803   for (d=0; d<dim+1; d++) {
804     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
805     for (p=pStart; p<pEnd; p++) {
806       point = p + depthShift[d];
807       ilocal[point] = point;
808       iremote[point].index = p;
809       iremote[point].rank = rank;
810       depthIdx[d]++;
811     }
812   }
813 
814   /* Add in the remote roots for currently shared points */
815   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
816   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
817   for (d=0; d<dim+1; d++) {
818     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
819     for (p=0; p<numSharedPoints; p++) {
820       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
821         point = sharedLocal[p] + depthShift[d];
822         iremote[point].index = sharedRemote[p].index;
823         iremote[point].rank = sharedRemote[p].rank;
824       }
825     }
826   }
827 
828   /* Now add the incoming overlap points */
829   for (p=0; p<nleaves; p++) {
830     point = depthIdx[remoteDepths[p]];
831     ilocal[point] = point;
832     iremote[point].index = overlapRemote[p].index;
833     iremote[point].rank = overlapRemote[p].rank;
834     depthIdx[remoteDepths[p]]++;
835   }
836   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
837 
838   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
839   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
840   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
841   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
842   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
843 
844   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
845   PetscFunctionReturn(0);
846 }
847 
848 #undef __FUNCT__
849 #define __FUNCT__ "DMPlexDistributeField"
850 /*@
851   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
852 
853   Collective on DM
854 
855   Input Parameters:
856 + dm - The DMPlex object
857 . pointSF - The PetscSF describing the communication pattern
858 . originalSection - The PetscSection for existing data layout
859 - originalVec - The existing data
860 
861   Output Parameters:
862 + newSection - The PetscSF describing the new data layout
863 - newVec - The new data
864 
865   Level: developer
866 
867 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
868 @*/
869 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
870 {
871   PetscSF        fieldSF;
872   PetscInt      *remoteOffsets, fieldSize;
873   PetscScalar   *originalValues, *newValues;
874   PetscErrorCode ierr;
875 
876   PetscFunctionBegin;
877   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
878   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
879 
880   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
881   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
882   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
883 
884   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
885   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
886   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
887   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
888   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
889   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
890   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
891   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
892   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
893   PetscFunctionReturn(0);
894 }
895 
896 #undef __FUNCT__
897 #define __FUNCT__ "DMPlexDistributeFieldIS"
898 /*@
899   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
900 
901   Collective on DM
902 
903   Input Parameters:
904 + dm - The DMPlex object
905 . pointSF - The PetscSF describing the communication pattern
906 . originalSection - The PetscSection for existing data layout
907 - originalIS - The existing data
908 
909   Output Parameters:
910 + newSection - The PetscSF describing the new data layout
911 - newIS - The new data
912 
913   Level: developer
914 
915 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
916 @*/
917 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
918 {
919   PetscSF         fieldSF;
920   PetscInt       *newValues, *remoteOffsets, fieldSize;
921   const PetscInt *originalValues;
922   PetscErrorCode  ierr;
923 
924   PetscFunctionBegin;
925   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
926   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
927 
928   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
929   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
930 
931   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
932   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
933   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
934   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
935   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
936   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
937   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
938   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "DMPlexDistributeData"
944 /*@
945   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
946 
947   Collective on DM
948 
949   Input Parameters:
950 + dm - The DMPlex object
951 . pointSF - The PetscSF describing the communication pattern
952 . originalSection - The PetscSection for existing data layout
953 . datatype - The type of data
954 - originalData - The existing data
955 
956   Output Parameters:
957 + newSection - The PetscSection describing the new data layout
958 - newData - The new data
959 
960   Level: developer
961 
962 .seealso: DMPlexDistribute(), DMPlexDistributeField()
963 @*/
964 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
965 {
966   PetscSF        fieldSF;
967   PetscInt      *remoteOffsets, fieldSize;
968   PetscMPIInt    dataSize;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
973   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
974 
975   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
976   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
977   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
978 
979   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
980   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
981   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
982   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
983   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
984   PetscFunctionReturn(0);
985 }
986 
987 #undef __FUNCT__
988 #define __FUNCT__ "DMPlexDistributeCones"
989 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
990 {
991   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
992   MPI_Comm               comm;
993   PetscSF                coneSF;
994   PetscSection           originalConeSection, newConeSection;
995   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
996   PetscBool              flg;
997   PetscErrorCode         ierr;
998 
999   PetscFunctionBegin;
1000   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1001   PetscValidPointer(dmParallel,4);
1002   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1003 
1004   /* Distribute cone section */
1005   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1006   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1007   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1008   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1009   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1010   {
1011     PetscInt pStart, pEnd, p;
1012 
1013     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1014     for (p = pStart; p < pEnd; ++p) {
1015       PetscInt coneSize;
1016       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1017       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1018     }
1019   }
1020   /* Communicate and renumber cones */
1021   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1022   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1023   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1024   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1025   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1026   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1027   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1028 #if PETSC_USE_DEBUG
1029   {
1030     PetscInt  p;
1031     PetscBool valid = PETSC_TRUE;
1032     for (p = 0; p < newConesSize; ++p) {
1033       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1034     }
1035     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1036   }
1037 #endif
1038   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1039   if (flg) {
1040     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1041     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1042     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1043     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1044     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1045   }
1046   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1047   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1048   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1049   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1050   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1051   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1052   /* Create supports and stratify sieve */
1053   {
1054     PetscInt pStart, pEnd;
1055 
1056     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1057     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1058   }
1059   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1060   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "DMPlexDistributeCoordinates"
1066 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1067 {
1068   MPI_Comm         comm;
1069   PetscSection     originalCoordSection, newCoordSection;
1070   Vec              originalCoordinates, newCoordinates;
1071   PetscInt         bs;
1072   const char      *name;
1073   const PetscReal *maxCell, *L;
1074   PetscErrorCode   ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1078   PetscValidPointer(dmParallel, 3);
1079 
1080   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1081   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1082   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1083   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1084   if (originalCoordinates) {
1085     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1086     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1087     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1088 
1089     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1090     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1091     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1092     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1093     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1094   }
1095   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1096   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 #undef __FUNCT__
1101 #define __FUNCT__ "DMPlexDistributeLabels"
1102 /* Here we are assuming that process 0 always has everything */
1103 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1104 {
1105   MPI_Comm       comm;
1106   PetscMPIInt    rank;
1107   PetscInt       numLabels, numLocalLabels, l;
1108   PetscBool      hasLabels = PETSC_FALSE;
1109   PetscErrorCode ierr;
1110 
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1113   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1114   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1115   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1116   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1117 
1118   /* Everyone must have either the same number of labels, or none */
1119   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1120   numLabels = numLocalLabels;
1121   if (numLocalLabels > 1) hasLabels = PETSC_TRUE;
1122   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1123   if (hasLabels && (numLabels != numLocalLabels)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Invalid number of labels %d != %d", numLocalLabels, numLabels);
1124   for (l = numLabels-1; l >= 0; --l) {
1125     DMLabel     label = NULL, labelNew = NULL;
1126     PetscBool   isdepth;
1127 
1128     if (hasLabels) {
1129       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1130       /* Skip "depth" because it is recreated */
1131       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1132     }
1133     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1134     if (isdepth) continue;
1135     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1136     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1137   }
1138   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 #undef __FUNCT__
1143 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1144 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1145 {
1146   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1147   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1148   MPI_Comm        comm;
1149   const PetscInt *gpoints;
1150   PetscInt        dim, depth, n, d;
1151   PetscErrorCode  ierr;
1152 
1153   PetscFunctionBegin;
1154   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1155   PetscValidPointer(dmParallel, 4);
1156 
1157   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1158   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1159 
1160   /* Setup hybrid structure */
1161   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1162   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1163   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1164   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1165   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1166   for (d = 0; d <= dim; ++d) {
1167     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1168 
1169     if (pmax < 0) continue;
1170     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1171     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1172     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1173     for (p = 0; p < n; ++p) {
1174       const PetscInt point = gpoints[p];
1175 
1176       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1177     }
1178     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1179     else            pmesh->hybridPointMax[d] = -1;
1180   }
1181   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1182   PetscFunctionReturn(0);
1183 }
1184 
1185 #undef __FUNCT__
1186 #define __FUNCT__ "DMPlexDistributeSetupTree"
1187 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1188 {
1189   MPI_Comm        comm;
1190   DM              refTree;
1191   PetscSection    origParentSection, newParentSection;
1192   PetscInt        *origParents, *origChildIDs;
1193   PetscBool       flg;
1194   PetscErrorCode  ierr;
1195 
1196   PetscFunctionBegin;
1197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1198   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1199   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1200 
1201   /* Set up tree */
1202   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1203   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1204   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1205   if (origParentSection) {
1206     PetscInt        pStart, pEnd;
1207     PetscInt        *newParents, *newChildIDs;
1208     PetscInt        *remoteOffsetsParents, newParentSize;
1209     PetscSF         parentSF;
1210 
1211     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1212     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1213     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1214     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1215     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1216     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1217     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1218     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1219     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1220     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1221     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1222     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1223     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1224     if (flg) {
1225       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1226       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1227       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1228       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1229       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1230     }
1231     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1232     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1233     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1234     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1235   }
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "DMPlexDistributeSF"
1241 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1242 {
1243   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1244   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1245   PetscMPIInt            rank, numProcs;
1246   MPI_Comm               comm;
1247   PetscErrorCode         ierr;
1248 
1249   PetscFunctionBegin;
1250   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1251   PetscValidPointer(dmParallel,7);
1252 
1253   /* Create point SF for parallel mesh */
1254   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1255   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1256   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1257   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1258   {
1259     const PetscInt *leaves;
1260     PetscSFNode    *remotePoints, *rowners, *lowners;
1261     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1262     PetscInt        pStart, pEnd;
1263 
1264     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1265     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1266     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1267     for (p=0; p<numRoots; p++) {
1268       rowners[p].rank  = -1;
1269       rowners[p].index = -1;
1270     }
1271     if (origPart) {
1272       /* Make sure points in the original partition are not assigned to other procs */
1273       const PetscInt *origPoints;
1274 
1275       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1276       for (p = 0; p < numProcs; ++p) {
1277         PetscInt dof, off, d;
1278 
1279         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1280         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1281         for (d = off; d < off+dof; ++d) {
1282           rowners[origPoints[d]].rank = p;
1283         }
1284       }
1285       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1286     }
1287     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1288     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1289     for (p = 0; p < numLeaves; ++p) {
1290       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1291         lowners[p].rank  = rank;
1292         lowners[p].index = leaves ? leaves[p] : p;
1293       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1294         lowners[p].rank  = -2;
1295         lowners[p].index = -2;
1296       }
1297     }
1298     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1299       rowners[p].rank  = -3;
1300       rowners[p].index = -3;
1301     }
1302     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1303     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1304     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1305     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1306     for (p = 0; p < numLeaves; ++p) {
1307       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1308       if (lowners[p].rank != rank) ++numGhostPoints;
1309     }
1310     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1311     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1312     for (p = 0, gp = 0; p < numLeaves; ++p) {
1313       if (lowners[p].rank != rank) {
1314         ghostPoints[gp]        = leaves ? leaves[p] : p;
1315         remotePoints[gp].rank  = lowners[p].rank;
1316         remotePoints[gp].index = lowners[p].index;
1317         ++gp;
1318       }
1319     }
1320     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1321     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1322     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1323   }
1324   pmesh->useCone    = mesh->useCone;
1325   pmesh->useClosure = mesh->useClosure;
1326   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 #undef __FUNCT__
1331 #define __FUNCT__ "DMPlexDistribute"
1332 /*@C
1333   DMPlexDistribute - Distributes the mesh and any associated sections.
1334 
1335   Not Collective
1336 
1337   Input Parameter:
1338 + dm  - The original DMPlex object
1339 - overlap - The overlap of partitions, 0 is the default
1340 
1341   Output Parameter:
1342 + sf - The PetscSF used for point distribution
1343 - parallelMesh - The distributed DMPlex object, or NULL
1344 
1345   Note: If the mesh was not distributed, the return value is NULL.
1346 
1347   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1348   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1349   representation on the mesh.
1350 
1351   Level: intermediate
1352 
1353 .keywords: mesh, elements
1354 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1355 @*/
1356 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1357 {
1358   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1359   MPI_Comm               comm;
1360   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1361   DM                     dmOverlap;
1362   IS                     cellPart,        part;
1363   PetscSection           cellPartSection, partSection;
1364   PetscSFNode           *remoteRanks, *newRemote;
1365   const PetscSFNode     *oldRemote;
1366   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1367   ISLocalToGlobalMapping renumbering;
1368   PetscBool              flg;
1369   PetscMPIInt            rank, numProcs, p;
1370   PetscErrorCode         ierr;
1371 
1372   PetscFunctionBegin;
1373   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1374   if (sf) PetscValidPointer(sf,4);
1375   PetscValidPointer(dmParallel,5);
1376 
1377   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1378   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1379   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1380   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1381 
1382   *dmParallel = NULL;
1383   if (numProcs == 1) PetscFunctionReturn(0);
1384 
1385   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1386   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1387   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1388   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1389   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1390   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1391   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1392   if (!rank) numRemoteRanks = numProcs;
1393   else       numRemoteRanks = 0;
1394   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1395   for (p = 0; p < numRemoteRanks; ++p) {
1396     remoteRanks[p].rank  = p;
1397     remoteRanks[p].index = 0;
1398   }
1399   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1400   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1401   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1402   if (flg) {
1403     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1404     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1405     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1406     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1407   }
1408   /* Close the partition over the mesh */
1409   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1410   /* Create new mesh */
1411   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1412   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1413   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1414   pmesh = (DM_Plex*) (*dmParallel)->data;
1415   pmesh->useAnchors = mesh->useAnchors;
1416 
1417   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1418   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1419   if (flg) {
1420     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1421     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1422     ierr = ISView(part, NULL);CHKERRQ(ierr);
1423     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1424     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1425     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1426   }
1427   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1428 
1429   /* Migrate data to a non-overlapping parallel DM */
1430   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1431   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1432   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1433   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1434   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1435 
1436   /* Build the point SF without overlap */
1437   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1438   if (flg) {
1439     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1440     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1441   }
1442 
1443   if (overlap > 0) {
1444     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1445     /* Add the partition overlap to the distributed DM */
1446     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1447     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1448     *dmParallel = dmOverlap;
1449     if (flg) {
1450       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1451       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1452     }
1453 
1454     /* Re-map the pointSF to establish the full migration pattern */
1455     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1456     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1457     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1458     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1459     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1460     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1461     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1462     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1463     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1464     pointSF = overlapPointSF;
1465     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1466   }
1467   /* Cleanup Partition */
1468   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1469   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1470   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1471   ierr = ISDestroy(&part);CHKERRQ(ierr);
1472   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1473   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1474   /* Copy BC */
1475   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1476   /* Cleanup */
1477   if (sf) {*sf = pointSF;}
1478   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1479   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1480   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 #undef __FUNCT__
1485 #define __FUNCT__ "DMPlexDistributeOverlap"
1486 /*@C
1487   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1488 
1489   Not Collective
1490 
1491   Input Parameter:
1492 + dm  - The non-overlapping distrbuted DMPlex object
1493 - overlap - The overlap of partitions, 0 is the default
1494 
1495   Output Parameter:
1496 + sf - The PetscSF used for point distribution
1497 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1498 
1499   Note: If the mesh was not distributed, the return value is NULL.
1500 
1501   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1502   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1503   representation on the mesh.
1504 
1505   Level: intermediate
1506 
1507 .keywords: mesh, elements
1508 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1509 @*/
1510 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1511 {
1512   MPI_Comm               comm;
1513   PetscMPIInt            rank;
1514   PetscSection           rootSection, leafSection;
1515   IS                     rootrank, leafrank;
1516   PetscSection           coneSection;
1517   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1518   PetscSFNode           *ghostRemote;
1519   const PetscSFNode     *overlapRemote;
1520   ISLocalToGlobalMapping overlapRenumbering;
1521   const PetscInt        *renumberingArray, *overlapLocal;
1522   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1523   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1524   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1525   PetscErrorCode         ierr;
1526 
1527   PetscFunctionBegin;
1528   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1529   if (sf) PetscValidPointer(sf, 3);
1530   PetscValidPointer(dmOverlap, 4);
1531 
1532   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1533   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1534   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1535 
1536   /* Compute point overlap with neighbouring processes on the distributed DM */
1537   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1538   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1539   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1540   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1541   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1542   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1543   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1544   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1545   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1546   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1547 
1548   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1549   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1550 
1551   /* Convert cones to global numbering before migrating them */
1552   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1553   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1554   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1555   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1556 
1557   /* Derive the new local-to-global mapping from the old one */
1558   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1559   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1560   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1561   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1562   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1563   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1564 
1565   /* Build the overlapping DM */
1566   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1567   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1568   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1569   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1570   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1571   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1572   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1573 
1574   /* Build the new point SF by propagating the depthShift generate remote root indices */
1575   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1576   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1577   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1578   numGhostPoints = numSharedPoints + numOverlapPoints;
1579   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1580   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1581   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1582   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1583   for (p=0; p<overlapLeaves; p++) {
1584     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1585   }
1586   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1587   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1588   for (idx=0, p=0; p<overlapLeaves; p++) {
1589     if (overlapRemote[p].rank != rank) {
1590       ghostLocal[idx] = overlapLocal[p];
1591       ghostRemote[idx].index = recvPointIDs[p];
1592       ghostRemote[idx].rank = overlapRemote[p].rank;
1593       idx++;
1594     }
1595   }
1596   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1597   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1598   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1599   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1600   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1601   /* Cleanup overlap partition */
1602   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1603   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1604   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1605   if (sf) *sf = migrationSF;
1606   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1607   PetscFunctionReturn(0);
1608 }
1609