xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 3533c52b29edbc7b9f52f0c179567251e6920429)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovRootSection - The number of new overlap points for each neighboring process
540 . ovRootPoints  - The new overlap points for each neighboring process
541 . ovLeafSection - The number of new overlap points from each neighboring process
542 - ovLeafPoints  - The new overlap points from each neighboring process
543 
544   Level: developer
545 
546 .seealso: DMPlexDistributeOwnership()
547 @*/
548 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
549 {
550   MPI_Comm           comm;
551   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
552   PetscSF            sfPoint, sfProc;
553   IS                 valueIS;
554   DMLabel            ovLeafLabel;
555   const PetscSFNode *remote;
556   const PetscInt    *local;
557   const PetscInt    *nrank, *rrank, *neighbors;
558   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
559   PetscSection       ovRootSection, ovLeafSection;
560   PetscInt          *adj = NULL;
561   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
562   PetscInt           idx, numRemote;
563   PetscMPIInt        rank, numProcs;
564   PetscBool          flg;
565   PetscErrorCode     ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
569   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
570   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
571   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
572   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
573   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
574   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
575   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
576   /* Handle leaves: shared with the root point */
577   for (l = 0; l < nleaves; ++l) {
578     PetscInt adjSize = PETSC_DETERMINE, a;
579 
580     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
581     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
582   }
583   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
584   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
585   /* Handle roots */
586   for (p = pStart; p < pEnd; ++p) {
587     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
588 
589     if ((p >= sStart) && (p < sEnd)) {
590       /* Some leaves share a root with other leaves on different processes */
591       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
592       if (neighbors) {
593         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
594         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
595         for (n = 0; n < neighbors; ++n) {
596           const PetscInt remoteRank = nrank[noff+n];
597 
598           if (remoteRank == rank) continue;
599           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
600         }
601       }
602     }
603     /* Roots are shared with leaves */
604     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
605     if (!neighbors) continue;
606     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
607     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
608     for (n = 0; n < neighbors; ++n) {
609       const PetscInt remoteRank = rrank[noff+n];
610 
611       if (remoteRank == rank) continue;
612       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
613     }
614   }
615   ierr = PetscFree(adj);CHKERRQ(ierr);
616   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
617   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Convert to (point, rank) and use actual owners */
624   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
625   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
626   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
627   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
628   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
629   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
630   for (n = 0; n < numNeighbors; ++n) {
631     PetscInt numPoints;
632 
633     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
634     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
635   }
636   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
637   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
638   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
639   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
640   for (n = 0; n < numNeighbors; ++n) {
641     IS              pointIS;
642     const PetscInt *points;
643     PetscInt        off, numPoints, p;
644 
645     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
646     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
647     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
648     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
649     for (p = 0; p < numPoints; ++p) {
650       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
651       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
652       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
653     }
654     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
655     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
656   }
657   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
658   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
659   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
660   /* Make process SF */
661   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
662   if (flg) {
663     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
664   }
665   /* Communicate overlap */
666   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
668   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
669   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
670   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
671   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
672   for (p = 0; p < ovSize; p++) {
673     /* Don't import points from yourself */
674     if (ovLeafPoints[p].rank == rank) continue;
675     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
676   }
677   /* Don't import points already in the pointSF */
678   for (l = 0; l < nleaves; ++l) {
679     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
680   }
681   for (numRemote = 0, n = 0; n < numProcs; ++n) {
682     PetscInt numPoints;
683     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
684     numRemote += numPoints;
685   }
686   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
687   for (idx = 0, n = 0; n < numProcs; ++n) {
688     IS remoteRootIS;
689     PetscInt numPoints;
690     const PetscInt *remoteRoots;
691     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
692     if (numPoints <= 0) continue;
693     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
694     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
695     for (p = 0; p < numPoints; p++) {
696       remotePoints[idx].index = remoteRoots[p];
697       remotePoints[idx].rank = n;
698       idx++;
699     }
700     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
701     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
702   }
703   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
704   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
705   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
706   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
707   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
708   if (flg) {
709     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
710     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
711   }
712   /* Clean up */
713   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
714   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
715   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
716   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
717   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
723 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
724 {
725   MPI_Comm           comm;
726   PetscMPIInt        rank, numProcs;
727   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
728   PetscInt          *pointDepths, *remoteDepths, *ilocal;
729   PetscInt          *depthRecv, *depthShift, *depthIdx;
730   PetscSFNode       *iremote;
731   PetscSF            pointSF;
732   const PetscInt    *sharedLocal;
733   const PetscSFNode *overlapRemote, *sharedRemote;
734   PetscErrorCode     ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
738   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
739   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
740   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
741   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
742 
743   /* Before building the migration SF we need to know the new stratum offsets */
744   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
745   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
746   for (d=0; d<dim+1; d++) {
747     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
748     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
749   }
750   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
751   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
752   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
753 
754   /* Count recevied points in each stratum and compute the internal strata shift */
755   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
756   for (d=0; d<dim+1; d++) depthRecv[d]=0;
757   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
758   depthShift[dim] = 0;
759   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
760   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
761   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
762   for (d=0; d<dim+1; d++) {
763     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
764     depthIdx[d] = pStart + depthShift[d];
765   }
766 
767   /* Form the overlap SF build an SF that describes the full overlap migration SF */
768   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
769   newLeaves = pEnd - pStart + nleaves;
770   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
771   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
772   /* First map local points to themselves */
773   for (d=0; d<dim+1; d++) {
774     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
775     for (p=pStart; p<pEnd; p++) {
776       point = p + depthShift[d];
777       ilocal[point] = point;
778       iremote[point].index = p;
779       iremote[point].rank = rank;
780       depthIdx[d]++;
781     }
782   }
783 
784   /* Add in the remote roots for currently shared points */
785   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
786   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
787   for (d=0; d<dim+1; d++) {
788     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
789     for (p=0; p<numSharedPoints; p++) {
790       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
791         point = sharedLocal[p] + depthShift[d];
792         iremote[point].index = sharedRemote[p].index;
793         iremote[point].rank = sharedRemote[p].rank;
794       }
795     }
796   }
797 
798   /* Now add the incoming overlap points */
799   for (p=0; p<nleaves; p++) {
800     point = depthIdx[remoteDepths[p]];
801     ilocal[point] = point;
802     iremote[point].index = overlapRemote[p].index;
803     iremote[point].rank = overlapRemote[p].rank;
804     depthIdx[remoteDepths[p]]++;
805   }
806   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
807 
808   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
809   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
810   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
811   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
812   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
813 
814   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "DMPlexDistributeField"
820 /*@
821   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
822 
823   Collective on DM
824 
825   Input Parameters:
826 + dm - The DMPlex object
827 . pointSF - The PetscSF describing the communication pattern
828 . originalSection - The PetscSection for existing data layout
829 - originalVec - The existing data
830 
831   Output Parameters:
832 + newSection - The PetscSF describing the new data layout
833 - newVec - The new data
834 
835   Level: developer
836 
837 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
838 @*/
839 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
840 {
841   PetscSF        fieldSF;
842   PetscInt      *remoteOffsets, fieldSize;
843   PetscScalar   *originalValues, *newValues;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
849 
850   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
851   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
852   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
853 
854   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
855   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
856   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
857   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
858   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
859   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
860   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
861   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
862   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMPlexDistributeFieldIS"
868 /*@
869   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
870 
871   Collective on DM
872 
873   Input Parameters:
874 + dm - The DMPlex object
875 . pointSF - The PetscSF describing the communication pattern
876 . originalSection - The PetscSection for existing data layout
877 - originalIS - The existing data
878 
879   Output Parameters:
880 + newSection - The PetscSF describing the new data layout
881 - newIS - The new data
882 
883   Level: developer
884 
885 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
886 @*/
887 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
888 {
889   PetscSF         fieldSF;
890   PetscInt       *newValues, *remoteOffsets, fieldSize;
891   const PetscInt *originalValues;
892   PetscErrorCode  ierr;
893 
894   PetscFunctionBegin;
895   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
896   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
897 
898   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
899   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
900 
901   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
902   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
903   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
904   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
905   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
906   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
907   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
908   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "DMPlexDistributeData"
914 /*@
915   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 . datatype - The type of data
924 - originalData - The existing data
925 
926   Output Parameters:
927 + newSection - The PetscSection describing the new data layout
928 - newData - The new data
929 
930   Level: developer
931 
932 .seealso: DMPlexDistribute(), DMPlexDistributeField()
933 @*/
934 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
935 {
936   PetscSF        fieldSF;
937   PetscInt      *remoteOffsets, fieldSize;
938   PetscMPIInt    dataSize;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
944 
945   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
946   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
947   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
948 
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
951   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
952   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
953   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "DMPlexDistributeCones"
959 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
960 {
961   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
962   MPI_Comm               comm;
963   PetscSF                coneSF;
964   PetscSection           originalConeSection, newConeSection;
965   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
966   PetscBool              flg;
967   PetscErrorCode         ierr;
968 
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
971   PetscValidPointer(dmParallel,4);
972   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
973 
974   /* Distribute cone section */
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
977   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
978   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
979   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
980   {
981     PetscInt pStart, pEnd, p;
982 
983     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
984     for (p = pStart; p < pEnd; ++p) {
985       PetscInt coneSize;
986       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
987       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
988     }
989   }
990   /* Communicate and renumber cones */
991   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
992   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
993   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
994   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
995   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
996   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
997   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
998 #if PETSC_USE_DEBUG
999   {
1000     PetscInt  p;
1001     PetscBool valid = PETSC_TRUE;
1002     for (p = 0; p < newConesSize; ++p) {
1003       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1004     }
1005     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1006   }
1007 #endif
1008   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1009   if (flg) {
1010     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1011     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1012     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1013     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1014     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1015   }
1016   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1017   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1018   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1019   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1020   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1021   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1022   /* Create supports and stratify sieve */
1023   {
1024     PetscInt pStart, pEnd;
1025 
1026     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1027     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1028   }
1029   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1030   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "DMPlexDistributeCoordinates"
1036 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1037 {
1038   MPI_Comm         comm;
1039   PetscSection     originalCoordSection, newCoordSection;
1040   Vec              originalCoordinates, newCoordinates;
1041   PetscInt         bs;
1042   const char      *name;
1043   const PetscReal *maxCell, *L;
1044   PetscErrorCode   ierr;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1048   PetscValidPointer(dmParallel, 3);
1049 
1050   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1051   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1052   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1053   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1054   if (originalCoordinates) {
1055     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1056     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1057     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1058 
1059     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1060     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1061     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1062     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1063     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1064   }
1065   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1066   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "DMPlexDistributeLabels"
1072 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1073 {
1074   MPI_Comm       comm;
1075   PetscMPIInt    rank;
1076   PetscInt       numLabels, l;
1077   PetscErrorCode ierr;
1078 
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1081   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1082   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1083   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1084   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1085 
1086   /* Bcast number of labels */
1087   ierr = DMPlexGetNumLabels(dm, &numLabels);CHKERRQ(ierr);
1088   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1089   for (l = numLabels-1; l >= 0; --l) {
1090     DMLabel     label = NULL, labelNew = NULL;
1091     PetscBool   isdepth;
1092 
1093     if (!rank) {
1094       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1095       /* Skip "depth" because it is recreated */
1096       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1097     }
1098     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1099     if (isdepth) continue;
1100     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1101     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1102   }
1103   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1104   PetscFunctionReturn(0);
1105 }
1106 
1107 #undef __FUNCT__
1108 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1109 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1110 {
1111   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1112   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1113   MPI_Comm        comm;
1114   const PetscInt *gpoints;
1115   PetscInt        dim, depth, n, d;
1116   PetscErrorCode  ierr;
1117 
1118   PetscFunctionBegin;
1119   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1120   PetscValidPointer(dmParallel, 4);
1121 
1122   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1123   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1124 
1125   /* Setup hybrid structure */
1126   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1127   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1128   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1129   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1130   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1131   for (d = 0; d <= dim; ++d) {
1132     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1133 
1134     if (pmax < 0) continue;
1135     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1136     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1137     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1138     for (p = 0; p < n; ++p) {
1139       const PetscInt point = gpoints[p];
1140 
1141       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1142     }
1143     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1144     else            pmesh->hybridPointMax[d] = -1;
1145   }
1146   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1147   PetscFunctionReturn(0);
1148 }
1149 
1150 #undef __FUNCT__
1151 #define __FUNCT__ "DMPlexDistributeSetupTree"
1152 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1153 {
1154   MPI_Comm        comm;
1155   DM              refTree;
1156   PetscSection    origParentSection, newParentSection;
1157   PetscInt        *origParents, *origChildIDs;
1158   PetscBool       flg;
1159   PetscErrorCode  ierr;
1160 
1161   PetscFunctionBegin;
1162   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1163   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1164   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1165 
1166   /* Set up tree */
1167   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1168   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1169   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1170   if (origParentSection) {
1171     PetscInt        pStart, pEnd;
1172     PetscInt        *newParents, *newChildIDs;
1173     PetscInt        *remoteOffsetsParents, newParentSize;
1174     PetscSF         parentSF;
1175 
1176     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1177     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1178     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1179     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1180     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1181     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1182     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1183     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1184     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1185     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1186     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1187     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1188     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1189     if (flg) {
1190       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1191       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1192       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1193       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1194       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1195     }
1196     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1197     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1198     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1199     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1200   }
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "DMPlexDistributeSF"
1206 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1207 {
1208   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1209   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1210   PetscMPIInt            rank, numProcs;
1211   MPI_Comm               comm;
1212   PetscErrorCode         ierr;
1213 
1214   PetscFunctionBegin;
1215   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1216   PetscValidPointer(dmParallel,7);
1217 
1218   /* Create point SF for parallel mesh */
1219   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1220   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1221   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1222   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1223   {
1224     const PetscInt *leaves;
1225     PetscSFNode    *remotePoints, *rowners, *lowners;
1226     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1227     PetscInt        pStart, pEnd;
1228 
1229     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1230     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1231     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1232     for (p=0; p<numRoots; p++) {
1233       rowners[p].rank  = -1;
1234       rowners[p].index = -1;
1235     }
1236     if (origPart) {
1237       /* Make sure points in the original partition are not assigned to other procs */
1238       const PetscInt *origPoints;
1239 
1240       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1241       for (p = 0; p < numProcs; ++p) {
1242         PetscInt dof, off, d;
1243 
1244         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1245         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1246         for (d = off; d < off+dof; ++d) {
1247           rowners[origPoints[d]].rank = p;
1248         }
1249       }
1250       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1251     }
1252     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1253     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1254     for (p = 0; p < numLeaves; ++p) {
1255       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1256         lowners[p].rank  = rank;
1257         lowners[p].index = leaves ? leaves[p] : p;
1258       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1259         lowners[p].rank  = -2;
1260         lowners[p].index = -2;
1261       }
1262     }
1263     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1264       rowners[p].rank  = -3;
1265       rowners[p].index = -3;
1266     }
1267     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1268     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1269     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1270     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1271     for (p = 0; p < numLeaves; ++p) {
1272       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1273       if (lowners[p].rank != rank) ++numGhostPoints;
1274     }
1275     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1276     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1277     for (p = 0, gp = 0; p < numLeaves; ++p) {
1278       if (lowners[p].rank != rank) {
1279         ghostPoints[gp]        = leaves ? leaves[p] : p;
1280         remotePoints[gp].rank  = lowners[p].rank;
1281         remotePoints[gp].index = lowners[p].index;
1282         ++gp;
1283       }
1284     }
1285     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1286     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1287     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1288   }
1289   pmesh->useCone    = mesh->useCone;
1290   pmesh->useClosure = mesh->useClosure;
1291   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 #undef __FUNCT__
1296 #define __FUNCT__ "DMPlexDistribute"
1297 /*@C
1298   DMPlexDistribute - Distributes the mesh and any associated sections.
1299 
1300   Not Collective
1301 
1302   Input Parameter:
1303 + dm  - The original DMPlex object
1304 - overlap - The overlap of partitions, 0 is the default
1305 
1306   Output Parameter:
1307 + sf - The PetscSF used for point distribution
1308 - parallelMesh - The distributed DMPlex object, or NULL
1309 
1310   Note: If the mesh was not distributed, the return value is NULL.
1311 
1312   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1313   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1314   representation on the mesh.
1315 
1316   Level: intermediate
1317 
1318 .keywords: mesh, elements
1319 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1320 @*/
1321 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1322 {
1323   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1324   MPI_Comm               comm;
1325   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1326   DM                     dmOverlap;
1327   IS                     cellPart,        part;
1328   PetscSection           cellPartSection, partSection;
1329   PetscSFNode           *remoteRanks, *newRemote;
1330   const PetscSFNode     *oldRemote;
1331   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1332   ISLocalToGlobalMapping renumbering;
1333   PetscBool              flg;
1334   PetscMPIInt            rank, numProcs, p;
1335   PetscErrorCode         ierr;
1336 
1337   PetscFunctionBegin;
1338   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1339   if (sf) PetscValidPointer(sf,4);
1340   PetscValidPointer(dmParallel,5);
1341 
1342   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1343   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1344   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1345   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1346 
1347   *dmParallel = NULL;
1348   if (numProcs == 1) PetscFunctionReturn(0);
1349 
1350   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1351   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1352   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1353   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1354   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1355   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1356   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1357   if (!rank) numRemoteRanks = numProcs;
1358   else       numRemoteRanks = 0;
1359   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1360   for (p = 0; p < numRemoteRanks; ++p) {
1361     remoteRanks[p].rank  = p;
1362     remoteRanks[p].index = 0;
1363   }
1364   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1365   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1366   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1367   if (flg) {
1368     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1369     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1370     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1371     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1372   }
1373   /* Close the partition over the mesh */
1374   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1375   /* Create new mesh */
1376   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1377   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1378   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1379   pmesh = (DM_Plex*) (*dmParallel)->data;
1380   pmesh->useAnchors = mesh->useAnchors;
1381 
1382   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1383   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1384   if (flg) {
1385     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1386     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1387     ierr = ISView(part, NULL);CHKERRQ(ierr);
1388     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1389     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1390     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1391   }
1392   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1393 
1394   /* Migrate data to a non-overlapping parallel DM */
1395   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1396   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1397   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1398   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1399   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1400 
1401   /* Build the point SF without overlap */
1402   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1403   if (flg) {
1404     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1405     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1406   }
1407 
1408   if (overlap > 0) {
1409     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1410     /* Add the partition overlap to the distributed DM */
1411     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1412     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1413     *dmParallel = dmOverlap;
1414     if (flg) {
1415       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1416       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1417     }
1418 
1419     /* Re-map the pointSF to establish the full migration pattern */
1420     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1421     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1422     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1423     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1424     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1425     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1426     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1427     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1428     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1429     pointSF = overlapPointSF;
1430     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1431   }
1432   /* Cleanup Partition */
1433   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1434   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1435   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1436   ierr = ISDestroy(&part);CHKERRQ(ierr);
1437   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1438   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1439   /* Copy BC */
1440   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1441   /* Cleanup */
1442   if (sf) {*sf = pointSF;}
1443   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1444   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1445   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 #undef __FUNCT__
1450 #define __FUNCT__ "DMPlexDistributeOverlap"
1451 /*@C
1452   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1453 
1454   Not Collective
1455 
1456   Input Parameter:
1457 + dm  - The non-overlapping distrbuted DMPlex object
1458 - overlap - The overlap of partitions, 0 is the default
1459 
1460   Output Parameter:
1461 + sf - The PetscSF used for point distribution
1462 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1463 
1464   Note: If the mesh was not distributed, the return value is NULL.
1465 
1466   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1467   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1468   representation on the mesh.
1469 
1470   Level: intermediate
1471 
1472 .keywords: mesh, elements
1473 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1474 @*/
1475 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1476 {
1477   MPI_Comm               comm;
1478   PetscMPIInt            rank;
1479   PetscSection           rootSection, leafSection;
1480   IS                     rootrank, leafrank;
1481   PetscSection           coneSection;
1482   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1483   PetscSFNode           *ghostRemote;
1484   const PetscSFNode     *overlapRemote;
1485   ISLocalToGlobalMapping overlapRenumbering;
1486   const PetscInt        *renumberingArray, *overlapLocal;
1487   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1488   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1489   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1490   PetscErrorCode         ierr;
1491 
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1494   if (sf) PetscValidPointer(sf, 3);
1495   PetscValidPointer(dmOverlap, 4);
1496 
1497   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1499   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1500 
1501   /* Compute point overlap with neighbouring processes on the distributed DM */
1502   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1503   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1504   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1505   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1506   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1507   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1508   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1509   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1510   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1511   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1512 
1513   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1514   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1515 
1516   /* Convert cones to global numbering before migrating them */
1517   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1518   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1519   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1520   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1521 
1522   /* Derive the new local-to-global mapping from the old one */
1523   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1524   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1525   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1526   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1527   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1528   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1529 
1530   /* Build the overlapping DM */
1531   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1532   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1533   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1534   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1535   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1536   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1537   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1538 
1539   /* Build the new point SF by propagating the depthShift generate remote root indices */
1540   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1541   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1542   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1543   numGhostPoints = numSharedPoints + numOverlapPoints;
1544   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1545   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1546   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1547   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1548   for (p=0; p<overlapLeaves; p++) {
1549     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1550   }
1551   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1552   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1553   for (idx=0, p=0; p<overlapLeaves; p++) {
1554     if (overlapRemote[p].rank != rank) {
1555       ghostLocal[idx] = overlapLocal[p];
1556       ghostRemote[idx].index = recvPointIDs[p];
1557       ghostRemote[idx].rank = overlapRemote[p].rank;
1558       idx++;
1559     }
1560   }
1561   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1562   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1563   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1564   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1565   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1566   /* Cleanup overlap partition */
1567   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1568   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1569   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1570   if (sf) *sf = migrationSF;
1571   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1572   ierr = DMSetFromOptions(*dmOverlap);CHKERRQ(ierr);
1573   PetscFunctionReturn(0);
1574 }
1575