xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 4b6b44bdd4a658e9c0a8d6caed02b5a1ad1ead7b)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovRootSection - The number of new overlap points for each neighboring process
540 . ovRootPoints  - The new overlap points for each neighboring process
541 . ovLeafSection - The number of new overlap points from each neighboring process
542 - ovLeafPoints  - The new overlap points from each neighboring process
543 
544   Level: developer
545 
546 .seealso: DMPlexDistributeOwnership()
547 @*/
548 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
549 {
550   MPI_Comm           comm;
551   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
552   PetscSF            sfPoint, sfProc;
553   IS                 valueIS;
554   DMLabel            ovLeafLabel;
555   const PetscSFNode *remote;
556   const PetscInt    *local;
557   const PetscInt    *nrank, *rrank, *neighbors;
558   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
559   PetscSection       ovRootSection, ovLeafSection;
560   PetscInt          *adj = NULL;
561   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
562   PetscInt           idx, numRemote;
563   PetscMPIInt        rank, numProcs;
564   PetscBool          flg;
565   PetscErrorCode     ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
569   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
570   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
571   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
572   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
573   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
574   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
575   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
576   /* Handle leaves: shared with the root point */
577   for (l = 0; l < nleaves; ++l) {
578     PetscInt adjSize = PETSC_DETERMINE, a;
579 
580     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
581     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
582   }
583   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
584   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
585   /* Handle roots */
586   for (p = pStart; p < pEnd; ++p) {
587     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
588 
589     if ((p >= sStart) && (p < sEnd)) {
590       /* Some leaves share a root with other leaves on different processes */
591       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
592       if (neighbors) {
593         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
594         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
595         for (n = 0; n < neighbors; ++n) {
596           const PetscInt remoteRank = nrank[noff+n];
597 
598           if (remoteRank == rank) continue;
599           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
600         }
601       }
602     }
603     /* Roots are shared with leaves */
604     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
605     if (!neighbors) continue;
606     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
607     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
608     for (n = 0; n < neighbors; ++n) {
609       const PetscInt remoteRank = rrank[noff+n];
610 
611       if (remoteRank == rank) continue;
612       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
613     }
614   }
615   ierr = PetscFree(adj);CHKERRQ(ierr);
616   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
617   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Convert to (point, rank) and use actual owners */
624   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
625   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
626   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
627   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
628   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
629   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
630   for (n = 0; n < numNeighbors; ++n) {
631     PetscInt numPoints;
632 
633     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
634     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
635   }
636   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
637   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
638   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
639   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
640   for (n = 0; n < numNeighbors; ++n) {
641     IS              pointIS;
642     const PetscInt *points;
643     PetscInt        off, numPoints, p;
644 
645     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
646     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
647     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
648     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
649     for (p = 0; p < numPoints; ++p) {
650       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
651       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
652       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
653     }
654     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
655     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
656   }
657   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
658   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
659   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
660   /* Make process SF */
661   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
662   if (flg) {
663     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
664   }
665   /* Communicate overlap */
666   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
668   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
669   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
670   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
671   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
672   for (p = 0; p < ovSize; p++) {
673     /* Don't import points from yourself */
674     if (ovLeafPoints[p].rank == rank) continue;
675     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
676   }
677   /* Don't import points already in the pointSF */
678   for (l = 0; l < nleaves; ++l) {
679     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
680   }
681   for (numRemote = 0, n = 0; n < numProcs; ++n) {
682     PetscInt numPoints;
683     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
684     numRemote += numPoints;
685   }
686   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
687   for (idx = 0, n = 0; n < numProcs; ++n) {
688     IS remoteRootIS;
689     PetscInt numPoints;
690     const PetscInt *remoteRoots;
691     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
692     if (numPoints <= 0) continue;
693     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
694     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
695     for (p = 0; p < numPoints; p++) {
696       remotePoints[idx].index = remoteRoots[p];
697       remotePoints[idx].rank = n;
698       idx++;
699     }
700     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
701     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
702   }
703   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
704   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
705   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
706   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
707   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
708   if (flg) {
709     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
710     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
711   }
712   /* Clean up */
713   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
714   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
715   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
716   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
717   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
723 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
724 {
725   MPI_Comm           comm;
726   PetscMPIInt        rank, numProcs;
727   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
728   PetscInt          *pointDepths, *remoteDepths, *ilocal;
729   PetscInt          *depthRecv, *depthShift, *depthIdx;
730   PetscSFNode       *iremote;
731   PetscSF            pointSF;
732   const PetscInt    *sharedLocal;
733   const PetscSFNode *overlapRemote, *sharedRemote;
734   PetscErrorCode     ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
738   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
739   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
740   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
741   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
742 
743   /* Before building the migration SF we need to know the new stratum offsets */
744   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
745   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
746   for (d=0; d<dim+1; d++) {
747     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
748     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
749   }
750   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
751   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
752   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
753 
754   /* Count recevied points in each stratum and compute the internal strata shift */
755   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
756   for (d=0; d<dim+1; d++) depthRecv[d]=0;
757   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
758   depthShift[dim] = 0;
759   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
760   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
761   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
762   for (d=0; d<dim+1; d++) {
763     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
764     depthIdx[d] = pStart + depthShift[d];
765   }
766 
767   /* Form the overlap SF build an SF that describes the full overlap migration SF */
768   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
769   newLeaves = pEnd - pStart + nleaves;
770   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
771   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
772   /* First map local points to themselves */
773   for (d=0; d<dim+1; d++) {
774     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
775     for (p=pStart; p<pEnd; p++) {
776       point = p + depthShift[d];
777       ilocal[point] = point;
778       iremote[point].index = p;
779       iremote[point].rank = rank;
780       depthIdx[d]++;
781     }
782   }
783 
784   /* Add in the remote roots for currently shared points */
785   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
786   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
787   for (d=0; d<dim+1; d++) {
788     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
789     for (p=0; p<numSharedPoints; p++) {
790       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
791         point = sharedLocal[p] + depthShift[d];
792         iremote[point].index = sharedRemote[p].index;
793         iremote[point].rank = sharedRemote[p].rank;
794       }
795     }
796   }
797 
798   /* Now add the incoming overlap points */
799   for (p=0; p<nleaves; p++) {
800     point = depthIdx[remoteDepths[p]];
801     ilocal[point] = point;
802     iremote[point].index = overlapRemote[p].index;
803     iremote[point].rank = overlapRemote[p].rank;
804     depthIdx[remoteDepths[p]]++;
805   }
806   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
807 
808   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
809   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
810   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
811   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
812   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
813 
814   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "DMPlexDistributeField"
820 /*@
821   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
822 
823   Collective on DM
824 
825   Input Parameters:
826 + dm - The DMPlex object
827 . pointSF - The PetscSF describing the communication pattern
828 . originalSection - The PetscSection for existing data layout
829 - originalVec - The existing data
830 
831   Output Parameters:
832 + newSection - The PetscSF describing the new data layout
833 - newVec - The new data
834 
835   Level: developer
836 
837 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
838 @*/
839 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
840 {
841   PetscSF        fieldSF;
842   PetscInt      *remoteOffsets, fieldSize;
843   PetscScalar   *originalValues, *newValues;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
849 
850   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
851   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
852   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
853 
854   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
855   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
856   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
857   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
858   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
859   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
860   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
861   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
862   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMPlexDistributeFieldIS"
868 /*@
869   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
870 
871   Collective on DM
872 
873   Input Parameters:
874 + dm - The DMPlex object
875 . pointSF - The PetscSF describing the communication pattern
876 . originalSection - The PetscSection for existing data layout
877 - originalIS - The existing data
878 
879   Output Parameters:
880 + newSection - The PetscSF describing the new data layout
881 - newIS - The new data
882 
883   Level: developer
884 
885 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
886 @*/
887 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
888 {
889   PetscSF         fieldSF;
890   PetscInt       *newValues, *remoteOffsets, fieldSize;
891   const PetscInt *originalValues;
892   PetscErrorCode  ierr;
893 
894   PetscFunctionBegin;
895   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
896   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
897 
898   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
899   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
900 
901   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
902   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
903   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
904   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
905   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
906   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
907   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
908   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "DMPlexDistributeData"
914 /*@
915   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 . datatype - The type of data
924 - originalData - The existing data
925 
926   Output Parameters:
927 + newSection - The PetscSection describing the new data layout
928 - newData - The new data
929 
930   Level: developer
931 
932 .seealso: DMPlexDistribute(), DMPlexDistributeField()
933 @*/
934 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
935 {
936   PetscSF        fieldSF;
937   PetscInt      *remoteOffsets, fieldSize;
938   PetscMPIInt    dataSize;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
944 
945   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
946   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
947   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
948 
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
951   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
952   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
953   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "DMPlexDistributeCones"
959 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
960 {
961   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
962   MPI_Comm               comm;
963   PetscSF                coneSF;
964   PetscSection           originalConeSection, newConeSection;
965   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
966   PetscBool              flg;
967   PetscErrorCode         ierr;
968 
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
971   PetscValidPointer(dmParallel,4);
972   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
973 
974   /* Distribute cone section */
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
977   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
978   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
979   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
980   {
981     PetscInt pStart, pEnd, p;
982 
983     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
984     for (p = pStart; p < pEnd; ++p) {
985       PetscInt coneSize;
986       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
987       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
988     }
989   }
990   /* Communicate and renumber cones */
991   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
992   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
993   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
994   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
995   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
996   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
997   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
998   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
999   if (flg) {
1000     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1001     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1002     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1003     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1004     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1005   }
1006   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1007   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1008   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1009   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1010   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1011   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1012   /* Create supports and stratify sieve */
1013   {
1014     PetscInt pStart, pEnd;
1015 
1016     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1017     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1018   }
1019   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1020   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMPlexDistributeCoordinates"
1026 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1027 {
1028   MPI_Comm         comm;
1029   PetscSection     originalCoordSection, newCoordSection;
1030   Vec              originalCoordinates, newCoordinates;
1031   PetscInt         bs;
1032   const char      *name;
1033   const PetscReal *maxCell, *L;
1034   PetscErrorCode   ierr;
1035 
1036   PetscFunctionBegin;
1037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1038   PetscValidPointer(dmParallel, 3);
1039 
1040   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1041   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1042   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1043   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1044   if (originalCoordinates) {
1045     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1046     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1047     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1048 
1049     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1050     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1051     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1052     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1053     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1054   }
1055   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1056   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "DMPlexDistributeLabels"
1062 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1063 {
1064   MPI_Comm       comm;
1065   PetscMPIInt    rank;
1066   PetscInt       numLabels, l;
1067   PetscErrorCode ierr;
1068 
1069   PetscFunctionBegin;
1070   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1071   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1072   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1073   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1074   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1075 
1076   /* Bcast number of labels */
1077   ierr = DMPlexGetNumLabels(dm, &numLabels);CHKERRQ(ierr);
1078   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1079   for (l = numLabels-1; l >= 0; --l) {
1080     DMLabel     label = NULL, labelNew = NULL;
1081     PetscBool   isdepth;
1082 
1083     if (!rank) {
1084       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1085       /* Skip "depth" because it is recreated */
1086       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1087     }
1088     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1089     if (isdepth) continue;
1090     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1091     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1092   }
1093   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 #undef __FUNCT__
1098 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1099 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1100 {
1101   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1102   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1103   MPI_Comm        comm;
1104   const PetscInt *gpoints;
1105   PetscInt        dim, depth, n, d;
1106   PetscErrorCode  ierr;
1107 
1108   PetscFunctionBegin;
1109   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1110   PetscValidPointer(dmParallel, 4);
1111 
1112   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1113   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1114 
1115   /* Setup hybrid structure */
1116   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1117   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1118   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1119   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1120   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1121   for (d = 0; d <= dim; ++d) {
1122     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1123 
1124     if (pmax < 0) continue;
1125     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1126     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1127     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1128     for (p = 0; p < n; ++p) {
1129       const PetscInt point = gpoints[p];
1130 
1131       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1132     }
1133     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1134     else            pmesh->hybridPointMax[d] = -1;
1135   }
1136   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1137   PetscFunctionReturn(0);
1138 }
1139 
1140 #undef __FUNCT__
1141 #define __FUNCT__ "DMPlexDistributeSetupTree"
1142 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1143 {
1144   MPI_Comm        comm;
1145   DM              refTree;
1146   PetscSection    origParentSection, newParentSection;
1147   PetscInt        *origParents, *origChildIDs;
1148   PetscBool       flg;
1149   PetscErrorCode  ierr;
1150 
1151   PetscFunctionBegin;
1152   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1153   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1154   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1155 
1156   /* Set up tree */
1157   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1158   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1159   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1160   if (origParentSection) {
1161     PetscInt        pStart, pEnd;
1162     PetscInt        *newParents, *newChildIDs;
1163     PetscInt        *remoteOffsetsParents, newParentSize;
1164     PetscSF         parentSF;
1165 
1166     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1167     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1168     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1169     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1170     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1171     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1172     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1173     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1174     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1175     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1176     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1177     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1178     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1179     if (flg) {
1180       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1181       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1182       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1183       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1184       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1185     }
1186     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1187     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1188     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1189     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1190   }
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 #undef __FUNCT__
1195 #define __FUNCT__ "DMPlexDistributeSF"
1196 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1197 {
1198   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1199   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1200   PetscMPIInt            rank, numProcs;
1201   MPI_Comm               comm;
1202   PetscErrorCode         ierr;
1203 
1204   PetscFunctionBegin;
1205   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1206   PetscValidPointer(dmParallel,7);
1207 
1208   /* Create point SF for parallel mesh */
1209   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1210   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1211   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1212   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1213   {
1214     const PetscInt *leaves;
1215     PetscSFNode    *remotePoints, *rowners, *lowners;
1216     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1217     PetscInt        pStart, pEnd;
1218 
1219     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1220     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1221     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1222     for (p=0; p<numRoots; p++) {
1223       rowners[p].rank  = -1;
1224       rowners[p].index = -1;
1225     }
1226     if (origPart) {
1227       /* Make sure points in the original partition are not assigned to other procs */
1228       const PetscInt *origPoints;
1229 
1230       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1231       for (p = 0; p < numProcs; ++p) {
1232         PetscInt dof, off, d;
1233 
1234         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1235         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1236         for (d = off; d < off+dof; ++d) {
1237           rowners[origPoints[d]].rank = p;
1238         }
1239       }
1240       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1241     }
1242     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1243     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1244     for (p = 0; p < numLeaves; ++p) {
1245       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1246         lowners[p].rank  = rank;
1247         lowners[p].index = leaves ? leaves[p] : p;
1248       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1249         lowners[p].rank  = -2;
1250         lowners[p].index = -2;
1251       }
1252     }
1253     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1254       rowners[p].rank  = -3;
1255       rowners[p].index = -3;
1256     }
1257     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1258     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1259     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1260     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1261     for (p = 0; p < numLeaves; ++p) {
1262       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1263       if (lowners[p].rank != rank) ++numGhostPoints;
1264     }
1265     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1266     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1267     for (p = 0, gp = 0; p < numLeaves; ++p) {
1268       if (lowners[p].rank != rank) {
1269         ghostPoints[gp]        = leaves ? leaves[p] : p;
1270         remotePoints[gp].rank  = lowners[p].rank;
1271         remotePoints[gp].index = lowners[p].index;
1272         ++gp;
1273       }
1274     }
1275     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1276     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1277     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1278   }
1279   pmesh->useCone    = mesh->useCone;
1280   pmesh->useClosure = mesh->useClosure;
1281   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 #undef __FUNCT__
1286 #define __FUNCT__ "DMPlexDistribute"
1287 /*@C
1288   DMPlexDistribute - Distributes the mesh and any associated sections.
1289 
1290   Not Collective
1291 
1292   Input Parameter:
1293 + dm  - The original DMPlex object
1294 - overlap - The overlap of partitions, 0 is the default
1295 
1296   Output Parameter:
1297 + sf - The PetscSF used for point distribution
1298 - parallelMesh - The distributed DMPlex object, or NULL
1299 
1300   Note: If the mesh was not distributed, the return value is NULL.
1301 
1302   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1303   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1304   representation on the mesh.
1305 
1306   Level: intermediate
1307 
1308 .keywords: mesh, elements
1309 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1310 @*/
1311 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1312 {
1313   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1314   MPI_Comm               comm;
1315   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1316   DM                     dmOverlap;
1317   IS                     cellPart,        part;
1318   PetscSection           cellPartSection, partSection;
1319   PetscSFNode           *remoteRanks, *newRemote;
1320   const PetscSFNode     *oldRemote;
1321   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1322   ISLocalToGlobalMapping renumbering;
1323   PetscBool              flg;
1324   PetscMPIInt            rank, numProcs, p;
1325   PetscErrorCode         ierr;
1326 
1327   PetscFunctionBegin;
1328   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1329   if (sf) PetscValidPointer(sf,4);
1330   PetscValidPointer(dmParallel,5);
1331 
1332   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1333   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1334   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1335   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1336 
1337   *dmParallel = NULL;
1338   if (numProcs == 1) PetscFunctionReturn(0);
1339 
1340   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1341   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1342   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1343   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1344   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1345   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1346   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1347   if (!rank) numRemoteRanks = numProcs;
1348   else       numRemoteRanks = 0;
1349   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1350   for (p = 0; p < numRemoteRanks; ++p) {
1351     remoteRanks[p].rank  = p;
1352     remoteRanks[p].index = 0;
1353   }
1354   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1355   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1356   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1357   if (flg) {
1358     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1359     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1360     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1361     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1362   }
1363   /* Close the partition over the mesh */
1364   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1365   /* Create new mesh */
1366   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1367   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1368   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1369   pmesh = (DM_Plex*) (*dmParallel)->data;
1370   pmesh->useAnchors = mesh->useAnchors;
1371 
1372   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1373   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1374   if (flg) {
1375     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1376     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1377     ierr = ISView(part, NULL);CHKERRQ(ierr);
1378     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1379     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1380     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1381   }
1382   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1383 
1384   /* Migrate data to a non-overlapping parallel DM */
1385   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1386   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1387   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1388   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1389   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1390 
1391   /* Build the point SF without overlap */
1392   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1393   if (flg) {
1394     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1395     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1396   }
1397 
1398   if (overlap > 0) {
1399     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1400     /* Add the partition overlap to the distributed DM */
1401     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1402     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1403     *dmParallel = dmOverlap;
1404     if (flg) {
1405       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1406       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1407     }
1408 
1409     /* Re-map the pointSF to establish the full migration pattern */
1410     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1411     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1412     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1413     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1414     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1415     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1416     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1417     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1418     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1419     pointSF = overlapPointSF;
1420     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1421   }
1422   /* Cleanup Partition */
1423   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1424   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1425   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1426   ierr = ISDestroy(&part);CHKERRQ(ierr);
1427   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1428   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1429   /* Copy BC */
1430   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1431   /* Cleanup */
1432   if (sf) {*sf = pointSF;}
1433   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1434   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1435   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 #undef __FUNCT__
1440 #define __FUNCT__ "DMPlexDistributeOverlap"
1441 /*@C
1442   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1443 
1444   Not Collective
1445 
1446   Input Parameter:
1447 + dm  - The non-overlapping distrbuted DMPlex object
1448 - overlap - The overlap of partitions, 0 is the default
1449 
1450   Output Parameter:
1451 + sf - The PetscSF used for point distribution
1452 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1453 
1454   Note: If the mesh was not distributed, the return value is NULL.
1455 
1456   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1457   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1458   representation on the mesh.
1459 
1460   Level: intermediate
1461 
1462 .keywords: mesh, elements
1463 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1464 @*/
1465 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1466 {
1467   MPI_Comm               comm;
1468   PetscMPIInt            rank;
1469   PetscSection           rootSection, leafSection;
1470   IS                     rootrank, leafrank;
1471   PetscSection           coneSection;
1472   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1473   PetscSFNode           *ghostRemote;
1474   const PetscSFNode     *overlapRemote;
1475   ISLocalToGlobalMapping overlapRenumbering;
1476   const PetscInt        *renumberingArray, *overlapLocal;
1477   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1478   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1479   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1480   PetscErrorCode         ierr;
1481 
1482   PetscFunctionBegin;
1483   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1484   if (sf) PetscValidPointer(sf, 3);
1485   PetscValidPointer(dmOverlap, 4);
1486 
1487   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1488   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1489   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1490 
1491   /* Compute point overlap with neighbouring processes on the distributed DM */
1492   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1493   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1494   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1495   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1496   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1497   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1498   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1499   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1500   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1501   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1502 
1503   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1504   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1505 
1506   /* Convert cones to global numbering before migrating them */
1507   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1508   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1509   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1510   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1511 
1512   /* Derive the new local-to-global mapping from the old one */
1513   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1514   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1515   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1516   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1517   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1518   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1519 
1520   /* Build the overlapping DM */
1521   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1522   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1523   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1524   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1525   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1526   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1527   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1528 
1529   /* Build the new point SF by propagating the depthShift generate remote root indices */
1530   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1531   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1532   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1533   numGhostPoints = numSharedPoints + numOverlapPoints;
1534   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1535   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1536   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1537   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1538   for (p=0; p<overlapLeaves; p++) {
1539     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1540   }
1541   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1542   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1543   for (idx=0, p=0; p<overlapLeaves; p++) {
1544     if (overlapRemote[p].rank != rank) {
1545       ghostLocal[idx] = overlapLocal[p];
1546       ghostRemote[idx].index = recvPointIDs[p];
1547       ghostRemote[idx].rank = overlapRemote[p].rank;
1548       idx++;
1549     }
1550   }
1551   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1552   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1553   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1554   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1555   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1556   /* Cleanup overlap partition */
1557   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1558   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1559   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1560   if (sf) *sf = migrationSF;
1561   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1562   ierr = DMSetFromOptions(*dmOverlap);CHKERRQ(ierr);
1563   PetscFunctionReturn(0);
1564 }
1565