xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 8d3c1932cd76dc2d1ac854cce48898cc6ac885a3)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . levels      - Number of overlap levels
534 . rootSection - The number of leaves for a given root point
535 . rootrank    - The rank of each edge into the root point
536 . leafSection - The number of processes sharing a given leaf point
537 - leafrank    - The rank of each process sharing a leaf point
538 
539   Output Parameters:
540 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
541 
542   Level: developer
543 
544 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
545 @*/
546 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
547 {
548   MPI_Comm           comm;
549   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
550   PetscSF            sfPoint, sfProc;
551   const PetscSFNode *remote;
552   const PetscInt    *local;
553   const PetscInt    *nrank, *rrank;
554   PetscInt          *adj = NULL;
555   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
556   PetscMPIInt        rank, numProcs;
557   PetscBool          useCone, useClosure, flg;
558   PetscErrorCode     ierr;
559 
560   PetscFunctionBegin;
561   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
562   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
563   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
564   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
565   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
566   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
567   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
568   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
569   /* Handle leaves: shared with the root point */
570   for (l = 0; l < nleaves; ++l) {
571     PetscInt adjSize = PETSC_DETERMINE, a;
572 
573     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
574     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
575   }
576   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
577   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
578   /* Handle roots */
579   for (p = pStart; p < pEnd; ++p) {
580     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
581 
582     if ((p >= sStart) && (p < sEnd)) {
583       /* Some leaves share a root with other leaves on different processes */
584       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
585       if (neighbors) {
586         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
587         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
588         for (n = 0; n < neighbors; ++n) {
589           const PetscInt remoteRank = nrank[noff+n];
590 
591           if (remoteRank == rank) continue;
592           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
593         }
594       }
595     }
596     /* Roots are shared with leaves */
597     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
598     if (!neighbors) continue;
599     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
600     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
601     for (n = 0; n < neighbors; ++n) {
602       const PetscInt remoteRank = rrank[noff+n];
603 
604       if (remoteRank == rank) continue;
605       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
606     }
607   }
608   ierr = PetscFree(adj);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
611   /* Add additional overlap levels */
612   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
613   /* We require the closure in the overlap */
614   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
615   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
616   if (useCone || !useClosure) {
617     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
618   }
619   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
620   if (flg) {
621     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
622     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
623   }
624   /* Make process SF and invert sender to receiver label */
625   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
626   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
627   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
628   /* Add owned points, except for shared local points */
629   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
630   for (l = 0; l < nleaves; ++l) {
631     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
632     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
633   }
634   /* Clean up */
635   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
636   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
642 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
643 {
644   MPI_Comm           comm;
645   PetscMPIInt        rank, numProcs;
646   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
647   PetscInt          *pointDepths, *remoteDepths, *ilocal;
648   PetscInt          *depthRecv, *depthShift, *depthIdx;
649   PetscSFNode       *iremote;
650   PetscSF            pointSF;
651   const PetscInt    *sharedLocal;
652   const PetscSFNode *overlapRemote, *sharedRemote;
653   PetscErrorCode     ierr;
654 
655   PetscFunctionBegin;
656   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
657   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
658   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
659   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
660   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
661 
662   /* Before building the migration SF we need to know the new stratum offsets */
663   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
664   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
665   for (d=0; d<dim+1; d++) {
666     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
667     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
668   }
669   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
670   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
672 
673   /* Count recevied points in each stratum and compute the internal strata shift */
674   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
675   for (d=0; d<dim+1; d++) depthRecv[d]=0;
676   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
677   depthShift[dim] = 0;
678   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
679   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
680   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
681   for (d=0; d<dim+1; d++) {
682     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
683     depthIdx[d] = pStart + depthShift[d];
684   }
685 
686   /* Form the overlap SF build an SF that describes the full overlap migration SF */
687   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
688   newLeaves = pEnd - pStart + nleaves;
689   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
690   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
691   /* First map local points to themselves */
692   for (d=0; d<dim+1; d++) {
693     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
694     for (p=pStart; p<pEnd; p++) {
695       point = p + depthShift[d];
696       ilocal[point] = point;
697       iremote[point].index = p;
698       iremote[point].rank = rank;
699       depthIdx[d]++;
700     }
701   }
702 
703   /* Add in the remote roots for currently shared points */
704   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
705   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
706   for (d=0; d<dim+1; d++) {
707     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
708     for (p=0; p<numSharedPoints; p++) {
709       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
710         point = sharedLocal[p] + depthShift[d];
711         iremote[point].index = sharedRemote[p].index;
712         iremote[point].rank = sharedRemote[p].rank;
713       }
714     }
715   }
716 
717   /* Now add the incoming overlap points */
718   for (p=0; p<nleaves; p++) {
719     point = depthIdx[remoteDepths[p]];
720     ilocal[point] = point;
721     iremote[point].index = overlapRemote[p].index;
722     iremote[point].rank = overlapRemote[p].rank;
723     depthIdx[remoteDepths[p]]++;
724   }
725   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
726 
727   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
728   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
729   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
730   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
731   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
732 
733   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "DMPlexStratifyMigrationSF"
739 /*@
740   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
741 
742   Input Parameter:
743 + dm          - The DM
744 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
745 
746   Output Parameter:
747 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
748 
749   Level: developer
750 
751 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
752 @*/
753 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
754 {
755   MPI_Comm           comm;
756   PetscMPIInt        rank, numProcs;
757   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
758   PetscInt          *pointDepths, *remoteDepths, *ilocal;
759   PetscInt          *depthRecv, *depthShift, *depthIdx;
760   const PetscSFNode *iremote;
761   PetscErrorCode     ierr;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
765   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
766   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
767   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
768   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
769   ierr = MPI_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
770   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
771 
772   /* Before building the migration SF we need to know the new stratum offsets */
773   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
774   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
775   for (d = 0; d < depth+1; ++d) {
776     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
777     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
778   }
779   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
780   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
781   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
782   /* Count recevied points in each stratum and compute the internal strata shift */
783   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
784   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
785   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
786   depthShift[depth] = 0;
787   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
788   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
789   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
790   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
791   /* Derive a new local permutation based on stratified indices */
792   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
793   for (p = 0; p < nleaves; ++p) {
794     const PetscInt dep = remoteDepths[p];
795 
796     ilocal[p] = depthShift[dep] + depthIdx[dep];
797     depthIdx[dep]++;
798   }
799   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
800   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
801   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
802   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
803   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "DMPlexDistributeField"
809 /*@
810   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
811 
812   Collective on DM
813 
814   Input Parameters:
815 + dm - The DMPlex object
816 . pointSF - The PetscSF describing the communication pattern
817 . originalSection - The PetscSection for existing data layout
818 - originalVec - The existing data
819 
820   Output Parameters:
821 + newSection - The PetscSF describing the new data layout
822 - newVec - The new data
823 
824   Level: developer
825 
826 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
827 @*/
828 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
829 {
830   PetscSF        fieldSF;
831   PetscInt      *remoteOffsets, fieldSize;
832   PetscScalar   *originalValues, *newValues;
833   PetscErrorCode ierr;
834 
835   PetscFunctionBegin;
836   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
837   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
838 
839   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
840   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
841   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
842 
843   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
844   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
845   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
846   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
847   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
848   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
849   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
850   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
851   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "DMPlexDistributeFieldIS"
857 /*@
858   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
859 
860   Collective on DM
861 
862   Input Parameters:
863 + dm - The DMPlex object
864 . pointSF - The PetscSF describing the communication pattern
865 . originalSection - The PetscSection for existing data layout
866 - originalIS - The existing data
867 
868   Output Parameters:
869 + newSection - The PetscSF describing the new data layout
870 - newIS - The new data
871 
872   Level: developer
873 
874 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
875 @*/
876 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
877 {
878   PetscSF         fieldSF;
879   PetscInt       *newValues, *remoteOffsets, fieldSize;
880   const PetscInt *originalValues;
881   PetscErrorCode  ierr;
882 
883   PetscFunctionBegin;
884   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
885   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
886 
887   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
888   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
889 
890   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
891   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
892   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
893   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
894   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
895   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
896   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
897   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
898   PetscFunctionReturn(0);
899 }
900 
901 #undef __FUNCT__
902 #define __FUNCT__ "DMPlexDistributeData"
903 /*@
904   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
905 
906   Collective on DM
907 
908   Input Parameters:
909 + dm - The DMPlex object
910 . pointSF - The PetscSF describing the communication pattern
911 . originalSection - The PetscSection for existing data layout
912 . datatype - The type of data
913 - originalData - The existing data
914 
915   Output Parameters:
916 + newSection - The PetscSection describing the new data layout
917 - newData - The new data
918 
919   Level: developer
920 
921 .seealso: DMPlexDistribute(), DMPlexDistributeField()
922 @*/
923 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
924 {
925   PetscSF        fieldSF;
926   PetscInt      *remoteOffsets, fieldSize;
927   PetscMPIInt    dataSize;
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
932   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
933 
934   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
935   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
936   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
937 
938   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
939   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
940   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
941   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
942   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "DMPlexDistributeCones"
948 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
949 {
950   DM_Plex               *mesh  = (DM_Plex*) dm->data;
951   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
952   MPI_Comm               comm;
953   PetscSF                coneSF;
954   PetscSection           originalConeSection, newConeSection;
955   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
956   PetscBool              flg;
957   PetscErrorCode         ierr;
958 
959   PetscFunctionBegin;
960   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
961   PetscValidPointer(dmParallel,4);
962   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
963 
964   /* Distribute cone section */
965   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
966   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
967   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
968   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
969   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
970   {
971     PetscInt pStart, pEnd, p;
972 
973     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
974     for (p = pStart; p < pEnd; ++p) {
975       PetscInt coneSize;
976       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
977       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
978     }
979   }
980   /* Communicate and renumber cones */
981   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
982   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
983   if (original) {
984     PetscInt numCones;
985 
986     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
987     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
988   }
989   else {
990     globCones = cones;
991   }
992   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
993   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
994   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
995   if (original) {
996     ierr = PetscFree(globCones);CHKERRQ(ierr);
997   }
998   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
999   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1000 #if PETSC_USE_DEBUG
1001   {
1002     PetscInt  p;
1003     PetscBool valid = PETSC_TRUE;
1004     for (p = 0; p < newConesSize; ++p) {
1005       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1006     }
1007     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1008   }
1009 #endif
1010   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1011   if (flg) {
1012     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1013     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1014     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1015     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1016     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1017   }
1018   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1019   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1020   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1021   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1022   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1023   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1024   /* Create supports and stratify sieve */
1025   {
1026     PetscInt pStart, pEnd;
1027 
1028     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1029     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1030   }
1031   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1032   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1033   pmesh->useCone    = mesh->useCone;
1034   pmesh->useClosure = mesh->useClosure;
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 #undef __FUNCT__
1039 #define __FUNCT__ "DMPlexDistributeCoordinates"
1040 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1041 {
1042   MPI_Comm         comm;
1043   PetscSection     originalCoordSection, newCoordSection;
1044   Vec              originalCoordinates, newCoordinates;
1045   PetscInt         bs;
1046   const char      *name;
1047   const PetscReal *maxCell, *L;
1048   const DMBoundaryType *bd;
1049   PetscErrorCode   ierr;
1050 
1051   PetscFunctionBegin;
1052   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1053   PetscValidPointer(dmParallel, 3);
1054 
1055   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1056   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1057   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1058   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1059   if (originalCoordinates) {
1060     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1061     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1062     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1063 
1064     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1065     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1066     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1067     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1068     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1069   }
1070   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1071   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 #undef __FUNCT__
1076 #define __FUNCT__ "DMPlexDistributeLabels"
1077 /* Here we are assuming that process 0 always has everything */
1078 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1079 {
1080   MPI_Comm       comm;
1081   PetscMPIInt    rank;
1082   PetscInt       numLabels, numLocalLabels, l;
1083   PetscBool      hasLabels = PETSC_FALSE;
1084   PetscErrorCode ierr;
1085 
1086   PetscFunctionBegin;
1087   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1088   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1089   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1090   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1091   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1092 
1093   /* Everyone must have either the same number of labels, or none */
1094   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1095   numLabels = numLocalLabels;
1096   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1097   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1098   for (l = numLabels-1; l >= 0; --l) {
1099     DMLabel     label = NULL, labelNew = NULL;
1100     PetscBool   isdepth;
1101 
1102     if (hasLabels) {
1103       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1104       /* Skip "depth" because it is recreated */
1105       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1106     }
1107     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1108     if (isdepth) continue;
1109     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1110     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1111   }
1112   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 #undef __FUNCT__
1117 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1118 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1119 {
1120   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1121   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1122   MPI_Comm        comm;
1123   const PetscInt *gpoints;
1124   PetscInt        dim, depth, n, d;
1125   PetscErrorCode  ierr;
1126 
1127   PetscFunctionBegin;
1128   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1129   PetscValidPointer(dmParallel, 4);
1130 
1131   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1132   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1133 
1134   /* Setup hybrid structure */
1135   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1136   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1137   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1138   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1139   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1140   for (d = 0; d <= dim; ++d) {
1141     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1142 
1143     if (pmax < 0) continue;
1144     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1145     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1146     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1147     for (p = 0; p < n; ++p) {
1148       const PetscInt point = gpoints[p];
1149 
1150       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1151     }
1152     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1153     else            pmesh->hybridPointMax[d] = -1;
1154   }
1155   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 #undef __FUNCT__
1160 #define __FUNCT__ "DMPlexDistributeSetupTree"
1161 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1162 {
1163   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1164   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1165   MPI_Comm        comm;
1166   DM              refTree;
1167   PetscSection    origParentSection, newParentSection;
1168   PetscInt        *origParents, *origChildIDs;
1169   PetscBool       flg;
1170   PetscErrorCode  ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1174   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1175   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1176 
1177   /* Set up tree */
1178   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1179   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1180   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1181   if (origParentSection) {
1182     PetscInt        pStart, pEnd;
1183     PetscInt        *newParents, *newChildIDs, *globParents;
1184     PetscInt        *remoteOffsetsParents, newParentSize;
1185     PetscSF         parentSF;
1186 
1187     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1188     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1189     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1190     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1191     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1192     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1193     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1194     if (original) {
1195       PetscInt numParents;
1196 
1197       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1198       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1199       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1200     }
1201     else {
1202       globParents = origParents;
1203     }
1204     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1205     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1206     if (original) {
1207       ierr = PetscFree(globParents);CHKERRQ(ierr);
1208     }
1209     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1210     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1211     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1212 #if PETSC_USE_DEBUG
1213     {
1214       PetscInt  p;
1215       PetscBool valid = PETSC_TRUE;
1216       for (p = 0; p < newParentSize; ++p) {
1217         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1218       }
1219       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1220     }
1221 #endif
1222     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1223     if (flg) {
1224       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1225       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1226       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1227       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1228       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1229     }
1230     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1231     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1232     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1233     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1234   }
1235   pmesh->useAnchors = mesh->useAnchors;
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "DMPlexDistributeSF"
1241 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1242 {
1243   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1244   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1245   PetscMPIInt            rank, numProcs;
1246   MPI_Comm               comm;
1247   PetscErrorCode         ierr;
1248 
1249   PetscFunctionBegin;
1250   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1251   PetscValidPointer(dmParallel,7);
1252 
1253   /* Create point SF for parallel mesh */
1254   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1255   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1256   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1257   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1258   {
1259     const PetscInt *leaves;
1260     PetscSFNode    *remotePoints, *rowners, *lowners;
1261     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1262     PetscInt        pStart, pEnd;
1263 
1264     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1265     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1266     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1267     for (p=0; p<numRoots; p++) {
1268       rowners[p].rank  = -1;
1269       rowners[p].index = -1;
1270     }
1271     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1272     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1273     for (p = 0; p < numLeaves; ++p) {
1274       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1275         lowners[p].rank  = rank;
1276         lowners[p].index = leaves ? leaves[p] : p;
1277       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1278         lowners[p].rank  = -2;
1279         lowners[p].index = -2;
1280       }
1281     }
1282     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1283       rowners[p].rank  = -3;
1284       rowners[p].index = -3;
1285     }
1286     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1287     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1288     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1289     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1290     for (p = 0; p < numLeaves; ++p) {
1291       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1292       if (lowners[p].rank != rank) ++numGhostPoints;
1293     }
1294     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1295     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1296     for (p = 0, gp = 0; p < numLeaves; ++p) {
1297       if (lowners[p].rank != rank) {
1298         ghostPoints[gp]        = leaves ? leaves[p] : p;
1299         remotePoints[gp].rank  = lowners[p].rank;
1300         remotePoints[gp].index = lowners[p].index;
1301         ++gp;
1302       }
1303     }
1304     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1305     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1306     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1307   }
1308   pmesh->useCone    = mesh->useCone;
1309   pmesh->useClosure = mesh->useClosure;
1310   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 #undef __FUNCT__
1315 #define __FUNCT__ "DMPlexCreatePointSF"
1316 /*@C
1317   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1318 
1319   Input Parameter:
1320 + dm          - The source DMPlex object
1321 . migrationSF - The star forest that describes the parallel point remapping
1322 . ownership   - Flag causing a vote to determine point ownership
1323 
1324   Output Parameter:
1325 - pointSF     - The star forest describing the point overlap in the remapped DM
1326 
1327   Level: developer
1328 
1329 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1330 @*/
1331 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1332 {
1333   PetscMPIInt        rank;
1334   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1335   PetscInt          *pointLocal;
1336   const PetscInt    *leaves;
1337   const PetscSFNode *roots;
1338   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1339   PetscErrorCode     ierr;
1340 
1341   PetscFunctionBegin;
1342   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1343   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1344 
1345   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1346   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1347   if (ownership) {
1348     /* Point ownership vote: Process with highest rank ownes shared points */
1349     for (p = 0; p < nleaves; ++p) {
1350       /* Either put in a bid or we know we own it */
1351       leafNodes[p].rank  = rank;
1352       leafNodes[p].index = p;
1353     }
1354     for (p = 0; p < nroots; p++) {
1355       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1356       rootNodes[p].rank  = -3;
1357       rootNodes[p].index = -3;
1358     }
1359     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1360     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1361   } else {
1362     for (p = 0; p < nroots; p++) {
1363       rootNodes[p].index = -1;
1364       rootNodes[p].rank = rank;
1365     };
1366     for (p = 0; p < nleaves; p++) {
1367       /* Write new local id into old location */
1368       if (roots[p].rank == rank) {
1369         rootNodes[roots[p].index].index = leaves[p];
1370       }
1371     }
1372   }
1373   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1374   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1375 
1376   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1377   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1378   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1379   for (idx = 0, p = 0; p < nleaves; p++) {
1380     if (leafNodes[p].rank != rank) {
1381       pointLocal[idx] = p;
1382       pointRemote[idx] = leafNodes[p];
1383       idx++;
1384     }
1385   }
1386   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1387   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1388   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1389   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "DMPlexMigrate"
1395 /*@C
1396   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1397 
1398   Input Parameter:
1399 + dm       - The source DMPlex object
1400 . sf       - The star forest communication context describing the migration pattern
1401 
1402   Output Parameter:
1403 - targetDM - The target DMPlex object
1404 
1405   Level: intermediate
1406 
1407 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1408 @*/
1409 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1410 {
1411   MPI_Comm               comm;
1412   PetscInt               dim, nroots;
1413   PetscSF                sfPoint;
1414   ISLocalToGlobalMapping ltogMigration;
1415   ISLocalToGlobalMapping ltogOriginal = NULL;
1416   PetscBool              flg;
1417   PetscErrorCode         ierr;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1421   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1422   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1423   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1424   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1425 
1426   /* Check for a one-to-all distribution pattern */
1427   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1428   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1429   if (nroots >= 0) {
1430     IS                     isOriginal;
1431     PetscInt               n, size, nleaves;
1432     PetscInt              *numbering_orig, *numbering_new;
1433     /* Get the original point numbering */
1434     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1435     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1436     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1437     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1438     /* Convert to positive global numbers */
1439     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1440     /* Derive the new local-to-global mapping from the old one */
1441     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1442     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1443     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1444     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1445     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1446     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1447     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1448   } else {
1449     /* One-to-all distribution pattern: We can derive LToG from SF */
1450     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1451   }
1452   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1453   if (flg) {
1454     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1455     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1456   }
1457   /* Migrate DM data to target DM */
1458   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1459   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1460   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1461   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1462   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1463   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1464   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1465   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 #undef __FUNCT__
1470 #define __FUNCT__ "DMPlexDistribute"
1471 /*@C
1472   DMPlexDistribute - Distributes the mesh and any associated sections.
1473 
1474   Not Collective
1475 
1476   Input Parameter:
1477 + dm  - The original DMPlex object
1478 - overlap - The overlap of partitions, 0 is the default
1479 
1480   Output Parameter:
1481 + sf - The PetscSF used for point distribution
1482 - parallelMesh - The distributed DMPlex object, or NULL
1483 
1484   Note: If the mesh was not distributed, the return value is NULL.
1485 
1486   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1487   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1488   representation on the mesh.
1489 
1490   Level: intermediate
1491 
1492 .keywords: mesh, elements
1493 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1494 @*/
1495 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1496 {
1497   MPI_Comm               comm;
1498   PetscPartitioner       partitioner;
1499   IS                     cellPart;
1500   PetscSection           cellPartSection;
1501   DM                     dmCoord;
1502   DMLabel                lblPartition, lblMigration;
1503   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1504   PetscBool              flg;
1505   PetscMPIInt            rank, numProcs, p;
1506   PetscErrorCode         ierr;
1507 
1508   PetscFunctionBegin;
1509   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1510   if (sf) PetscValidPointer(sf,4);
1511   PetscValidPointer(dmParallel,5);
1512 
1513   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1514   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1515   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1516   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1517 
1518   *dmParallel = NULL;
1519   if (numProcs == 1) PetscFunctionReturn(0);
1520 
1521   /* Create cell partition */
1522   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1523   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1524   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1525   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1526   {
1527     /* Convert partition to DMLabel */
1528     PetscInt proc, pStart, pEnd, npoints, poffset;
1529     const PetscInt *points;
1530     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1531     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1532     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1533     for (proc = pStart; proc < pEnd; proc++) {
1534       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1535       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1536       for (p = poffset; p < poffset+npoints; p++) {
1537         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1538       }
1539     }
1540     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1541   }
1542   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1543   {
1544     /* Build a global process SF */
1545     PetscSFNode *remoteProc;
1546     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1547     for (p = 0; p < numProcs; ++p) {
1548       remoteProc[p].rank  = p;
1549       remoteProc[p].index = rank;
1550     }
1551     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1552     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1553     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1554   }
1555   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1556   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1557   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1558   /* Stratify the SF in case we are migrating an already parallel plex */
1559   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1560   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1561   sfMigration = sfStratified;
1562   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1563   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1564   if (flg) {
1565     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
1566     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1567     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1568   }
1569 
1570   /* Create non-overlapping parallel DM and migrate internal data */
1571   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1572   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1573   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1574 
1575   /* Build the point SF without overlap */
1576   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1577   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1578   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1579   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1580   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1581 
1582   if (overlap > 0) {
1583     DM                 dmOverlap;
1584     PetscInt           nroots, nleaves;
1585     PetscSFNode       *newRemote;
1586     const PetscSFNode *oldRemote;
1587     PetscSF            sfOverlap, sfOverlapPoint;
1588     /* Add the partition overlap to the distributed DM */
1589     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1590     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1591     *dmParallel = dmOverlap;
1592     if (flg) {
1593       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1594       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1595     }
1596 
1597     /* Re-map the migration SF to establish the full migration pattern */
1598     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1599     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1600     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1601     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1602     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1603     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1604     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1605     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1606     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1607     sfMigration = sfOverlapPoint;
1608   }
1609   /* Cleanup Partition */
1610   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1611   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1612   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1613   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1614   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1615   /* Copy BC */
1616   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1617   /* Create sfNatural */
1618   if (dm->useNatural) {
1619     PetscSection section;
1620 
1621     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1622     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1623   }
1624   /* Cleanup */
1625   if (sf) {*sf = sfMigration;}
1626   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1627   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1628   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1629   PetscFunctionReturn(0);
1630 }
1631 
1632 #undef __FUNCT__
1633 #define __FUNCT__ "DMPlexDistributeOverlap"
1634 /*@C
1635   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1636 
1637   Not Collective
1638 
1639   Input Parameter:
1640 + dm  - The non-overlapping distrbuted DMPlex object
1641 - overlap - The overlap of partitions, 0 is the default
1642 
1643   Output Parameter:
1644 + sf - The PetscSF used for point distribution
1645 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1646 
1647   Note: If the mesh was not distributed, the return value is NULL.
1648 
1649   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1650   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1651   representation on the mesh.
1652 
1653   Level: intermediate
1654 
1655 .keywords: mesh, elements
1656 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1657 @*/
1658 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1659 {
1660   MPI_Comm               comm;
1661   PetscMPIInt            rank;
1662   PetscSection           rootSection, leafSection;
1663   IS                     rootrank, leafrank;
1664   DM                     dmCoord;
1665   DMLabel                lblOverlap;
1666   PetscSF                sfOverlap, sfStratified, sfPoint;
1667   PetscErrorCode         ierr;
1668 
1669   PetscFunctionBegin;
1670   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1671   if (sf) PetscValidPointer(sf, 3);
1672   PetscValidPointer(dmOverlap, 4);
1673 
1674   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1675   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1676   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1677 
1678   /* Compute point overlap with neighbouring processes on the distributed DM */
1679   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1680   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1681   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1682   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1683   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1684   /* Convert overlap label to stratified migration SF */
1685   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1686   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1687   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1688   sfOverlap = sfStratified;
1689   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1690   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1691 
1692   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1693   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1694   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1695   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1696   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1697 
1698   /* Build the overlapping DM */
1699   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1700   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1701   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1702   /* Build the new point SF */
1703   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1704   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1705   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1706   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1707   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1708   /* Cleanup overlap partition */
1709   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1710   if (sf) *sf = sfOverlap;
1711   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1712   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1713   PetscFunctionReturn(0);
1714 }
1715