xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision be200f8d7b9f2c03ad91139b9b00a1aef64bd73e)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
378 /*@
379   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
380 
381   Collective on DM
382 
383   Input Parameters:
384 + dm      - The DM
385 - sfPoint - The PetscSF which encodes point connectivity
386 
387   Output Parameters:
388 + processRanks - A list of process neighbors, or NULL
389 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
390 
391   Level: developer
392 
393 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
394 @*/
395 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
396 {
397   const PetscSFNode *remotePoints;
398   PetscInt          *localPointsNew;
399   PetscSFNode       *remotePointsNew;
400   const PetscInt    *nranks;
401   PetscInt          *ranksNew;
402   PetscBT            neighbors;
403   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
404   PetscMPIInt        numProcs, proc, rank;
405   PetscErrorCode     ierr;
406 
407   PetscFunctionBegin;
408   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
409   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
410   if (processRanks) {PetscValidPointer(processRanks, 3);}
411   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
412   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
413   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
414   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
415   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
416   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
417   /* Compute root-to-leaf process connectivity */
418   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
419   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
420   for (p = pStart; p < pEnd; ++p) {
421     PetscInt ndof, noff, n;
422 
423     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
424     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
425     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
426   }
427   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
428   /* Compute leaf-to-neighbor process connectivity */
429   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
430   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
431   for (p = pStart; p < pEnd; ++p) {
432     PetscInt ndof, noff, n;
433 
434     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
435     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
436     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
437   }
438   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
439   /* Compute leaf-to-root process connectivity */
440   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
441   /* Calculate edges */
442   PetscBTClear(neighbors, rank);
443   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
444   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
446   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
447   for(proc = 0, n = 0; proc < numProcs; ++proc) {
448     if (PetscBTLookup(neighbors, proc)) {
449       ranksNew[n]              = proc;
450       localPointsNew[n]        = proc;
451       remotePointsNew[n].index = rank;
452       remotePointsNew[n].rank  = proc;
453       ++n;
454     }
455   }
456   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
457   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
458   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
459   if (sfProcess) {
460     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
461     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
462     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
463     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
464   }
465   PetscFunctionReturn(0);
466 }
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "DMPlexDistributeOwnership"
470 /*@
471   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
472 
473   Collective on DM
474 
475   Input Parameter:
476 . dm - The DM
477 
478   Output Parameters:
479 + rootSection - The number of leaves for a given root point
480 . rootrank    - The rank of each edge into the root point
481 . leafSection - The number of processes sharing a given leaf point
482 - leafrank    - The rank of each process sharing a leaf point
483 
484   Level: developer
485 
486 .seealso: DMPlexCreateOverlap()
487 @*/
488 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
489 {
490   MPI_Comm        comm;
491   PetscSF         sfPoint;
492   const PetscInt *rootdegree;
493   PetscInt       *myrank, *remoterank;
494   PetscInt        pStart, pEnd, p, nedges;
495   PetscMPIInt     rank;
496   PetscErrorCode  ierr;
497 
498   PetscFunctionBegin;
499   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
500   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
501   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
502   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
503   /* Compute number of leaves for each root */
504   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
505   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
507   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
508   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
509   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
510   /* Gather rank of each leaf to root */
511   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
512   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
513   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
514   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
515   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
517   ierr = PetscFree(myrank);CHKERRQ(ierr);
518   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
519   /* Distribute remote ranks to leaves */
520   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
521   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
522   PetscFunctionReturn(0);
523 }
524 
525 #undef __FUNCT__
526 #define __FUNCT__ "DMPlexCreateOverlap"
527 /*@C
528   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
529 
530   Collective on DM
531 
532   Input Parameters:
533 + dm          - The DM
534 . levels      - Number of overlap levels
535 . rootSection - The number of leaves for a given root point
536 . rootrank    - The rank of each edge into the root point
537 . leafSection - The number of processes sharing a given leaf point
538 - leafrank    - The rank of each process sharing a leaf point
539 
540   Output Parameters:
541 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
542 
543   Level: developer
544 
545 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
546 @*/
547 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
548 {
549   MPI_Comm           comm;
550   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
551   PetscSF            sfPoint, sfProc;
552   const PetscSFNode *remote;
553   const PetscInt    *local;
554   const PetscInt    *nrank, *rrank;
555   PetscInt          *adj = NULL;
556   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
557   PetscMPIInt        rank, numProcs;
558   PetscBool          useCone, useClosure, flg;
559   PetscErrorCode     ierr;
560 
561   PetscFunctionBegin;
562   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
563   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
564   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
565   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
566   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
567   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
568   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
569   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
570   /* Handle leaves: shared with the root point */
571   for (l = 0; l < nleaves; ++l) {
572     PetscInt adjSize = PETSC_DETERMINE, a;
573 
574     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
575     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
576   }
577   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
578   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
579   /* Handle roots */
580   for (p = pStart; p < pEnd; ++p) {
581     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
582 
583     if ((p >= sStart) && (p < sEnd)) {
584       /* Some leaves share a root with other leaves on different processes */
585       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
586       if (neighbors) {
587         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
588         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
589         for (n = 0; n < neighbors; ++n) {
590           const PetscInt remoteRank = nrank[noff+n];
591 
592           if (remoteRank == rank) continue;
593           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
594         }
595       }
596     }
597     /* Roots are shared with leaves */
598     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
599     if (!neighbors) continue;
600     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
601     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
602     for (n = 0; n < neighbors; ++n) {
603       const PetscInt remoteRank = rrank[noff+n];
604 
605       if (remoteRank == rank) continue;
606       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
607     }
608   }
609   ierr = PetscFree(adj);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
611   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
612   /* Add additional overlap levels */
613   for (l = 1; l < levels; l++) {
614     /* Propagate point donations over SF to capture remote connections */
615     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
616     /* Add next level of point donations to the label */
617     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
618   }
619   /* We require the closure in the overlap */
620   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
621   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
622   if (useCone || !useClosure) {
623     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
624   }
625   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
626   if (flg) {
627     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
628   }
629   /* Make process SF and invert sender to receiver label */
630   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
631   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
632   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
633   /* Add owned points, except for shared local points */
634   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
635   for (l = 0; l < nleaves; ++l) {
636     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
637     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
638   }
639   /* Clean up */
640   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
641   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
647 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
648 {
649   MPI_Comm           comm;
650   PetscMPIInt        rank, numProcs;
651   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
652   PetscInt          *pointDepths, *remoteDepths, *ilocal;
653   PetscInt          *depthRecv, *depthShift, *depthIdx;
654   PetscSFNode       *iremote;
655   PetscSF            pointSF;
656   const PetscInt    *sharedLocal;
657   const PetscSFNode *overlapRemote, *sharedRemote;
658   PetscErrorCode     ierr;
659 
660   PetscFunctionBegin;
661   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
662   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
663   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
664   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
665   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
666 
667   /* Before building the migration SF we need to know the new stratum offsets */
668   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
669   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
670   for (d=0; d<dim+1; d++) {
671     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
672     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
673   }
674   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
675   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
676   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
677 
678   /* Count recevied points in each stratum and compute the internal strata shift */
679   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
680   for (d=0; d<dim+1; d++) depthRecv[d]=0;
681   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
682   depthShift[dim] = 0;
683   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
684   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
685   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
686   for (d=0; d<dim+1; d++) {
687     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
688     depthIdx[d] = pStart + depthShift[d];
689   }
690 
691   /* Form the overlap SF build an SF that describes the full overlap migration SF */
692   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
693   newLeaves = pEnd - pStart + nleaves;
694   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
695   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
696   /* First map local points to themselves */
697   for (d=0; d<dim+1; d++) {
698     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
699     for (p=pStart; p<pEnd; p++) {
700       point = p + depthShift[d];
701       ilocal[point] = point;
702       iremote[point].index = p;
703       iremote[point].rank = rank;
704       depthIdx[d]++;
705     }
706   }
707 
708   /* Add in the remote roots for currently shared points */
709   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
710   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
711   for (d=0; d<dim+1; d++) {
712     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
713     for (p=0; p<numSharedPoints; p++) {
714       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
715         point = sharedLocal[p] + depthShift[d];
716         iremote[point].index = sharedRemote[p].index;
717         iremote[point].rank = sharedRemote[p].rank;
718       }
719     }
720   }
721 
722   /* Now add the incoming overlap points */
723   for (p=0; p<nleaves; p++) {
724     point = depthIdx[remoteDepths[p]];
725     ilocal[point] = point;
726     iremote[point].index = overlapRemote[p].index;
727     iremote[point].rank = overlapRemote[p].rank;
728     depthIdx[remoteDepths[p]]++;
729   }
730   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
731 
732   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
733   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
734   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
735   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
736   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
737 
738   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "DMPlexStratifyMigrationSF"
744 /*@
745   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
746 
747   Input Parameter:
748 + dm          - The DM
749 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
750 
751   Output Parameter:
752 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
753 
754   Level: developer
755 
756 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
757 @*/
758 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
759 {
760   MPI_Comm           comm;
761   PetscMPIInt        rank, numProcs;
762   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
763   PetscInt          *pointDepths, *remoteDepths, *ilocal;
764   PetscInt          *depthRecv, *depthShift, *depthIdx;
765   const PetscSFNode *iremote;
766   PetscErrorCode     ierr;
767 
768   PetscFunctionBegin;
769   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
770   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
771   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
772   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
773   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
774   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
775   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
776 
777   /* Before building the migration SF we need to know the new stratum offsets */
778   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
779   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
780   for (d = 0; d < depth+1; ++d) {
781     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
782     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
783   }
784   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
785   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
786   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
787   /* Count recevied points in each stratum and compute the internal strata shift */
788   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
789   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
790   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
791   depthShift[depth] = 0;
792   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
793   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
794   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
795   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
796   /* Derive a new local permutation based on stratified indices */
797   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
798   for (p = 0; p < nleaves; ++p) {
799     const PetscInt dep = remoteDepths[p];
800 
801     ilocal[p] = depthShift[dep] + depthIdx[dep];
802     depthIdx[dep]++;
803   }
804   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
805   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
806   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
807   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
808   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "DMPlexDistributeField"
814 /*@
815   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
816 
817   Collective on DM
818 
819   Input Parameters:
820 + dm - The DMPlex object
821 . pointSF - The PetscSF describing the communication pattern
822 . originalSection - The PetscSection for existing data layout
823 - originalVec - The existing data
824 
825   Output Parameters:
826 + newSection - The PetscSF describing the new data layout
827 - newVec - The new data
828 
829   Level: developer
830 
831 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
832 @*/
833 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
834 {
835   PetscSF        fieldSF;
836   PetscInt      *remoteOffsets, fieldSize;
837   PetscScalar   *originalValues, *newValues;
838   PetscErrorCode ierr;
839 
840   PetscFunctionBegin;
841   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
842   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
843 
844   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
845   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
846   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
847 
848   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
849   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
850   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
851   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
852   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
853   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
854   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
855   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
856   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
857   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "DMPlexDistributeFieldIS"
863 /*@
864   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
865 
866   Collective on DM
867 
868   Input Parameters:
869 + dm - The DMPlex object
870 . pointSF - The PetscSF describing the communication pattern
871 . originalSection - The PetscSection for existing data layout
872 - originalIS - The existing data
873 
874   Output Parameters:
875 + newSection - The PetscSF describing the new data layout
876 - newIS - The new data
877 
878   Level: developer
879 
880 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
881 @*/
882 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
883 {
884   PetscSF         fieldSF;
885   PetscInt       *newValues, *remoteOffsets, fieldSize;
886   const PetscInt *originalValues;
887   PetscErrorCode  ierr;
888 
889   PetscFunctionBegin;
890   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
891   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
892 
893   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
894   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
895 
896   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
897   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
898   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
899   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
900   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
901   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
902   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
903   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
904   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907 
908 #undef __FUNCT__
909 #define __FUNCT__ "DMPlexDistributeData"
910 /*@
911   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
912 
913   Collective on DM
914 
915   Input Parameters:
916 + dm - The DMPlex object
917 . pointSF - The PetscSF describing the communication pattern
918 . originalSection - The PetscSection for existing data layout
919 . datatype - The type of data
920 - originalData - The existing data
921 
922   Output Parameters:
923 + newSection - The PetscSection describing the new data layout
924 - newData - The new data
925 
926   Level: developer
927 
928 .seealso: DMPlexDistribute(), DMPlexDistributeField()
929 @*/
930 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
931 {
932   PetscSF        fieldSF;
933   PetscInt      *remoteOffsets, fieldSize;
934   PetscMPIInt    dataSize;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
939   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
940 
941   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
942   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
943   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
944 
945   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
946   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
947   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
948   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
949   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
950   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 #undef __FUNCT__
955 #define __FUNCT__ "DMPlexDistributeCones"
956 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
957 {
958   DM_Plex               *mesh  = (DM_Plex*) dm->data;
959   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
960   MPI_Comm               comm;
961   PetscSF                coneSF;
962   PetscSection           originalConeSection, newConeSection;
963   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
964   PetscBool              flg;
965   PetscErrorCode         ierr;
966 
967   PetscFunctionBegin;
968   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
969   PetscValidPointer(dmParallel,4);
970   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
971 
972   /* Distribute cone section */
973   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
974   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
975   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
976   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
977   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
978   {
979     PetscInt pStart, pEnd, p;
980 
981     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
982     for (p = pStart; p < pEnd; ++p) {
983       PetscInt coneSize;
984       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
985       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
986     }
987   }
988   /* Communicate and renumber cones */
989   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
990   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
991   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
992   if (original) {
993     PetscInt numCones;
994 
995     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
996     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
997   }
998   else {
999     globCones = cones;
1000   }
1001   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1002   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1003   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1004   if (original) {
1005     ierr = PetscFree(globCones);CHKERRQ(ierr);
1006   }
1007   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1008   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1009 #if PETSC_USE_DEBUG
1010   {
1011     PetscInt  p;
1012     PetscBool valid = PETSC_TRUE;
1013     for (p = 0; p < newConesSize; ++p) {
1014       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1015     }
1016     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1017   }
1018 #endif
1019   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1020   if (flg) {
1021     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1022     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1023     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1024     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1025     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1026   }
1027   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1028   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1029   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1030   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1031   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1032   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1033   /* Create supports and stratify sieve */
1034   {
1035     PetscInt pStart, pEnd;
1036 
1037     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1038     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1039   }
1040   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1041   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1042   pmesh->useCone    = mesh->useCone;
1043   pmesh->useClosure = mesh->useClosure;
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "DMPlexDistributeCoordinates"
1049 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1050 {
1051   MPI_Comm         comm;
1052   PetscSection     originalCoordSection, newCoordSection;
1053   Vec              originalCoordinates, newCoordinates;
1054   PetscInt         bs;
1055   const char      *name;
1056   const PetscReal *maxCell, *L;
1057   const DMBoundaryType *bd;
1058   PetscErrorCode   ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1062   PetscValidPointer(dmParallel, 3);
1063 
1064   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1065   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1066   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1067   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1068   if (originalCoordinates) {
1069     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1070     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1071     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1072 
1073     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1074     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1075     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1076     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1077     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1078   }
1079   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1080   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "DMPlexDistributeLabels"
1086 /* Here we are assuming that process 0 always has everything */
1087 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1088 {
1089   DM_Plex         *mesh = (DM_Plex*) dm->data;
1090   MPI_Comm         comm;
1091   DMLabel          depth;
1092   PetscMPIInt      rank;
1093   PetscInt         numLabels, numLocalLabels, l;
1094   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1095   PetscObjectState depthState = -1;
1096   PetscErrorCode   ierr;
1097 
1098   PetscFunctionBegin;
1099   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1100   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1101   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1102   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1103   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1104 
1105   /* If the user has changed the depth label, communicate it instead */
1106   ierr = DMPlexGetDepthLabel(dm, &depth);CHKERRQ(ierr);
1107   if (depth) {ierr = DMLabelGetState(depth, &depthState);CHKERRQ(ierr);}
1108   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1109   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1110   if (sendDepth) {
1111     ierr = DMRemoveLabel(dmParallel, "depth", &depth);CHKERRQ(ierr);
1112     ierr = DMLabelDestroy(&depth);CHKERRQ(ierr);
1113   }
1114   /* Everyone must have either the same number of labels, or none */
1115   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1116   numLabels = numLocalLabels;
1117   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1118   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1119   for (l = numLabels-1; l >= 0; --l) {
1120     DMLabel     label = NULL, labelNew = NULL;
1121     PetscBool   isdepth;
1122 
1123     if (hasLabels) {
1124       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1125       /* Skip "depth" because it is recreated */
1126       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1127     }
1128     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1129     if (isdepth && !sendDepth) continue;
1130     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1131     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1132   }
1133   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1139 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1140 {
1141   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1142   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1143   MPI_Comm        comm;
1144   const PetscInt *gpoints;
1145   PetscInt        dim, depth, n, d;
1146   PetscErrorCode  ierr;
1147 
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1150   PetscValidPointer(dmParallel, 4);
1151 
1152   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1153   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1154 
1155   /* Setup hybrid structure */
1156   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1157   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1158   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1159   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1160   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1161   for (d = 0; d <= dim; ++d) {
1162     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1163 
1164     if (pmax < 0) continue;
1165     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1166     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1167     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1168     for (p = 0; p < n; ++p) {
1169       const PetscInt point = gpoints[p];
1170 
1171       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1172     }
1173     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1174     else            pmesh->hybridPointMax[d] = -1;
1175   }
1176   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 #undef __FUNCT__
1181 #define __FUNCT__ "DMPlexDistributeSetupTree"
1182 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1183 {
1184   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1185   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1186   MPI_Comm        comm;
1187   DM              refTree;
1188   PetscSection    origParentSection, newParentSection;
1189   PetscInt        *origParents, *origChildIDs;
1190   PetscBool       flg;
1191   PetscErrorCode  ierr;
1192 
1193   PetscFunctionBegin;
1194   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1195   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1196   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1197 
1198   /* Set up tree */
1199   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1200   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1201   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1202   if (origParentSection) {
1203     PetscInt        pStart, pEnd;
1204     PetscInt        *newParents, *newChildIDs, *globParents;
1205     PetscInt        *remoteOffsetsParents, newParentSize;
1206     PetscSF         parentSF;
1207 
1208     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1209     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1210     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1211     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1212     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1213     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1214     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1215     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1216     if (original) {
1217       PetscInt numParents;
1218 
1219       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1220       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1221       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1222     }
1223     else {
1224       globParents = origParents;
1225     }
1226     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1227     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1228     if (original) {
1229       ierr = PetscFree(globParents);CHKERRQ(ierr);
1230     }
1231     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1232     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1233     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1234 #if PETSC_USE_DEBUG
1235     {
1236       PetscInt  p;
1237       PetscBool valid = PETSC_TRUE;
1238       for (p = 0; p < newParentSize; ++p) {
1239         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1240       }
1241       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1242     }
1243 #endif
1244     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1245     if (flg) {
1246       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1247       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1248       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1249       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1250       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1251     }
1252     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1253     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1254     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1255     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1256   }
1257   pmesh->useAnchors = mesh->useAnchors;
1258   PetscFunctionReturn(0);
1259 }
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "DMPlexDistributeSF"
1263 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1264 {
1265   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1266   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1267   PetscMPIInt            rank, numProcs;
1268   MPI_Comm               comm;
1269   PetscErrorCode         ierr;
1270 
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1273   PetscValidPointer(dmParallel,7);
1274 
1275   /* Create point SF for parallel mesh */
1276   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1277   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1278   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1279   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1280   {
1281     const PetscInt *leaves;
1282     PetscSFNode    *remotePoints, *rowners, *lowners;
1283     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1284     PetscInt        pStart, pEnd;
1285 
1286     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1287     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1288     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1289     for (p=0; p<numRoots; p++) {
1290       rowners[p].rank  = -1;
1291       rowners[p].index = -1;
1292     }
1293     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1294     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1295     for (p = 0; p < numLeaves; ++p) {
1296       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1297         lowners[p].rank  = rank;
1298         lowners[p].index = leaves ? leaves[p] : p;
1299       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1300         lowners[p].rank  = -2;
1301         lowners[p].index = -2;
1302       }
1303     }
1304     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1305       rowners[p].rank  = -3;
1306       rowners[p].index = -3;
1307     }
1308     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1309     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1310     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1311     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1312     for (p = 0; p < numLeaves; ++p) {
1313       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1314       if (lowners[p].rank != rank) ++numGhostPoints;
1315     }
1316     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1317     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1318     for (p = 0, gp = 0; p < numLeaves; ++p) {
1319       if (lowners[p].rank != rank) {
1320         ghostPoints[gp]        = leaves ? leaves[p] : p;
1321         remotePoints[gp].rank  = lowners[p].rank;
1322         remotePoints[gp].index = lowners[p].index;
1323         ++gp;
1324       }
1325     }
1326     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1327     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1328     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1329   }
1330   pmesh->useCone    = mesh->useCone;
1331   pmesh->useClosure = mesh->useClosure;
1332   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 #undef __FUNCT__
1337 #define __FUNCT__ "DMPlexCreatePointSF"
1338 /*@C
1339   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1340 
1341   Input Parameter:
1342 + dm          - The source DMPlex object
1343 . migrationSF - The star forest that describes the parallel point remapping
1344 . ownership   - Flag causing a vote to determine point ownership
1345 
1346   Output Parameter:
1347 - pointSF     - The star forest describing the point overlap in the remapped DM
1348 
1349   Level: developer
1350 
1351 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1352 @*/
1353 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1354 {
1355   PetscMPIInt        rank;
1356   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1357   PetscInt          *pointLocal;
1358   const PetscInt    *leaves;
1359   const PetscSFNode *roots;
1360   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1361   PetscErrorCode     ierr;
1362 
1363   PetscFunctionBegin;
1364   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1365   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1366 
1367   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1368   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1369   if (ownership) {
1370     /* Point ownership vote: Process with highest rank ownes shared points */
1371     for (p = 0; p < nleaves; ++p) {
1372       /* Either put in a bid or we know we own it */
1373       leafNodes[p].rank  = rank;
1374       leafNodes[p].index = p;
1375     }
1376     for (p = 0; p < nroots; p++) {
1377       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1378       rootNodes[p].rank  = -3;
1379       rootNodes[p].index = -3;
1380     }
1381     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1382     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1383   } else {
1384     for (p = 0; p < nroots; p++) {
1385       rootNodes[p].index = -1;
1386       rootNodes[p].rank = rank;
1387     };
1388     for (p = 0; p < nleaves; p++) {
1389       /* Write new local id into old location */
1390       if (roots[p].rank == rank) {
1391         rootNodes[roots[p].index].index = leaves[p];
1392       }
1393     }
1394   }
1395   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1396   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1397 
1398   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1399   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1400   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1401   for (idx = 0, p = 0; p < nleaves; p++) {
1402     if (leafNodes[p].rank != rank) {
1403       pointLocal[idx] = p;
1404       pointRemote[idx] = leafNodes[p];
1405       idx++;
1406     }
1407   }
1408   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1409   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1410   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1411   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1412   PetscFunctionReturn(0);
1413 }
1414 
1415 #undef __FUNCT__
1416 #define __FUNCT__ "DMPlexMigrate"
1417 /*@C
1418   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1419 
1420   Input Parameter:
1421 + dm       - The source DMPlex object
1422 . sf       - The star forest communication context describing the migration pattern
1423 
1424   Output Parameter:
1425 - targetDM - The target DMPlex object
1426 
1427   Level: intermediate
1428 
1429 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1430 @*/
1431 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1432 {
1433   MPI_Comm               comm;
1434   PetscInt               dim, nroots;
1435   PetscSF                sfPoint;
1436   ISLocalToGlobalMapping ltogMigration;
1437   ISLocalToGlobalMapping ltogOriginal = NULL;
1438   PetscBool              flg;
1439   PetscErrorCode         ierr;
1440 
1441   PetscFunctionBegin;
1442   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1443   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1444   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1445   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1446   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1447 
1448   /* Check for a one-to-all distribution pattern */
1449   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1450   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1451   if (nroots >= 0) {
1452     IS                     isOriginal;
1453     PetscInt               n, size, nleaves;
1454     PetscInt              *numbering_orig, *numbering_new;
1455     /* Get the original point numbering */
1456     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1457     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1458     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1459     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1460     /* Convert to positive global numbers */
1461     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1462     /* Derive the new local-to-global mapping from the old one */
1463     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1464     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1465     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1466     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1467     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1468     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1469     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1470   } else {
1471     /* One-to-all distribution pattern: We can derive LToG from SF */
1472     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1473   }
1474   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1475   if (flg) {
1476     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1477     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1478   }
1479   /* Migrate DM data to target DM */
1480   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1481   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1482   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1483   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1484   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1485   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1486   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1487   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1488   PetscFunctionReturn(0);
1489 }
1490 
1491 #undef __FUNCT__
1492 #define __FUNCT__ "DMPlexDistribute"
1493 /*@C
1494   DMPlexDistribute - Distributes the mesh and any associated sections.
1495 
1496   Not Collective
1497 
1498   Input Parameter:
1499 + dm  - The original DMPlex object
1500 - overlap - The overlap of partitions, 0 is the default
1501 
1502   Output Parameter:
1503 + sf - The PetscSF used for point distribution
1504 - parallelMesh - The distributed DMPlex object, or NULL
1505 
1506   Note: If the mesh was not distributed, the return value is NULL.
1507 
1508   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1509   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1510   representation on the mesh.
1511 
1512   Level: intermediate
1513 
1514 .keywords: mesh, elements
1515 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1516 @*/
1517 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1518 {
1519   MPI_Comm               comm;
1520   PetscPartitioner       partitioner;
1521   IS                     cellPart;
1522   PetscSection           cellPartSection;
1523   DM                     dmCoord;
1524   DMLabel                lblPartition, lblMigration;
1525   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1526   PetscBool              flg;
1527   PetscMPIInt            rank, numProcs, p;
1528   PetscErrorCode         ierr;
1529 
1530   PetscFunctionBegin;
1531   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1532   if (sf) PetscValidPointer(sf,4);
1533   PetscValidPointer(dmParallel,5);
1534 
1535   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1536   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1537   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1538   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1539 
1540   *dmParallel = NULL;
1541   if (numProcs == 1) PetscFunctionReturn(0);
1542 
1543   /* Create cell partition */
1544   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1545   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1546   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1547   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1548   {
1549     /* Convert partition to DMLabel */
1550     PetscInt proc, pStart, pEnd, npoints, poffset;
1551     const PetscInt *points;
1552     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1553     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1554     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1555     for (proc = pStart; proc < pEnd; proc++) {
1556       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1557       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1558       for (p = poffset; p < poffset+npoints; p++) {
1559         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1560       }
1561     }
1562     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1563   }
1564   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1565   {
1566     /* Build a global process SF */
1567     PetscSFNode *remoteProc;
1568     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1569     for (p = 0; p < numProcs; ++p) {
1570       remoteProc[p].rank  = p;
1571       remoteProc[p].index = rank;
1572     }
1573     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1574     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1575     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1576   }
1577   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1578   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1579   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1580   /* Stratify the SF in case we are migrating an already parallel plex */
1581   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1582   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1583   sfMigration = sfStratified;
1584   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1585   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1586   if (flg) {
1587     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1588     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1589   }
1590 
1591   /* Create non-overlapping parallel DM and migrate internal data */
1592   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1593   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1594   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1595 
1596   /* Build the point SF without overlap */
1597   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1598   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1599   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1600   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1601   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1602 
1603   if (overlap > 0) {
1604     DM                 dmOverlap;
1605     PetscInt           nroots, nleaves;
1606     PetscSFNode       *newRemote;
1607     const PetscSFNode *oldRemote;
1608     PetscSF            sfOverlap, sfOverlapPoint;
1609     /* Add the partition overlap to the distributed DM */
1610     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1611     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1612     *dmParallel = dmOverlap;
1613     if (flg) {
1614       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1615       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1616     }
1617 
1618     /* Re-map the migration SF to establish the full migration pattern */
1619     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1620     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1621     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1622     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1623     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1624     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1625     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1626     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1627     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1628     sfMigration = sfOverlapPoint;
1629   }
1630   /* Cleanup Partition */
1631   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1632   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1633   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1634   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1635   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1636   /* Copy BC */
1637   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1638   /* Create sfNatural */
1639   if (dm->useNatural) {
1640     PetscSection section;
1641 
1642     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1643     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1644   }
1645   /* Cleanup */
1646   if (sf) {*sf = sfMigration;}
1647   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1648   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1649   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 #undef __FUNCT__
1654 #define __FUNCT__ "DMPlexDistributeOverlap"
1655 /*@C
1656   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1657 
1658   Not Collective
1659 
1660   Input Parameter:
1661 + dm  - The non-overlapping distrbuted DMPlex object
1662 - overlap - The overlap of partitions, 0 is the default
1663 
1664   Output Parameter:
1665 + sf - The PetscSF used for point distribution
1666 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1667 
1668   Note: If the mesh was not distributed, the return value is NULL.
1669 
1670   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1671   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1672   representation on the mesh.
1673 
1674   Level: intermediate
1675 
1676 .keywords: mesh, elements
1677 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1678 @*/
1679 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1680 {
1681   MPI_Comm               comm;
1682   PetscMPIInt            rank;
1683   PetscSection           rootSection, leafSection;
1684   IS                     rootrank, leafrank;
1685   DM                     dmCoord;
1686   DMLabel                lblOverlap;
1687   PetscSF                sfOverlap, sfStratified, sfPoint;
1688   PetscErrorCode         ierr;
1689 
1690   PetscFunctionBegin;
1691   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1692   if (sf) PetscValidPointer(sf, 3);
1693   PetscValidPointer(dmOverlap, 4);
1694 
1695   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1696   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1697   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1698 
1699   /* Compute point overlap with neighbouring processes on the distributed DM */
1700   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1701   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1702   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1703   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1704   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1705   /* Convert overlap label to stratified migration SF */
1706   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1707   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1708   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1709   sfOverlap = sfStratified;
1710   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1711   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1712 
1713   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1714   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1715   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1716   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1717   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1718 
1719   /* Build the overlapping DM */
1720   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1721   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1722   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1723   /* Build the new point SF */
1724   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1725   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1726   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1727   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1728   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1729   /* Cleanup overlap partition */
1730   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1731   if (sf) *sf = sfOverlap;
1732   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1733   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1734   PetscFunctionReturn(0);
1735 }
1736