xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision ac04eaf74b830e6d89875d943a0d677cc23be6c9)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . levels      - Number of overlap levels
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
540 
541   Level: developer
542 
543 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
544 @*/
545 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
546 {
547   MPI_Comm           comm;
548   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
549   PetscSF            sfPoint, sfProc;
550   const PetscSFNode *remote;
551   const PetscInt    *local;
552   const PetscInt    *nrank, *rrank;
553   PetscInt          *adj = NULL;
554   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555   PetscMPIInt        rank, numProcs;
556   PetscBool          useCone, useClosure, flg;
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
561   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
563   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
564   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
565   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
566   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
567   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
568   /* Handle leaves: shared with the root point */
569   for (l = 0; l < nleaves; ++l) {
570     PetscInt adjSize = PETSC_DETERMINE, a;
571 
572     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
573     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
574   }
575   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
576   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
577   /* Handle roots */
578   for (p = pStart; p < pEnd; ++p) {
579     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
580 
581     if ((p >= sStart) && (p < sEnd)) {
582       /* Some leaves share a root with other leaves on different processes */
583       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
584       if (neighbors) {
585         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
586         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
587         for (n = 0; n < neighbors; ++n) {
588           const PetscInt remoteRank = nrank[noff+n];
589 
590           if (remoteRank == rank) continue;
591           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
592         }
593       }
594     }
595     /* Roots are shared with leaves */
596     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
597     if (!neighbors) continue;
598     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
599     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
600     for (n = 0; n < neighbors; ++n) {
601       const PetscInt remoteRank = rrank[noff+n];
602 
603       if (remoteRank == rank) continue;
604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
605     }
606   }
607   ierr = PetscFree(adj);CHKERRQ(ierr);
608   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
610   /* Add additional overlap levels */
611   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
612   /* We require the closure in the overlap */
613   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
614   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
615   if (useCone || !useClosure) {
616     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
617   }
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Make process SF and invert sender to receiver label */
624   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
625   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
626   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
627   /* Add owned points, except for shared local points */
628   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
629   for (l = 0; l < nleaves; ++l) {
630     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
631     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
632   }
633   /* Clean up */
634   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
635   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
641 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
642 {
643   MPI_Comm           comm;
644   PetscMPIInt        rank, numProcs;
645   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
646   PetscInt          *pointDepths, *remoteDepths, *ilocal;
647   PetscInt          *depthRecv, *depthShift, *depthIdx;
648   PetscSFNode       *iremote;
649   PetscSF            pointSF;
650   const PetscInt    *sharedLocal;
651   const PetscSFNode *overlapRemote, *sharedRemote;
652   PetscErrorCode     ierr;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
656   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
657   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
658   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
659   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
660 
661   /* Before building the migration SF we need to know the new stratum offsets */
662   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
663   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
664   for (d=0; d<dim+1; d++) {
665     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
666     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
667   }
668   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
669   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
670   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
671 
672   /* Count recevied points in each stratum and compute the internal strata shift */
673   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
674   for (d=0; d<dim+1; d++) depthRecv[d]=0;
675   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
676   depthShift[dim] = 0;
677   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
678   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
679   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
680   for (d=0; d<dim+1; d++) {
681     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
682     depthIdx[d] = pStart + depthShift[d];
683   }
684 
685   /* Form the overlap SF build an SF that describes the full overlap migration SF */
686   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
687   newLeaves = pEnd - pStart + nleaves;
688   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
689   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
690   /* First map local points to themselves */
691   for (d=0; d<dim+1; d++) {
692     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
693     for (p=pStart; p<pEnd; p++) {
694       point = p + depthShift[d];
695       ilocal[point] = point;
696       iremote[point].index = p;
697       iremote[point].rank = rank;
698       depthIdx[d]++;
699     }
700   }
701 
702   /* Add in the remote roots for currently shared points */
703   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
704   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
705   for (d=0; d<dim+1; d++) {
706     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
707     for (p=0; p<numSharedPoints; p++) {
708       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
709         point = sharedLocal[p] + depthShift[d];
710         iremote[point].index = sharedRemote[p].index;
711         iremote[point].rank = sharedRemote[p].rank;
712       }
713     }
714   }
715 
716   /* Now add the incoming overlap points */
717   for (p=0; p<nleaves; p++) {
718     point = depthIdx[remoteDepths[p]];
719     ilocal[point] = point;
720     iremote[point].index = overlapRemote[p].index;
721     iremote[point].rank = overlapRemote[p].rank;
722     depthIdx[remoteDepths[p]]++;
723   }
724   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
725 
726   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
727   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
728   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
729   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
730   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
731 
732   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "DMPlexStratifyMigrationSF"
738 /*@
739   DMPlexStratifyMigrationSF - Add partition overlap to a distributed non-overlapping DM.
740 
741   Input Parameter:
742 + dm          - The DM
743 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
744 
745   Output Parameter:
746 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
747 
748   Level: developer
749 
750 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
751 @*/
752 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
753 {
754   MPI_Comm           comm;
755   PetscMPIInt        rank, numProcs;
756   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
757   PetscInt          *pointDepths, *remoteDepths, *ilocal;
758   PetscInt          *depthRecv, *depthShift, *depthIdx;
759   const PetscSFNode *iremote;
760   PetscErrorCode     ierr;
761 
762   PetscFunctionBegin;
763   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
764   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
765   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
766   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
767   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
768   ierr = MPI_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
769   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
770 
771   /* Before building the migration SF we need to know the new stratum offsets */
772   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
773   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
774   for (d = 0; d < depth+1; ++d) {
775     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
776     for (p = pStart; p < pEnd; ++p) pointDepths[p] = d;
777   }
778   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
779   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
780   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
781   /* Count recevied points in each stratum and compute the internal strata shift */
782   ierr = PetscMalloc3(depth+1, &depthRecv, depth+1, &depthShift, depth+1, &depthIdx);CHKERRQ(ierr);
783   for (d = 0; d < depth+1; ++d) depthRecv[d] = 0;
784   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
785   depthShift[depth] = 0;
786   for (d = 0; d < depth; ++d) depthShift[d] = depthRecv[depth];
787   for (d = 1; d < depth; ++d) depthShift[d] += depthRecv[0];
788   for (d = depth-2; d > 0; --d) depthShift[d] += depthRecv[d+1];
789   for (d = 0; d < depth+1; ++d) {depthIdx[d] = 0;}
790   /* Derive a new local permutation based on stratified indices */
791   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
792   for (p = 0; p < nleaves; ++p) {
793     const PetscInt dep = remoteDepths[p];
794 
795     ilocal[p] = depthShift[dep] + depthIdx[dep];
796     depthIdx[dep]++;
797   }
798   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
799   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
800   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
801   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
802   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "DMPlexDistributeField"
808 /*@
809   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
810 
811   Collective on DM
812 
813   Input Parameters:
814 + dm - The DMPlex object
815 . pointSF - The PetscSF describing the communication pattern
816 . originalSection - The PetscSection for existing data layout
817 - originalVec - The existing data
818 
819   Output Parameters:
820 + newSection - The PetscSF describing the new data layout
821 - newVec - The new data
822 
823   Level: developer
824 
825 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
826 @*/
827 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
828 {
829   PetscSF        fieldSF;
830   PetscInt      *remoteOffsets, fieldSize;
831   PetscScalar   *originalValues, *newValues;
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
836   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
837 
838   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
839   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
840   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
841 
842   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
843   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
844   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
845   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
846   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
847   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
848   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
849   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
850   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMPlexDistributeFieldIS"
856 /*@
857   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
858 
859   Collective on DM
860 
861   Input Parameters:
862 + dm - The DMPlex object
863 . pointSF - The PetscSF describing the communication pattern
864 . originalSection - The PetscSection for existing data layout
865 - originalIS - The existing data
866 
867   Output Parameters:
868 + newSection - The PetscSF describing the new data layout
869 - newIS - The new data
870 
871   Level: developer
872 
873 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
874 @*/
875 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
876 {
877   PetscSF         fieldSF;
878   PetscInt       *newValues, *remoteOffsets, fieldSize;
879   const PetscInt *originalValues;
880   PetscErrorCode  ierr;
881 
882   PetscFunctionBegin;
883   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
884   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
885 
886   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
887   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
888 
889   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
890   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
891   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
892   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
893   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
894   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
895   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
896   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "DMPlexDistributeData"
902 /*@
903   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
904 
905   Collective on DM
906 
907   Input Parameters:
908 + dm - The DMPlex object
909 . pointSF - The PetscSF describing the communication pattern
910 . originalSection - The PetscSection for existing data layout
911 . datatype - The type of data
912 - originalData - The existing data
913 
914   Output Parameters:
915 + newSection - The PetscSection describing the new data layout
916 - newData - The new data
917 
918   Level: developer
919 
920 .seealso: DMPlexDistribute(), DMPlexDistributeField()
921 @*/
922 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
923 {
924   PetscSF        fieldSF;
925   PetscInt      *remoteOffsets, fieldSize;
926   PetscMPIInt    dataSize;
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
931   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
932 
933   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
934   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
935   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
936 
937   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
938   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
939   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
940   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
941   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "DMPlexDistributeCones"
947 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
948 {
949   DM_Plex               *mesh  = (DM_Plex*) dm->data;
950   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
951   MPI_Comm               comm;
952   PetscSF                coneSF;
953   PetscSection           originalConeSection, newConeSection;
954   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
955   PetscBool              flg;
956   PetscErrorCode         ierr;
957 
958   PetscFunctionBegin;
959   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
960   PetscValidPointer(dmParallel,4);
961   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
962 
963   /* Distribute cone section */
964   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
965   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
966   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
967   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
968   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
969   {
970     PetscInt pStart, pEnd, p;
971 
972     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
973     for (p = pStart; p < pEnd; ++p) {
974       PetscInt coneSize;
975       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
976       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
977     }
978   }
979   /* Communicate and renumber cones */
980   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
981   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
982   if (original) {
983     PetscInt numCones;
984 
985     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
986     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
987   }
988   else {
989     globCones = cones;
990   }
991   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
992   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
993   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
994   if (original) {
995     ierr = PetscFree(globCones);CHKERRQ(ierr);
996   }
997   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
998   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
999 #if PETSC_USE_DEBUG
1000   {
1001     PetscInt  p;
1002     PetscBool valid = PETSC_TRUE;
1003     for (p = 0; p < newConesSize; ++p) {
1004       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1005     }
1006     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1007   }
1008 #endif
1009   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1010   if (flg) {
1011     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1012     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1013     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1014     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1015     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1016   }
1017   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1018   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1019   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1020   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1021   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1022   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1023   /* Create supports and stratify sieve */
1024   {
1025     PetscInt pStart, pEnd;
1026 
1027     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1028     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1029   }
1030   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1031   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1032   pmesh->useCone    = mesh->useCone;
1033   pmesh->useClosure = mesh->useClosure;
1034   PetscFunctionReturn(0);
1035 }
1036 
1037 #undef __FUNCT__
1038 #define __FUNCT__ "DMPlexDistributeCoordinates"
1039 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1040 {
1041   MPI_Comm         comm;
1042   PetscSection     originalCoordSection, newCoordSection;
1043   Vec              originalCoordinates, newCoordinates;
1044   PetscInt         bs;
1045   const char      *name;
1046   const PetscReal *maxCell, *L;
1047   const DMBoundaryType *bd;
1048   PetscErrorCode   ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1052   PetscValidPointer(dmParallel, 3);
1053 
1054   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1055   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1056   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1057   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1058   if (originalCoordinates) {
1059     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1060     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1061     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1062 
1063     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1064     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1065     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1066     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1067     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1068   }
1069   ierr = DMGetPeriodicity(dm, &maxCell, &L, &bd);CHKERRQ(ierr);
1070   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L, bd);CHKERRQ(ierr);}
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "DMPlexDistributeLabels"
1076 /* Here we are assuming that process 0 always has everything */
1077 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1078 {
1079   MPI_Comm       comm;
1080   PetscMPIInt    rank;
1081   PetscInt       numLabels, numLocalLabels, l;
1082   PetscBool      hasLabels = PETSC_FALSE;
1083   PetscErrorCode ierr;
1084 
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1087   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1088   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1089   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1090   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1091 
1092   /* Everyone must have either the same number of labels, or none */
1093   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1094   numLabels = numLocalLabels;
1095   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1096   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1097   for (l = numLabels-1; l >= 0; --l) {
1098     DMLabel     label = NULL, labelNew = NULL;
1099     PetscBool   isdepth;
1100 
1101     if (hasLabels) {
1102       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1103       /* Skip "depth" because it is recreated */
1104       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1105     }
1106     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1107     if (isdepth) continue;
1108     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1109     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1110   }
1111   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 #undef __FUNCT__
1116 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1117 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1118 {
1119   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1120   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1121   MPI_Comm        comm;
1122   const PetscInt *gpoints;
1123   PetscInt        dim, depth, n, d;
1124   PetscErrorCode  ierr;
1125 
1126   PetscFunctionBegin;
1127   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1128   PetscValidPointer(dmParallel, 4);
1129 
1130   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1131   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1132 
1133   /* Setup hybrid structure */
1134   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1135   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1136   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1137   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1138   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1139   for (d = 0; d <= dim; ++d) {
1140     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1141 
1142     if (pmax < 0) continue;
1143     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1144     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1145     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1146     for (p = 0; p < n; ++p) {
1147       const PetscInt point = gpoints[p];
1148 
1149       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1150     }
1151     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1152     else            pmesh->hybridPointMax[d] = -1;
1153   }
1154   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1155   PetscFunctionReturn(0);
1156 }
1157 
1158 #undef __FUNCT__
1159 #define __FUNCT__ "DMPlexDistributeSetupTree"
1160 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1161 {
1162   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1163   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1164   MPI_Comm        comm;
1165   DM              refTree;
1166   PetscSection    origParentSection, newParentSection;
1167   PetscInt        *origParents, *origChildIDs;
1168   PetscBool       flg;
1169   PetscErrorCode  ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1173   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1174   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1175 
1176   /* Set up tree */
1177   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1178   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1179   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1180   if (origParentSection) {
1181     PetscInt        pStart, pEnd;
1182     PetscInt        *newParents, *newChildIDs;
1183     PetscInt        *remoteOffsetsParents, newParentSize;
1184     PetscSF         parentSF;
1185 
1186     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1187     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1188     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1189     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1190     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1191     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1192     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1193     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1194     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1195     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1196     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1197     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1198 #if PETSC_USE_DEBUG
1199     {
1200       PetscInt  p;
1201       PetscBool valid = PETSC_TRUE;
1202       for (p = 0; p < newParentSize; ++p) {
1203         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1204       }
1205       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1206     }
1207 #endif
1208     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1209     if (flg) {
1210       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1211       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1212       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1213       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1214       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1215     }
1216     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1217     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1218     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1219     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1220   }
1221   pmesh->useAnchors = mesh->useAnchors;
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 #undef __FUNCT__
1226 #define __FUNCT__ "DMPlexDistributeSF"
1227 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1228 {
1229   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1230   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1231   PetscMPIInt            rank, numProcs;
1232   MPI_Comm               comm;
1233   PetscErrorCode         ierr;
1234 
1235   PetscFunctionBegin;
1236   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1237   PetscValidPointer(dmParallel,7);
1238 
1239   /* Create point SF for parallel mesh */
1240   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1241   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1242   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1243   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1244   {
1245     const PetscInt *leaves;
1246     PetscSFNode    *remotePoints, *rowners, *lowners;
1247     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1248     PetscInt        pStart, pEnd;
1249 
1250     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1251     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1252     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1253     for (p=0; p<numRoots; p++) {
1254       rowners[p].rank  = -1;
1255       rowners[p].index = -1;
1256     }
1257     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1258     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1259     for (p = 0; p < numLeaves; ++p) {
1260       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1261         lowners[p].rank  = rank;
1262         lowners[p].index = leaves ? leaves[p] : p;
1263       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1264         lowners[p].rank  = -2;
1265         lowners[p].index = -2;
1266       }
1267     }
1268     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1269       rowners[p].rank  = -3;
1270       rowners[p].index = -3;
1271     }
1272     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1273     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1274     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1275     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1276     for (p = 0; p < numLeaves; ++p) {
1277       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1278       if (lowners[p].rank != rank) ++numGhostPoints;
1279     }
1280     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1281     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1282     for (p = 0, gp = 0; p < numLeaves; ++p) {
1283       if (lowners[p].rank != rank) {
1284         ghostPoints[gp]        = leaves ? leaves[p] : p;
1285         remotePoints[gp].rank  = lowners[p].rank;
1286         remotePoints[gp].index = lowners[p].index;
1287         ++gp;
1288       }
1289     }
1290     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1291     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1292     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1293   }
1294   pmesh->useCone    = mesh->useCone;
1295   pmesh->useClosure = mesh->useClosure;
1296   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1297   PetscFunctionReturn(0);
1298 }
1299 
1300 #undef __FUNCT__
1301 #define __FUNCT__ "DMPlexCreatePointSF"
1302 /*@C
1303   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1304 
1305   Input Parameter:
1306 + dm          - The source DMPlex object
1307 . migrationSF - The star forest that describes the parallel point remapping
1308 . ownership   - Flag causing a vote to determine point ownership
1309 
1310   Output Parameter:
1311 - pointSF     - The star forest describing the point overlap in the remapped DM
1312 
1313   Level: developer
1314 
1315 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1316 @*/
1317 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1318 {
1319   PetscMPIInt        rank;
1320   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1321   PetscInt          *pointLocal;
1322   const PetscInt    *leaves;
1323   const PetscSFNode *roots;
1324   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1325   PetscErrorCode     ierr;
1326 
1327   PetscFunctionBegin;
1328   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1329   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1330 
1331   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1332   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1333   if (ownership) {
1334     /* Point ownership vote: Process with highest rank ownes shared points */
1335     for (p = 0; p < nleaves; ++p) {
1336       /* Either put in a bid or we know we own it */
1337       leafNodes[p].rank  = rank;
1338       leafNodes[p].index = p;
1339     }
1340     for (p = 0; p < nroots; p++) {
1341       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1342       rootNodes[p].rank  = -3;
1343       rootNodes[p].index = -3;
1344     }
1345     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1346     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1347   } else {
1348     for (p = 0; p < nroots; p++) {
1349       rootNodes[p].index = -1;
1350       rootNodes[p].rank = rank;
1351     };
1352     for (p = 0; p < nleaves; p++) {
1353       /* Write new local id into old location */
1354       if (roots[p].rank == rank) {
1355         rootNodes[roots[p].index].index = leaves[p];
1356       }
1357     }
1358   }
1359   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1360   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1361 
1362   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1363   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1364   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1365   for (idx = 0, p = 0; p < nleaves; p++) {
1366     if (leafNodes[p].rank != rank) {
1367       pointLocal[idx] = p;
1368       pointRemote[idx] = leafNodes[p];
1369       idx++;
1370     }
1371   }
1372   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1373   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1374   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1375   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1376   PetscFunctionReturn(0);
1377 }
1378 
1379 #undef __FUNCT__
1380 #define __FUNCT__ "DMPlexMigrate"
1381 /*@C
1382   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1383 
1384   Input Parameter:
1385 + dm       - The source DMPlex object
1386 . sf       - The star forest communication context describing the migration pattern
1387 
1388   Output Parameter:
1389 - targetDM - The target DMPlex object
1390 
1391   Level: intermediate
1392 
1393 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1394 @*/
1395 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1396 {
1397   MPI_Comm               comm;
1398   PetscInt               dim, nroots;
1399   PetscSF                sfPoint;
1400   ISLocalToGlobalMapping ltogMigration;
1401   ISLocalToGlobalMapping ltogOriginal = NULL;
1402   PetscBool              flg;
1403   PetscErrorCode         ierr;
1404 
1405   PetscFunctionBegin;
1406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1407   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1408   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1409   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1410   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1411 
1412   /* Check for a one-to-all distribution pattern */
1413   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1414   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1415   if (nroots >= 0) {
1416     IS                     isOriginal;
1417     PetscInt               n, size, nleaves;
1418     PetscInt              *numbering_orig, *numbering_new;
1419     /* Get the original point numbering */
1420     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1421     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1422     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1423     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1424     /* Convert to positive global numbers */
1425     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1426     /* Derive the new local-to-global mapping from the old one */
1427     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1428     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1429     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1430     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1431     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1432     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1433     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1434   } else {
1435     /* One-to-all distribution pattern: We can derive LToG from SF */
1436     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1437   }
1438   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1439   if (flg) {
1440     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1441     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1442   }
1443   /* Migrate DM data to target DM */
1444   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1445   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1446   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1447   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1448   ierr = DMPlexDistributeSetupTree(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1449   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1450   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1451   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 #undef __FUNCT__
1456 #define __FUNCT__ "DMPlexDistribute"
1457 /*@C
1458   DMPlexDistribute - Distributes the mesh and any associated sections.
1459 
1460   Not Collective
1461 
1462   Input Parameter:
1463 + dm  - The original DMPlex object
1464 - overlap - The overlap of partitions, 0 is the default
1465 
1466   Output Parameter:
1467 + sf - The PetscSF used for point distribution
1468 - parallelMesh - The distributed DMPlex object, or NULL
1469 
1470   Note: If the mesh was not distributed, the return value is NULL.
1471 
1472   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1473   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1474   representation on the mesh.
1475 
1476   Level: intermediate
1477 
1478 .keywords: mesh, elements
1479 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1480 @*/
1481 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1482 {
1483   MPI_Comm               comm;
1484   PetscPartitioner       partitioner;
1485   IS                     cellPart;
1486   PetscSection           cellPartSection;
1487   DM                     dmCoord;
1488   DMLabel                lblPartition, lblMigration;
1489   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1490   PetscBool              flg;
1491   PetscMPIInt            rank, numProcs, p;
1492   PetscErrorCode         ierr;
1493 
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1496   if (sf) PetscValidPointer(sf,4);
1497   PetscValidPointer(dmParallel,5);
1498 
1499   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1500   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1501   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1502   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1503 
1504   *dmParallel = NULL;
1505   if (numProcs == 1) PetscFunctionReturn(0);
1506 
1507   /* Create cell partition */
1508   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1509   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1510   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1511   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1512   {
1513     /* Convert partition to DMLabel */
1514     PetscInt proc, pStart, pEnd, npoints, poffset;
1515     const PetscInt *points;
1516     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1517     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1518     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1519     for (proc = pStart; proc < pEnd; proc++) {
1520       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1521       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1522       for (p = poffset; p < poffset+npoints; p++) {
1523         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1524       }
1525     }
1526     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1527   }
1528   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1529   {
1530     /* Build a global process SF */
1531     PetscSFNode *remoteProc;
1532     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1533     for (p = 0; p < numProcs; ++p) {
1534       remoteProc[p].rank  = p;
1535       remoteProc[p].index = rank;
1536     }
1537     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1538     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1539     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1540   }
1541   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1542   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1543   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1544   /* Stratify the SF in case we are migrating an already parallel plex */
1545   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1546   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1547   sfMigration = sfStratified;
1548   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1549   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1550   if (flg) {
1551     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
1552     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1553     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1554   }
1555 
1556   /* Create non-overlapping parallel DM and migrate internal data */
1557   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1558   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1559   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1560 
1561   /* Build the point SF without overlap */
1562   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1563   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1564   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1565   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1566   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1567 
1568   if (overlap > 0) {
1569     DM                 dmOverlap;
1570     PetscInt           nroots, nleaves;
1571     PetscSFNode       *newRemote;
1572     const PetscSFNode *oldRemote;
1573     PetscSF            sfOverlap, sfOverlapPoint;
1574     /* Add the partition overlap to the distributed DM */
1575     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1576     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1577     *dmParallel = dmOverlap;
1578     if (flg) {
1579       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1580       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1581     }
1582 
1583     /* Re-map the migration SF to establish the full migration pattern */
1584     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1585     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1586     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1587     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1588     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1589     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1590     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1591     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1592     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1593     sfMigration = sfOverlapPoint;
1594   }
1595   /* Cleanup Partition */
1596   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1597   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1598   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1599   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1600   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1601   /* Copy BC */
1602   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1603   /* Cleanup */
1604   if (sf) {*sf = sfMigration;}
1605   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1606   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1607   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 #undef __FUNCT__
1612 #define __FUNCT__ "DMPlexDistributeOverlap"
1613 /*@C
1614   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1615 
1616   Not Collective
1617 
1618   Input Parameter:
1619 + dm  - The non-overlapping distrbuted DMPlex object
1620 - overlap - The overlap of partitions, 0 is the default
1621 
1622   Output Parameter:
1623 + sf - The PetscSF used for point distribution
1624 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1625 
1626   Note: If the mesh was not distributed, the return value is NULL.
1627 
1628   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1629   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1630   representation on the mesh.
1631 
1632   Level: intermediate
1633 
1634 .keywords: mesh, elements
1635 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1636 @*/
1637 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1638 {
1639   MPI_Comm               comm;
1640   PetscMPIInt            rank;
1641   PetscSection           rootSection, leafSection;
1642   IS                     rootrank, leafrank;
1643   DM                     dmCoord;
1644   DMLabel                lblOverlap;
1645   PetscSF                sfOverlap, sfStratified, sfPoint;
1646   PetscErrorCode         ierr;
1647 
1648   PetscFunctionBegin;
1649   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1650   if (sf) PetscValidPointer(sf, 3);
1651   PetscValidPointer(dmOverlap, 4);
1652 
1653   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1654   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1655   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1656 
1657   /* Compute point overlap with neighbouring processes on the distributed DM */
1658   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1659   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1660   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1661   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1662   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1663   /* Convert overlap label to stratified migration SF */
1664   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1665   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1666   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1667   sfOverlap = sfStratified;
1668   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1669   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1670 
1671   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1672   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1673   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1674   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1675   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1676 
1677   /* Build the overlapping DM */
1678   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1679   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1680   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1681   /* Build the new point SF */
1682   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1683   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1684   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1685   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1686   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1687   /* Cleanup overlap partition */
1688   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1689   if (sf) *sf = sfOverlap;
1690   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1691   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1692   PetscFunctionReturn(0);
1693 }
1694