xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 1fd9873a3ede9133033f76980bf9ca7cb689c104)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . rootSection - The number of leaves for a given root point
533 . rootrank    - The rank of each edge into the root point
534 . leafSection - The number of processes sharing a given leaf point
535 - leafrank    - The rank of each process sharing a leaf point
536 
537   Output Parameters:
538 + overlapSF   - The star forest mapping remote overlap points into a contiguous leaf space
539 
540   Level: developer
541 
542 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
543 @*/
544 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
545 {
546   MPI_Comm           comm;
547   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
548   PetscSF            sfPoint, sfProc;
549   DMLabel            ovLeafLabel;
550   const PetscSFNode *remote;
551   const PetscInt    *local;
552   const PetscInt    *nrank, *rrank;
553   PetscInt          *adj = NULL;
554   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555   PetscMPIInt        rank, numProcs;
556   PetscBool          useCone, useClosure, flg;
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
561   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
563   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
564   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
565   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
566   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
567   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
568   /* Handle leaves: shared with the root point */
569   for (l = 0; l < nleaves; ++l) {
570     PetscInt adjSize = PETSC_DETERMINE, a;
571 
572     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
573     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
574   }
575   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
576   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
577   /* Handle roots */
578   for (p = pStart; p < pEnd; ++p) {
579     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
580 
581     if ((p >= sStart) && (p < sEnd)) {
582       /* Some leaves share a root with other leaves on different processes */
583       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
584       if (neighbors) {
585         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
586         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
587         for (n = 0; n < neighbors; ++n) {
588           const PetscInt remoteRank = nrank[noff+n];
589 
590           if (remoteRank == rank) continue;
591           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
592         }
593       }
594     }
595     /* Roots are shared with leaves */
596     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
597     if (!neighbors) continue;
598     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
599     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
600     for (n = 0; n < neighbors; ++n) {
601       const PetscInt remoteRank = rrank[noff+n];
602 
603       if (remoteRank == rank) continue;
604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
605     }
606   }
607   ierr = PetscFree(adj);CHKERRQ(ierr);
608   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
610   /* We require the closure in the overlap */
611   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
612   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
613   if (useCone || !useClosure) {
614     IS              rankIS,   pointIS;
615     const PetscInt *ranks,   *points;
616     PetscInt        numRanks, numPoints, r, p;
617 
618     ierr = DMLabelGetValueIS(ovAdjByRank, &rankIS);CHKERRQ(ierr);
619     ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
620     ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
621     for (r = 0; r < numRanks; ++r) {
622       const PetscInt rank = ranks[r];
623 
624       ierr = DMLabelGetStratumIS(ovAdjByRank, rank, &pointIS);CHKERRQ(ierr);
625       ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
626       ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
627       for (p = 0; p < numPoints; ++p) {
628         PetscInt *closure = NULL, closureSize, c;
629 
630         ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
631         for (c = 0; c < closureSize*2; c += 2) {ierr = DMLabelSetValue(ovAdjByRank, closure[c], rank);CHKERRQ(ierr);}
632         ierr = DMPlexRestoreTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
633       }
634       ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
635       ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
636     }
637     ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
638     ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
639   }
640   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
641   if (flg) {
642     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
643     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
644   }
645   /* Make process SF and invert sender to receiver label */
646   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
647   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
648   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, ovLeafLabel);CHKERRQ(ierr);
649   /* Don't import points from yourself */
650   ierr = DMLabelClearStratum(ovLeafLabel, rank);CHKERRQ(ierr);
651   /* Don't import points already in the pointSF */
652   for (l = 0; l < nleaves; ++l) {
653     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
654   }
655   /* Convert receiver label to SF */
656   ierr = DMPlexPartitionLabelCreateSF(dm, ovLeafLabel, overlapSF);CHKERRQ(ierr);
657   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
658   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
659   if (flg) {
660     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
661     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
662   }
663   /* Clean up */
664   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
665   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
666   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
672 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
673 {
674   MPI_Comm           comm;
675   PetscMPIInt        rank, numProcs;
676   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
677   PetscInt          *pointDepths, *remoteDepths, *ilocal;
678   PetscInt          *depthRecv, *depthShift, *depthIdx;
679   PetscSFNode       *iremote;
680   PetscSF            pointSF;
681   const PetscInt    *sharedLocal;
682   const PetscSFNode *overlapRemote, *sharedRemote;
683   PetscErrorCode     ierr;
684 
685   PetscFunctionBegin;
686   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
687   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
688   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
689   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
690   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
691 
692   /* Before building the migration SF we need to know the new stratum offsets */
693   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
694   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
695   for (d=0; d<dim+1; d++) {
696     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
697     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
698   }
699   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
700   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
701   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
702 
703   /* Count recevied points in each stratum and compute the internal strata shift */
704   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
705   for (d=0; d<dim+1; d++) depthRecv[d]=0;
706   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
707   depthShift[dim] = 0;
708   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
709   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
710   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
711   for (d=0; d<dim+1; d++) {
712     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
713     depthIdx[d] = pStart + depthShift[d];
714   }
715 
716   /* Form the overlap SF build an SF that describes the full overlap migration SF */
717   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
718   newLeaves = pEnd - pStart + nleaves;
719   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
720   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
721   /* First map local points to themselves */
722   for (d=0; d<dim+1; d++) {
723     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
724     for (p=pStart; p<pEnd; p++) {
725       point = p + depthShift[d];
726       ilocal[point] = point;
727       iremote[point].index = p;
728       iremote[point].rank = rank;
729       depthIdx[d]++;
730     }
731   }
732 
733   /* Add in the remote roots for currently shared points */
734   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
735   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
736   for (d=0; d<dim+1; d++) {
737     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
738     for (p=0; p<numSharedPoints; p++) {
739       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
740         point = sharedLocal[p] + depthShift[d];
741         iremote[point].index = sharedRemote[p].index;
742         iremote[point].rank = sharedRemote[p].rank;
743       }
744     }
745   }
746 
747   /* Now add the incoming overlap points */
748   for (p=0; p<nleaves; p++) {
749     point = depthIdx[remoteDepths[p]];
750     ilocal[point] = point;
751     iremote[point].index = overlapRemote[p].index;
752     iremote[point].rank = overlapRemote[p].rank;
753     depthIdx[remoteDepths[p]]++;
754   }
755   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
756 
757   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
758   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
759   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
760   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
761   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
762 
763   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "DMPlexDistributeField"
769 /*@
770   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
771 
772   Collective on DM
773 
774   Input Parameters:
775 + dm - The DMPlex object
776 . pointSF - The PetscSF describing the communication pattern
777 . originalSection - The PetscSection for existing data layout
778 - originalVec - The existing data
779 
780   Output Parameters:
781 + newSection - The PetscSF describing the new data layout
782 - newVec - The new data
783 
784   Level: developer
785 
786 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
787 @*/
788 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
789 {
790   PetscSF        fieldSF;
791   PetscInt      *remoteOffsets, fieldSize;
792   PetscScalar   *originalValues, *newValues;
793   PetscErrorCode ierr;
794 
795   PetscFunctionBegin;
796   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
797   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
798 
799   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
800   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
801   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
802 
803   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
804   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
805   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
806   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
807   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
808   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
809   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
810   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
811   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
812   PetscFunctionReturn(0);
813 }
814 
815 #undef __FUNCT__
816 #define __FUNCT__ "DMPlexDistributeFieldIS"
817 /*@
818   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
819 
820   Collective on DM
821 
822   Input Parameters:
823 + dm - The DMPlex object
824 . pointSF - The PetscSF describing the communication pattern
825 . originalSection - The PetscSection for existing data layout
826 - originalIS - The existing data
827 
828   Output Parameters:
829 + newSection - The PetscSF describing the new data layout
830 - newIS - The new data
831 
832   Level: developer
833 
834 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
835 @*/
836 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
837 {
838   PetscSF         fieldSF;
839   PetscInt       *newValues, *remoteOffsets, fieldSize;
840   const PetscInt *originalValues;
841   PetscErrorCode  ierr;
842 
843   PetscFunctionBegin;
844   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
845   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
846 
847   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
848   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
849 
850   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
851   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
852   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
853   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
854   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
855   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
856   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
857   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "DMPlexDistributeData"
863 /*@
864   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
865 
866   Collective on DM
867 
868   Input Parameters:
869 + dm - The DMPlex object
870 . pointSF - The PetscSF describing the communication pattern
871 . originalSection - The PetscSection for existing data layout
872 . datatype - The type of data
873 - originalData - The existing data
874 
875   Output Parameters:
876 + newSection - The PetscSection describing the new data layout
877 - newData - The new data
878 
879   Level: developer
880 
881 .seealso: DMPlexDistribute(), DMPlexDistributeField()
882 @*/
883 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
884 {
885   PetscSF        fieldSF;
886   PetscInt      *remoteOffsets, fieldSize;
887   PetscMPIInt    dataSize;
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
892   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
893 
894   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
895   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
896   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
897 
898   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
899   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
900   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
901   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
902   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "DMPlexDistributeCones"
908 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
909 {
910   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
911   MPI_Comm               comm;
912   PetscSF                coneSF;
913   PetscSection           originalConeSection, newConeSection;
914   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
915   PetscBool              flg;
916   PetscErrorCode         ierr;
917 
918   PetscFunctionBegin;
919   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
920   PetscValidPointer(dmParallel,4);
921   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
922 
923   /* Distribute cone section */
924   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
925   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
926   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
927   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
928   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
929   {
930     PetscInt pStart, pEnd, p;
931 
932     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
933     for (p = pStart; p < pEnd; ++p) {
934       PetscInt coneSize;
935       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
936       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
937     }
938   }
939   /* Communicate and renumber cones */
940   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
941   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
942   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
943   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
944   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
945   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
946   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
947 #if PETSC_USE_DEBUG
948   {
949     PetscInt  p;
950     PetscBool valid = PETSC_TRUE;
951     for (p = 0; p < newConesSize; ++p) {
952       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
953     }
954     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
955   }
956 #endif
957   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
958   if (flg) {
959     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
960     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
961     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
962     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
963     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
964   }
965   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
966   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
967   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
968   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
969   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
970   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
971   /* Create supports and stratify sieve */
972   {
973     PetscInt pStart, pEnd;
974 
975     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
976     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
977   }
978   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
979   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "DMPlexDistributeCoordinates"
985 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
986 {
987   MPI_Comm         comm;
988   PetscSection     originalCoordSection, newCoordSection;
989   Vec              originalCoordinates, newCoordinates;
990   PetscInt         bs;
991   const char      *name;
992   const PetscReal *maxCell, *L;
993   PetscErrorCode   ierr;
994 
995   PetscFunctionBegin;
996   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
997   PetscValidPointer(dmParallel, 3);
998 
999   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1000   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1001   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1002   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1003   if (originalCoordinates) {
1004     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1005     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1006     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1007 
1008     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1009     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1010     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1011     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1012     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1013   }
1014   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1015   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "DMPlexDistributeLabels"
1021 /* Here we are assuming that process 0 always has everything */
1022 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1023 {
1024   MPI_Comm       comm;
1025   PetscMPIInt    rank;
1026   PetscInt       numLabels, numLocalLabels, l;
1027   PetscBool      hasLabels = PETSC_FALSE;
1028   PetscErrorCode ierr;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1032   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1033   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1034   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1035   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1036 
1037   /* Everyone must have either the same number of labels, or none */
1038   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1039   numLabels = numLocalLabels;
1040   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1041   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1042   for (l = numLabels-1; l >= 0; --l) {
1043     DMLabel     label = NULL, labelNew = NULL;
1044     PetscBool   isdepth;
1045 
1046     if (hasLabels) {
1047       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1048       /* Skip "depth" because it is recreated */
1049       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1050     }
1051     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1052     if (isdepth) continue;
1053     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1054     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1055   }
1056   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1062 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1063 {
1064   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1065   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1066   MPI_Comm        comm;
1067   const PetscInt *gpoints;
1068   PetscInt        dim, depth, n, d;
1069   PetscErrorCode  ierr;
1070 
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1073   PetscValidPointer(dmParallel, 4);
1074 
1075   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1076   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1077 
1078   /* Setup hybrid structure */
1079   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1080   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1081   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1082   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1083   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1084   for (d = 0; d <= dim; ++d) {
1085     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1086 
1087     if (pmax < 0) continue;
1088     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1089     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1090     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1091     for (p = 0; p < n; ++p) {
1092       const PetscInt point = gpoints[p];
1093 
1094       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1095     }
1096     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1097     else            pmesh->hybridPointMax[d] = -1;
1098   }
1099   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 #undef __FUNCT__
1104 #define __FUNCT__ "DMPlexDistributeSetupTree"
1105 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1106 {
1107   MPI_Comm        comm;
1108   DM              refTree;
1109   PetscSection    origParentSection, newParentSection;
1110   PetscInt        *origParents, *origChildIDs;
1111   PetscBool       flg;
1112   PetscErrorCode  ierr;
1113 
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1116   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1117   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1118 
1119   /* Set up tree */
1120   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1121   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1122   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1123   if (origParentSection) {
1124     PetscInt        pStart, pEnd;
1125     PetscInt        *newParents, *newChildIDs;
1126     PetscInt        *remoteOffsetsParents, newParentSize;
1127     PetscSF         parentSF;
1128 
1129     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1130     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1131     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1132     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1133     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1134     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1135     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1136     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1137     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1138     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1139     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1140     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1141     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1142     if (flg) {
1143       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1144       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1145       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1146       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1147       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1148     }
1149     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1150     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1151     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1152     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1153   }
1154   PetscFunctionReturn(0);
1155 }
1156 
1157 #undef __FUNCT__
1158 #define __FUNCT__ "DMPlexDistributeSF"
1159 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1160 {
1161   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1162   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1163   PetscMPIInt            rank, numProcs;
1164   MPI_Comm               comm;
1165   PetscErrorCode         ierr;
1166 
1167   PetscFunctionBegin;
1168   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1169   PetscValidPointer(dmParallel,7);
1170 
1171   /* Create point SF for parallel mesh */
1172   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1173   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1174   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1175   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1176   {
1177     const PetscInt *leaves;
1178     PetscSFNode    *remotePoints, *rowners, *lowners;
1179     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1180     PetscInt        pStart, pEnd;
1181 
1182     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1183     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1184     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1185     for (p=0; p<numRoots; p++) {
1186       rowners[p].rank  = -1;
1187       rowners[p].index = -1;
1188     }
1189     if (origPart) {
1190       /* Make sure points in the original partition are not assigned to other procs */
1191       const PetscInt *origPoints;
1192 
1193       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1194       for (p = 0; p < numProcs; ++p) {
1195         PetscInt dof, off, d;
1196 
1197         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1198         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1199         for (d = off; d < off+dof; ++d) {
1200           rowners[origPoints[d]].rank = p;
1201         }
1202       }
1203       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1204     }
1205     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1206     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1207     for (p = 0; p < numLeaves; ++p) {
1208       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1209         lowners[p].rank  = rank;
1210         lowners[p].index = leaves ? leaves[p] : p;
1211       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1212         lowners[p].rank  = -2;
1213         lowners[p].index = -2;
1214       }
1215     }
1216     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1217       rowners[p].rank  = -3;
1218       rowners[p].index = -3;
1219     }
1220     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1221     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1222     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1223     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1224     for (p = 0; p < numLeaves; ++p) {
1225       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1226       if (lowners[p].rank != rank) ++numGhostPoints;
1227     }
1228     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1229     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1230     for (p = 0, gp = 0; p < numLeaves; ++p) {
1231       if (lowners[p].rank != rank) {
1232         ghostPoints[gp]        = leaves ? leaves[p] : p;
1233         remotePoints[gp].rank  = lowners[p].rank;
1234         remotePoints[gp].index = lowners[p].index;
1235         ++gp;
1236       }
1237     }
1238     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1239     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1240     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1241   }
1242   pmesh->useCone    = mesh->useCone;
1243   pmesh->useClosure = mesh->useClosure;
1244   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 #undef __FUNCT__
1249 #define __FUNCT__ "DMPlexDistribute"
1250 /*@C
1251   DMPlexDistribute - Distributes the mesh and any associated sections.
1252 
1253   Not Collective
1254 
1255   Input Parameter:
1256 + dm  - The original DMPlex object
1257 - overlap - The overlap of partitions, 0 is the default
1258 
1259   Output Parameter:
1260 + sf - The PetscSF used for point distribution
1261 - parallelMesh - The distributed DMPlex object, or NULL
1262 
1263   Note: If the mesh was not distributed, the return value is NULL.
1264 
1265   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1266   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1267   representation on the mesh.
1268 
1269   Level: intermediate
1270 
1271 .keywords: mesh, elements
1272 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1273 @*/
1274 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1275 {
1276   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1277   MPI_Comm               comm;
1278   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1279   DM                     dmOverlap;
1280   IS                     cellPart,        part;
1281   PetscSection           cellPartSection, partSection;
1282   PetscSFNode           *remoteRanks, *newRemote;
1283   const PetscSFNode     *oldRemote;
1284   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1285   ISLocalToGlobalMapping renumbering;
1286   PetscBool              flg;
1287   PetscMPIInt            rank, numProcs, p;
1288   PetscErrorCode         ierr;
1289 
1290   PetscFunctionBegin;
1291   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1292   if (sf) PetscValidPointer(sf,4);
1293   PetscValidPointer(dmParallel,5);
1294 
1295   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1296   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1297   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1298   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1299 
1300   *dmParallel = NULL;
1301   if (numProcs == 1) PetscFunctionReturn(0);
1302 
1303   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1304   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1305   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1306   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1307   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1308   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1309   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1310   if (!rank) numRemoteRanks = numProcs;
1311   else       numRemoteRanks = 0;
1312   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1313   for (p = 0; p < numRemoteRanks; ++p) {
1314     remoteRanks[p].rank  = p;
1315     remoteRanks[p].index = 0;
1316   }
1317   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1318   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1319   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1320   if (flg) {
1321     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1322     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1323     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1324     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1325   }
1326   /* Close the partition over the mesh */
1327   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1328   /* Create new mesh */
1329   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1330   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1331   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1332   pmesh = (DM_Plex*) (*dmParallel)->data;
1333   pmesh->useAnchors = mesh->useAnchors;
1334 
1335   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1336   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1337   if (flg) {
1338     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1339     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1340     ierr = ISView(part, NULL);CHKERRQ(ierr);
1341     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1342     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1343     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1344   }
1345   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1346 
1347   /* Migrate data to a non-overlapping parallel DM */
1348   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1349   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1350   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1351   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1352   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1353 
1354   /* Build the point SF without overlap */
1355   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1356   if (flg) {
1357     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1358     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1359   }
1360 
1361   if (overlap > 0) {
1362     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1363     /* Add the partition overlap to the distributed DM */
1364     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1365     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1366     *dmParallel = dmOverlap;
1367     if (flg) {
1368       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1369       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1370     }
1371 
1372     /* Re-map the pointSF to establish the full migration pattern */
1373     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1374     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1375     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1376     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1377     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1378     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1379     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1380     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1381     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1382     pointSF = overlapPointSF;
1383     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1384   }
1385   /* Cleanup Partition */
1386   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1387   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1388   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1389   ierr = ISDestroy(&part);CHKERRQ(ierr);
1390   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1391   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1392   /* Copy BC */
1393   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1394   /* Cleanup */
1395   if (sf) {*sf = pointSF;}
1396   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1397   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1398   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 #undef __FUNCT__
1403 #define __FUNCT__ "DMPlexDistributeOverlap"
1404 /*@C
1405   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1406 
1407   Not Collective
1408 
1409   Input Parameter:
1410 + dm  - The non-overlapping distrbuted DMPlex object
1411 - overlap - The overlap of partitions, 0 is the default
1412 
1413   Output Parameter:
1414 + sf - The PetscSF used for point distribution
1415 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1416 
1417   Note: If the mesh was not distributed, the return value is NULL.
1418 
1419   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1420   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1421   representation on the mesh.
1422 
1423   Level: intermediate
1424 
1425 .keywords: mesh, elements
1426 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1427 @*/
1428 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1429 {
1430   MPI_Comm               comm;
1431   PetscMPIInt            rank;
1432   PetscSection           rootSection, leafSection;
1433   IS                     rootrank, leafrank;
1434   PetscSection           coneSection;
1435   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1436   PetscSFNode           *ghostRemote;
1437   const PetscSFNode     *overlapRemote;
1438   ISLocalToGlobalMapping overlapRenumbering;
1439   const PetscInt        *renumberingArray, *overlapLocal;
1440   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1441   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1442   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1443   PetscErrorCode         ierr;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1447   if (sf) PetscValidPointer(sf, 3);
1448   PetscValidPointer(dmOverlap, 4);
1449 
1450   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1451   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1452   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1453 
1454   /* Compute point overlap with neighbouring processes on the distributed DM */
1455   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1456   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1457   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1458   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1459   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1460   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1461   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1462   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1463   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1464   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1465 
1466   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1467   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1468 
1469   /* Convert cones to global numbering before migrating them */
1470   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1471   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1472   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1473   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1474 
1475   /* Derive the new local-to-global mapping from the old one */
1476   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1477   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1478   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1479   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1480   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1481   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1482 
1483   /* Build the overlapping DM */
1484   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1485   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1486   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1487   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1488   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1489   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1490   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1491 
1492   /* Build the new point SF by propagating the depthShift generate remote root indices */
1493   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1494   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1495   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1496   numGhostPoints = numSharedPoints + numOverlapPoints;
1497   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1498   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1500   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1501   for (p=0; p<overlapLeaves; p++) {
1502     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1503   }
1504   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1505   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1506   for (idx=0, p=0; p<overlapLeaves; p++) {
1507     if (overlapRemote[p].rank != rank) {
1508       ghostLocal[idx] = overlapLocal[p];
1509       ghostRemote[idx].index = recvPointIDs[p];
1510       ghostRemote[idx].rank = overlapRemote[p].rank;
1511       idx++;
1512     }
1513   }
1514   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1515   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1516   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1517   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1518   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1519   /* Cleanup overlap partition */
1520   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1521   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1522   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1523   if (sf) *sf = migrationSF;
1524   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1525   PetscFunctionReturn(0);
1526 }
1527