xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision bdd2d7519b0c721ec0e026c4dbaee90bb0134da7)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c < coneSize; ++c) {
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s < supportSize; ++s) {
211     const PetscInt *cone = NULL;
212     PetscInt        coneSize, c, q;
213 
214     ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr);
215     ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr);
216     for (c = 0; c < coneSize; ++c) {
217       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
218         if (cone[c] == adj[q]) break;
219       }
220       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
221     }
222   }
223   *adjSize = numAdj;
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
229 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
230 {
231   PetscInt      *star = NULL;
232   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
233   PetscErrorCode ierr;
234 
235   PetscFunctionBeginHot;
236   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
237   for (s = 0; s < starSize*2; s += 2) {
238     const PetscInt *closure = NULL;
239     PetscInt        closureSize, c, q;
240 
241     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
242     for (c = 0; c < closureSize*2; c += 2) {
243       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
244         if (closure[c] == adj[q]) break;
245       }
246       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
247     }
248     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
249   }
250   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
251   *adjSize = numAdj;
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
257 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
258 {
259   static PetscInt asiz = 0;
260   PetscInt maxAnchors = 1;
261   PetscInt aStart = -1, aEnd = -1;
262   PetscInt maxAdjSize;
263   PetscSection aSec = NULL;
264   IS aIS = NULL;
265   const PetscInt *anchors;
266   PetscErrorCode  ierr;
267 
268   PetscFunctionBeginHot;
269   if (useAnchors) {
270     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
271     if (aSec) {
272       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
273       maxAnchors = PetscMax(1,maxAnchors);
274       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
275       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
276     }
277   }
278   if (!*adj) {
279     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
280 
281     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
282     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
283     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
284     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
285     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
286     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
287     asiz *= maxAnchors;
288     asiz  = PetscMin(asiz,pEnd-pStart);
289     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
290   }
291   if (*adjSize < 0) *adjSize = asiz;
292   maxAdjSize = *adjSize;
293   if (useTransitiveClosure) {
294     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
295   } else if (useCone) {
296     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
297   } else {
298     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   }
300   if (useAnchors && aSec) {
301     PetscInt origSize = *adjSize;
302     PetscInt numAdj = origSize;
303     PetscInt i = 0, j;
304     PetscInt *orig = *adj;
305 
306     while (i < origSize) {
307       PetscInt p = orig[i];
308       PetscInt aDof = 0;
309 
310       if (p >= aStart && p < aEnd) {
311         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
312       }
313       if (aDof) {
314         PetscInt aOff;
315         PetscInt s, q;
316 
317         for (j = i + 1; j < numAdj; j++) {
318           orig[j - 1] = orig[j];
319         }
320         origSize--;
321         numAdj--;
322         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
323         for (s = 0; s < aDof; ++s) {
324           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
325             if (anchors[aOff+s] == orig[q]) break;
326           }
327           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
328         }
329       }
330       else {
331         i++;
332       }
333     }
334     *adjSize = numAdj;
335     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "DMPlexGetAdjacency"
342 /*@
343   DMPlexGetAdjacency - Return all points adjacent to the given point
344 
345   Input Parameters:
346 + dm - The DM object
347 . p  - The point
348 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
349 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
350 
351   Output Parameters:
352 + adjSize - The number of adjacent points
353 - adj - The adjacent points
354 
355   Level: advanced
356 
357   Notes: The user must PetscFree the adj array if it was not passed in.
358 
359 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
360 @*/
361 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
362 {
363   DM_Plex       *mesh = (DM_Plex *) dm->data;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBeginHot;
367   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
368   PetscValidPointer(adjSize,3);
369   PetscValidPointer(adj,4);
370   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
371   PetscFunctionReturn(0);
372 }
373 #undef __FUNCT__
374 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
375 /*@
376   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
377 
378   Collective on DM
379 
380   Input Parameters:
381 + dm      - The DM
382 - sfPoint - The PetscSF which encodes point connectivity
383 
384   Output Parameters:
385 + processRanks - A list of process neighbors, or NULL
386 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
387 
388   Level: developer
389 
390 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
391 @*/
392 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
393 {
394   const PetscSFNode *remotePoints;
395   PetscInt          *localPointsNew;
396   PetscSFNode       *remotePointsNew;
397   const PetscInt    *nranks;
398   PetscInt          *ranksNew;
399   PetscBT            neighbors;
400   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
401   PetscMPIInt        numProcs, proc, rank;
402   PetscErrorCode     ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
406   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
407   if (processRanks) {PetscValidPointer(processRanks, 3);}
408   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
409   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
410   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
411   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
412   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
413   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
414   /* Compute root-to-leaf process connectivity */
415   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
416   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
417   for (p = pStart; p < pEnd; ++p) {
418     PetscInt ndof, noff, n;
419 
420     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
421     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
422     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
423   }
424   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
425   /* Compute leaf-to-neighbor process connectivity */
426   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
427   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
428   for (p = pStart; p < pEnd; ++p) {
429     PetscInt ndof, noff, n;
430 
431     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
432     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
433     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
434   }
435   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
436   /* Compute leaf-to-root process connectivity */
437   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
438   /* Calculate edges */
439   PetscBTClear(neighbors, rank);
440   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
441   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
442   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
444   for(proc = 0, n = 0; proc < numProcs; ++proc) {
445     if (PetscBTLookup(neighbors, proc)) {
446       ranksNew[n]              = proc;
447       localPointsNew[n]        = proc;
448       remotePointsNew[n].index = rank;
449       remotePointsNew[n].rank  = proc;
450       ++n;
451     }
452   }
453   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
454   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
455   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
456   if (sfProcess) {
457     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
458     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
459     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
460     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "DMPlexDistributeOwnership"
467 /*@
468   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
469 
470   Collective on DM
471 
472   Input Parameter:
473 . dm - The DM
474 
475   Output Parameters:
476 + rootSection - The number of leaves for a given root point
477 . rootrank    - The rank of each edge into the root point
478 . leafSection - The number of processes sharing a given leaf point
479 - leafrank    - The rank of each process sharing a leaf point
480 
481   Level: developer
482 
483 .seealso: DMPlexCreateOverlap()
484 @*/
485 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
486 {
487   MPI_Comm        comm;
488   PetscSF         sfPoint;
489   const PetscInt *rootdegree;
490   PetscInt       *myrank, *remoterank;
491   PetscInt        pStart, pEnd, p, nedges;
492   PetscMPIInt     rank;
493   PetscErrorCode  ierr;
494 
495   PetscFunctionBegin;
496   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
497   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
498   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
499   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
500   /* Compute number of leaves for each root */
501   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
502   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
503   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
505   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
506   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
507   /* Gather rank of each leaf to root */
508   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
509   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
510   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
511   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
512   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
513   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscFree(myrank);CHKERRQ(ierr);
515   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
516   /* Distribute remote ranks to leaves */
517   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
518   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "DMPlexCreateOverlap"
524 /*@C
525   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
526 
527   Collective on DM
528 
529   Input Parameters:
530 + dm          - The DM
531 . rootSection - The number of leaves for a given root point
532 . rootrank    - The rank of each edge into the root point
533 . leafSection - The number of processes sharing a given leaf point
534 - leafrank    - The rank of each process sharing a leaf point
535 
536   Output Parameters:
537 + ovRootSection - The number of new overlap points for each neighboring process
538 . ovRootPoints  - The new overlap points for each neighboring process
539 . ovLeafSection - The number of new overlap points from each neighboring process
540 - ovLeafPoints  - The new overlap points from each neighboring process
541 
542   Level: developer
543 
544 .seealso: DMPlexDistributeOwnership()
545 @*/
546 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
547 {
548   MPI_Comm           comm;
549   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
550   PetscSF            sfPoint, sfProc;
551   IS                 valueIS;
552   DMLabel            ovLeafLabel;
553   const PetscSFNode *remote;
554   const PetscInt    *local;
555   const PetscInt    *nrank, *rrank, *neighbors;
556   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
557   PetscSection       ovRootSection, ovLeafSection;
558   PetscInt          *adj = NULL;
559   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
560   PetscInt           idx, numRemote;
561   PetscMPIInt        rank, numProcs;
562   PetscBool          flg;
563   PetscErrorCode     ierr;
564 
565   PetscFunctionBegin;
566   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
567   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
568   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
569   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
570   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
571   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
572   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
573   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
574   /* Handle leaves: shared with the root point */
575   for (l = 0; l < nleaves; ++l) {
576     PetscInt adjSize = PETSC_DETERMINE, a;
577 
578     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
579     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
580   }
581   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
582   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
583   /* Handle roots */
584   for (p = pStart; p < pEnd; ++p) {
585     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
586 
587     if ((p >= sStart) && (p < sEnd)) {
588       /* Some leaves share a root with other leaves on different processes */
589       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
590       if (neighbors) {
591         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
592         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
593         for (n = 0; n < neighbors; ++n) {
594           const PetscInt remoteRank = nrank[noff+n];
595 
596           if (remoteRank == rank) continue;
597           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
598         }
599       }
600     }
601     /* Roots are shared with leaves */
602     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
603     if (!neighbors) continue;
604     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
605     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = rrank[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
611     }
612   }
613   ierr = PetscFree(adj);CHKERRQ(ierr);
614   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
615   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
616   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
617   if (flg) {
618     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
619     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
620   }
621   /* Convert to (point, rank) and use actual owners */
622   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
623   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
624   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
625   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
626   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
627   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
628   for (n = 0; n < numNeighbors; ++n) {
629     PetscInt numPoints;
630 
631     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
632     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
633   }
634   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
635   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
636   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
637   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
638   for (n = 0; n < numNeighbors; ++n) {
639     IS              pointIS;
640     const PetscInt *points;
641     PetscInt        off, numPoints, p;
642 
643     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
644     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
645     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
646     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
647     for (p = 0; p < numPoints; ++p) {
648       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
649       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
650       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
651     }
652     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
653     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
654   }
655   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
656   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
657   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
658   /* Make process SF */
659   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
660   if (flg) {
661     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
662   }
663   /* Communicate overlap */
664   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
665   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
666   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
667   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
668   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
669   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
670   for (p = 0; p < ovSize; p++) {
671     /* Don't import points from yourself */
672     if (ovLeafPoints[p].rank == rank) continue;
673     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
674   }
675   /* Don't import points already in the pointSF */
676   for (l = 0; l < nleaves; ++l) {
677     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
678   }
679   for (numRemote = 0, n = 0; n < numProcs; ++n) {
680     PetscInt numPoints;
681     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
682     numRemote += numPoints;
683   }
684   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
685   for (idx = 0, n = 0; n < numProcs; ++n) {
686     IS remoteRootIS;
687     PetscInt numPoints;
688     const PetscInt *remoteRoots;
689     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
690     if (numPoints <= 0) continue;
691     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
692     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
693     for (p = 0; p < numPoints; p++) {
694       remotePoints[idx].index = remoteRoots[p];
695       remotePoints[idx].rank = n;
696       idx++;
697     }
698     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
699   }
700   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
701   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
702   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
703   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
704   if (flg) {
705     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
706     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
707   }
708   /* Clean up */
709   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
710   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
711   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
712   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
713   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
714   PetscFunctionReturn(0);
715 }
716 
717 #undef __FUNCT__
718 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
719 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
720 {
721   MPI_Comm           comm;
722   PetscMPIInt        rank, numProcs;
723   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
724   PetscInt          *pointDepths, *remoteDepths, *ilocal;
725   PetscInt          *depthRecv, *depthShift, *depthIdx;
726   PetscSFNode       *iremote;
727   PetscSF            pointSF;
728   const PetscInt    *sharedLocal;
729   const PetscSFNode *overlapRemote, *sharedRemote;
730   PetscErrorCode     ierr;
731 
732   PetscFunctionBegin;
733   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
734   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
735   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
736   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
737   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
738 
739   /* Before building the migration SF we need to know the new stratum offsets */
740   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
741   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
742   for (d=0; d<dim+1; d++) {
743     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
744     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
745   }
746   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
747   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
748   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
749 
750   /* Count recevied points in each stratum and compute the internal strata shift */
751   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
752   for (d=0; d<dim+1; d++) depthRecv[d]=0;
753   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
754   depthShift[dim] = 0;
755   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
756   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
757   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
758   for (d=0; d<dim+1; d++) {
759     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
760     depthIdx[d] = pStart + depthShift[d];
761   }
762 
763   /* Form the overlap SF build an SF that describes the full overlap migration SF */
764   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
765   newLeaves = pEnd - pStart + nleaves;
766   ierr = PetscMalloc2(newLeaves, &ilocal, newLeaves, &iremote);CHKERRQ(ierr);
767   /* First map local points to themselves */
768   for (d=0; d<dim+1; d++) {
769     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
770     for (p=pStart; p<pEnd; p++) {
771       point = p + depthShift[d];
772       ilocal[point] = point;
773       iremote[point].index = p;
774       iremote[point].rank = rank;
775       depthIdx[d]++;
776     }
777   }
778 
779   /* Add in the remote roots for currently shared points */
780   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
781   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
782   for (d=0; d<dim+1; d++) {
783     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
784     for (p=0; p<numSharedPoints; p++) {
785       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
786         point = sharedLocal[p] + depthShift[d];
787         iremote[point].index = sharedRemote[p].index;
788         iremote[point].rank = sharedRemote[p].rank;
789       }
790     }
791   }
792 
793   /* Now add the incoming overlap points */
794   for (p=0; p<nleaves; p++) {
795     point = depthIdx[remoteDepths[p]];
796     ilocal[point] = point;
797     iremote[point].index = overlapRemote[p].index;
798     iremote[point].rank = overlapRemote[p].rank;
799     depthIdx[remoteDepths[p]]++;
800   }
801 
802   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
803   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
804   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
805   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
806   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
807 
808   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "DMPlexDistributeField"
814 /*@
815   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
816 
817   Collective on DM
818 
819   Input Parameters:
820 + dm - The DMPlex object
821 . pointSF - The PetscSF describing the communication pattern
822 . originalSection - The PetscSection for existing data layout
823 - originalVec - The existing data
824 
825   Output Parameters:
826 + newSection - The PetscSF describing the new data layout
827 - newVec - The new data
828 
829   Level: developer
830 
831 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
832 @*/
833 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
834 {
835   PetscSF        fieldSF;
836   PetscInt      *remoteOffsets, fieldSize;
837   PetscScalar   *originalValues, *newValues;
838   PetscErrorCode ierr;
839 
840   PetscFunctionBegin;
841   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
842   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
843 
844   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
845   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
846   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
847 
848   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
849   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
850   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
851   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
852   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
853   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
854   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
855   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
856   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
857   PetscFunctionReturn(0);
858 }
859 
860 #undef __FUNCT__
861 #define __FUNCT__ "DMPlexDistributeFieldIS"
862 /*@
863   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
864 
865   Collective on DM
866 
867   Input Parameters:
868 + dm - The DMPlex object
869 . pointSF - The PetscSF describing the communication pattern
870 . originalSection - The PetscSection for existing data layout
871 - originalIS - The existing data
872 
873   Output Parameters:
874 + newSection - The PetscSF describing the new data layout
875 - newIS - The new data
876 
877   Level: developer
878 
879 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
880 @*/
881 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
882 {
883   PetscSF         fieldSF;
884   PetscInt       *newValues, *remoteOffsets, fieldSize;
885   const PetscInt *originalValues;
886   PetscErrorCode  ierr;
887 
888   PetscFunctionBegin;
889   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
890   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
891 
892   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
893   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
894 
895   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
896   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
897   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
898   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
899   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
900   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
901   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
902   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "DMPlexDistributeData"
908 /*@
909   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
910 
911   Collective on DM
912 
913   Input Parameters:
914 + dm - The DMPlex object
915 . pointSF - The PetscSF describing the communication pattern
916 . originalSection - The PetscSection for existing data layout
917 . datatype - The type of data
918 - originalData - The existing data
919 
920   Output Parameters:
921 + newSection - The PetscSection describing the new data layout
922 - newData - The new data
923 
924   Level: developer
925 
926 .seealso: DMPlexDistribute(), DMPlexDistributeField()
927 @*/
928 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
929 {
930   PetscSF        fieldSF;
931   PetscInt      *remoteOffsets, fieldSize;
932   PetscMPIInt    dataSize;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
937   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
938 
939   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
940   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
941   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
942 
943   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
944   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
945   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
946   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
947   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "DMPlexDistributeCones"
953 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
954 {
955   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
956   MPI_Comm               comm;
957   PetscSF                coneSF;
958   PetscSection           originalConeSection, newConeSection;
959   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
960   PetscBool              flg;
961   PetscErrorCode         ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
965   PetscValidPointer(dmParallel,4);
966   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
967 
968   /* Distribute cone section */
969   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
970   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
971   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
972   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
973   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
974   {
975     PetscInt pStart, pEnd, p;
976 
977     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
978     for (p = pStart; p < pEnd; ++p) {
979       PetscInt coneSize;
980       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
981       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
982     }
983   }
984   /* Communicate and renumber cones */
985   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
986   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
987   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
988   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
989   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
990   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
991   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
992   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
993   if (flg) {
994     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
995     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
996     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
997     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
998     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
999   }
1000   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1001   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1002   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1003   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1004   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1005   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1006   /* Create supports and stratify sieve */
1007   {
1008     PetscInt pStart, pEnd;
1009 
1010     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1011     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1012   }
1013   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1014   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "DMPlexDistributeCoordinates"
1020 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1021 {
1022   MPI_Comm         comm;
1023   PetscSection     originalCoordSection, newCoordSection;
1024   Vec              originalCoordinates, newCoordinates;
1025   PetscInt         bs;
1026   const char      *name;
1027   const PetscReal *maxCell, *L;
1028   PetscErrorCode   ierr;
1029 
1030   PetscFunctionBegin;
1031   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1032   PetscValidPointer(dmParallel, 3);
1033 
1034   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1035   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1036   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1037   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1038   if (originalCoordinates) {
1039     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1040     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1041     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1042 
1043     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1044     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1045     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1046     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1047     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1048   }
1049   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1050   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "DMPlexDistributeLabels"
1056 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1057 {
1058   MPI_Comm       comm;
1059   PetscMPIInt    rank;
1060   PetscInt       numLabels, l;
1061   PetscErrorCode ierr;
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1065   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1066   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1067   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1068   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1069 
1070   /* Bcast number of labels */
1071   ierr = DMPlexGetNumLabels(dm, &numLabels);CHKERRQ(ierr);
1072   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1073   for (l = numLabels-1; l >= 0; --l) {
1074     DMLabel     label = NULL, labelNew = NULL;
1075     PetscBool   isdepth;
1076 
1077     if (!rank) {
1078       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1079       /* Skip "depth" because it is recreated */
1080       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1081     }
1082     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1083     if (isdepth) continue;
1084     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1085     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1086   }
1087   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1093 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1094 {
1095   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1096   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1097   MPI_Comm        comm;
1098   const PetscInt *gpoints;
1099   PetscInt        dim, depth, n, d;
1100   PetscErrorCode  ierr;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1104   PetscValidPointer(dmParallel, 4);
1105 
1106   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1107   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1108 
1109   /* Setup hybrid structure */
1110   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1111   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1112   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1113   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1114   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1115   for (d = 0; d <= dim; ++d) {
1116     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1117 
1118     if (pmax < 0) continue;
1119     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1120     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1121     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1122     for (p = 0; p < n; ++p) {
1123       const PetscInt point = gpoints[p];
1124 
1125       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1126     }
1127     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1128     else            pmesh->hybridPointMax[d] = -1;
1129   }
1130   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1131   PetscFunctionReturn(0);
1132 }
1133 
1134 #undef __FUNCT__
1135 #define __FUNCT__ "DMPlexDistributeSetupTree"
1136 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1137 {
1138   MPI_Comm        comm;
1139   DM              refTree;
1140   PetscSection    origParentSection, newParentSection;
1141   PetscInt        *origParents, *origChildIDs;
1142   PetscBool       flg;
1143   PetscErrorCode  ierr;
1144 
1145   PetscFunctionBegin;
1146   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1147   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1148   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1149 
1150   /* Set up tree */
1151   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1152   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1153   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1154   if (origParentSection) {
1155     PetscInt        pStart, pEnd;
1156     PetscInt        *newParents, *newChildIDs;
1157     PetscInt        *remoteOffsetsParents, newParentSize;
1158     PetscSF         parentSF;
1159 
1160     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1161     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1162     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1163     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1164     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1165     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1166     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1167     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1168     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1169     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1170     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1171     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1172     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1173     if (flg) {
1174       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1175       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1176       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1177       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1178       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1179     }
1180     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1181     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1182     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1183     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1184   }
1185   PetscFunctionReturn(0);
1186 }
1187 
1188 #undef __FUNCT__
1189 #define __FUNCT__ "DMPlexDistributeSF"
1190 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1191 {
1192   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1193   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1194   PetscMPIInt            rank, numProcs;
1195   MPI_Comm               comm;
1196   PetscErrorCode         ierr;
1197 
1198   PetscFunctionBegin;
1199   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1200   PetscValidPointer(dmParallel,7);
1201 
1202   /* Create point SF for parallel mesh */
1203   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1204   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1205   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1206   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1207   {
1208     const PetscInt *leaves;
1209     PetscSFNode    *remotePoints, *rowners, *lowners;
1210     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1211     PetscInt        pStart, pEnd;
1212 
1213     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1214     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1215     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1216     for (p=0; p<numRoots; p++) {
1217       rowners[p].rank  = -1;
1218       rowners[p].index = -1;
1219     }
1220     if (origPart) {
1221       /* Make sure points in the original partition are not assigned to other procs */
1222       const PetscInt *origPoints;
1223 
1224       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1225       for (p = 0; p < numProcs; ++p) {
1226         PetscInt dof, off, d;
1227 
1228         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1229         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1230         for (d = off; d < off+dof; ++d) {
1231           rowners[origPoints[d]].rank = p;
1232         }
1233       }
1234       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1235     }
1236     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1237     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1238     for (p = 0; p < numLeaves; ++p) {
1239       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1240         lowners[p].rank  = rank;
1241         lowners[p].index = leaves ? leaves[p] : p;
1242       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1243         lowners[p].rank  = -2;
1244         lowners[p].index = -2;
1245       }
1246     }
1247     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1248       rowners[p].rank  = -3;
1249       rowners[p].index = -3;
1250     }
1251     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1252     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1253     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1254     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1255     for (p = 0; p < numLeaves; ++p) {
1256       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1257       if (lowners[p].rank != rank) ++numGhostPoints;
1258     }
1259     ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
1260     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1261     for (p = 0, gp = 0; p < numLeaves; ++p) {
1262       if (lowners[p].rank != rank) {
1263         ghostPoints[gp]        = leaves ? leaves[p] : p;
1264         remotePoints[gp].rank  = lowners[p].rank;
1265         remotePoints[gp].index = lowners[p].index;
1266         ++gp;
1267       }
1268     }
1269     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1270     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1271     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1272   }
1273   pmesh->useCone    = mesh->useCone;
1274   pmesh->useClosure = mesh->useClosure;
1275   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1276   PetscFunctionReturn(0);
1277 }
1278 
1279 #undef __FUNCT__
1280 #define __FUNCT__ "DMPlexDistribute"
1281 /*@C
1282   DMPlexDistribute - Distributes the mesh and any associated sections.
1283 
1284   Not Collective
1285 
1286   Input Parameter:
1287 + dm  - The original DMPlex object
1288 . partitioner - The partitioning package, or NULL for the default
1289 - overlap - The overlap of partitions, 0 is the default
1290 
1291   Output Parameter:
1292 + sf - The PetscSF used for point distribution
1293 - parallelMesh - The distributed DMPlex object, or NULL
1294 
1295   Note: If the mesh was not distributed, the return value is NULL.
1296 
1297   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1298   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1299   representation on the mesh.
1300 
1301   Level: intermediate
1302 
1303 .keywords: mesh, elements
1304 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1305 @*/
1306 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
1307 {
1308   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1309   MPI_Comm               comm;
1310   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1311   DM                     dmOverlap;
1312   IS                     cellPart,        part;
1313   PetscSection           cellPartSection, partSection;
1314   PetscSFNode           *remoteRanks, *newRemote;
1315   const PetscSFNode     *oldRemote;
1316   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1317   ISLocalToGlobalMapping renumbering;
1318   PetscBool              flg;
1319   PetscMPIInt            rank, numProcs, p;
1320   PetscErrorCode         ierr;
1321 
1322   PetscFunctionBegin;
1323   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1324   if (sf) PetscValidPointer(sf,4);
1325   PetscValidPointer(dmParallel,5);
1326 
1327   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1328   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1329   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1330   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1331 
1332   *dmParallel = NULL;
1333   if (numProcs == 1) PetscFunctionReturn(0);
1334 
1335   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1336   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1337   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1338   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1339   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1340   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1341   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1342   if (!rank) numRemoteRanks = numProcs;
1343   else       numRemoteRanks = 0;
1344   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1345   for (p = 0; p < numRemoteRanks; ++p) {
1346     remoteRanks[p].rank  = p;
1347     remoteRanks[p].index = 0;
1348   }
1349   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1350   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1351   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1352   if (flg) {
1353     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1354     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1355     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1356     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1357   }
1358   /* Close the partition over the mesh */
1359   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1360   /* Create new mesh */
1361   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1362   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1363   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1364   pmesh = (DM_Plex*) (*dmParallel)->data;
1365   pmesh->useAnchors = mesh->useAnchors;
1366 
1367   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1368   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1369   if (flg) {
1370     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1371     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1372     ierr = ISView(part, NULL);CHKERRQ(ierr);
1373     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1374     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1375     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1376   }
1377   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1378 
1379   /* Migrate data to a non-overlapping parallel DM */
1380   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1381   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1382   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1383   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1384   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1385 
1386   /* Build the point SF without overlap */
1387   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1388 
1389   if (overlap > 0) {
1390     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1391     /* Add the partition overlap to the distributed DM */
1392     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1393     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1394     *dmParallel = dmOverlap;
1395     if (flg) {
1396       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1397       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1398     }
1399 
1400     /* Re-map the pointSF to establish the full migration pattern */
1401     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1402     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1403     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1404     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1405     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1406     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1407     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1408     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1409     pointSF = overlapPointSF;
1410     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1411   }
1412   /* Cleanup Partition */
1413   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1414   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1415   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1416   ierr = ISDestroy(&part);CHKERRQ(ierr);
1417   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1418   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1419   /* Copy BC */
1420   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1421   /* Cleanup */
1422   if (sf) {*sf = pointSF;}
1423   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1424   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1425   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "DMPlexDistributeOverlap"
1431 /*@C
1432   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1433 
1434   Not Collective
1435 
1436   Input Parameter:
1437 + dm  - The non-overlapping distrbuted DMPlex object
1438 - overlap - The overlap of partitions, 0 is the default
1439 
1440   Output Parameter:
1441 + sf - The PetscSF used for point distribution
1442 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1443 
1444   Note: If the mesh was not distributed, the return value is NULL.
1445 
1446   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1447   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1448   representation on the mesh.
1449 
1450   Level: intermediate
1451 
1452 .keywords: mesh, elements
1453 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1454 @*/
1455 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1456 {
1457   MPI_Comm               comm;
1458   PetscMPIInt            rank;
1459   PetscSection           rootSection, leafSection;
1460   IS                     rootrank, leafrank;
1461   PetscSection           coneSection;
1462   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1463   PetscSFNode           *ghostRemote;
1464   const PetscSFNode     *overlapRemote;
1465   ISLocalToGlobalMapping overlapRenumbering;
1466   const PetscInt        *renumberingArray, *overlapLocal;
1467   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1468   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1469   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1470   PetscErrorCode         ierr;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1474   if (sf) PetscValidPointer(sf, 3);
1475   PetscValidPointer(dmOverlap, 4);
1476 
1477   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1478   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1479   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1480 
1481   /* Compute point overlap with neighbouring processes on the distributed DM */
1482   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1483   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1484   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1485   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1486   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1487   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1488 
1489   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1490   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1491 
1492   /* Convert cones to global numbering before migrating them */
1493   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1494   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1495   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1496   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1497 
1498   /* Derive the new local-to-global mapping from the old one */
1499   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1500   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1501   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1502   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1503   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1504   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1505 
1506   /* Build the overlapping DM */
1507   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1508   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1509   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1510   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1511   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1512   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1513   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1514 
1515   /* Build the new point SF by propagating the depthShift generate remote root indices */
1516   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1517   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1518   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1519   numGhostPoints = numSharedPoints + numOverlapPoints;
1520   ierr = PetscMalloc2(numGhostPoints, &ghostLocal, numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1521   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1522   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1523   for (p=0; p<overlapLeaves; p++) {
1524     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1525   }
1526   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1527   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1528   for (idx=0, p=0; p<overlapLeaves; p++) {
1529     if (overlapRemote[p].rank != rank) {
1530       ghostLocal[idx] = overlapLocal[p];
1531       ghostRemote[idx].index = recvPointIDs[p];
1532       ghostRemote[idx].rank = overlapRemote[p].rank;
1533       idx++;
1534     }
1535   }
1536   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1537   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1538   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1539   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1540   /* Cleanup overlap partition */
1541   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1542   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1543   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1544   if (sf) *sf = migrationSF;
1545   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1546   ierr = DMSetFromOptions(*dmOverlap);CHKERRQ(ierr);
1547   PetscFunctionReturn(0);
1548 }
1549