xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 5abbe4fe37ff6162da73f4ab80690d9893bb06b5)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . rootSection - The number of leaves for a given root point
533 . rootrank    - The rank of each edge into the root point
534 . leafSection - The number of processes sharing a given leaf point
535 - leafrank    - The rank of each process sharing a leaf point
536 
537   Output Parameters:
538 + overlapSF   - The star forest mapping remote overlap points into a contiguous leaf space
539 
540   Level: developer
541 
542 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
543 @*/
544 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
545 {
546   MPI_Comm           comm;
547   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
548   PetscSF            sfPoint, sfProc;
549   DMLabel            ovLeafLabel;
550   const PetscSFNode *remote;
551   const PetscInt    *local;
552   const PetscInt    *nrank, *rrank;
553   PetscInt          *adj = NULL;
554   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
555   PetscMPIInt        rank, numProcs;
556   PetscBool          useCone, useClosure, flg;
557   PetscErrorCode     ierr;
558 
559   PetscFunctionBegin;
560   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
561   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
562   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
563   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
564   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
565   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
566   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
567   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
568   /* Handle leaves: shared with the root point */
569   for (l = 0; l < nleaves; ++l) {
570     PetscInt adjSize = PETSC_DETERMINE, a;
571 
572     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
573     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
574   }
575   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
576   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
577   /* Handle roots */
578   for (p = pStart; p < pEnd; ++p) {
579     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
580 
581     if ((p >= sStart) && (p < sEnd)) {
582       /* Some leaves share a root with other leaves on different processes */
583       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
584       if (neighbors) {
585         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
586         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
587         for (n = 0; n < neighbors; ++n) {
588           const PetscInt remoteRank = nrank[noff+n];
589 
590           if (remoteRank == rank) continue;
591           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
592         }
593       }
594     }
595     /* Roots are shared with leaves */
596     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
597     if (!neighbors) continue;
598     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
599     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
600     for (n = 0; n < neighbors; ++n) {
601       const PetscInt remoteRank = rrank[noff+n];
602 
603       if (remoteRank == rank) continue;
604       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
605     }
606   }
607   ierr = PetscFree(adj);CHKERRQ(ierr);
608   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
610   /* We require the closure in the overlap */
611   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
612   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
613   if (useCone || !useClosure) {
614     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
615   }
616   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
617   if (flg) {
618     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
619     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
620   }
621   /* Make process SF and invert sender to receiver label */
622   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
623   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
624   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, ovLeafLabel);CHKERRQ(ierr);
625   /* Don't import points from yourself */
626   ierr = DMLabelClearStratum(ovLeafLabel, rank);CHKERRQ(ierr);
627   /* Don't import points already in the pointSF */
628   for (l = 0; l < nleaves; ++l) {
629     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
630   }
631   /* Convert receiver label to SF */
632   ierr = DMPlexPartitionLabelCreateSF(dm, ovLeafLabel, overlapSF);CHKERRQ(ierr);
633   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
634   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
635   if (flg) {
636     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
637     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
638   }
639   /* Clean up */
640   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
641   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
642   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
643   PetscFunctionReturn(0);
644 }
645 
646 #undef __FUNCT__
647 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
648 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
649 {
650   MPI_Comm           comm;
651   PetscMPIInt        rank, numProcs;
652   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
653   PetscInt          *pointDepths, *remoteDepths, *ilocal;
654   PetscInt          *depthRecv, *depthShift, *depthIdx;
655   PetscSFNode       *iremote;
656   PetscSF            pointSF;
657   const PetscInt    *sharedLocal;
658   const PetscSFNode *overlapRemote, *sharedRemote;
659   PetscErrorCode     ierr;
660 
661   PetscFunctionBegin;
662   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
663   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
664   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
665   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
666   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
667 
668   /* Before building the migration SF we need to know the new stratum offsets */
669   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
670   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
671   for (d=0; d<dim+1; d++) {
672     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
673     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
674   }
675   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
676   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
677   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
678 
679   /* Count recevied points in each stratum and compute the internal strata shift */
680   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
681   for (d=0; d<dim+1; d++) depthRecv[d]=0;
682   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
683   depthShift[dim] = 0;
684   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
685   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
686   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
687   for (d=0; d<dim+1; d++) {
688     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
689     depthIdx[d] = pStart + depthShift[d];
690   }
691 
692   /* Form the overlap SF build an SF that describes the full overlap migration SF */
693   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
694   newLeaves = pEnd - pStart + nleaves;
695   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
696   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
697   /* First map local points to themselves */
698   for (d=0; d<dim+1; d++) {
699     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
700     for (p=pStart; p<pEnd; p++) {
701       point = p + depthShift[d];
702       ilocal[point] = point;
703       iremote[point].index = p;
704       iremote[point].rank = rank;
705       depthIdx[d]++;
706     }
707   }
708 
709   /* Add in the remote roots for currently shared points */
710   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
711   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
712   for (d=0; d<dim+1; d++) {
713     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
714     for (p=0; p<numSharedPoints; p++) {
715       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
716         point = sharedLocal[p] + depthShift[d];
717         iremote[point].index = sharedRemote[p].index;
718         iremote[point].rank = sharedRemote[p].rank;
719       }
720     }
721   }
722 
723   /* Now add the incoming overlap points */
724   for (p=0; p<nleaves; p++) {
725     point = depthIdx[remoteDepths[p]];
726     ilocal[point] = point;
727     iremote[point].index = overlapRemote[p].index;
728     iremote[point].rank = overlapRemote[p].rank;
729     depthIdx[remoteDepths[p]]++;
730   }
731   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
732 
733   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
734   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
735   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
736   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
737   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
738 
739   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
740   PetscFunctionReturn(0);
741 }
742 
743 #undef __FUNCT__
744 #define __FUNCT__ "DMPlexDistributeField"
745 /*@
746   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
747 
748   Collective on DM
749 
750   Input Parameters:
751 + dm - The DMPlex object
752 . pointSF - The PetscSF describing the communication pattern
753 . originalSection - The PetscSection for existing data layout
754 - originalVec - The existing data
755 
756   Output Parameters:
757 + newSection - The PetscSF describing the new data layout
758 - newVec - The new data
759 
760   Level: developer
761 
762 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
763 @*/
764 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
765 {
766   PetscSF        fieldSF;
767   PetscInt      *remoteOffsets, fieldSize;
768   PetscScalar   *originalValues, *newValues;
769   PetscErrorCode ierr;
770 
771   PetscFunctionBegin;
772   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
773   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
774 
775   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
776   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
777   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
778 
779   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
780   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
781   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
782   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
783   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
784   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
785   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
786   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
787   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "DMPlexDistributeFieldIS"
793 /*@
794   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
795 
796   Collective on DM
797 
798   Input Parameters:
799 + dm - The DMPlex object
800 . pointSF - The PetscSF describing the communication pattern
801 . originalSection - The PetscSection for existing data layout
802 - originalIS - The existing data
803 
804   Output Parameters:
805 + newSection - The PetscSF describing the new data layout
806 - newIS - The new data
807 
808   Level: developer
809 
810 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
811 @*/
812 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
813 {
814   PetscSF         fieldSF;
815   PetscInt       *newValues, *remoteOffsets, fieldSize;
816   const PetscInt *originalValues;
817   PetscErrorCode  ierr;
818 
819   PetscFunctionBegin;
820   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
821   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
822 
823   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
824   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
825 
826   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
827   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
828   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
829   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
830   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
831   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
832   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
833   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
834   PetscFunctionReturn(0);
835 }
836 
837 #undef __FUNCT__
838 #define __FUNCT__ "DMPlexDistributeData"
839 /*@
840   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
841 
842   Collective on DM
843 
844   Input Parameters:
845 + dm - The DMPlex object
846 . pointSF - The PetscSF describing the communication pattern
847 . originalSection - The PetscSection for existing data layout
848 . datatype - The type of data
849 - originalData - The existing data
850 
851   Output Parameters:
852 + newSection - The PetscSection describing the new data layout
853 - newData - The new data
854 
855   Level: developer
856 
857 .seealso: DMPlexDistribute(), DMPlexDistributeField()
858 @*/
859 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
860 {
861   PetscSF        fieldSF;
862   PetscInt      *remoteOffsets, fieldSize;
863   PetscMPIInt    dataSize;
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
868   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
869 
870   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
871   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
872   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
873 
874   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
875   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
876   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
877   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
878   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
879   PetscFunctionReturn(0);
880 }
881 
882 #undef __FUNCT__
883 #define __FUNCT__ "DMPlexDistributeCones"
884 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
885 {
886   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
887   MPI_Comm               comm;
888   PetscSF                coneSF;
889   PetscSection           originalConeSection, newConeSection;
890   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
891   PetscBool              flg;
892   PetscErrorCode         ierr;
893 
894   PetscFunctionBegin;
895   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
896   PetscValidPointer(dmParallel,4);
897   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
898 
899   /* Distribute cone section */
900   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
901   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
902   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
903   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
904   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
905   {
906     PetscInt pStart, pEnd, p;
907 
908     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
909     for (p = pStart; p < pEnd; ++p) {
910       PetscInt coneSize;
911       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
912       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
913     }
914   }
915   /* Communicate and renumber cones */
916   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
917   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
918   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
919   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
920   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
921   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
922   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
923 #if PETSC_USE_DEBUG
924   {
925     PetscInt  p;
926     PetscBool valid = PETSC_TRUE;
927     for (p = 0; p < newConesSize; ++p) {
928       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
929     }
930     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
931   }
932 #endif
933   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
934   if (flg) {
935     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
936     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
937     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
938     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
939     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
940   }
941   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
942   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
943   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
944   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
945   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
946   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
947   /* Create supports and stratify sieve */
948   {
949     PetscInt pStart, pEnd;
950 
951     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
952     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
953   }
954   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
955   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "DMPlexDistributeCoordinates"
961 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
962 {
963   MPI_Comm         comm;
964   PetscSection     originalCoordSection, newCoordSection;
965   Vec              originalCoordinates, newCoordinates;
966   PetscInt         bs;
967   const char      *name;
968   const PetscReal *maxCell, *L;
969   PetscErrorCode   ierr;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
973   PetscValidPointer(dmParallel, 3);
974 
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
977   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
978   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
979   if (originalCoordinates) {
980     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
981     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
982     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
983 
984     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
985     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
986     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
987     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
988     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
989   }
990   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
991   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "DMPlexDistributeLabels"
997 /* Here we are assuming that process 0 always has everything */
998 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
999 {
1000   MPI_Comm       comm;
1001   PetscMPIInt    rank;
1002   PetscInt       numLabels, numLocalLabels, l;
1003   PetscBool      hasLabels = PETSC_FALSE;
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1008   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1009   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1010   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1011   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1012 
1013   /* Everyone must have either the same number of labels, or none */
1014   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1015   numLabels = numLocalLabels;
1016   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1017   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1018   for (l = numLabels-1; l >= 0; --l) {
1019     DMLabel     label = NULL, labelNew = NULL;
1020     PetscBool   isdepth;
1021 
1022     if (hasLabels) {
1023       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1024       /* Skip "depth" because it is recreated */
1025       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1026     }
1027     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1028     if (isdepth) continue;
1029     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1030     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1031   }
1032   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 #undef __FUNCT__
1037 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1038 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1039 {
1040   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1041   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1042   MPI_Comm        comm;
1043   const PetscInt *gpoints;
1044   PetscInt        dim, depth, n, d;
1045   PetscErrorCode  ierr;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1049   PetscValidPointer(dmParallel, 4);
1050 
1051   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1052   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1053 
1054   /* Setup hybrid structure */
1055   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1056   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1057   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1058   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1059   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1060   for (d = 0; d <= dim; ++d) {
1061     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1062 
1063     if (pmax < 0) continue;
1064     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1065     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1066     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1067     for (p = 0; p < n; ++p) {
1068       const PetscInt point = gpoints[p];
1069 
1070       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1071     }
1072     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1073     else            pmesh->hybridPointMax[d] = -1;
1074   }
1075   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "DMPlexDistributeSetupTree"
1081 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1082 {
1083   MPI_Comm        comm;
1084   DM              refTree;
1085   PetscSection    origParentSection, newParentSection;
1086   PetscInt        *origParents, *origChildIDs;
1087   PetscBool       flg;
1088   PetscErrorCode  ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1092   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1093   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1094 
1095   /* Set up tree */
1096   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1097   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1098   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1099   if (origParentSection) {
1100     PetscInt        pStart, pEnd;
1101     PetscInt        *newParents, *newChildIDs;
1102     PetscInt        *remoteOffsetsParents, newParentSize;
1103     PetscSF         parentSF;
1104 
1105     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1106     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1107     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1108     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1109     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1110     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1111     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1112     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1113     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1114     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1115     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1116     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1117     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1118     if (flg) {
1119       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1120       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1121       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1122       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1123       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1124     }
1125     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1126     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1127     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1128     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1129   }
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 #undef __FUNCT__
1134 #define __FUNCT__ "DMPlexDistributeSF"
1135 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1136 {
1137   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1138   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1139   PetscMPIInt            rank, numProcs;
1140   MPI_Comm               comm;
1141   PetscErrorCode         ierr;
1142 
1143   PetscFunctionBegin;
1144   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1145   PetscValidPointer(dmParallel,7);
1146 
1147   /* Create point SF for parallel mesh */
1148   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1149   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1150   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1151   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1152   {
1153     const PetscInt *leaves;
1154     PetscSFNode    *remotePoints, *rowners, *lowners;
1155     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1156     PetscInt        pStart, pEnd;
1157 
1158     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1159     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1160     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1161     for (p=0; p<numRoots; p++) {
1162       rowners[p].rank  = -1;
1163       rowners[p].index = -1;
1164     }
1165     if (origPart) {
1166       /* Make sure points in the original partition are not assigned to other procs */
1167       const PetscInt *origPoints;
1168 
1169       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1170       for (p = 0; p < numProcs; ++p) {
1171         PetscInt dof, off, d;
1172 
1173         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1174         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1175         for (d = off; d < off+dof; ++d) {
1176           rowners[origPoints[d]].rank = p;
1177         }
1178       }
1179       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1180     }
1181     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1182     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1183     for (p = 0; p < numLeaves; ++p) {
1184       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1185         lowners[p].rank  = rank;
1186         lowners[p].index = leaves ? leaves[p] : p;
1187       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1188         lowners[p].rank  = -2;
1189         lowners[p].index = -2;
1190       }
1191     }
1192     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1193       rowners[p].rank  = -3;
1194       rowners[p].index = -3;
1195     }
1196     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1197     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1198     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1199     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1200     for (p = 0; p < numLeaves; ++p) {
1201       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1202       if (lowners[p].rank != rank) ++numGhostPoints;
1203     }
1204     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1205     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1206     for (p = 0, gp = 0; p < numLeaves; ++p) {
1207       if (lowners[p].rank != rank) {
1208         ghostPoints[gp]        = leaves ? leaves[p] : p;
1209         remotePoints[gp].rank  = lowners[p].rank;
1210         remotePoints[gp].index = lowners[p].index;
1211         ++gp;
1212       }
1213     }
1214     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1215     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1216     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1217   }
1218   pmesh->useCone    = mesh->useCone;
1219   pmesh->useClosure = mesh->useClosure;
1220   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1221   PetscFunctionReturn(0);
1222 }
1223 
1224 #undef __FUNCT__
1225 #define __FUNCT__ "DMPlexDistribute"
1226 /*@C
1227   DMPlexDistribute - Distributes the mesh and any associated sections.
1228 
1229   Not Collective
1230 
1231   Input Parameter:
1232 + dm  - The original DMPlex object
1233 - overlap - The overlap of partitions, 0 is the default
1234 
1235   Output Parameter:
1236 + sf - The PetscSF used for point distribution
1237 - parallelMesh - The distributed DMPlex object, or NULL
1238 
1239   Note: If the mesh was not distributed, the return value is NULL.
1240 
1241   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1242   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1243   representation on the mesh.
1244 
1245   Level: intermediate
1246 
1247 .keywords: mesh, elements
1248 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1249 @*/
1250 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1251 {
1252   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1253   MPI_Comm               comm;
1254   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1255   DM                     dmOverlap;
1256   IS                     cellPart,        part;
1257   PetscSection           cellPartSection, partSection;
1258   PetscSFNode           *remoteRanks, *newRemote;
1259   const PetscSFNode     *oldRemote;
1260   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1261   ISLocalToGlobalMapping renumbering;
1262   PetscBool              flg;
1263   PetscMPIInt            rank, numProcs, p;
1264   PetscErrorCode         ierr;
1265 
1266   PetscFunctionBegin;
1267   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1268   if (sf) PetscValidPointer(sf,4);
1269   PetscValidPointer(dmParallel,5);
1270 
1271   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1272   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1273   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1274   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1275 
1276   *dmParallel = NULL;
1277   if (numProcs == 1) PetscFunctionReturn(0);
1278 
1279   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1280   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1281   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1282   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1283   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1284   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1285   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1286   if (!rank) numRemoteRanks = numProcs;
1287   else       numRemoteRanks = 0;
1288   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1289   for (p = 0; p < numRemoteRanks; ++p) {
1290     remoteRanks[p].rank  = p;
1291     remoteRanks[p].index = 0;
1292   }
1293   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1294   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1295   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1296   if (flg) {
1297     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1298     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1299     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1300     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1301   }
1302   /* Close the partition over the mesh */
1303   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1304   /* Create new mesh */
1305   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1306   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1307   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1308   pmesh = (DM_Plex*) (*dmParallel)->data;
1309   pmesh->useAnchors = mesh->useAnchors;
1310 
1311   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1312   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1313   if (flg) {
1314     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1315     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1316     ierr = ISView(part, NULL);CHKERRQ(ierr);
1317     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1318     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1319     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1320   }
1321   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1322 
1323   /* Migrate data to a non-overlapping parallel DM */
1324   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1325   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1326   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1327   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1328   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1329 
1330   /* Build the point SF without overlap */
1331   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1332   if (flg) {
1333     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1334     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1335   }
1336 
1337   if (overlap > 0) {
1338     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1339     /* Add the partition overlap to the distributed DM */
1340     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1341     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1342     *dmParallel = dmOverlap;
1343     if (flg) {
1344       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1345       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1346     }
1347 
1348     /* Re-map the pointSF to establish the full migration pattern */
1349     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1350     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1351     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1352     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1353     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1354     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1355     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1356     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1357     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1358     pointSF = overlapPointSF;
1359     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1360   }
1361   /* Cleanup Partition */
1362   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1363   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1364   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1365   ierr = ISDestroy(&part);CHKERRQ(ierr);
1366   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1367   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1368   /* Copy BC */
1369   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1370   /* Cleanup */
1371   if (sf) {*sf = pointSF;}
1372   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1373   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1374   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 #undef __FUNCT__
1379 #define __FUNCT__ "DMPlexDistributeOverlap"
1380 /*@C
1381   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1382 
1383   Not Collective
1384 
1385   Input Parameter:
1386 + dm  - The non-overlapping distrbuted DMPlex object
1387 - overlap - The overlap of partitions, 0 is the default
1388 
1389   Output Parameter:
1390 + sf - The PetscSF used for point distribution
1391 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1392 
1393   Note: If the mesh was not distributed, the return value is NULL.
1394 
1395   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1396   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1397   representation on the mesh.
1398 
1399   Level: intermediate
1400 
1401 .keywords: mesh, elements
1402 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1403 @*/
1404 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1405 {
1406   MPI_Comm               comm;
1407   PetscMPIInt            rank;
1408   PetscSection           rootSection, leafSection;
1409   IS                     rootrank, leafrank;
1410   PetscSection           coneSection;
1411   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1412   PetscSFNode           *ghostRemote;
1413   const PetscSFNode     *overlapRemote;
1414   ISLocalToGlobalMapping overlapRenumbering;
1415   const PetscInt        *renumberingArray, *overlapLocal;
1416   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1417   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1418   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1419   PetscErrorCode         ierr;
1420 
1421   PetscFunctionBegin;
1422   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1423   if (sf) PetscValidPointer(sf, 3);
1424   PetscValidPointer(dmOverlap, 4);
1425 
1426   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1427   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1428   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1429 
1430   /* Compute point overlap with neighbouring processes on the distributed DM */
1431   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1432   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1433   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1434   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1435   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1436   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1437   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1438   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1439   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1440   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1441 
1442   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1443   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1444 
1445   /* Convert cones to global numbering before migrating them */
1446   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1447   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1448   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1449   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1450 
1451   /* Derive the new local-to-global mapping from the old one */
1452   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1453   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1454   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1455   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1456   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1457   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1458 
1459   /* Build the overlapping DM */
1460   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1461   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1462   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1463   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1464   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1465   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1466   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1467 
1468   /* Build the new point SF by propagating the depthShift generate remote root indices */
1469   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1470   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1471   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1472   numGhostPoints = numSharedPoints + numOverlapPoints;
1473   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1474   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1475   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1476   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1477   for (p=0; p<overlapLeaves; p++) {
1478     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1479   }
1480   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1481   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1482   for (idx=0, p=0; p<overlapLeaves; p++) {
1483     if (overlapRemote[p].rank != rank) {
1484       ghostLocal[idx] = overlapLocal[p];
1485       ghostRemote[idx].index = recvPointIDs[p];
1486       ghostRemote[idx].rank = overlapRemote[p].rank;
1487       idx++;
1488     }
1489   }
1490   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1491   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1492   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1493   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1494   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1495   /* Cleanup overlap partition */
1496   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1497   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1498   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1499   if (sf) *sf = migrationSF;
1500   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1501   PetscFunctionReturn(0);
1502 }
1503