xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision e540f424d93a01896fc8be44e39d689b2a649fd9)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)), useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in star(cone(p)),    useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)), useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c < coneSize; ++c) {
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, cone[c], &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, cone[c], &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s < supportSize; ++s) {
211     const PetscInt *cone = NULL;
212     PetscInt        coneSize, c, q;
213 
214     ierr = DMPlexGetConeSize(dm, support[s], &coneSize);CHKERRQ(ierr);
215     ierr = DMPlexGetCone(dm, support[s], &cone);CHKERRQ(ierr);
216     for (c = 0; c < coneSize; ++c) {
217       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
218         if (cone[c] == adj[q]) break;
219       }
220       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
221     }
222   }
223   *adjSize = numAdj;
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
229 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
230 {
231   PetscInt      *star = NULL;
232   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
233   PetscErrorCode ierr;
234 
235   PetscFunctionBeginHot;
236   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
237   for (s = 0; s < starSize*2; s += 2) {
238     const PetscInt *closure = NULL;
239     PetscInt        closureSize, c, q;
240 
241     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
242     for (c = 0; c < closureSize*2; c += 2) {
243       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
244         if (closure[c] == adj[q]) break;
245       }
246       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
247     }
248     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
249   }
250   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
251   *adjSize = numAdj;
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
257 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
258 {
259   static PetscInt asiz = 0;
260   PetscInt maxAnchors = 1;
261   PetscInt aStart = -1, aEnd = -1;
262   PetscInt maxAdjSize;
263   PetscSection aSec = NULL;
264   IS aIS = NULL;
265   const PetscInt *anchors;
266   PetscErrorCode  ierr;
267 
268   PetscFunctionBeginHot;
269   if (useAnchors) {
270     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
271     if (aSec) {
272       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
273       maxAnchors = PetscMax(1,maxAnchors);
274       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
275       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
276     }
277   }
278   if (!*adj) {
279     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
280 
281     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
282     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
283     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
284     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
285     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
286     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
287     asiz *= maxAnchors;
288     asiz  = PetscMin(asiz,pEnd-pStart);
289     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
290   }
291   if (*adjSize < 0) *adjSize = asiz;
292   maxAdjSize = *adjSize;
293   if (useTransitiveClosure) {
294     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
295   } else if (useCone) {
296     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
297   } else {
298     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   }
300   if (useAnchors && aSec) {
301     PetscInt origSize = *adjSize;
302     PetscInt numAdj = origSize;
303     PetscInt i = 0, j;
304     PetscInt *orig = *adj;
305 
306     while (i < origSize) {
307       PetscInt p = orig[i];
308       PetscInt aDof = 0;
309 
310       if (p >= aStart && p < aEnd) {
311         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
312       }
313       if (aDof) {
314         PetscInt aOff;
315         PetscInt s, q;
316 
317         for (j = i + 1; j < numAdj; j++) {
318           orig[j - 1] = orig[j];
319         }
320         origSize--;
321         numAdj--;
322         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
323         for (s = 0; s < aDof; ++s) {
324           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
325             if (anchors[aOff+s] == orig[q]) break;
326           }
327           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
328         }
329       }
330       else {
331         i++;
332       }
333     }
334     *adjSize = numAdj;
335     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 #undef __FUNCT__
341 #define __FUNCT__ "DMPlexGetAdjacency"
342 /*@
343   DMPlexGetAdjacency - Return all points adjacent to the given point
344 
345   Input Parameters:
346 + dm - The DM object
347 . p  - The point
348 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
349 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
350 
351   Output Parameters:
352 + adjSize - The number of adjacent points
353 - adj - The adjacent points
354 
355   Level: advanced
356 
357   Notes: The user must PetscFree the adj array if it was not passed in.
358 
359 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
360 @*/
361 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
362 {
363   DM_Plex       *mesh = (DM_Plex *) dm->data;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBeginHot;
367   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
368   PetscValidPointer(adjSize,3);
369   PetscValidPointer(adj,4);
370   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
371   PetscFunctionReturn(0);
372 }
373 #undef __FUNCT__
374 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
375 /*@
376   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
377 
378   Collective on DM
379 
380   Input Parameters:
381 + dm      - The DM
382 - sfPoint - The PetscSF which encodes point connectivity
383 
384   Output Parameters:
385 + processRanks - A list of process neighbors, or NULL
386 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
387 
388   Level: developer
389 
390 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
391 @*/
392 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
393 {
394   const PetscSFNode *remotePoints;
395   PetscInt          *localPointsNew;
396   PetscSFNode       *remotePointsNew;
397   const PetscInt    *nranks;
398   PetscInt          *ranksNew;
399   PetscBT            neighbors;
400   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
401   PetscMPIInt        numProcs, proc, rank;
402   PetscErrorCode     ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
406   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
407   if (processRanks) {PetscValidPointer(processRanks, 3);}
408   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
409   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
410   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
411   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
412   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
413   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
414   /* Compute root-to-leaf process connectivity */
415   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
416   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
417   for (p = pStart; p < pEnd; ++p) {
418     PetscInt ndof, noff, n;
419 
420     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
421     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
422     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
423   }
424   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
425   /* Compute leaf-to-neighbor process connectivity */
426   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
427   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
428   for (p = pStart; p < pEnd; ++p) {
429     PetscInt ndof, noff, n;
430 
431     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
432     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
433     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
434   }
435   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
436   /* Compute leaf-to-root process connectivity */
437   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
438   /* Calculate edges */
439   PetscBTClear(neighbors, rank);
440   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
441   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
442   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
444   for(proc = 0, n = 0; proc < numProcs; ++proc) {
445     if (PetscBTLookup(neighbors, proc)) {
446       ranksNew[n]              = proc;
447       localPointsNew[n]        = proc;
448       remotePointsNew[n].index = rank;
449       remotePointsNew[n].rank  = proc;
450       ++n;
451     }
452   }
453   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
454   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
455   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
456   if (sfProcess) {
457     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
458     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
459     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
460     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "DMPlexDistributeOwnership"
467 /*@
468   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
469 
470   Collective on DM
471 
472   Input Parameter:
473 . dm - The DM
474 
475   Output Parameters:
476 + rootSection - The number of leaves for a given root point
477 . rootrank    - The rank of each edge into the root point
478 . leafSection - The number of processes sharing a given leaf point
479 - leafrank    - The rank of each process sharing a leaf point
480 
481   Level: developer
482 
483 .seealso: DMPlexCreateOverlap()
484 @*/
485 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
486 {
487   MPI_Comm        comm;
488   PetscSF         sfPoint;
489   const PetscInt *rootdegree;
490   PetscInt       *myrank, *remoterank;
491   PetscInt        pStart, pEnd, p, nedges;
492   PetscMPIInt     rank;
493   PetscErrorCode  ierr;
494 
495   PetscFunctionBegin;
496   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
497   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
498   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
499   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
500   /* Compute number of leaves for each root */
501   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
502   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
503   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
505   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
506   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
507   /* Gather rank of each leaf to root */
508   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
509   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
510   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
511   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
512   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
513   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscFree(myrank);CHKERRQ(ierr);
515   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
516   /* Distribute remote ranks to leaves */
517   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
518   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "DMPlexCreateOverlap"
524 /*@C
525   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
526 
527   Collective on DM
528 
529   Input Parameters:
530 + dm          - The DM
531 . rootSection - The number of leaves for a given root point
532 . rootrank    - The rank of each edge into the root point
533 . leafSection - The number of processes sharing a given leaf point
534 - leafrank    - The rank of each process sharing a leaf point
535 
536   Output Parameters:
537 + ovRootSection - The number of new overlap points for each neighboring process
538 . ovRootPoints  - The new overlap points for each neighboring process
539 . ovLeafSection - The number of new overlap points from each neighboring process
540 - ovLeafPoints  - The new overlap points from each neighboring process
541 
542   Level: developer
543 
544 .seealso: DMPlexDistributeOwnership()
545 @*/
546 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
547 {
548   MPI_Comm           comm;
549   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
550   PetscSF            sfPoint, sfProc;
551   IS                 valueIS;
552   DMLabel            ovLeafLabel;
553   const PetscSFNode *remote;
554   const PetscInt    *local;
555   const PetscInt    *nrank, *rrank, *neighbors;
556   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
557   PetscSection       ovRootSection, ovLeafSection;
558   PetscInt          *adj = NULL;
559   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
560   PetscInt           idx, numRemote;
561   PetscMPIInt        rank, numProcs;
562   PetscBool          flg;
563   PetscErrorCode     ierr;
564 
565   PetscFunctionBegin;
566   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
567   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
568   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
569   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
570   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
571   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
572   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
573   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
574   /* Handle leaves: shared with the root point */
575   for (l = 0; l < nleaves; ++l) {
576     PetscInt adjSize = PETSC_DETERMINE, a;
577 
578     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
579     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
580   }
581   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
582   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
583   /* Handle roots */
584   for (p = pStart; p < pEnd; ++p) {
585     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
586 
587     if ((p >= sStart) && (p < sEnd)) {
588       /* Some leaves share a root with other leaves on different processes */
589       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
590       if (neighbors) {
591         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
592         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
593         for (n = 0; n < neighbors; ++n) {
594           const PetscInt remoteRank = nrank[noff+n];
595 
596           if (remoteRank == rank) continue;
597           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
598         }
599       }
600     }
601     /* Roots are shared with leaves */
602     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
603     if (!neighbors) continue;
604     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
605     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
606     for (n = 0; n < neighbors; ++n) {
607       const PetscInt remoteRank = rrank[noff+n];
608 
609       if (remoteRank == rank) continue;
610       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
611     }
612   }
613   ierr = PetscFree(adj);CHKERRQ(ierr);
614   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
615   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
616   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
617   if (flg) {
618     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
619     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
620   }
621   /* Convert to (point, rank) and use actual owners */
622   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
623   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
624   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
625   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
626   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
627   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
628   for (n = 0; n < numNeighbors; ++n) {
629     PetscInt numPoints;
630 
631     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
632     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
633   }
634   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
635   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
636   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
637   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
638   for (n = 0; n < numNeighbors; ++n) {
639     IS              pointIS;
640     const PetscInt *points;
641     PetscInt        off, numPoints, p;
642 
643     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
644     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
645     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
646     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
647     for (p = 0; p < numPoints; ++p) {
648       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
649       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
650       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
651     }
652     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
653     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
654   }
655   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
656   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
657   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
658   /* Make process SF */
659   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
660   if (flg) {
661     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
662   }
663   /* Communicate overlap */
664   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
665   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
666   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
667   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
668   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
669   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
670   for (p = 0; p < ovSize; p++) {
671     /* Don't import points from yourself */
672     if (ovLeafPoints[p].rank == rank) continue;
673     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
674   }
675   /* Don't import points already in the pointSF */
676   for (l = 0; l < nleaves; ++l) {
677     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
678   }
679   for (numRemote = 0, n = 0; n < numProcs; ++n) {
680     PetscInt numPoints;
681     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
682     numRemote += numPoints;
683   }
684   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
685   for (idx = 0, n = 0; n < numProcs; ++n) {
686     IS remoteRootIS;
687     PetscInt numPoints;
688     const PetscInt *remoteRoots;
689     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
690     if (numPoints <= 0) continue;
691     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
692     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
693     for (p = 0; p < numPoints; p++) {
694       remotePoints[idx].index = remoteRoots[p];
695       remotePoints[idx].rank = n;
696       idx++;
697     }
698     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
699   }
700   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
701   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
702   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
703   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
704   if (flg) {
705     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
706     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
707   }
708   /* Clean up */
709   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
710   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
711   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
712   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
713   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
714   PetscFunctionReturn(0);
715 }
716 
717 #undef __FUNCT__
718 #define __FUNCT__ "DMPlexDistributeField"
719 /*@
720   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
721 
722   Collective on DM
723 
724   Input Parameters:
725 + dm - The DMPlex object
726 . pointSF - The PetscSF describing the communication pattern
727 . originalSection - The PetscSection for existing data layout
728 - originalVec - The existing data
729 
730   Output Parameters:
731 + newSection - The PetscSF describing the new data layout
732 - newVec - The new data
733 
734   Level: developer
735 
736 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
737 @*/
738 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
739 {
740   PetscSF        fieldSF;
741   PetscInt      *remoteOffsets, fieldSize;
742   PetscScalar   *originalValues, *newValues;
743   PetscErrorCode ierr;
744 
745   PetscFunctionBegin;
746   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
747   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
748 
749   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
750   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
751   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
752 
753   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
754   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
755   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
756   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
757   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
758   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
759   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
760   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
761   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 #undef __FUNCT__
766 #define __FUNCT__ "DMPlexDistributeFieldIS"
767 /*@
768   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
769 
770   Collective on DM
771 
772   Input Parameters:
773 + dm - The DMPlex object
774 . pointSF - The PetscSF describing the communication pattern
775 . originalSection - The PetscSection for existing data layout
776 - originalIS - The existing data
777 
778   Output Parameters:
779 + newSection - The PetscSF describing the new data layout
780 - newIS - The new data
781 
782   Level: developer
783 
784 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
785 @*/
786 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
787 {
788   PetscSF         fieldSF;
789   PetscInt       *newValues, *remoteOffsets, fieldSize;
790   const PetscInt *originalValues;
791   PetscErrorCode  ierr;
792 
793   PetscFunctionBegin;
794   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
795   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
796 
797   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
798   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
799 
800   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
801   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
802   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
803   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
804   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
805   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
806   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
807   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "DMPlexDistributeData"
813 /*@
814   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
815 
816   Collective on DM
817 
818   Input Parameters:
819 + dm - The DMPlex object
820 . pointSF - The PetscSF describing the communication pattern
821 . originalSection - The PetscSection for existing data layout
822 . datatype - The type of data
823 - originalData - The existing data
824 
825   Output Parameters:
826 + newSection - The PetscSection describing the new data layout
827 - newData - The new data
828 
829   Level: developer
830 
831 .seealso: DMPlexDistribute(), DMPlexDistributeField()
832 @*/
833 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
834 {
835   PetscSF        fieldSF;
836   PetscInt      *remoteOffsets, fieldSize;
837   PetscMPIInt    dataSize;
838   PetscErrorCode ierr;
839 
840   PetscFunctionBegin;
841   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
842   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
843 
844   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
845   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
846   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
847 
848   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
849   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
850   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
851   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
852   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "DMPlexDistributeCones"
858 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
859 {
860   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
861   MPI_Comm               comm;
862   PetscSF                coneSF;
863   PetscSection           originalConeSection, newConeSection;
864   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
865   PetscBool              flg;
866   PetscErrorCode         ierr;
867 
868   PetscFunctionBegin;
869   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
870   PetscValidPointer(dmParallel,4);
871   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
872 
873   /* Distribute cone section */
874   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
875   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
876   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
877   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
878   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
879   {
880     PetscInt pStart, pEnd, p;
881 
882     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
883     for (p = pStart; p < pEnd; ++p) {
884       PetscInt coneSize;
885       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
886       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
887     }
888   }
889   /* Communicate and renumber cones */
890   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
891   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
892   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
893   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
894   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
895   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
896   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
897   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
898   if (flg) {
899     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
900     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
901     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
902     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
903     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
904   }
905   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
906   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
907   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
908   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
909   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
910   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
911   /* Create supports and stratify sieve */
912   {
913     PetscInt pStart, pEnd;
914 
915     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
916     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
917   }
918   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
919   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 
923 #undef __FUNCT__
924 #define __FUNCT__ "DMPlexDistributeCoordinates"
925 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
926 {
927   MPI_Comm         comm;
928   PetscSection     originalCoordSection, newCoordSection;
929   Vec              originalCoordinates, newCoordinates;
930   PetscInt         bs;
931   const char      *name;
932   const PetscReal *maxCell, *L;
933   PetscErrorCode   ierr;
934 
935   PetscFunctionBegin;
936   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
937   PetscValidPointer(dmParallel, 3);
938 
939   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
940   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
941   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
942   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
943   if (originalCoordinates) {
944     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
945     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
946     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
947 
948     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
949     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
950     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
951     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
952     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
953   }
954   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
955   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "DMPlexDistributeLabels"
961 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, ISLocalToGlobalMapping renumbering, DM dmParallel)
962 {
963   DM_Plex       *mesh      = (DM_Plex*) dm->data;
964   DM_Plex       *pmesh     = (DM_Plex*) (dmParallel)->data;
965   MPI_Comm       comm;
966   PetscMPIInt    rank;
967   DMLabel        next      = mesh->labels, newNext = pmesh->labels;
968   PetscInt       numLabels = 0, l;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
973   PetscValidPointer(dmParallel, 6);
974   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
977 
978   /* Bcast number of labels */
979   while (next) {++numLabels; next = next->next;}
980   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
981   next = mesh->labels;
982   for (l = 0; l < numLabels; ++l) {
983     DMLabel   labelNew;
984     PetscBool isdepth;
985 
986     /* Skip "depth" because it is recreated */
987     if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);}
988     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
989     if (isdepth) {if (!rank) next = next->next; continue;}
990     ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr);
991     /* Insert into list */
992     if (newNext) newNext->next = labelNew;
993     else         pmesh->labels = labelNew;
994     newNext = labelNew;
995     if (!rank) next = next->next;
996   }
997   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
998   PetscFunctionReturn(0);
999 }
1000 
1001 #undef __FUNCT__
1002 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1003 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1004 {
1005   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1006   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1007   MPI_Comm        comm;
1008   const PetscInt *gpoints;
1009   PetscInt        dim, depth, n, d;
1010   PetscErrorCode  ierr;
1011 
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1014   PetscValidPointer(dmParallel, 4);
1015 
1016   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1017   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1018 
1019   /* Setup hybrid structure */
1020   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1021   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1022   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1023   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1024   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1025   for (d = 0; d <= dim; ++d) {
1026     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1027 
1028     if (pmax < 0) continue;
1029     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1030     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1031     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1032     for (p = 0; p < n; ++p) {
1033       const PetscInt point = gpoints[p];
1034 
1035       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1036     }
1037     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1038     else            pmesh->hybridPointMax[d] = -1;
1039   }
1040   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "DMPlexDistributeSetupTree"
1046 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1047 {
1048   MPI_Comm        comm;
1049   DM              refTree;
1050   PetscSection    origParentSection, newParentSection;
1051   PetscInt        *origParents, *origChildIDs;
1052   PetscBool       flg;
1053   PetscErrorCode  ierr;
1054 
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1057   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1058   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1059 
1060   /* Set up tree */
1061   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1062   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1063   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1064   if (origParentSection) {
1065     PetscInt        pStart, pEnd;
1066     PetscInt        *newParents, *newChildIDs;
1067     PetscInt        *remoteOffsetsParents, newParentSize;
1068     PetscSF         parentSF;
1069 
1070     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1071     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1072     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1073     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1074     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1075     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1076     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1077     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1078     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1079     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1080     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1081     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1082     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1083     if (flg) {
1084       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1085       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1086       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1087       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1088       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1089     }
1090     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1091     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1092     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1093     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1094   }
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "DMPlexDistributeSF"
1100 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1101 {
1102   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1103   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1104   PetscMPIInt            rank, numProcs;
1105   MPI_Comm               comm;
1106   PetscErrorCode         ierr;
1107 
1108   PetscFunctionBegin;
1109   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1110   PetscValidPointer(dmParallel,7);
1111 
1112   /* Create point SF for parallel mesh */
1113   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1114   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1115   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1116   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1117   {
1118     const PetscInt *leaves;
1119     PetscSFNode    *remotePoints, *rowners, *lowners;
1120     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1121     PetscInt        pStart, pEnd;
1122 
1123     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1124     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1125     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1126     for (p=0; p<numRoots; p++) {
1127       rowners[p].rank  = -1;
1128       rowners[p].index = -1;
1129     }
1130     if (origPart) {
1131       /* Make sure points in the original partition are not assigned to other procs */
1132       const PetscInt *origPoints;
1133 
1134       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1135       for (p = 0; p < numProcs; ++p) {
1136         PetscInt dof, off, d;
1137 
1138         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1139         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1140         for (d = off; d < off+dof; ++d) {
1141           rowners[origPoints[d]].rank = p;
1142         }
1143       }
1144       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1145     }
1146     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1147     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1148     for (p = 0; p < numLeaves; ++p) {
1149       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1150         lowners[p].rank  = rank;
1151         lowners[p].index = leaves ? leaves[p] : p;
1152       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1153         lowners[p].rank  = -2;
1154         lowners[p].index = -2;
1155       }
1156     }
1157     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1158       rowners[p].rank  = -3;
1159       rowners[p].index = -3;
1160     }
1161     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1162     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1163     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1164     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1165     for (p = 0; p < numLeaves; ++p) {
1166       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1167       if (lowners[p].rank != rank) ++numGhostPoints;
1168     }
1169     ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
1170     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1171     for (p = 0, gp = 0; p < numLeaves; ++p) {
1172       if (lowners[p].rank != rank) {
1173         ghostPoints[gp]        = leaves ? leaves[p] : p;
1174         remotePoints[gp].rank  = lowners[p].rank;
1175         remotePoints[gp].index = lowners[p].index;
1176         ++gp;
1177       }
1178     }
1179     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1180     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1181     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1182   }
1183   pmesh->useCone    = mesh->useCone;
1184   pmesh->useClosure = mesh->useClosure;
1185   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 #undef __FUNCT__
1190 #define __FUNCT__ "DMPlexDistribute"
1191 /*@C
1192   DMPlexDistribute - Distributes the mesh and any associated sections.
1193 
1194   Not Collective
1195 
1196   Input Parameter:
1197 + dm  - The original DMPlex object
1198 . partitioner - The partitioning package, or NULL for the default
1199 - overlap - The overlap of partitions, 0 is the default
1200 
1201   Output Parameter:
1202 + sf - The PetscSF used for point distribution
1203 - parallelMesh - The distributed DMPlex object, or NULL
1204 
1205   Note: If the mesh was not distributed, the return value is NULL.
1206 
1207   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1208   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1209   representation on the mesh.
1210 
1211   Level: intermediate
1212 
1213 .keywords: mesh, elements
1214 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1215 @*/
1216 PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
1217 {
1218   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1219   MPI_Comm               comm;
1220   PetscInt               dim, numRemoteRanks;
1221   IS                     origCellPart=NULL,        origPart=NULL,        cellPart,        part;
1222   PetscSection           origCellPartSection=NULL, origPartSection=NULL, cellPartSection, partSection;
1223   PetscSFNode           *remoteRanks;
1224   PetscSF                partSF, pointSF;
1225   ISLocalToGlobalMapping renumbering;
1226   PetscBool              flg;
1227   PetscMPIInt            rank, numProcs, p;
1228   PetscErrorCode         ierr;
1229 
1230   PetscFunctionBegin;
1231   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1232   if (sf) PetscValidPointer(sf,4);
1233   PetscValidPointer(dmParallel,5);
1234 
1235   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1236   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1237   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1238   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1239 
1240   *dmParallel = NULL;
1241   if (numProcs == 1) PetscFunctionReturn(0);
1242 
1243   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1244   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1245   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1246   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1247   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1248   ierr = PetscSectionCreate(comm, &origCellPartSection);CHKERRQ(ierr);
1249   ierr = PetscPartitionerPartition(mesh->partitioner, dm, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, cellPartSection, &cellPart, origCellPartSection, &origCellPart);CHKERRQ(ierr);
1250   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1251   if (!rank) numRemoteRanks = numProcs;
1252   else       numRemoteRanks = 0;
1253   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1254   for (p = 0; p < numRemoteRanks; ++p) {
1255     remoteRanks[p].rank  = p;
1256     remoteRanks[p].index = 0;
1257   }
1258   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1259   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1260   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1261   if (flg) {
1262     ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr);
1263     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1264     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1265     if (origCellPart) {
1266       ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1267       ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1268       ierr = ISView(origCellPart, NULL);CHKERRQ(ierr);
1269     }
1270     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1271   }
1272   /* Close the partition over the mesh */
1273   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1274   /* Create new mesh */
1275   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1276   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1277   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1278   pmesh = (DM_Plex*) (*dmParallel)->data;
1279   pmesh->useAnchors = mesh->useAnchors;
1280 
1281   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1282   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1283   if (flg) {
1284     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1285     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1286     ierr = ISView(part, NULL);CHKERRQ(ierr);
1287     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1288     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1289     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1290   }
1291   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1292 
1293   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1294   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1295   ierr = DMPlexDistributeLabels(dm, pointSF, partSection, part, renumbering, *dmParallel);CHKERRQ(ierr);
1296   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1297   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1298   if (origCellPart) {
1299     ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr);
1300   }
1301   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, origPartSection, origPart, *dmParallel);CHKERRQ(ierr);
1302 
1303   /* Cleanup Partition */
1304   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1305   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1306   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1307   ierr = ISDestroy(&part);CHKERRQ(ierr);
1308   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1309   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1310   if (origCellPart) {
1311     ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr);
1312     ierr = ISDestroy(&origPart);CHKERRQ(ierr);
1313     ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr);
1314     ierr = ISDestroy(&origCellPart);CHKERRQ(ierr);
1315   }
1316   /* Copy BC */
1317   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1318   /* Cleanup */
1319   if (sf) {*sf = pointSF;}
1320   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1321   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1322   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325