xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision d995df53d0d748e171cf58e67a73de119140b661)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petscsf.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
6 /*@
7   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
8 
9   Input Parameters:
10 + dm      - The DM object
11 - useCone - Flag to use the cone first
12 
13   Level: intermediate
14 
15   Notes:
16 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
17 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
18 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
19 
20 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
21 @*/
22 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
23 {
24   DM_Plex *mesh = (DM_Plex *) dm->data;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
28   mesh->useCone = useCone;
29   PetscFunctionReturn(0);
30 }
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
34 /*@
35   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
36 
37   Input Parameter:
38 . dm      - The DM object
39 
40   Output Parameter:
41 . useCone - Flag to use the cone first
42 
43   Level: intermediate
44 
45   Notes:
46 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
47 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
48 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
49 
50 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
51 @*/
52 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
53 {
54   DM_Plex *mesh = (DM_Plex *) dm->data;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidIntPointer(useCone, 2);
59   *useCone = mesh->useCone;
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
65 /*@
66   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
67 
68   Input Parameters:
69 + dm      - The DM object
70 - useClosure - Flag to use the closure
71 
72   Level: intermediate
73 
74   Notes:
75 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
76 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
77 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
78 
79 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
80 @*/
81 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
82 {
83   DM_Plex *mesh = (DM_Plex *) dm->data;
84 
85   PetscFunctionBegin;
86   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
87   mesh->useClosure = useClosure;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
93 /*@
94   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
95 
96   Input Parameter:
97 . dm      - The DM object
98 
99   Output Parameter:
100 . useClosure - Flag to use the closure
101 
102   Level: intermediate
103 
104   Notes:
105 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
106 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
107 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
108 
109 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
110 @*/
111 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
112 {
113   DM_Plex *mesh = (DM_Plex *) dm->data;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
117   PetscValidIntPointer(useClosure, 2);
118   *useClosure = mesh->useClosure;
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
124 /*@
125   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
126 
127   Input Parameters:
128 + dm      - The DM object
129 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
130 
131   Level: intermediate
132 
133 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
134 @*/
135 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
136 {
137   DM_Plex *mesh = (DM_Plex *) dm->data;
138 
139   PetscFunctionBegin;
140   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
141   mesh->useAnchors = useAnchors;
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
147 /*@
148   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
149 
150   Input Parameter:
151 . dm      - The DM object
152 
153   Output Parameter:
154 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
155 
156   Level: intermediate
157 
158 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
159 @*/
160 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
161 {
162   DM_Plex *mesh = (DM_Plex *) dm->data;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
166   PetscValidIntPointer(useAnchors, 2);
167   *useAnchors = mesh->useAnchors;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
173 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
174 {
175   const PetscInt *cone = NULL;
176   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
177   PetscErrorCode  ierr;
178 
179   PetscFunctionBeginHot;
180   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
181   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
182   for (c = 0; c <= coneSize; ++c) {
183     const PetscInt  point   = !c ? p : cone[c-1];
184     const PetscInt *support = NULL;
185     PetscInt        supportSize, s, q;
186 
187     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
188     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
189     for (s = 0; s < supportSize; ++s) {
190       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
191         if (support[s] == adj[q]) break;
192       }
193       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
194     }
195   }
196   *adjSize = numAdj;
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
202 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
203 {
204   const PetscInt *support = NULL;
205   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
206   PetscErrorCode  ierr;
207 
208   PetscFunctionBeginHot;
209   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
210   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
211   for (s = 0; s <= supportSize; ++s) {
212     const PetscInt  point = !s ? p : support[s-1];
213     const PetscInt *cone  = NULL;
214     PetscInt        coneSize, c, q;
215 
216     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
217     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
218     for (c = 0; c < coneSize; ++c) {
219       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
220         if (cone[c] == adj[q]) break;
221       }
222       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
223     }
224   }
225   *adjSize = numAdj;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
231 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
232 {
233   PetscInt      *star = NULL;
234   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
235   PetscErrorCode ierr;
236 
237   PetscFunctionBeginHot;
238   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
239   for (s = 0; s < starSize*2; s += 2) {
240     const PetscInt *closure = NULL;
241     PetscInt        closureSize, c, q;
242 
243     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
244     for (c = 0; c < closureSize*2; c += 2) {
245       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
246         if (closure[c] == adj[q]) break;
247       }
248       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
249     }
250     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
251   }
252   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
259 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
260 {
261   static PetscInt asiz = 0;
262   PetscInt maxAnchors = 1;
263   PetscInt aStart = -1, aEnd = -1;
264   PetscInt maxAdjSize;
265   PetscSection aSec = NULL;
266   IS aIS = NULL;
267   const PetscInt *anchors;
268   PetscErrorCode  ierr;
269 
270   PetscFunctionBeginHot;
271   if (useAnchors) {
272     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
273     if (aSec) {
274       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
275       maxAnchors = PetscMax(1,maxAnchors);
276       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
277       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
278     }
279   }
280   if (!*adj) {
281     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
282 
283     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
284     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
285     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
286     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
287     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
288     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
289     asiz *= maxAnchors;
290     asiz  = PetscMin(asiz,pEnd-pStart);
291     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
292   }
293   if (*adjSize < 0) *adjSize = asiz;
294   maxAdjSize = *adjSize;
295   if (useTransitiveClosure) {
296     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
297   } else if (useCone) {
298     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
299   } else {
300     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
301   }
302   if (useAnchors && aSec) {
303     PetscInt origSize = *adjSize;
304     PetscInt numAdj = origSize;
305     PetscInt i = 0, j;
306     PetscInt *orig = *adj;
307 
308     while (i < origSize) {
309       PetscInt p = orig[i];
310       PetscInt aDof = 0;
311 
312       if (p >= aStart && p < aEnd) {
313         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
314       }
315       if (aDof) {
316         PetscInt aOff;
317         PetscInt s, q;
318 
319         for (j = i + 1; j < numAdj; j++) {
320           orig[j - 1] = orig[j];
321         }
322         origSize--;
323         numAdj--;
324         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
325         for (s = 0; s < aDof; ++s) {
326           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
327             if (anchors[aOff+s] == orig[q]) break;
328           }
329           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
330         }
331       }
332       else {
333         i++;
334       }
335     }
336     *adjSize = numAdj;
337     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
338   }
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMPlexGetAdjacency"
344 /*@
345   DMPlexGetAdjacency - Return all points adjacent to the given point
346 
347   Input Parameters:
348 + dm - The DM object
349 . p  - The point
350 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
351 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
352 
353   Output Parameters:
354 + adjSize - The number of adjacent points
355 - adj - The adjacent points
356 
357   Level: advanced
358 
359   Notes: The user must PetscFree the adj array if it was not passed in.
360 
361 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
362 @*/
363 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
364 {
365   DM_Plex       *mesh = (DM_Plex *) dm->data;
366   PetscErrorCode ierr;
367 
368   PetscFunctionBeginHot;
369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
370   PetscValidPointer(adjSize,3);
371   PetscValidPointer(adj,4);
372   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
373   PetscFunctionReturn(0);
374 }
375 #undef __FUNCT__
376 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
377 /*@
378   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
379 
380   Collective on DM
381 
382   Input Parameters:
383 + dm      - The DM
384 - sfPoint - The PetscSF which encodes point connectivity
385 
386   Output Parameters:
387 + processRanks - A list of process neighbors, or NULL
388 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
389 
390   Level: developer
391 
392 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
393 @*/
394 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
395 {
396   const PetscSFNode *remotePoints;
397   PetscInt          *localPointsNew;
398   PetscSFNode       *remotePointsNew;
399   const PetscInt    *nranks;
400   PetscInt          *ranksNew;
401   PetscBT            neighbors;
402   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
403   PetscMPIInt        numProcs, proc, rank;
404   PetscErrorCode     ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
408   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
409   if (processRanks) {PetscValidPointer(processRanks, 3);}
410   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
411   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
412   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
413   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
414   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
415   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
416   /* Compute root-to-leaf process connectivity */
417   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
418   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
419   for (p = pStart; p < pEnd; ++p) {
420     PetscInt ndof, noff, n;
421 
422     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
423     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
424     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
425   }
426   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
427   /* Compute leaf-to-neighbor process connectivity */
428   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
429   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
430   for (p = pStart; p < pEnd; ++p) {
431     PetscInt ndof, noff, n;
432 
433     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
434     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
435     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
436   }
437   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
438   /* Compute leaf-to-root process connectivity */
439   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
440   /* Calculate edges */
441   PetscBTClear(neighbors, rank);
442   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
443   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
445   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
446   for(proc = 0, n = 0; proc < numProcs; ++proc) {
447     if (PetscBTLookup(neighbors, proc)) {
448       ranksNew[n]              = proc;
449       localPointsNew[n]        = proc;
450       remotePointsNew[n].index = rank;
451       remotePointsNew[n].rank  = proc;
452       ++n;
453     }
454   }
455   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
456   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
457   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
458   if (sfProcess) {
459     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
460     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
461     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
462     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMPlexDistributeOwnership"
469 /*@
470   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
471 
472   Collective on DM
473 
474   Input Parameter:
475 . dm - The DM
476 
477   Output Parameters:
478 + rootSection - The number of leaves for a given root point
479 . rootrank    - The rank of each edge into the root point
480 . leafSection - The number of processes sharing a given leaf point
481 - leafrank    - The rank of each process sharing a leaf point
482 
483   Level: developer
484 
485 .seealso: DMPlexCreateOverlap()
486 @*/
487 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
488 {
489   MPI_Comm        comm;
490   PetscSF         sfPoint;
491   const PetscInt *rootdegree;
492   PetscInt       *myrank, *remoterank;
493   PetscInt        pStart, pEnd, p, nedges;
494   PetscMPIInt     rank;
495   PetscErrorCode  ierr;
496 
497   PetscFunctionBegin;
498   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
499   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
500   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
501   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
502   /* Compute number of leaves for each root */
503   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
504   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
506   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
507   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
508   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
509   /* Gather rank of each leaf to root */
510   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
511   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
512   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
513   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
514   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
516   ierr = PetscFree(myrank);CHKERRQ(ierr);
517   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
518   /* Distribute remote ranks to leaves */
519   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
520   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMPlexCreateOverlap"
526 /*@C
527   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
528 
529   Collective on DM
530 
531   Input Parameters:
532 + dm          - The DM
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + ovRootSection - The number of new overlap points for each neighboring process
540 . ovRootPoints  - The new overlap points for each neighboring process
541 . ovLeafSection - The number of new overlap points from each neighboring process
542 - ovLeafPoints  - The new overlap points from each neighboring process
543 
544   Level: developer
545 
546 .seealso: DMPlexDistributeOwnership()
547 @*/
548 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
549 {
550   MPI_Comm           comm;
551   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
552   PetscSF            sfPoint, sfProc;
553   IS                 valueIS;
554   DMLabel            ovLeafLabel;
555   const PetscSFNode *remote;
556   const PetscInt    *local;
557   const PetscInt    *nrank, *rrank, *neighbors;
558   PetscSFNode       *ovRootPoints, *ovLeafPoints, *remotePoints;
559   PetscSection       ovRootSection, ovLeafSection;
560   PetscInt          *adj = NULL;
561   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l, numNeighbors, n, ovSize;
562   PetscInt           idx, numRemote;
563   PetscMPIInt        rank, numProcs;
564   PetscBool          flg;
565   PetscErrorCode     ierr;
566 
567   PetscFunctionBegin;
568   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
569   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
570   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
571   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
572   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
573   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
574   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
575   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
576   /* Handle leaves: shared with the root point */
577   for (l = 0; l < nleaves; ++l) {
578     PetscInt adjSize = PETSC_DETERMINE, a;
579 
580     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
581     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
582   }
583   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
584   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
585   /* Handle roots */
586   for (p = pStart; p < pEnd; ++p) {
587     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
588 
589     if ((p >= sStart) && (p < sEnd)) {
590       /* Some leaves share a root with other leaves on different processes */
591       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
592       if (neighbors) {
593         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
594         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
595         for (n = 0; n < neighbors; ++n) {
596           const PetscInt remoteRank = nrank[noff+n];
597 
598           if (remoteRank == rank) continue;
599           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
600         }
601       }
602     }
603     /* Roots are shared with leaves */
604     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
605     if (!neighbors) continue;
606     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
607     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
608     for (n = 0; n < neighbors; ++n) {
609       const PetscInt remoteRank = rrank[noff+n];
610 
611       if (remoteRank == rank) continue;
612       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
613     }
614   }
615   ierr = PetscFree(adj);CHKERRQ(ierr);
616   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
617   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
618   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
619   if (flg) {
620     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
621     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
622   }
623   /* Convert to (point, rank) and use actual owners */
624   ierr = PetscSectionCreate(comm, &ovRootSection);CHKERRQ(ierr);
625   ierr = PetscObjectSetName((PetscObject) ovRootSection, "Overlap Root Section");CHKERRQ(ierr);
626   ierr = PetscSectionSetChart(ovRootSection, 0, numProcs);CHKERRQ(ierr);
627   ierr = DMLabelGetValueIS(ovAdjByRank, &valueIS);CHKERRQ(ierr);
628   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
629   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
630   for (n = 0; n < numNeighbors; ++n) {
631     PetscInt numPoints;
632 
633     ierr = DMLabelGetStratumSize(ovAdjByRank, neighbors[n], &numPoints);CHKERRQ(ierr);
634     ierr = PetscSectionAddDof(ovRootSection, neighbors[n], numPoints);CHKERRQ(ierr);
635   }
636   ierr = PetscSectionSetUp(ovRootSection);CHKERRQ(ierr);
637   ierr = PetscSectionGetStorageSize(ovRootSection, &ovSize);CHKERRQ(ierr);
638   ierr = PetscMalloc1(ovSize, &ovRootPoints);CHKERRQ(ierr);
639   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
640   for (n = 0; n < numNeighbors; ++n) {
641     IS              pointIS;
642     const PetscInt *points;
643     PetscInt        off, numPoints, p;
644 
645     ierr = PetscSectionGetOffset(ovRootSection, neighbors[n], &off);CHKERRQ(ierr);
646     ierr = DMLabelGetStratumIS(ovAdjByRank, neighbors[n], &pointIS);CHKERRQ(ierr);
647     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
648     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
649     for (p = 0; p < numPoints; ++p) {
650       ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);
651       if (l >= 0) {ovRootPoints[off+p] = remote[l];}
652       else        {ovRootPoints[off+p].index = points[p]; ovRootPoints[off+p].rank = rank;}
653     }
654     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
655     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
656   }
657   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
658   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
659   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
660   /* Make process SF */
661   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
662   if (flg) {
663     ierr = PetscSFView(sfProc, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
664   }
665   /* Communicate overlap */
666   ierr = PetscSectionCreate(comm, &ovLeafSection);CHKERRQ(ierr);
667   ierr = PetscObjectSetName((PetscObject) ovLeafSection, "Overlap Leaf Section");CHKERRQ(ierr);
668   ierr = DMPlexDistributeData(dm, sfProc, ovRootSection, MPIU_2INT, ovRootPoints, ovLeafSection, (void**) &ovLeafPoints);CHKERRQ(ierr);
669   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
670   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
671   ierr = PetscSectionGetStorageSize(ovLeafSection, &ovSize);CHKERRQ(ierr);
672   for (p = 0; p < ovSize; p++) {
673     /* Don't import points from yourself */
674     if (ovLeafPoints[p].rank == rank) continue;
675     ierr = DMLabelSetValue(ovLeafLabel, ovLeafPoints[p].index, ovLeafPoints[p].rank);CHKERRQ(ierr);
676   }
677   /* Don't import points already in the pointSF */
678   for (l = 0; l < nleaves; ++l) {
679     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
680   }
681   for (numRemote = 0, n = 0; n < numProcs; ++n) {
682     PetscInt numPoints;
683     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
684     numRemote += numPoints;
685   }
686   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
687   for (idx = 0, n = 0; n < numProcs; ++n) {
688     IS remoteRootIS;
689     PetscInt numPoints;
690     const PetscInt *remoteRoots;
691     ierr = DMLabelGetStratumSize(ovLeafLabel, n, &numPoints);CHKERRQ(ierr);
692     if (numPoints <= 0) continue;
693     ierr = DMLabelGetStratumIS(ovLeafLabel, n, &remoteRootIS);CHKERRQ(ierr);
694     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
695     for (p = 0; p < numPoints; p++) {
696       remotePoints[idx].index = remoteRoots[p];
697       remotePoints[idx].rank = n;
698       idx++;
699     }
700     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
701     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
702   }
703   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
704   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), overlapSF);CHKERRQ(ierr);
705   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
706   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
707   ierr = PetscSFSetGraph(*overlapSF, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
708   if (flg) {
709     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
710     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
711   }
712   /* Clean up */
713   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
714   ierr = PetscFree(ovRootPoints);CHKERRQ(ierr);
715   ierr = PetscSectionDestroy(&ovRootSection);CHKERRQ(ierr);
716   ierr = PetscFree(ovLeafPoints);CHKERRQ(ierr);
717   ierr = PetscSectionDestroy(&ovLeafSection);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
723 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
724 {
725   MPI_Comm           comm;
726   PetscMPIInt        rank, numProcs;
727   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
728   PetscInt          *pointDepths, *remoteDepths, *ilocal;
729   PetscInt          *depthRecv, *depthShift, *depthIdx;
730   PetscSFNode       *iremote;
731   PetscSF            pointSF;
732   const PetscInt    *sharedLocal;
733   const PetscSFNode *overlapRemote, *sharedRemote;
734   PetscErrorCode     ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
738   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
739   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
740   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
741   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
742 
743   /* Before building the migration SF we need to know the new stratum offsets */
744   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
745   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
746   for (d=0; d<dim+1; d++) {
747     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
748     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
749   }
750   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
751   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
752   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
753 
754   /* Count recevied points in each stratum and compute the internal strata shift */
755   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
756   for (d=0; d<dim+1; d++) depthRecv[d]=0;
757   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
758   depthShift[dim] = 0;
759   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
760   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
761   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
762   for (d=0; d<dim+1; d++) {
763     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
764     depthIdx[d] = pStart + depthShift[d];
765   }
766 
767   /* Form the overlap SF build an SF that describes the full overlap migration SF */
768   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
769   newLeaves = pEnd - pStart + nleaves;
770   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
771   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
772   /* First map local points to themselves */
773   for (d=0; d<dim+1; d++) {
774     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
775     for (p=pStart; p<pEnd; p++) {
776       point = p + depthShift[d];
777       ilocal[point] = point;
778       iremote[point].index = p;
779       iremote[point].rank = rank;
780       depthIdx[d]++;
781     }
782   }
783 
784   /* Add in the remote roots for currently shared points */
785   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
786   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
787   for (d=0; d<dim+1; d++) {
788     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
789     for (p=0; p<numSharedPoints; p++) {
790       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
791         point = sharedLocal[p] + depthShift[d];
792         iremote[point].index = sharedRemote[p].index;
793         iremote[point].rank = sharedRemote[p].rank;
794       }
795     }
796   }
797 
798   /* Now add the incoming overlap points */
799   for (p=0; p<nleaves; p++) {
800     point = depthIdx[remoteDepths[p]];
801     ilocal[point] = point;
802     iremote[point].index = overlapRemote[p].index;
803     iremote[point].rank = overlapRemote[p].rank;
804     depthIdx[remoteDepths[p]]++;
805   }
806   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
807 
808   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
809   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
810   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
811   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
812   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
813 
814   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "DMPlexDistributeField"
820 /*@
821   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
822 
823   Collective on DM
824 
825   Input Parameters:
826 + dm - The DMPlex object
827 . pointSF - The PetscSF describing the communication pattern
828 . originalSection - The PetscSection for existing data layout
829 - originalVec - The existing data
830 
831   Output Parameters:
832 + newSection - The PetscSF describing the new data layout
833 - newVec - The new data
834 
835   Level: developer
836 
837 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
838 @*/
839 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
840 {
841   PetscSF        fieldSF;
842   PetscInt      *remoteOffsets, fieldSize;
843   PetscScalar   *originalValues, *newValues;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
848   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
849 
850   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
851   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
852   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
853 
854   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
855   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
856   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
857   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
858   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
859   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
860   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
861   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
862   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMPlexDistributeFieldIS"
868 /*@
869   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
870 
871   Collective on DM
872 
873   Input Parameters:
874 + dm - The DMPlex object
875 . pointSF - The PetscSF describing the communication pattern
876 . originalSection - The PetscSection for existing data layout
877 - originalIS - The existing data
878 
879   Output Parameters:
880 + newSection - The PetscSF describing the new data layout
881 - newIS - The new data
882 
883   Level: developer
884 
885 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
886 @*/
887 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
888 {
889   PetscSF         fieldSF;
890   PetscInt       *newValues, *remoteOffsets, fieldSize;
891   const PetscInt *originalValues;
892   PetscErrorCode  ierr;
893 
894   PetscFunctionBegin;
895   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
896   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
897 
898   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
899   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
900 
901   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
902   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
903   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
904   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, originalValues, newValues);CHKERRQ(ierr);
905   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
906   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
907   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
908   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "DMPlexDistributeData"
914 /*@
915   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
916 
917   Collective on DM
918 
919   Input Parameters:
920 + dm - The DMPlex object
921 . pointSF - The PetscSF describing the communication pattern
922 . originalSection - The PetscSection for existing data layout
923 . datatype - The type of data
924 - originalData - The existing data
925 
926   Output Parameters:
927 + newSection - The PetscSection describing the new data layout
928 - newData - The new data
929 
930   Level: developer
931 
932 .seealso: DMPlexDistribute(), DMPlexDistributeField()
933 @*/
934 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
935 {
936   PetscSF        fieldSF;
937   PetscInt      *remoteOffsets, fieldSize;
938   PetscMPIInt    dataSize;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
943   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
944 
945   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
946   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
947   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
948 
949   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
950   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
951   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
952   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
953   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "DMPlexDistributeCones"
959 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
960 {
961   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
962   MPI_Comm               comm;
963   PetscSF                coneSF;
964   PetscSection           originalConeSection, newConeSection;
965   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
966   PetscBool              flg;
967   PetscErrorCode         ierr;
968 
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
971   PetscValidPointer(dmParallel,4);
972   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
973 
974   /* Distribute cone section */
975   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
976   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
977   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
978   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
979   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
980   {
981     PetscInt pStart, pEnd, p;
982 
983     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
984     for (p = pStart; p < pEnd; ++p) {
985       PetscInt coneSize;
986       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
987       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
988     }
989   }
990   /* Communicate and renumber cones */
991   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
992   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
993   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
994   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
995   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
996   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
997   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
998 #if PETSC_USE_DEBUG
999   {
1000     PetscInt  p;
1001     PetscBool valid = PETSC_TRUE;
1002     for (p = 0; p < newConesSize; ++p) {
1003       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1004     }
1005     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1006   }
1007 #endif
1008   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1009   if (flg) {
1010     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1011     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1012     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1013     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1014     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1015   }
1016   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1017   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1018   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1019   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1020   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1021   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1022   /* Create supports and stratify sieve */
1023   {
1024     PetscInt pStart, pEnd;
1025 
1026     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1027     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1028   }
1029   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1030   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "DMPlexDistributeCoordinates"
1036 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1037 {
1038   MPI_Comm         comm;
1039   PetscSection     originalCoordSection, newCoordSection;
1040   Vec              originalCoordinates, newCoordinates;
1041   PetscInt         bs;
1042   const char      *name;
1043   const PetscReal *maxCell, *L;
1044   PetscErrorCode   ierr;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1048   PetscValidPointer(dmParallel, 3);
1049 
1050   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1051   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1052   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1053   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1054   if (originalCoordinates) {
1055     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
1056     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1057     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1058 
1059     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1060     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1061     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1062     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1063     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1064   }
1065   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
1066   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "DMPlexDistributeLabels"
1072 /* Here we are assuming that process 0 always has everything */
1073 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1074 {
1075   MPI_Comm       comm;
1076   PetscMPIInt    rank;
1077   PetscInt       numLabels, numLocalLabels, l;
1078   PetscBool      hasLabels = PETSC_FALSE;
1079   PetscErrorCode ierr;
1080 
1081   PetscFunctionBegin;
1082   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1083   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1084   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1085   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1086   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1087 
1088   /* Everyone must have either the same number of labels, or none */
1089   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1090   numLabels = numLocalLabels;
1091   if (numLocalLabels > 1) hasLabels = PETSC_TRUE;
1092   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1093   if (hasLabels && (numLabels != numLocalLabels)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Invalid number of labels %d != %d", numLocalLabels, numLabels);
1094   for (l = numLabels-1; l >= 0; --l) {
1095     DMLabel     label = NULL, labelNew = NULL;
1096     PetscBool   isdepth;
1097 
1098     if (hasLabels) {
1099       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1100       /* Skip "depth" because it is recreated */
1101       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1102     }
1103     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1104     if (isdepth) continue;
1105     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1106     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1107   }
1108   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 #undef __FUNCT__
1113 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1114 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1115 {
1116   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1117   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1118   MPI_Comm        comm;
1119   const PetscInt *gpoints;
1120   PetscInt        dim, depth, n, d;
1121   PetscErrorCode  ierr;
1122 
1123   PetscFunctionBegin;
1124   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1125   PetscValidPointer(dmParallel, 4);
1126 
1127   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1128   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1129 
1130   /* Setup hybrid structure */
1131   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1132   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1133   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1134   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1135   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1136   for (d = 0; d <= dim; ++d) {
1137     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1138 
1139     if (pmax < 0) continue;
1140     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1141     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1142     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1143     for (p = 0; p < n; ++p) {
1144       const PetscInt point = gpoints[p];
1145 
1146       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1147     }
1148     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1149     else            pmesh->hybridPointMax[d] = -1;
1150   }
1151   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "DMPlexDistributeSetupTree"
1157 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1158 {
1159   MPI_Comm        comm;
1160   DM              refTree;
1161   PetscSection    origParentSection, newParentSection;
1162   PetscInt        *origParents, *origChildIDs;
1163   PetscBool       flg;
1164   PetscErrorCode  ierr;
1165 
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1168   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1169   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1170 
1171   /* Set up tree */
1172   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1173   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1174   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1175   if (origParentSection) {
1176     PetscInt        pStart, pEnd;
1177     PetscInt        *newParents, *newChildIDs;
1178     PetscInt        *remoteOffsetsParents, newParentSize;
1179     PetscSF         parentSF;
1180 
1181     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1182     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1183     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1184     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1185     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1186     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1187     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1188     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1189     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1190     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1191     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1192     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1193     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1194     if (flg) {
1195       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1196       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1197       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1198       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1199       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1200     }
1201     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1202     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1203     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1204     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1205   }
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 #undef __FUNCT__
1210 #define __FUNCT__ "DMPlexDistributeSF"
1211 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, PetscSection partSection, IS part, PetscSection origPartSection, IS origPart, DM dmParallel)
1212 {
1213   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1214   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1215   PetscMPIInt            rank, numProcs;
1216   MPI_Comm               comm;
1217   PetscErrorCode         ierr;
1218 
1219   PetscFunctionBegin;
1220   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1221   PetscValidPointer(dmParallel,7);
1222 
1223   /* Create point SF for parallel mesh */
1224   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1225   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1226   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1227   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1228   {
1229     const PetscInt *leaves;
1230     PetscSFNode    *remotePoints, *rowners, *lowners;
1231     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1232     PetscInt        pStart, pEnd;
1233 
1234     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1235     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1236     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1237     for (p=0; p<numRoots; p++) {
1238       rowners[p].rank  = -1;
1239       rowners[p].index = -1;
1240     }
1241     if (origPart) {
1242       /* Make sure points in the original partition are not assigned to other procs */
1243       const PetscInt *origPoints;
1244 
1245       ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
1246       for (p = 0; p < numProcs; ++p) {
1247         PetscInt dof, off, d;
1248 
1249         ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
1250         ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
1251         for (d = off; d < off+dof; ++d) {
1252           rowners[origPoints[d]].rank = p;
1253         }
1254       }
1255       ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
1256     }
1257     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1258     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1259     for (p = 0; p < numLeaves; ++p) {
1260       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1261         lowners[p].rank  = rank;
1262         lowners[p].index = leaves ? leaves[p] : p;
1263       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1264         lowners[p].rank  = -2;
1265         lowners[p].index = -2;
1266       }
1267     }
1268     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1269       rowners[p].rank  = -3;
1270       rowners[p].index = -3;
1271     }
1272     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1273     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1274     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1275     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1276     for (p = 0; p < numLeaves; ++p) {
1277       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1278       if (lowners[p].rank != rank) ++numGhostPoints;
1279     }
1280     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1281     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1282     for (p = 0, gp = 0; p < numLeaves; ++p) {
1283       if (lowners[p].rank != rank) {
1284         ghostPoints[gp]        = leaves ? leaves[p] : p;
1285         remotePoints[gp].rank  = lowners[p].rank;
1286         remotePoints[gp].index = lowners[p].index;
1287         ++gp;
1288       }
1289     }
1290     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1291     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1292     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1293   }
1294   pmesh->useCone    = mesh->useCone;
1295   pmesh->useClosure = mesh->useClosure;
1296   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1297   PetscFunctionReturn(0);
1298 }
1299 
1300 #undef __FUNCT__
1301 #define __FUNCT__ "DMPlexDistribute"
1302 /*@C
1303   DMPlexDistribute - Distributes the mesh and any associated sections.
1304 
1305   Not Collective
1306 
1307   Input Parameter:
1308 + dm  - The original DMPlex object
1309 - overlap - The overlap of partitions, 0 is the default
1310 
1311   Output Parameter:
1312 + sf - The PetscSF used for point distribution
1313 - parallelMesh - The distributed DMPlex object, or NULL
1314 
1315   Note: If the mesh was not distributed, the return value is NULL.
1316 
1317   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1318   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1319   representation on the mesh.
1320 
1321   Level: intermediate
1322 
1323 .keywords: mesh, elements
1324 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1325 @*/
1326 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1327 {
1328   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1329   MPI_Comm               comm;
1330   PetscInt               dim, numRemoteRanks, nroots, nleaves;
1331   DM                     dmOverlap;
1332   IS                     cellPart,        part;
1333   PetscSection           cellPartSection, partSection;
1334   PetscSFNode           *remoteRanks, *newRemote;
1335   const PetscSFNode     *oldRemote;
1336   PetscSF                partSF, pointSF, overlapPointSF, overlapSF;
1337   ISLocalToGlobalMapping renumbering;
1338   PetscBool              flg;
1339   PetscMPIInt            rank, numProcs, p;
1340   PetscErrorCode         ierr;
1341 
1342   PetscFunctionBegin;
1343   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1344   if (sf) PetscValidPointer(sf,4);
1345   PetscValidPointer(dmParallel,5);
1346 
1347   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1348   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1349   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1350   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1351 
1352   *dmParallel = NULL;
1353   if (numProcs == 1) PetscFunctionReturn(0);
1354 
1355   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1356   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1357   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1358   if (overlap > 1) SETERRQ(comm, PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
1359   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1360   ierr = PetscPartitionerPartition(mesh->partitioner, dm, PETSC_FALSE, cellPartSection, &cellPart, NULL, NULL);CHKERRQ(ierr);
1361   /* Create SF assuming a serial partition for all processes: Could check for IS length here */
1362   if (!rank) numRemoteRanks = numProcs;
1363   else       numRemoteRanks = 0;
1364   ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
1365   for (p = 0; p < numRemoteRanks; ++p) {
1366     remoteRanks[p].rank  = p;
1367     remoteRanks[p].index = 0;
1368   }
1369   ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
1370   ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
1371   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1372   if (flg) {
1373     ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
1374     ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1375     ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
1376     ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
1377   }
1378   /* Close the partition over the mesh */
1379   ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
1380   /* Create new mesh */
1381   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1382   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1383   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1384   pmesh = (DM_Plex*) (*dmParallel)->data;
1385   pmesh->useAnchors = mesh->useAnchors;
1386 
1387   /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
1388   ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
1389   if (flg) {
1390     ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
1391     ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1392     ierr = ISView(part, NULL);CHKERRQ(ierr);
1393     ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
1394     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1395     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1396   }
1397   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1398 
1399   /* Migrate data to a non-overlapping parallel DM */
1400   ierr = DMPlexDistributeCones(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1401   ierr = DMPlexDistributeCoordinates(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1402   ierr = DMPlexDistributeLabels(dm, pointSF, *dmParallel);CHKERRQ(ierr);
1403   ierr = DMPlexDistributeSetupHybrid(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1404   ierr = DMPlexDistributeSetupTree(dm, pointSF, renumbering, *dmParallel);CHKERRQ(ierr);
1405 
1406   /* Build the point SF without overlap */
1407   ierr = DMPlexDistributeSF(dm, pointSF, partSection, part, NULL, NULL, *dmParallel);CHKERRQ(ierr);
1408   if (flg) {
1409     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1410     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1411   }
1412 
1413   if (overlap > 0) {
1414     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1415     /* Add the partition overlap to the distributed DM */
1416     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1417     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1418     *dmParallel = dmOverlap;
1419     if (flg) {
1420       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1421       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1422     }
1423 
1424     /* Re-map the pointSF to establish the full migration pattern */
1425     ierr = PetscSFGetGraph(pointSF, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1426     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1427     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1428     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1429     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1430     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1431     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1432     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1433     ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);
1434     pointSF = overlapPointSF;
1435     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1436   }
1437   /* Cleanup Partition */
1438   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1439   ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
1440   ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
1441   ierr = ISDestroy(&part);CHKERRQ(ierr);
1442   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1443   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1444   /* Copy BC */
1445   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1446   /* Cleanup */
1447   if (sf) {*sf = pointSF;}
1448   else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
1449   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1450   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1451   PetscFunctionReturn(0);
1452 }
1453 
1454 #undef __FUNCT__
1455 #define __FUNCT__ "DMPlexDistributeOverlap"
1456 /*@C
1457   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1458 
1459   Not Collective
1460 
1461   Input Parameter:
1462 + dm  - The non-overlapping distrbuted DMPlex object
1463 - overlap - The overlap of partitions, 0 is the default
1464 
1465   Output Parameter:
1466 + sf - The PetscSF used for point distribution
1467 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1468 
1469   Note: If the mesh was not distributed, the return value is NULL.
1470 
1471   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1472   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1473   representation on the mesh.
1474 
1475   Level: intermediate
1476 
1477 .keywords: mesh, elements
1478 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1479 @*/
1480 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1481 {
1482   MPI_Comm               comm;
1483   PetscMPIInt            rank;
1484   PetscSection           rootSection, leafSection;
1485   IS                     rootrank, leafrank;
1486   PetscSection           coneSection;
1487   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1488   PetscSFNode           *ghostRemote;
1489   const PetscSFNode     *overlapRemote;
1490   ISLocalToGlobalMapping overlapRenumbering;
1491   const PetscInt        *renumberingArray, *overlapLocal;
1492   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1493   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1494   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1495   PetscErrorCode         ierr;
1496 
1497   PetscFunctionBegin;
1498   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1499   if (sf) PetscValidPointer(sf, 3);
1500   PetscValidPointer(dmOverlap, 4);
1501 
1502   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1503   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1504   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1505 
1506   /* Compute point overlap with neighbouring processes on the distributed DM */
1507   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1508   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1509   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1510   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1511   ierr = DMPlexCreateOverlap(dm, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1512   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1513   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1514   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1515   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1516   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1517 
1518   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1519   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1520 
1521   /* Convert cones to global numbering before migrating them */
1522   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1523   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1524   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1525   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1526 
1527   /* Derive the new local-to-global mapping from the old one */
1528   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1529   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1530   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1531   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1532   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1533   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1534 
1535   /* Build the overlapping DM */
1536   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1537   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1538   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1539   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1540   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1541   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1542   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1543 
1544   /* Build the new point SF by propagating the depthShift generate remote root indices */
1545   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1546   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1547   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1548   numGhostPoints = numSharedPoints + numOverlapPoints;
1549   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1550   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1551   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1552   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1553   for (p=0; p<overlapLeaves; p++) {
1554     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1555   }
1556   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1557   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1558   for (idx=0, p=0; p<overlapLeaves; p++) {
1559     if (overlapRemote[p].rank != rank) {
1560       ghostLocal[idx] = overlapLocal[p];
1561       ghostRemote[idx].index = recvPointIDs[p];
1562       ghostRemote[idx].rank = overlapRemote[p].rank;
1563       idx++;
1564     }
1565   }
1566   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1567   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1568   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1569   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1570   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1571   /* Cleanup overlap partition */
1572   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1573   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1574   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1575   if (sf) *sf = migrationSF;
1576   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1577   ierr = DMSetFromOptions(*dmOverlap);CHKERRQ(ierr);
1578   PetscFunctionReturn(0);
1579 }
1580