xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 1cf84007adf5c108639c0d32cd590b65feee66a2)
1 #include <petsc/private/dmpleximpl.h>    /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/dmlabelimpl.h>   /*I      "petscdmlabel.h"  I*/
3 
4 /*@C
5   DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6 
7   Input Parameters:
8 + dm      - The DM object
9 . user    - The user callback, may be NULL (to clear the callback)
10 - ctx     - context for callback evaluation, may be NULL
11 
12   Level: advanced
13 
14   Notes:
15      The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16 
17      Any setting here overrides other configuration of DMPlex adjacency determination.
18 
19 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22 {
23   DM_Plex *mesh = (DM_Plex *)dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useradjacency = user;
28   mesh->useradjacencyctx = ctx;
29   PetscFunctionReturn(0);
30 }
31 
32 /*@C
33   DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34 
35   Input Parameter:
36 . dm      - The DM object
37 
38   Output Parameters:
39 - user    - The user callback
40 - ctx     - context for callback evaluation
41 
42   Level: advanced
43 
44 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45 @*/
46 PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47 {
48   DM_Plex *mesh = (DM_Plex *)dm->data;
49 
50   PetscFunctionBegin;
51   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
52   if (user) *user = mesh->useradjacency;
53   if (ctx) *ctx = mesh->useradjacencyctx;
54   PetscFunctionReturn(0);
55 }
56 
57 /*@
58   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
59 
60   Input Parameters:
61 + dm      - The DM object
62 - useCone - Flag to use the cone first
63 
64   Level: intermediate
65 
66   Notes:
67 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
68 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
69 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
70 
71 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
72 @*/
73 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
74 {
75   PetscDS        prob;
76   PetscBool      useClosure;
77   PetscInt       Nf;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
82   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
83   if (Nf < 1) {ierr = DMSetNumFields(dm, 1);CHKERRQ(ierr);}
84   ierr = PetscDSGetAdjacency(prob, 0, NULL, &useClosure);CHKERRQ(ierr);
85   ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
86   PetscFunctionReturn(0);
87 }
88 
89 /*@
90   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
91 
92   Input Parameter:
93 . dm      - The DM object
94 
95   Output Parameter:
96 . useCone - Flag to use the cone first
97 
98   Level: intermediate
99 
100   Notes:
101 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
102 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
103 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
104 
105 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
106 @*/
107 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
108 {
109   PetscDS        prob;
110   PetscInt       Nf;
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
115   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
116   if (Nf < 1) {ierr = DMSetNumFields(dm, 1);CHKERRQ(ierr);}
117   ierr = PetscDSGetAdjacency(prob, 0, useCone, NULL);CHKERRQ(ierr);
118   PetscFunctionReturn(0);
119 }
120 
121 /*@
122   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
123 
124   Input Parameters:
125 + dm      - The DM object
126 - useClosure - Flag to use the closure
127 
128   Level: intermediate
129 
130   Notes:
131 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
132 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
133 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
134 
135 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
136 @*/
137 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
138 {
139   PetscDS        prob;
140   PetscBool      useCone;
141   PetscInt       Nf;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
146   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
147   if (Nf < 1) {ierr = DMSetNumFields(dm, 1);CHKERRQ(ierr);}
148   ierr = PetscDSGetAdjacency(prob, 0, &useCone, NULL);CHKERRQ(ierr);
149   ierr = PetscDSSetAdjacency(prob, 0, useCone, useClosure);CHKERRQ(ierr);
150   PetscFunctionReturn(0);
151 }
152 
153 /*@
154   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
155 
156   Input Parameter:
157 . dm      - The DM object
158 
159   Output Parameter:
160 . useClosure - Flag to use the closure
161 
162   Level: intermediate
163 
164   Notes:
165 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
166 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
167 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
168 
169 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
170 @*/
171 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
172 {
173   PetscDS        prob;
174   PetscInt       Nf;
175   PetscErrorCode ierr;
176 
177   PetscFunctionBegin;
178   ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
179   ierr = PetscDSGetNumFields(prob, &Nf);CHKERRQ(ierr);
180   if (Nf < 1) {ierr = DMSetNumFields(dm, 1);CHKERRQ(ierr);}
181   ierr = PetscDSGetAdjacency(prob, 0, NULL, useClosure);CHKERRQ(ierr);
182   PetscFunctionReturn(0);
183 }
184 
185 /*@
186   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
187 
188   Input Parameters:
189 + dm      - The DM object
190 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
191 
192   Level: intermediate
193 
194 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
195 @*/
196 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
197 {
198   DM_Plex *mesh = (DM_Plex *) dm->data;
199 
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
202   mesh->useAnchors = useAnchors;
203   PetscFunctionReturn(0);
204 }
205 
206 /*@
207   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
208 
209   Input Parameter:
210 . dm      - The DM object
211 
212   Output Parameter:
213 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
214 
215   Level: intermediate
216 
217 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
218 @*/
219 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
220 {
221   DM_Plex *mesh = (DM_Plex *) dm->data;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
225   PetscValidIntPointer(useAnchors, 2);
226   *useAnchors = mesh->useAnchors;
227   PetscFunctionReturn(0);
228 }
229 
230 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
231 {
232   const PetscInt *cone = NULL;
233   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
234   PetscErrorCode  ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
238   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
239   for (c = 0; c <= coneSize; ++c) {
240     const PetscInt  point   = !c ? p : cone[c-1];
241     const PetscInt *support = NULL;
242     PetscInt        supportSize, s, q;
243 
244     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
245     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
246     for (s = 0; s < supportSize; ++s) {
247       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
248         if (support[s] == adj[q]) break;
249       }
250       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
251     }
252   }
253   *adjSize = numAdj;
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
258 {
259   const PetscInt *support = NULL;
260   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
261   PetscErrorCode  ierr;
262 
263   PetscFunctionBeginHot;
264   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
265   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
266   for (s = 0; s <= supportSize; ++s) {
267     const PetscInt  point = !s ? p : support[s-1];
268     const PetscInt *cone  = NULL;
269     PetscInt        coneSize, c, q;
270 
271     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
272     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
273     for (c = 0; c < coneSize; ++c) {
274       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
275         if (cone[c] == adj[q]) break;
276       }
277       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
278     }
279   }
280   *adjSize = numAdj;
281   PetscFunctionReturn(0);
282 }
283 
284 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
285 {
286   PetscInt      *star = NULL;
287   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
288   PetscErrorCode ierr;
289 
290   PetscFunctionBeginHot;
291   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
292   for (s = 0; s < starSize*2; s += 2) {
293     const PetscInt *closure = NULL;
294     PetscInt        closureSize, c, q;
295 
296     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
297     for (c = 0; c < closureSize*2; c += 2) {
298       for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
299         if (closure[c] == adj[q]) break;
300       }
301       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
302     }
303     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
304   }
305   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
306   *adjSize = numAdj;
307   PetscFunctionReturn(0);
308 }
309 
310 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
311 {
312   static PetscInt asiz = 0;
313   PetscInt maxAnchors = 1;
314   PetscInt aStart = -1, aEnd = -1;
315   PetscInt maxAdjSize;
316   PetscSection aSec = NULL;
317   IS aIS = NULL;
318   const PetscInt *anchors;
319   DM_Plex *mesh = (DM_Plex *)dm->data;
320   PetscErrorCode  ierr;
321 
322   PetscFunctionBeginHot;
323   if (useAnchors) {
324     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
325     if (aSec) {
326       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
327       maxAnchors = PetscMax(1,maxAnchors);
328       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
329       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
330     }
331   }
332   if (!*adj) {
333     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
334 
335     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
336     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
337     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
338     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
339     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
340     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
341     asiz *= maxAnchors;
342     asiz  = PetscMin(asiz,pEnd-pStart);
343     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
344   }
345   if (*adjSize < 0) *adjSize = asiz;
346   maxAdjSize = *adjSize;
347   if (mesh->useradjacency) {
348     ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr);
349   } else if (useTransitiveClosure) {
350     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
351   } else if (useCone) {
352     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
353   } else {
354     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
355   }
356   if (useAnchors && aSec) {
357     PetscInt origSize = *adjSize;
358     PetscInt numAdj = origSize;
359     PetscInt i = 0, j;
360     PetscInt *orig = *adj;
361 
362     while (i < origSize) {
363       PetscInt p = orig[i];
364       PetscInt aDof = 0;
365 
366       if (p >= aStart && p < aEnd) {
367         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
368       }
369       if (aDof) {
370         PetscInt aOff;
371         PetscInt s, q;
372 
373         for (j = i + 1; j < numAdj; j++) {
374           orig[j - 1] = orig[j];
375         }
376         origSize--;
377         numAdj--;
378         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
379         for (s = 0; s < aDof; ++s) {
380           for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
381             if (anchors[aOff+s] == orig[q]) break;
382           }
383           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
384         }
385       }
386       else {
387         i++;
388       }
389     }
390     *adjSize = numAdj;
391     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
392   }
393   PetscFunctionReturn(0);
394 }
395 
396 /*@
397   DMPlexGetAdjacency - Return all points adjacent to the given point
398 
399   Input Parameters:
400 + dm - The DM object
401 . p  - The point
402 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
403 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
404 
405   Output Parameters:
406 + adjSize - The number of adjacent points
407 - adj - The adjacent points
408 
409   Level: advanced
410 
411   Notes: The user must PetscFree the adj array if it was not passed in.
412 
413 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
414 @*/
415 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
416 {
417   PetscBool      useCone, useClosure, useAnchors;
418   PetscErrorCode ierr;
419 
420   PetscFunctionBeginHot;
421   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
422   PetscValidPointer(adjSize,3);
423   PetscValidPointer(adj,4);
424   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
425   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
426   ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
427   ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 
431 /*@
432   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
433 
434   Collective on DM
435 
436   Input Parameters:
437 + dm      - The DM
438 - sfPoint - The PetscSF which encodes point connectivity
439 
440   Output Parameters:
441 + processRanks - A list of process neighbors, or NULL
442 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
443 
444   Level: developer
445 
446 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
447 @*/
448 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
449 {
450   const PetscSFNode *remotePoints;
451   PetscInt          *localPointsNew;
452   PetscSFNode       *remotePointsNew;
453   const PetscInt    *nranks;
454   PetscInt          *ranksNew;
455   PetscBT            neighbors;
456   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
457   PetscMPIInt        size, proc, rank;
458   PetscErrorCode     ierr;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
462   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
463   if (processRanks) {PetscValidPointer(processRanks, 3);}
464   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
465   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
466   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
467   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
468   ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr);
469   ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr);
470   /* Compute root-to-leaf process connectivity */
471   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
472   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
473   for (p = pStart; p < pEnd; ++p) {
474     PetscInt ndof, noff, n;
475 
476     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
477     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
478     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
479   }
480   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
481   /* Compute leaf-to-neighbor process connectivity */
482   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
483   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
484   for (p = pStart; p < pEnd; ++p) {
485     PetscInt ndof, noff, n;
486 
487     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
488     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
489     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr);}
490   }
491   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
492   /* Compute leaf-to-root process connectivity */
493   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
494   /* Calculate edges */
495   PetscBTClear(neighbors, rank);
496   for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
497   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
498   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
499   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
500   for(proc = 0, n = 0; proc < size; ++proc) {
501     if (PetscBTLookup(neighbors, proc)) {
502       ranksNew[n]              = proc;
503       localPointsNew[n]        = proc;
504       remotePointsNew[n].index = rank;
505       remotePointsNew[n].rank  = proc;
506       ++n;
507     }
508   }
509   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
510   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
511   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
512   if (sfProcess) {
513     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
514     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
515     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
516     ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
517   }
518   PetscFunctionReturn(0);
519 }
520 
521 /*@
522   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
523 
524   Collective on DM
525 
526   Input Parameter:
527 . dm - The DM
528 
529   Output Parameters:
530 + rootSection - The number of leaves for a given root point
531 . rootrank    - The rank of each edge into the root point
532 . leafSection - The number of processes sharing a given leaf point
533 - leafrank    - The rank of each process sharing a leaf point
534 
535   Level: developer
536 
537 .seealso: DMPlexCreateOverlap()
538 @*/
539 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
540 {
541   MPI_Comm        comm;
542   PetscSF         sfPoint;
543   const PetscInt *rootdegree;
544   PetscInt       *myrank, *remoterank;
545   PetscInt        pStart, pEnd, p, nedges;
546   PetscMPIInt     rank;
547   PetscErrorCode  ierr;
548 
549   PetscFunctionBegin;
550   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
551   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
552   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
553   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
554   /* Compute number of leaves for each root */
555   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
556   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
557   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
558   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
559   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
560   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
561   /* Gather rank of each leaf to root */
562   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
563   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
564   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
565   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
566   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
567   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
568   ierr = PetscFree(myrank);CHKERRQ(ierr);
569   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
570   /* Distribute remote ranks to leaves */
571   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
572   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
573   PetscFunctionReturn(0);
574 }
575 
576 /*@C
577   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
578 
579   Collective on DM
580 
581   Input Parameters:
582 + dm          - The DM
583 . levels      - Number of overlap levels
584 . rootSection - The number of leaves for a given root point
585 . rootrank    - The rank of each edge into the root point
586 . leafSection - The number of processes sharing a given leaf point
587 - leafrank    - The rank of each process sharing a leaf point
588 
589   Output Parameters:
590 + ovLabel     - DMLabel containing remote overlap contributions as point/rank pairings
591 
592   Level: developer
593 
594 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
595 @*/
596 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
597 {
598   MPI_Comm           comm;
599   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
600   PetscSF            sfPoint, sfProc;
601   const PetscSFNode *remote;
602   const PetscInt    *local;
603   const PetscInt    *nrank, *rrank;
604   PetscInt          *adj = NULL;
605   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
606   PetscMPIInt        rank, size;
607   PetscBool          useCone, useClosure, flg;
608   PetscErrorCode     ierr;
609 
610   PetscFunctionBegin;
611   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
612   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
613   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
614   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
615   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
616   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
617   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
618   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
619   /* Handle leaves: shared with the root point */
620   for (l = 0; l < nleaves; ++l) {
621     PetscInt adjSize = PETSC_DETERMINE, a;
622 
623     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
624     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
625   }
626   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
627   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
628   /* Handle roots */
629   for (p = pStart; p < pEnd; ++p) {
630     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
631 
632     if ((p >= sStart) && (p < sEnd)) {
633       /* Some leaves share a root with other leaves on different processes */
634       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
635       if (neighbors) {
636         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
637         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
638         for (n = 0; n < neighbors; ++n) {
639           const PetscInt remoteRank = nrank[noff+n];
640 
641           if (remoteRank == rank) continue;
642           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
643         }
644       }
645     }
646     /* Roots are shared with leaves */
647     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
648     if (!neighbors) continue;
649     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
650     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
651     for (n = 0; n < neighbors; ++n) {
652       const PetscInt remoteRank = rrank[noff+n];
653 
654       if (remoteRank == rank) continue;
655       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
656     }
657   }
658   ierr = PetscFree(adj);CHKERRQ(ierr);
659   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
660   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
661   /* Add additional overlap levels */
662   for (l = 1; l < levels; l++) {
663     /* Propagate point donations over SF to capture remote connections */
664     ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr);
665     /* Add next level of point donations to the label */
666     ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);
667   }
668   /* We require the closure in the overlap */
669   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
670   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
671   if (useCone || !useClosure) {
672     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
673   }
674   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
675   if (flg) {
676     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
677   }
678   /* Make global process SF and invert sender to receiver label */
679   {
680     /* Build a global process SF */
681     PetscSFNode *remoteProc;
682     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
683     for (p = 0; p < size; ++p) {
684       remoteProc[p].rank  = p;
685       remoteProc[p].index = rank;
686     }
687     ierr = PetscSFCreate(comm, &sfProc);CHKERRQ(ierr);
688     ierr = PetscObjectSetName((PetscObject) sfProc, "Process SF");CHKERRQ(ierr);
689     ierr = PetscSFSetGraph(sfProc, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
690   }
691   ierr = DMLabelCreate("Overlap label", ovLabel);CHKERRQ(ierr);
692   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, *ovLabel);CHKERRQ(ierr);
693   /* Add owned points, except for shared local points */
694   for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr);}
695   for (l = 0; l < nleaves; ++l) {
696     ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr);
697     ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
698   }
699   /* Clean up */
700   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
701   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
702   PetscFunctionReturn(0);
703 }
704 
705 /*@C
706   DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
707 
708   Collective on DM
709 
710   Input Parameters:
711 + dm          - The DM
712 - overlapSF   - The SF mapping ghost points in overlap to owner points on other processes
713 
714   Output Parameters:
715 + migrationSF - An SF that maps original points in old locations to points in new locations
716 
717   Level: developer
718 
719 .seealso: DMPlexCreateOverlap(), DMPlexDistribute()
720 @*/
721 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
722 {
723   MPI_Comm           comm;
724   PetscMPIInt        rank, size;
725   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
726   PetscInt          *pointDepths, *remoteDepths, *ilocal;
727   PetscInt          *depthRecv, *depthShift, *depthIdx;
728   PetscSFNode       *iremote;
729   PetscSF            pointSF;
730   const PetscInt    *sharedLocal;
731   const PetscSFNode *overlapRemote, *sharedRemote;
732   PetscErrorCode     ierr;
733 
734   PetscFunctionBegin;
735   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
736   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
737   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
738   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
739   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
740 
741   /* Before building the migration SF we need to know the new stratum offsets */
742   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
743   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
744   for (d=0; d<dim+1; d++) {
745     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
746     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
747   }
748   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
749   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
750   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
751 
752   /* Count recevied points in each stratum and compute the internal strata shift */
753   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
754   for (d=0; d<dim+1; d++) depthRecv[d]=0;
755   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
756   depthShift[dim] = 0;
757   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
758   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
759   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
760   for (d=0; d<dim+1; d++) {
761     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
762     depthIdx[d] = pStart + depthShift[d];
763   }
764 
765   /* Form the overlap SF build an SF that describes the full overlap migration SF */
766   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
767   newLeaves = pEnd - pStart + nleaves;
768   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
769   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
770   /* First map local points to themselves */
771   for (d=0; d<dim+1; d++) {
772     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
773     for (p=pStart; p<pEnd; p++) {
774       point = p + depthShift[d];
775       ilocal[point] = point;
776       iremote[point].index = p;
777       iremote[point].rank = rank;
778       depthIdx[d]++;
779     }
780   }
781 
782   /* Add in the remote roots for currently shared points */
783   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
784   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
785   for (d=0; d<dim+1; d++) {
786     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
787     for (p=0; p<numSharedPoints; p++) {
788       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
789         point = sharedLocal[p] + depthShift[d];
790         iremote[point].index = sharedRemote[p].index;
791         iremote[point].rank = sharedRemote[p].rank;
792       }
793     }
794   }
795 
796   /* Now add the incoming overlap points */
797   for (p=0; p<nleaves; p++) {
798     point = depthIdx[remoteDepths[p]];
799     ilocal[point] = point;
800     iremote[point].index = overlapRemote[p].index;
801     iremote[point].rank = overlapRemote[p].rank;
802     depthIdx[remoteDepths[p]]++;
803   }
804   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
805 
806   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
807   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
808   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
809   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
810   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
811 
812   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
813   PetscFunctionReturn(0);
814 }
815 
816 /*@
817   DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
818 
819   Input Parameter:
820 + dm          - The DM
821 - sf          - A star forest with non-ordered leaves, usually defining a DM point migration
822 
823   Output Parameter:
824 . migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
825 
826   Level: developer
827 
828 .seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
829 @*/
830 PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
831 {
832   MPI_Comm           comm;
833   PetscMPIInt        rank, size;
834   PetscInt           d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
835   PetscInt          *pointDepths, *remoteDepths, *ilocal;
836   PetscInt          *depthRecv, *depthShift, *depthIdx;
837   PetscInt           hybEnd[4];
838   const PetscSFNode *iremote;
839   PetscErrorCode     ierr;
840 
841   PetscFunctionBegin;
842   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
843   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
844   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
845   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
846   ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr);
847   ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
848   if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth);
849 
850   /* Before building the migration SF we need to know the new stratum offsets */
851   ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL, &iremote);CHKERRQ(ierr);
852   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
853   ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[depth-1],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr);
854   for (d = 0; d < depth+1; ++d) {
855     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
856     for (p = pStart; p < pEnd; ++p) {
857       if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
858         pointDepths[p] = 2 * d;
859       } else {
860         pointDepths[p] = 2 * d + 1;
861       }
862     }
863   }
864   for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
865   ierr = PetscSFBcastBegin(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
866   ierr = PetscSFBcastEnd(sf, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
867   /* Count recevied points in each stratum and compute the internal strata shift */
868   ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx);CHKERRQ(ierr);
869   for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
870   for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
871   depthShift[2*depth+1] = 0;
872   for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
873   for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
874   depthShift[0] += depthRecv[1];
875   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
876   for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
877   for (d = 2 * depth-1; d > 2; --d) {
878     PetscInt e;
879 
880     for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
881   }
882   for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
883   /* Derive a new local permutation based on stratified indices */
884   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
885   for (p = 0; p < nleaves; ++p) {
886     const PetscInt dep = remoteDepths[p];
887 
888     ilocal[p] = depthShift[dep] + depthIdx[dep];
889     depthIdx[dep]++;
890   }
891   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
892   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr);
893   ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr);
894   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
895   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 /*@
900   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
901 
902   Collective on DM
903 
904   Input Parameters:
905 + dm - The DMPlex object
906 . pointSF - The PetscSF describing the communication pattern
907 . originalSection - The PetscSection for existing data layout
908 - originalVec - The existing data
909 
910   Output Parameters:
911 + newSection - The PetscSF describing the new data layout
912 - newVec - The new data
913 
914   Level: developer
915 
916 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
917 @*/
918 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
919 {
920   PetscSF        fieldSF;
921   PetscInt      *remoteOffsets, fieldSize;
922   PetscScalar   *originalValues, *newValues;
923   PetscErrorCode ierr;
924 
925   PetscFunctionBegin;
926   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
927   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
928 
929   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
930   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
931   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
932 
933   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
934   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
935   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
936   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
937   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
938   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
939   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
940   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
941   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
942   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 /*@
947   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
948 
949   Collective on DM
950 
951   Input Parameters:
952 + dm - The DMPlex object
953 . pointSF - The PetscSF describing the communication pattern
954 . originalSection - The PetscSection for existing data layout
955 - originalIS - The existing data
956 
957   Output Parameters:
958 + newSection - The PetscSF describing the new data layout
959 - newIS - The new data
960 
961   Level: developer
962 
963 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
964 @*/
965 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
966 {
967   PetscSF         fieldSF;
968   PetscInt       *newValues, *remoteOffsets, fieldSize;
969   const PetscInt *originalValues;
970   PetscErrorCode  ierr;
971 
972   PetscFunctionBegin;
973   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
974   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
975 
976   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
977   ierr = PetscMalloc1(fieldSize, &newValues);CHKERRQ(ierr);
978 
979   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
980   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
981   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
982   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
983   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
984   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
985   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
986   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
987   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
988   PetscFunctionReturn(0);
989 }
990 
991 /*@
992   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
993 
994   Collective on DM
995 
996   Input Parameters:
997 + dm - The DMPlex object
998 . pointSF - The PetscSF describing the communication pattern
999 . originalSection - The PetscSection for existing data layout
1000 . datatype - The type of data
1001 - originalData - The existing data
1002 
1003   Output Parameters:
1004 + newSection - The PetscSection describing the new data layout
1005 - newData - The new data
1006 
1007   Level: developer
1008 
1009 .seealso: DMPlexDistribute(), DMPlexDistributeField()
1010 @*/
1011 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
1012 {
1013   PetscSF        fieldSF;
1014   PetscInt      *remoteOffsets, fieldSize;
1015   PetscMPIInt    dataSize;
1016   PetscErrorCode ierr;
1017 
1018   PetscFunctionBegin;
1019   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1020   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
1021 
1022   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
1023   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
1024   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
1025 
1026   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
1027   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1028   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1029   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
1030   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
1031   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1036 {
1037   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1038   MPI_Comm               comm;
1039   PetscSF                coneSF;
1040   PetscSection           originalConeSection, newConeSection;
1041   PetscInt              *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
1042   PetscBool              flg;
1043   PetscErrorCode         ierr;
1044 
1045   PetscFunctionBegin;
1046   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1047   PetscValidPointer(dmParallel,4);
1048   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1049 
1050   /* Distribute cone section */
1051   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1052   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
1053   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
1054   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
1055   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
1056   {
1057     PetscInt pStart, pEnd, p;
1058 
1059     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
1060     for (p = pStart; p < pEnd; ++p) {
1061       PetscInt coneSize;
1062       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
1063       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
1064     }
1065   }
1066   /* Communicate and renumber cones */
1067   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
1068   ierr = PetscFree(remoteOffsets);CHKERRQ(ierr);
1069   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1070   if (original) {
1071     PetscInt numCones;
1072 
1073     ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr); ierr = PetscMalloc1(numCones,&globCones);CHKERRQ(ierr);
1074     ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr);
1075   }
1076   else {
1077     globCones = cones;
1078   }
1079   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
1080   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1081   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, globCones, newCones);CHKERRQ(ierr);
1082   if (original) {
1083     ierr = PetscFree(globCones);CHKERRQ(ierr);
1084   }
1085   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
1086   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
1087 #if PETSC_USE_DEBUG
1088   {
1089     PetscInt  p;
1090     PetscBool valid = PETSC_TRUE;
1091     for (p = 0; p < newConesSize; ++p) {
1092       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1093     }
1094     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1095   }
1096 #endif
1097   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
1098   if (flg) {
1099     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
1100     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1101     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
1102     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1103     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
1104   }
1105   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
1106   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
1107   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1108   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
1109   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
1110   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
1111   /* Create supports and stratify DMPlex */
1112   {
1113     PetscInt pStart, pEnd;
1114 
1115     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
1116     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
1117   }
1118   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
1119   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
1120   {
1121     PetscBool useCone, useClosure, useAnchors;
1122 
1123     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1124     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1125     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1126     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1127     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1128     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1129   }
1130   PetscFunctionReturn(0);
1131 }
1132 
1133 static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
1134 {
1135   MPI_Comm         comm;
1136   PetscSection     originalCoordSection, newCoordSection;
1137   Vec              originalCoordinates, newCoordinates;
1138   PetscInt         bs;
1139   PetscBool        isper;
1140   const char      *name;
1141   const PetscReal *maxCell, *L;
1142   const DMBoundaryType *bd;
1143   PetscErrorCode   ierr;
1144 
1145   PetscFunctionBegin;
1146   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1147   PetscValidPointer(dmParallel, 3);
1148 
1149   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1150   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
1151   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
1152   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
1153   if (originalCoordinates) {
1154     ierr = VecCreate(PETSC_COMM_SELF, &newCoordinates);CHKERRQ(ierr);
1155     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
1156     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
1157 
1158     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
1159     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
1160     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
1161     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
1162     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
1163   }
1164   ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr);
1165   ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr);
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 /* Here we are assuming that process 0 always has everything */
1170 static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1171 {
1172   DM_Plex         *mesh = (DM_Plex*) dm->data;
1173   MPI_Comm         comm;
1174   DMLabel          depthLabel;
1175   PetscMPIInt      rank;
1176   PetscInt         depth, d, numLabels, numLocalLabels, l;
1177   PetscBool        hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1178   PetscObjectState depthState = -1;
1179   PetscErrorCode   ierr;
1180 
1181   PetscFunctionBegin;
1182   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1183   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1184   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1185   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1186   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1187 
1188   /* If the user has changed the depth label, communicate it instead */
1189   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1190   ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr);
1191   if (depthLabel) {ierr = DMLabelGetState(depthLabel, &depthState);CHKERRQ(ierr);}
1192   lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1193   ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm);CHKERRQ(ierr);
1194   if (sendDepth) {
1195     ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr);
1196     ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr);
1197   }
1198   /* Everyone must have either the same number of labels, or none */
1199   ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1200   numLabels = numLocalLabels;
1201   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1202   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1203   for (l = numLabels-1; l >= 0; --l) {
1204     DMLabel     label = NULL, labelNew = NULL;
1205     PetscBool   isdepth;
1206 
1207     if (hasLabels) {
1208       ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1209       /* Skip "depth" because it is recreated */
1210       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1211     }
1212     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1213     if (isdepth && !sendDepth) continue;
1214     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1215     if (isdepth) {
1216       /* Put in any missing strata which can occur if users are managing the depth label themselves */
1217       PetscInt gdepth;
1218 
1219       ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm);CHKERRQ(ierr);
1220       if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth);
1221       for (d = 0; d <= gdepth; ++d) {
1222         PetscBool has;
1223 
1224         ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr);
1225         if (!has) ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr);
1226       }
1227     }
1228     ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1229   }
1230   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1235 {
1236   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1237   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1238   PetscBool      *isHybrid, *isHybridParallel;
1239   PetscInt        dim, depth, d;
1240   PetscInt        pStart, pEnd, pStartP, pEndP;
1241   PetscErrorCode  ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1245   PetscValidPointer(dmParallel, 4);
1246 
1247   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1248   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1249   ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr);
1250   ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr);
1251   ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel);CHKERRQ(ierr);
1252   for (d = 0; d <= depth; d++) {
1253     PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1254 
1255     if (hybridMax >= 0) {
1256       PetscInt sStart, sEnd, p;
1257 
1258       ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr);
1259       for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1260     }
1261   }
1262   ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1263   ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr);
1264   for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1265   for (d = 0; d <= depth; d++) {
1266     PetscInt sStart, sEnd, p, dd;
1267 
1268     ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr);
1269     dd = (depth == 1 && d == 1) ? dim : d;
1270     for (p = sStart; p < sEnd; p++) {
1271       if (isHybridParallel[p-pStartP]) {
1272         pmesh->hybridPointMax[dd] = p;
1273         break;
1274       }
1275     }
1276   }
1277   ierr = PetscFree2(isHybrid,isHybridParallel);CHKERRQ(ierr);
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1282 {
1283   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1284   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1285   MPI_Comm        comm;
1286   DM              refTree;
1287   PetscSection    origParentSection, newParentSection;
1288   PetscInt        *origParents, *origChildIDs;
1289   PetscBool       flg;
1290   PetscErrorCode  ierr;
1291 
1292   PetscFunctionBegin;
1293   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1294   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1295   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1296 
1297   /* Set up tree */
1298   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1299   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1300   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1301   if (origParentSection) {
1302     PetscInt        pStart, pEnd;
1303     PetscInt        *newParents, *newChildIDs, *globParents;
1304     PetscInt        *remoteOffsetsParents, newParentSize;
1305     PetscSF         parentSF;
1306 
1307     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1308     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1309     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1310     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1311     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1312     ierr = PetscFree(remoteOffsetsParents);CHKERRQ(ierr);
1313     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1314     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1315     if (original) {
1316       PetscInt numParents;
1317 
1318       ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr);
1319       ierr = PetscMalloc1(numParents,&globParents);CHKERRQ(ierr);
1320       ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr);
1321     }
1322     else {
1323       globParents = origParents;
1324     }
1325     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1326     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, globParents, newParents);CHKERRQ(ierr);
1327     if (original) {
1328       ierr = PetscFree(globParents);CHKERRQ(ierr);
1329     }
1330     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1331     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1332     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1333 #if PETSC_USE_DEBUG
1334     {
1335       PetscInt  p;
1336       PetscBool valid = PETSC_TRUE;
1337       for (p = 0; p < newParentSize; ++p) {
1338         if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
1339       }
1340       if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
1341     }
1342 #endif
1343     ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1344     if (flg) {
1345       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1346       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1347       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1348       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1349       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1350     }
1351     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1352     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1353     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1354     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1355   }
1356   pmesh->useAnchors = mesh->useAnchors;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 PETSC_UNUSED static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1361 {
1362   PetscMPIInt            rank, size;
1363   MPI_Comm               comm;
1364   PetscErrorCode         ierr;
1365 
1366   PetscFunctionBegin;
1367   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1368   PetscValidPointer(dmParallel,7);
1369 
1370   /* Create point SF for parallel mesh */
1371   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1372   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1373   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1374   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1375   {
1376     const PetscInt *leaves;
1377     PetscSFNode    *remotePoints, *rowners, *lowners;
1378     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1379     PetscInt        pStart, pEnd;
1380 
1381     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1382     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1383     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1384     for (p=0; p<numRoots; p++) {
1385       rowners[p].rank  = -1;
1386       rowners[p].index = -1;
1387     }
1388     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1389     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1390     for (p = 0; p < numLeaves; ++p) {
1391       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1392         lowners[p].rank  = rank;
1393         lowners[p].index = leaves ? leaves[p] : p;
1394       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1395         lowners[p].rank  = -2;
1396         lowners[p].index = -2;
1397       }
1398     }
1399     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1400       rowners[p].rank  = -3;
1401       rowners[p].index = -3;
1402     }
1403     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1404     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1405     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1406     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1407     for (p = 0; p < numLeaves; ++p) {
1408       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1409       if (lowners[p].rank != rank) ++numGhostPoints;
1410     }
1411     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1412     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1413     for (p = 0, gp = 0; p < numLeaves; ++p) {
1414       if (lowners[p].rank != rank) {
1415         ghostPoints[gp]        = leaves ? leaves[p] : p;
1416         remotePoints[gp].rank  = lowners[p].rank;
1417         remotePoints[gp].index = lowners[p].index;
1418         ++gp;
1419       }
1420     }
1421     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1422     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1423     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1424   }
1425   {
1426     PetscBool useCone, useClosure, useAnchors;
1427 
1428     ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
1429     ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
1430     ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr);
1431     ierr = DMPlexSetAdjacencyUseCone(dmParallel, useCone);CHKERRQ(ierr);
1432     ierr = DMPlexSetAdjacencyUseClosure(dmParallel, useClosure);CHKERRQ(ierr);
1433     ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr);
1434   }
1435   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1436   PetscFunctionReturn(0);
1437 }
1438 
1439 /*@C
1440   DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1441 
1442   Input Parameter:
1443 + dm          - The source DMPlex object
1444 . migrationSF - The star forest that describes the parallel point remapping
1445 . ownership   - Flag causing a vote to determine point ownership
1446 
1447   Output Parameter:
1448 - pointSF     - The star forest describing the point overlap in the remapped DM
1449 
1450   Level: developer
1451 
1452 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1453 @*/
1454 PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1455 {
1456   PetscMPIInt        rank;
1457   PetscInt           p, nroots, nleaves, idx, npointLeaves;
1458   PetscInt          *pointLocal;
1459   const PetscInt    *leaves;
1460   const PetscSFNode *roots;
1461   PetscSFNode       *rootNodes, *leafNodes, *pointRemote;
1462   PetscErrorCode     ierr;
1463 
1464   PetscFunctionBegin;
1465   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1466   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
1467 
1468   ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr);
1469   ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes);CHKERRQ(ierr);
1470   if (ownership) {
1471     /* Point ownership vote: Process with highest rank ownes shared points */
1472     for (p = 0; p < nleaves; ++p) {
1473       /* Either put in a bid or we know we own it */
1474       leafNodes[p].rank  = rank;
1475       leafNodes[p].index = p;
1476     }
1477     for (p = 0; p < nroots; p++) {
1478       /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1479       rootNodes[p].rank  = -3;
1480       rootNodes[p].index = -3;
1481     }
1482     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1483     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, leafNodes, rootNodes, MPI_MAXLOC);CHKERRQ(ierr);
1484   } else {
1485     for (p = 0; p < nroots; p++) {
1486       rootNodes[p].index = -1;
1487       rootNodes[p].rank = rank;
1488     };
1489     for (p = 0; p < nleaves; p++) {
1490       /* Write new local id into old location */
1491       if (roots[p].rank == rank) {
1492         rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1493       }
1494     }
1495   }
1496   ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1497   ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rootNodes, leafNodes);CHKERRQ(ierr);
1498 
1499   for (npointLeaves = 0, p = 0; p < nleaves; p++) {if (leafNodes[p].rank != rank) npointLeaves++;}
1500   ierr = PetscMalloc1(npointLeaves, &pointLocal);CHKERRQ(ierr);
1501   ierr = PetscMalloc1(npointLeaves, &pointRemote);CHKERRQ(ierr);
1502   for (idx = 0, p = 0; p < nleaves; p++) {
1503     if (leafNodes[p].rank != rank) {
1504       pointLocal[idx] = p;
1505       pointRemote[idx] = leafNodes[p];
1506       idx++;
1507     }
1508   }
1509   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr);
1510   ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr);
1511   ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1512   ierr = PetscFree2(rootNodes, leafNodes);CHKERRQ(ierr);
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 /*@C
1517   DMPlexMigrate  - Migrates internal DM data over the supplied star forest
1518 
1519   Input Parameter:
1520 + dm       - The source DMPlex object
1521 . sf       - The star forest communication context describing the migration pattern
1522 
1523   Output Parameter:
1524 - targetDM - The target DMPlex object
1525 
1526   Level: intermediate
1527 
1528 .seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1529 @*/
1530 PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1531 {
1532   MPI_Comm               comm;
1533   PetscInt               dim, nroots;
1534   PetscSF                sfPoint;
1535   ISLocalToGlobalMapping ltogMigration;
1536   ISLocalToGlobalMapping ltogOriginal = NULL;
1537   PetscBool              flg;
1538   PetscErrorCode         ierr;
1539 
1540   PetscFunctionBegin;
1541   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1542   ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1543   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
1544   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1545   ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr);
1546 
1547   /* Check for a one-to-all distribution pattern */
1548   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
1549   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
1550   if (nroots >= 0) {
1551     IS                     isOriginal;
1552     PetscInt               n, size, nleaves;
1553     PetscInt              *numbering_orig, *numbering_new;
1554     /* Get the original point numbering */
1555     ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr);
1556     ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr);
1557     ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr);
1558     ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1559     /* Convert to positive global numbers */
1560     for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1561     /* Derive the new local-to-global mapping from the old one */
1562     ierr = PetscSFGetGraph(sf, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1563     ierr = PetscMalloc1(nleaves, &numbering_new);CHKERRQ(ierr);
1564     ierr = PetscSFBcastBegin(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1565     ierr = PetscSFBcastEnd(sf, MPIU_INT, (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr);
1566     ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr);
1567     ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr);
1568     ierr = ISDestroy(&isOriginal);CHKERRQ(ierr);
1569   } else {
1570     /* One-to-all distribution pattern: We can derive LToG from SF */
1571     ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr);
1572   }
1573   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1574   if (flg) {
1575     ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr);
1576     ierr = ISLocalToGlobalMappingView(ltogMigration, NULL);CHKERRQ(ierr);
1577   }
1578   /* Migrate DM data to target DM */
1579   ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1580   ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr);
1581   ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr);
1582   ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr);
1583   ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr);
1584   ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr);
1585   ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr);
1586   ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0);CHKERRQ(ierr);
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 /*@C
1591   DMPlexDistribute - Distributes the mesh and any associated sections.
1592 
1593   Not Collective
1594 
1595   Input Parameter:
1596 + dm  - The original DMPlex object
1597 - overlap - The overlap of partitions, 0 is the default
1598 
1599   Output Parameter:
1600 + sf - The PetscSF used for point distribution
1601 - parallelMesh - The distributed DMPlex object, or NULL
1602 
1603   Note: If the mesh was not distributed, the return value is NULL.
1604 
1605   The user can control the definition of adjacency for the mesh using DMPlexSetAdjacencyUseCone() and
1606   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1607   representation on the mesh.
1608 
1609   Level: intermediate
1610 
1611 .keywords: mesh, elements
1612 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1613 @*/
1614 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1615 {
1616   MPI_Comm               comm;
1617   PetscPartitioner       partitioner;
1618   IS                     cellPart;
1619   PetscSection           cellPartSection;
1620   DM                     dmCoord;
1621   DMLabel                lblPartition, lblMigration;
1622   PetscSF                sfProcess, sfMigration, sfStratified, sfPoint;
1623   PetscBool              flg;
1624   PetscMPIInt            rank, size, p;
1625   PetscErrorCode         ierr;
1626 
1627   PetscFunctionBegin;
1628   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1629   if (sf) PetscValidPointer(sf,4);
1630   PetscValidPointer(dmParallel,5);
1631 
1632   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1633   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1634   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1635   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1636 
1637   if (sf) *sf = NULL;
1638   *dmParallel = NULL;
1639   if (size == 1) {
1640     ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1641     PetscFunctionReturn(0);
1642   }
1643 
1644   /* Create cell partition */
1645   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1646   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1647   ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr);
1648   ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1649   {
1650     /* Convert partition to DMLabel */
1651     PetscInt proc, pStart, pEnd, npoints, poffset;
1652     const PetscInt *points;
1653     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1654     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1655     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1656     for (proc = pStart; proc < pEnd; proc++) {
1657       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1658       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1659       for (p = poffset; p < poffset+npoints; p++) {
1660         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1661       }
1662     }
1663     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1664   }
1665   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1666   {
1667     /* Build a global process SF */
1668     PetscSFNode *remoteProc;
1669     ierr = PetscMalloc1(size, &remoteProc);CHKERRQ(ierr);
1670     for (p = 0; p < size; ++p) {
1671       remoteProc[p].rank  = p;
1672       remoteProc[p].index = rank;
1673     }
1674     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1675     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1676     ierr = PetscSFSetGraph(sfProcess, size, size, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1677   }
1678   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1679   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1680   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr);
1681   /* Stratify the SF in case we are migrating an already parallel plex */
1682   ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr);
1683   ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1684   sfMigration = sfStratified;
1685   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1686   ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1687   if (flg) {
1688     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1689     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1690   }
1691 
1692   /* Create non-overlapping parallel DM and migrate internal data */
1693   ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1694   ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1695   ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1696 
1697   /* Build the point SF without overlap */
1698   ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr);
1699   ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr);
1700   ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr);
1701   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1702   if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);}
1703 
1704   if (overlap > 0) {
1705     DM                 dmOverlap;
1706     PetscInt           nroots, nleaves;
1707     PetscSFNode       *newRemote;
1708     const PetscSFNode *oldRemote;
1709     PetscSF            sfOverlap, sfOverlapPoint;
1710     /* Add the partition overlap to the distributed DM */
1711     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr);
1712     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1713     *dmParallel = dmOverlap;
1714     if (flg) {
1715       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1716       ierr = PetscSFView(sfOverlap, NULL);CHKERRQ(ierr);
1717     }
1718 
1719     /* Re-map the migration SF to establish the full migration pattern */
1720     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1721     ierr = PetscSFGetGraph(sfOverlap, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1722     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1723     ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1724     ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1725     ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr);
1726     ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1727     ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1728     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1729     sfMigration = sfOverlapPoint;
1730   }
1731   /* Cleanup Partition */
1732   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1733   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1734   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1735   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1736   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1737   /* Copy BC */
1738   ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1739   /* Create sfNatural */
1740   if (dm->useNatural) {
1741     PetscSection section;
1742 
1743     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1744     ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr);
1745   }
1746   /* Cleanup */
1747   if (sf) {*sf = sfMigration;}
1748   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1749   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1750   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1751   PetscFunctionReturn(0);
1752 }
1753 
1754 /*@C
1755   DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1756 
1757   Not Collective
1758 
1759   Input Parameter:
1760 + dm  - The non-overlapping distrbuted DMPlex object
1761 - overlap - The overlap of partitions, 0 is the default
1762 
1763   Output Parameter:
1764 + sf - The PetscSF used for point distribution
1765 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1766 
1767   Note: If the mesh was not distributed, the return value is NULL.
1768 
1769   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1770   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1771   representation on the mesh.
1772 
1773   Level: intermediate
1774 
1775 .keywords: mesh, elements
1776 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1777 @*/
1778 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1779 {
1780   MPI_Comm               comm;
1781   PetscMPIInt            size, rank;
1782   PetscSection           rootSection, leafSection;
1783   IS                     rootrank, leafrank;
1784   DM                     dmCoord;
1785   DMLabel                lblOverlap;
1786   PetscSF                sfOverlap, sfStratified, sfPoint;
1787   PetscErrorCode         ierr;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1791   if (sf) PetscValidPointer(sf, 3);
1792   PetscValidPointer(dmOverlap, 4);
1793 
1794   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1795   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1796   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1797   if (size == 1) {*dmOverlap = NULL; PetscFunctionReturn(0);}
1798   ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1799 
1800   /* Compute point overlap with neighbouring processes on the distributed DM */
1801   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1802   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1803   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1804   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1805   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr);
1806   /* Convert overlap label to stratified migration SF */
1807   ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr);
1808   ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr);
1809   ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);
1810   sfOverlap = sfStratified;
1811   ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr);
1812   ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr);
1813 
1814   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1815   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1816   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1817   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1818   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1819 
1820   /* Build the overlapping DM */
1821   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1822   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1823   ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr);
1824   /* Build the new point SF */
1825   ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1826   ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr);
1827   ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr);
1828   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1829   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1830   /* Cleanup overlap partition */
1831   ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr);
1832   if (sf) *sf = sfOverlap;
1833   else    {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr);}
1834   ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0);CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 /*@C
1839   DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1840   root process of the original's communicator.
1841 
1842   Input Parameters:
1843 . dm - the original DMPlex object
1844 
1845   Output Parameters:
1846 . gatherMesh - the gathered DM object, or NULL
1847 
1848   Level: intermediate
1849 
1850 .keywords: mesh
1851 .seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1852 @*/
1853 PetscErrorCode DMPlexGetGatherDM(DM dm, DM * gatherMesh)
1854 {
1855   MPI_Comm       comm;
1856   PetscMPIInt    size;
1857   PetscPartitioner oldPart, gatherPart;
1858   PetscErrorCode ierr;
1859 
1860   PetscFunctionBegin;
1861   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1862   comm = PetscObjectComm((PetscObject)dm);
1863   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1864   *gatherMesh = NULL;
1865   if (size == 1) PetscFunctionReturn(0);
1866   ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr);
1867   ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr);
1868   ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr);
1869   ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER);CHKERRQ(ierr);
1870   ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr);
1871   ierr = DMPlexDistribute(dm,0,NULL,gatherMesh);CHKERRQ(ierr);
1872   ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr);
1873   ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr);
1874   ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr);
1875   PetscFunctionReturn(0);
1876 }
1877 
1878 /*@C
1879   DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1880 
1881   Input Parameters:
1882 . dm - the original DMPlex object
1883 
1884   Output Parameters:
1885 . redundantMesh - the redundant DM object, or NULL
1886 
1887   Level: intermediate
1888 
1889 .keywords: mesh
1890 .seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1891 @*/
1892 PetscErrorCode DMPlexGetRedundantDM(DM dm, DM * redundantMesh)
1893 {
1894   MPI_Comm       comm;
1895   PetscMPIInt    size, rank;
1896   PetscInt       pStart, pEnd, p;
1897   PetscInt       numPoints = -1;
1898   PetscSF        migrationSF, sfPoint;
1899   DM             gatherDM, dmCoord;
1900   PetscSFNode    *points;
1901   PetscErrorCode ierr;
1902 
1903   PetscFunctionBegin;
1904   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1905   comm = PetscObjectComm((PetscObject)dm);
1906   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1907   *redundantMesh = NULL;
1908   if (size == 1) {
1909     ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr);
1910     *redundantMesh = dm;
1911     PetscFunctionReturn(0);
1912   }
1913   ierr = DMPlexGetGatherDM(dm,&gatherDM);CHKERRQ(ierr);
1914   if (!gatherDM) PetscFunctionReturn(0);
1915   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1916   ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr);
1917   numPoints = pEnd - pStart;
1918   ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm);CHKERRQ(ierr);
1919   ierr = PetscMalloc1(numPoints,&points);CHKERRQ(ierr);
1920   ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr);
1921   for (p = 0; p < numPoints; p++) {
1922     points[p].index = p;
1923     points[p].rank  = 0;
1924   }
1925   ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL,PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr);
1926   ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr);
1927   ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr);
1928   ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr);
1929   ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr);
1930   ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr);
1931   ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr);
1932   if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr);}
1933   ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr);
1934   ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);
1935   ierr = DMDestroy(&gatherDM);CHKERRQ(ierr);
1936   PetscFunctionReturn(0);
1937 }
1938