xref: /petsc/src/dm/impls/plex/plexdistribute.c (revision 0457e1093cde4ec74f84e6bf0913aac613a9552a)
1 #include <petsc-private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMPlexSetAdjacencyUseCone"
5 /*@
6   DMPlexSetAdjacencyUseCone - Define adjacency in the mesh using either the cone or the support first
7 
8   Input Parameters:
9 + dm      - The DM object
10 - useCone - Flag to use the cone first
11 
12   Level: intermediate
13 
14   Notes:
15 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
16 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
17 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
18 
19 .seealso: DMPlexGetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
20 @*/
21 PetscErrorCode DMPlexSetAdjacencyUseCone(DM dm, PetscBool useCone)
22 {
23   DM_Plex *mesh = (DM_Plex *) dm->data;
24 
25   PetscFunctionBegin;
26   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
27   mesh->useCone = useCone;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "DMPlexGetAdjacencyUseCone"
33 /*@
34   DMPlexGetAdjacencyUseCone - Query whether adjacency in the mesh uses the cone or the support first
35 
36   Input Parameter:
37 . dm      - The DM object
38 
39   Output Parameter:
40 . useCone - Flag to use the cone first
41 
42   Level: intermediate
43 
44   Notes:
45 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
46 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
47 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
48 
49 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexGetAdjacencyUseClosure(), DMPlexDistribute(), DMPlexPreallocateOperator()
50 @*/
51 PetscErrorCode DMPlexGetAdjacencyUseCone(DM dm, PetscBool *useCone)
52 {
53   DM_Plex *mesh = (DM_Plex *) dm->data;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
57   PetscValidIntPointer(useCone, 2);
58   *useCone = mesh->useCone;
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMPlexSetAdjacencyUseClosure"
64 /*@
65   DMPlexSetAdjacencyUseClosure - Define adjacency in the mesh using the transitive closure
66 
67   Input Parameters:
68 + dm      - The DM object
69 - useClosure - Flag to use the closure
70 
71   Level: intermediate
72 
73   Notes:
74 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
75 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
76 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
77 
78 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
79 @*/
80 PetscErrorCode DMPlexSetAdjacencyUseClosure(DM dm, PetscBool useClosure)
81 {
82   DM_Plex *mesh = (DM_Plex *) dm->data;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
86   mesh->useClosure = useClosure;
87   PetscFunctionReturn(0);
88 }
89 
90 #undef __FUNCT__
91 #define __FUNCT__ "DMPlexGetAdjacencyUseClosure"
92 /*@
93   DMPlexGetAdjacencyUseClosure - Query whether adjacency in the mesh uses the transitive closure
94 
95   Input Parameter:
96 . dm      - The DM object
97 
98   Output Parameter:
99 . useClosure - Flag to use the closure
100 
101   Level: intermediate
102 
103   Notes:
104 $     FEM:   Two points p and q are adjacent if q \in closure(star(p)),   useCone = PETSC_FALSE, useClosure = PETSC_TRUE
105 $     FVM:   Two points p and q are adjacent if q \in support(p+cone(p)), useCone = PETSC_TRUE,  useClosure = PETSC_FALSE
106 $     FVM++: Two points p and q are adjacent if q \in star(closure(p)),   useCone = PETSC_TRUE,  useClosure = PETSC_TRUE
107 
108 .seealso: DMPlexSetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator()
109 @*/
110 PetscErrorCode DMPlexGetAdjacencyUseClosure(DM dm, PetscBool *useClosure)
111 {
112   DM_Plex *mesh = (DM_Plex *) dm->data;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
116   PetscValidIntPointer(useClosure, 2);
117   *useClosure = mesh->useClosure;
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMPlexSetAdjacencyUseAnchors"
123 /*@
124   DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
125 
126   Input Parameters:
127 + dm      - The DM object
128 - useAnchors - Flag to use the constraints.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
129 
130   Level: intermediate
131 
132 .seealso: DMPlexGetAdjacencyUseClosure(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
133 @*/
134 PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
135 {
136   DM_Plex *mesh = (DM_Plex *) dm->data;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
140   mesh->useAnchors = useAnchors;
141   PetscFunctionReturn(0);
142 }
143 
144 #undef __FUNCT__
145 #define __FUNCT__ "DMPlexGetAdjacencyUseAnchors"
146 /*@
147   DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
148 
149   Input Parameter:
150 . dm      - The DM object
151 
152   Output Parameter:
153 . useAnchors - Flag to use the closure.  If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
154 
155   Level: intermediate
156 
157 .seealso: DMPlexSetAdjacencyUseAnchors(), DMPlexSetAdjacencyUseCone(), DMPlexGetAdjacencyUseCone(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
158 @*/
159 PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
160 {
161   DM_Plex *mesh = (DM_Plex *) dm->data;
162 
163   PetscFunctionBegin;
164   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
165   PetscValidIntPointer(useAnchors, 2);
166   *useAnchors = mesh->useAnchors;
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMPlexGetAdjacency_Cone_Internal"
172 static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
173 {
174   const PetscInt *cone = NULL;
175   PetscInt        numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
176   PetscErrorCode  ierr;
177 
178   PetscFunctionBeginHot;
179   ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
180   ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
181   for (c = 0; c <= coneSize; ++c) {
182     const PetscInt  point   = !c ? p : cone[c-1];
183     const PetscInt *support = NULL;
184     PetscInt        supportSize, s, q;
185 
186     ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr);
187     ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr);
188     for (s = 0; s < supportSize; ++s) {
189       for (q = 0; q < numAdj || (adj[numAdj++] = support[s],0); ++q) {
190         if (support[s] == adj[q]) break;
191       }
192       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
193     }
194   }
195   *adjSize = numAdj;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "DMPlexGetAdjacency_Support_Internal"
201 static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
202 {
203   const PetscInt *support = NULL;
204   PetscInt        numAdj   = 0, maxAdjSize = *adjSize, supportSize, s;
205   PetscErrorCode  ierr;
206 
207   PetscFunctionBeginHot;
208   ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr);
209   ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr);
210   for (s = 0; s <= supportSize; ++s) {
211     const PetscInt  point = !s ? p : support[s-1];
212     const PetscInt *cone  = NULL;
213     PetscInt        coneSize, c, q;
214 
215     ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr);
216     ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr);
217     for (c = 0; c < coneSize; ++c) {
218       for (q = 0; q < numAdj || (adj[numAdj++] = cone[c],0); ++q) {
219         if (cone[c] == adj[q]) break;
220       }
221       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
222     }
223   }
224   *adjSize = numAdj;
225   PetscFunctionReturn(0);
226 }
227 
228 #undef __FUNCT__
229 #define __FUNCT__ "DMPlexGetAdjacency_Transitive_Internal"
230 static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
231 {
232   PetscInt      *star = NULL;
233   PetscInt       numAdj = 0, maxAdjSize = *adjSize, starSize, s;
234   PetscErrorCode ierr;
235 
236   PetscFunctionBeginHot;
237   ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
238   for (s = 0; s < starSize*2; s += 2) {
239     const PetscInt *closure = NULL;
240     PetscInt        closureSize, c, q;
241 
242     ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
243     for (c = 0; c < closureSize*2; c += 2) {
244       for (q = 0; q < numAdj || (adj[numAdj++] = closure[c],0); ++q) {
245         if (closure[c] == adj[q]) break;
246       }
247       if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
248     }
249     ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr);
250   }
251   ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr);
252   *adjSize = numAdj;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMPlexGetAdjacency_Internal"
258 PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
259 {
260   static PetscInt asiz = 0;
261   PetscInt maxAnchors = 1;
262   PetscInt aStart = -1, aEnd = -1;
263   PetscInt maxAdjSize;
264   PetscSection aSec = NULL;
265   IS aIS = NULL;
266   const PetscInt *anchors;
267   PetscErrorCode  ierr;
268 
269   PetscFunctionBeginHot;
270   if (useAnchors) {
271     ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr);
272     if (aSec) {
273       ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr);
274       maxAnchors = PetscMax(1,maxAnchors);
275       ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr);
276       ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr);
277     }
278   }
279   if (!*adj) {
280     PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
281 
282     ierr  = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr);
283     ierr  = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
284     ierr  = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr);
285     coneSeries    = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
286     supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
287     asiz  = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries);
288     asiz *= maxAnchors;
289     asiz  = PetscMin(asiz,pEnd-pStart);
290     ierr  = PetscMalloc1(asiz,adj);CHKERRQ(ierr);
291   }
292   if (*adjSize < 0) *adjSize = asiz;
293   maxAdjSize = *adjSize;
294   if (useTransitiveClosure) {
295     ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr);
296   } else if (useCone) {
297     ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
298   } else {
299     ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr);
300   }
301   if (useAnchors && aSec) {
302     PetscInt origSize = *adjSize;
303     PetscInt numAdj = origSize;
304     PetscInt i = 0, j;
305     PetscInt *orig = *adj;
306 
307     while (i < origSize) {
308       PetscInt p = orig[i];
309       PetscInt aDof = 0;
310 
311       if (p >= aStart && p < aEnd) {
312         ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr);
313       }
314       if (aDof) {
315         PetscInt aOff;
316         PetscInt s, q;
317 
318         for (j = i + 1; j < numAdj; j++) {
319           orig[j - 1] = orig[j];
320         }
321         origSize--;
322         numAdj--;
323         ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr);
324         for (s = 0; s < aDof; ++s) {
325           for (q = 0; q < numAdj || (orig[numAdj++] = anchors[aOff+s],0); ++q) {
326             if (anchors[aOff+s] == orig[q]) break;
327           }
328           if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize);
329         }
330       }
331       else {
332         i++;
333       }
334     }
335     *adjSize = numAdj;
336     ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr);
337   }
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMPlexGetAdjacency"
343 /*@
344   DMPlexGetAdjacency - Return all points adjacent to the given point
345 
346   Input Parameters:
347 + dm - The DM object
348 . p  - The point
349 . adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
350 - adj - Either NULL so that the array is allocated, or an existing array with size adjSize
351 
352   Output Parameters:
353 + adjSize - The number of adjacent points
354 - adj - The adjacent points
355 
356   Level: advanced
357 
358   Notes: The user must PetscFree the adj array if it was not passed in.
359 
360 .seealso: DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
361 @*/
362 PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
363 {
364   DM_Plex       *mesh = (DM_Plex *) dm->data;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBeginHot;
368   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
369   PetscValidPointer(adjSize,3);
370   PetscValidPointer(adj,4);
371   ierr = DMPlexGetAdjacency_Internal(dm, p, mesh->useCone, mesh->useClosure, mesh->useAnchors, adjSize, adj);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 #undef __FUNCT__
375 #define __FUNCT__ "DMPlexCreateTwoSidedProcessSF"
376 /*@
377   DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
378 
379   Collective on DM
380 
381   Input Parameters:
382 + dm      - The DM
383 - sfPoint - The PetscSF which encodes point connectivity
384 
385   Output Parameters:
386 + processRanks - A list of process neighbors, or NULL
387 - sfProcess    - An SF encoding the two-sided process connectivity, or NULL
388 
389   Level: developer
390 
391 .seealso: PetscSFCreate(), DMPlexCreateProcessSF()
392 @*/
393 PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
394 {
395   const PetscSFNode *remotePoints;
396   PetscInt          *localPointsNew;
397   PetscSFNode       *remotePointsNew;
398   const PetscInt    *nranks;
399   PetscInt          *ranksNew;
400   PetscBT            neighbors;
401   PetscInt           pStart, pEnd, p, numLeaves, l, numNeighbors, n;
402   PetscMPIInt        numProcs, proc, rank;
403   PetscErrorCode     ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
407   PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2);
408   if (processRanks) {PetscValidPointer(processRanks, 3);}
409   if (sfProcess)    {PetscValidPointer(sfProcess, 4);}
410   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
411   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
412   ierr = PetscSFGetGraph(sfPoint, NULL, &numLeaves, NULL, &remotePoints);CHKERRQ(ierr);
413   ierr = PetscBTCreate(numProcs, &neighbors);CHKERRQ(ierr);
414   ierr = PetscBTMemzero(numProcs, neighbors);CHKERRQ(ierr);
415   /* Compute root-to-leaf process connectivity */
416   ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr);
417   ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr);
418   for (p = pStart; p < pEnd; ++p) {
419     PetscInt ndof, noff, n;
420 
421     ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr);
422     ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr);
423     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
424   }
425   ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr);
426   /* Compute leaf-to-neighbor process connectivity */
427   ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr);
428   ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr);
429   for (p = pStart; p < pEnd; ++p) {
430     PetscInt ndof, noff, n;
431 
432     ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr);
433     ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr);
434     for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);}
435   }
436   ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr);
437   /* Compute leaf-to-root process connectivity */
438   for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
439   /* Calculate edges */
440   PetscBTClear(neighbors, rank);
441   for(proc = 0, numNeighbors = 0; proc < numProcs; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
442   ierr = PetscMalloc1(numNeighbors, &ranksNew);CHKERRQ(ierr);
443   ierr = PetscMalloc1(numNeighbors, &localPointsNew);CHKERRQ(ierr);
444   ierr = PetscMalloc1(numNeighbors, &remotePointsNew);CHKERRQ(ierr);
445   for(proc = 0, n = 0; proc < numProcs; ++proc) {
446     if (PetscBTLookup(neighbors, proc)) {
447       ranksNew[n]              = proc;
448       localPointsNew[n]        = proc;
449       remotePointsNew[n].index = rank;
450       remotePointsNew[n].rank  = proc;
451       ++n;
452     }
453   }
454   ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr);
455   if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr);}
456   else              {ierr = PetscFree(ranksNew);CHKERRQ(ierr);}
457   if (sfProcess) {
458     ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr);
459     ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr);
460     ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr);
461     ierr = PetscSFSetGraph(*sfProcess, numProcs, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "DMPlexDistributeOwnership"
468 /*@
469   DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
470 
471   Collective on DM
472 
473   Input Parameter:
474 . dm - The DM
475 
476   Output Parameters:
477 + rootSection - The number of leaves for a given root point
478 . rootrank    - The rank of each edge into the root point
479 . leafSection - The number of processes sharing a given leaf point
480 - leafrank    - The rank of each process sharing a leaf point
481 
482   Level: developer
483 
484 .seealso: DMPlexCreateOverlap()
485 @*/
486 PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
487 {
488   MPI_Comm        comm;
489   PetscSF         sfPoint;
490   const PetscInt *rootdegree;
491   PetscInt       *myrank, *remoterank;
492   PetscInt        pStart, pEnd, p, nedges;
493   PetscMPIInt     rank;
494   PetscErrorCode  ierr;
495 
496   PetscFunctionBegin;
497   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
498   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
499   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
500   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
501   /* Compute number of leaves for each root */
502   ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr);
503   ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr);
504   ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr);
505   ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr);
506   for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr);}
507   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
508   /* Gather rank of each leaf to root */
509   ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr);
510   ierr = PetscMalloc1(pEnd-pStart, &myrank);CHKERRQ(ierr);
511   ierr = PetscMalloc1(nedges,  &remoterank);CHKERRQ(ierr);
512   for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
513   ierr = PetscSFGatherBegin(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
514   ierr = PetscSFGatherEnd(sfPoint, MPIU_INT, myrank, remoterank);CHKERRQ(ierr);
515   ierr = PetscFree(myrank);CHKERRQ(ierr);
516   ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr);
517   /* Distribute remote ranks to leaves */
518   ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr);
519   ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMPlexCreateOverlap"
525 /*@C
526   DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
527 
528   Collective on DM
529 
530   Input Parameters:
531 + dm          - The DM
532 . levels      - Number of overlap levels
533 . rootSection - The number of leaves for a given root point
534 . rootrank    - The rank of each edge into the root point
535 . leafSection - The number of processes sharing a given leaf point
536 - leafrank    - The rank of each process sharing a leaf point
537 
538   Output Parameters:
539 + overlapSF   - The star forest mapping remote overlap points into a contiguous leaf space
540 
541   Level: developer
542 
543 .seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
544 @*/
545 PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, PetscSF *overlapSF)
546 {
547   MPI_Comm           comm;
548   DMLabel            ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
549   PetscSF            sfPoint, sfProc;
550   DMLabel            ovLeafLabel;
551   const PetscSFNode *remote;
552   const PetscInt    *local;
553   const PetscInt    *nrank, *rrank;
554   PetscInt          *adj = NULL;
555   PetscInt           pStart, pEnd, p, sStart, sEnd, nleaves, l;
556   PetscMPIInt        rank, numProcs;
557   PetscBool          useCone, useClosure, flg;
558   PetscErrorCode     ierr;
559 
560   PetscFunctionBegin;
561   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
562   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
563   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
564   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
565   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
566   ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr);
567   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
568   ierr = DMLabelCreate("Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr);
569   /* Handle leaves: shared with the root point */
570   for (l = 0; l < nleaves; ++l) {
571     PetscInt adjSize = PETSC_DETERMINE, a;
572 
573     ierr = DMPlexGetAdjacency(dm, local[l], &adjSize, &adj);CHKERRQ(ierr);
574     for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr);}
575   }
576   ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr);
577   ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr);
578   /* Handle roots */
579   for (p = pStart; p < pEnd; ++p) {
580     PetscInt adjSize = PETSC_DETERMINE, neighbors = 0, noff, n, a;
581 
582     if ((p >= sStart) && (p < sEnd)) {
583       /* Some leaves share a root with other leaves on different processes */
584       ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr);
585       if (neighbors) {
586         ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr);
587         ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
588         for (n = 0; n < neighbors; ++n) {
589           const PetscInt remoteRank = nrank[noff+n];
590 
591           if (remoteRank == rank) continue;
592           for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
593         }
594       }
595     }
596     /* Roots are shared with leaves */
597     ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr);
598     if (!neighbors) continue;
599     ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr);
600     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
601     for (n = 0; n < neighbors; ++n) {
602       const PetscInt remoteRank = rrank[noff+n];
603 
604       if (remoteRank == rank) continue;
605       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr);}
606     }
607   }
608   ierr = PetscFree(adj);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr);
610   ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr);
611   /* Add additional overlap levels */
612   for (l = 1; l < levels; l++) {ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr);}
613   /* We require the closure in the overlap */
614   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
615   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
616   if (useCone || !useClosure) {
617     ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr);
618   }
619   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr);
620   if (flg) {
621     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
622     ierr = DMLabelView(ovAdjByRank, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
623   }
624   /* Make process SF and invert sender to receiver label */
625   ierr = DMPlexCreateTwoSidedProcessSF(dm, sfPoint, rootSection, rootrank, leafSection, leafrank, NULL, &sfProc);CHKERRQ(ierr);
626   ierr = DMLabelCreate("ovLeafs", &ovLeafLabel);CHKERRQ(ierr);
627   ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, sfProc, ovLeafLabel);CHKERRQ(ierr);
628   /* Don't import points from yourself */
629   ierr = DMLabelClearStratum(ovLeafLabel, rank);CHKERRQ(ierr);
630   /* Don't import points already in the pointSF */
631   for (l = 0; l < nleaves; ++l) {
632     ierr = DMLabelClearValue(ovLeafLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr);
633   }
634   /* Convert receiver label to SF */
635   ierr = DMPlexPartitionLabelCreateSF(dm, ovLeafLabel, overlapSF);CHKERRQ(ierr);
636   ierr = PetscObjectSetName((PetscObject) *overlapSF, "Overlap SF");CHKERRQ(ierr);
637   ierr = PetscSFSetFromOptions(*overlapSF);CHKERRQ(ierr);
638   if (flg) {
639     ierr = PetscPrintf(comm, "Overlap SF\n");CHKERRQ(ierr);
640     ierr = PetscSFView(*overlapSF, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
641   }
642   /* Clean up */
643   ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr);
644   ierr = DMLabelDestroy(&ovLeafLabel);CHKERRQ(ierr);
645   ierr = PetscSFDestroy(&sfProc);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "DMPlexCreateOverlapMigrationSF"
651 PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
652 {
653   MPI_Comm           comm;
654   PetscMPIInt        rank, numProcs;
655   PetscInt           d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
656   PetscInt          *pointDepths, *remoteDepths, *ilocal;
657   PetscInt          *depthRecv, *depthShift, *depthIdx;
658   PetscSFNode       *iremote;
659   PetscSF            pointSF;
660   const PetscInt    *sharedLocal;
661   const PetscSFNode *overlapRemote, *sharedRemote;
662   PetscErrorCode     ierr;
663 
664   PetscFunctionBegin;
665   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
666   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
667   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
668   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
669   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
670 
671   /* Before building the migration SF we need to know the new stratum offsets */
672   ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL, &overlapRemote);CHKERRQ(ierr);
673   ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths);CHKERRQ(ierr);
674   for (d=0; d<dim+1; d++) {
675     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
676     for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
677   }
678   for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
679   ierr = PetscSFBcastBegin(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
680   ierr = PetscSFBcastEnd(overlapSF, MPIU_INT, pointDepths, remoteDepths);CHKERRQ(ierr);
681 
682   /* Count recevied points in each stratum and compute the internal strata shift */
683   ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx);CHKERRQ(ierr);
684   for (d=0; d<dim+1; d++) depthRecv[d]=0;
685   for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
686   depthShift[dim] = 0;
687   for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
688   for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
689   for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
690   for (d=0; d<dim+1; d++) {
691     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
692     depthIdx[d] = pStart + depthShift[d];
693   }
694 
695   /* Form the overlap SF build an SF that describes the full overlap migration SF */
696   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
697   newLeaves = pEnd - pStart + nleaves;
698   ierr = PetscMalloc1(newLeaves, &ilocal);CHKERRQ(ierr);
699   ierr = PetscMalloc1(newLeaves, &iremote);CHKERRQ(ierr);
700   /* First map local points to themselves */
701   for (d=0; d<dim+1; d++) {
702     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
703     for (p=pStart; p<pEnd; p++) {
704       point = p + depthShift[d];
705       ilocal[point] = point;
706       iremote[point].index = p;
707       iremote[point].rank = rank;
708       depthIdx[d]++;
709     }
710   }
711 
712   /* Add in the remote roots for currently shared points */
713   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
714   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr);
715   for (d=0; d<dim+1; d++) {
716     ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr);
717     for (p=0; p<numSharedPoints; p++) {
718       if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
719         point = sharedLocal[p] + depthShift[d];
720         iremote[point].index = sharedRemote[p].index;
721         iremote[point].rank = sharedRemote[p].rank;
722       }
723     }
724   }
725 
726   /* Now add the incoming overlap points */
727   for (p=0; p<nleaves; p++) {
728     point = depthIdx[remoteDepths[p]];
729     ilocal[point] = point;
730     iremote[point].index = overlapRemote[p].index;
731     iremote[point].rank = overlapRemote[p].rank;
732     depthIdx[remoteDepths[p]]++;
733   }
734   ierr = PetscFree2(pointDepths,remoteDepths);CHKERRQ(ierr);
735 
736   ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr);
737   ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr);
738   ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr);
739   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
740   ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
741 
742   ierr = PetscFree3(depthRecv, depthShift, depthIdx);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "DMPlexDistributeField"
748 /*@
749   DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
750 
751   Collective on DM
752 
753   Input Parameters:
754 + dm - The DMPlex object
755 . pointSF - The PetscSF describing the communication pattern
756 . originalSection - The PetscSection for existing data layout
757 - originalVec - The existing data
758 
759   Output Parameters:
760 + newSection - The PetscSF describing the new data layout
761 - newVec - The new data
762 
763   Level: developer
764 
765 .seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
766 @*/
767 PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
768 {
769   PetscSF        fieldSF;
770   PetscInt      *remoteOffsets, fieldSize;
771   PetscScalar   *originalValues, *newValues;
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
776   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
777 
778   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
779   ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
780   ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);
781 
782   ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
783   ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
784   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
785   ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
786   ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
787   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
788   ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
789   ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
790   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
791   PetscFunctionReturn(0);
792 }
793 
794 #undef __FUNCT__
795 #define __FUNCT__ "DMPlexDistributeFieldIS"
796 /*@
797   DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
798 
799   Collective on DM
800 
801   Input Parameters:
802 + dm - The DMPlex object
803 . pointSF - The PetscSF describing the communication pattern
804 . originalSection - The PetscSection for existing data layout
805 - originalIS - The existing data
806 
807   Output Parameters:
808 + newSection - The PetscSF describing the new data layout
809 - newIS - The new data
810 
811   Level: developer
812 
813 .seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
814 @*/
815 PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
816 {
817   PetscSF         fieldSF;
818   PetscInt       *newValues, *remoteOffsets, fieldSize;
819   const PetscInt *originalValues;
820   PetscErrorCode  ierr;
821 
822   PetscFunctionBegin;
823   ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
824   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
825 
826   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
827   ierr = PetscMalloc(fieldSize * sizeof(PetscInt), &newValues);CHKERRQ(ierr);
828 
829   ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr);
830   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
831   ierr = PetscSFBcastBegin(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
832   ierr = PetscSFBcastEnd(fieldSF, MPIU_INT, (PetscInt *) originalValues, newValues);CHKERRQ(ierr);
833   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
834   ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr);
835   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr);
836   ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "DMPlexDistributeData"
842 /*@
843   DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
844 
845   Collective on DM
846 
847   Input Parameters:
848 + dm - The DMPlex object
849 . pointSF - The PetscSF describing the communication pattern
850 . originalSection - The PetscSection for existing data layout
851 . datatype - The type of data
852 - originalData - The existing data
853 
854   Output Parameters:
855 + newSection - The PetscSection describing the new data layout
856 - newData - The new data
857 
858   Level: developer
859 
860 .seealso: DMPlexDistribute(), DMPlexDistributeField()
861 @*/
862 PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
863 {
864   PetscSF        fieldSF;
865   PetscInt      *remoteOffsets, fieldSize;
866   PetscMPIInt    dataSize;
867   PetscErrorCode ierr;
868 
869   PetscFunctionBegin;
870   ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
871   ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);
872 
873   ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
874   ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
875   ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);
876 
877   ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
878   ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
879   ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
880   ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
881   ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "DMPlexDistributeCones"
887 PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
888 {
889   DM_Plex               *pmesh   = (DM_Plex*) (dmParallel)->data;
890   MPI_Comm               comm;
891   PetscSF                coneSF;
892   PetscSection           originalConeSection, newConeSection;
893   PetscInt              *remoteOffsets, *cones, *newCones, newConesSize;
894   PetscBool              flg;
895   PetscErrorCode         ierr;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
899   PetscValidPointer(dmParallel,4);
900   ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
901 
902   /* Distribute cone section */
903   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
904   ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
905   ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr);
906   ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
907   ierr = DMSetUp(dmParallel);CHKERRQ(ierr);
908   {
909     PetscInt pStart, pEnd, p;
910 
911     ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
912     for (p = pStart; p < pEnd; ++p) {
913       PetscInt coneSize;
914       ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
915       pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
916     }
917   }
918   /* Communicate and renumber cones */
919   ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
920   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
921   ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr);
922   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
923   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
924   ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
925   ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
926 #if PETSC_USE_DEBUG
927   {
928     PetscInt  p;
929     PetscBool valid = PETSC_TRUE;
930     for (p = 0; p < newConesSize; ++p) {
931       if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF, "Point %d not in overlap SF\n", p);CHKERRQ(ierr);}
932     }
933     if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map");
934   }
935 #endif
936   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
937   if (flg) {
938     ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
939     ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
940     ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
941     ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
942     ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
943   }
944   ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
945   ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr);
946   ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
947   ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
948   ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
949   ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
950   /* Create supports and stratify sieve */
951   {
952     PetscInt pStart, pEnd;
953 
954     ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
955     ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
956   }
957   ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr);
958   ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr);
959   PetscFunctionReturn(0);
960 }
961 
962 #undef __FUNCT__
963 #define __FUNCT__ "DMPlexDistributeCoordinates"
964 PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
965 {
966   MPI_Comm         comm;
967   PetscSection     originalCoordSection, newCoordSection;
968   Vec              originalCoordinates, newCoordinates;
969   PetscInt         bs;
970   const char      *name;
971   const PetscReal *maxCell, *L;
972   PetscErrorCode   ierr;
973 
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
976   PetscValidPointer(dmParallel, 3);
977 
978   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
979   ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
980   ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr);
981   ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
982   if (originalCoordinates) {
983     ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
984     ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
985     ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);
986 
987     ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
988     ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr);
989     ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
990     ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
991     ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
992   }
993   ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr);
994   if (L) {ierr = DMSetPeriodicity(dmParallel, maxCell, L);CHKERRQ(ierr);}
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "DMPlexDistributeLabels"
1000 /* Here we are assuming that process 0 always has everything */
1001 PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1002 {
1003   MPI_Comm       comm;
1004   PetscMPIInt    rank;
1005   PetscInt       numLabels, numLocalLabels, l;
1006   PetscBool      hasLabels = PETSC_FALSE;
1007   PetscErrorCode ierr;
1008 
1009   PetscFunctionBegin;
1010   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1011   PetscValidHeaderSpecific(dm, DM_CLASSID, 3);
1012   ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1013   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1014   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1015 
1016   /* Everyone must have either the same number of labels, or none */
1017   ierr = DMPlexGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr);
1018   numLabels = numLocalLabels;
1019   ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1020   if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1021   for (l = numLabels-1; l >= 0; --l) {
1022     DMLabel     label = NULL, labelNew = NULL;
1023     PetscBool   isdepth;
1024 
1025     if (hasLabels) {
1026       ierr = DMPlexGetLabelByNum(dm, l, &label);CHKERRQ(ierr);
1027       /* Skip "depth" because it is recreated */
1028       ierr = PetscStrcmp(label->name, "depth", &isdepth);CHKERRQ(ierr);
1029     }
1030     ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
1031     if (isdepth) continue;
1032     ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr);
1033     ierr = DMPlexAddLabel(dmParallel, labelNew);CHKERRQ(ierr);
1034   }
1035   ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
1036   PetscFunctionReturn(0);
1037 }
1038 
1039 #undef __FUNCT__
1040 #define __FUNCT__ "DMPlexDistributeSetupHybrid"
1041 PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1042 {
1043   DM_Plex        *mesh  = (DM_Plex*) dm->data;
1044   DM_Plex        *pmesh = (DM_Plex*) (dmParallel)->data;
1045   MPI_Comm        comm;
1046   const PetscInt *gpoints;
1047   PetscInt        dim, depth, n, d;
1048   PetscErrorCode  ierr;
1049 
1050   PetscFunctionBegin;
1051   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1052   PetscValidPointer(dmParallel, 4);
1053 
1054   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1055   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1056 
1057   /* Setup hybrid structure */
1058   for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
1059   ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
1060   ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
1061   ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
1062   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
1063   for (d = 0; d <= dim; ++d) {
1064     PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;
1065 
1066     if (pmax < 0) continue;
1067     ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
1068     ierr = DMPlexGetDepthStratum(dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
1069     ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
1070     for (p = 0; p < n; ++p) {
1071       const PetscInt point = gpoints[p];
1072 
1073       if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
1074     }
1075     if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
1076     else            pmesh->hybridPointMax[d] = -1;
1077   }
1078   ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
1079   PetscFunctionReturn(0);
1080 }
1081 
1082 #undef __FUNCT__
1083 #define __FUNCT__ "DMPlexDistributeSetupTree"
1084 PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1085 {
1086   MPI_Comm        comm;
1087   DM              refTree;
1088   PetscSection    origParentSection, newParentSection;
1089   PetscInt        *origParents, *origChildIDs;
1090   PetscBool       flg;
1091   PetscErrorCode  ierr;
1092 
1093   PetscFunctionBegin;
1094   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1095   PetscValidHeaderSpecific(dm, DM_CLASSID, 4);
1096   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1097 
1098   /* Set up tree */
1099   ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr);
1100   ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr);
1101   ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL,NULL);CHKERRQ(ierr);
1102   if (origParentSection) {
1103     PetscInt        pStart, pEnd;
1104     PetscInt        *newParents, *newChildIDs;
1105     PetscInt        *remoteOffsetsParents, newParentSize;
1106     PetscSF         parentSF;
1107 
1108     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1109     ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr);
1110     ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr);
1111     ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr);
1112     ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr);
1113     ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr);
1114     ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs);CHKERRQ(ierr);
1115     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1116     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origParents, newParents);CHKERRQ(ierr);
1117     ierr = PetscSFBcastBegin(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1118     ierr = PetscSFBcastEnd(parentSF, MPIU_INT, origChildIDs, newChildIDs);CHKERRQ(ierr);
1119     ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL, newParents);CHKERRQ(ierr);
1120     ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr);
1121     if (flg) {
1122       ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr);
1123       ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1124       ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr);
1125       ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1126       ierr = PetscSFView(parentSF, NULL);CHKERRQ(ierr);
1127     }
1128     ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr);
1129     ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr);
1130     ierr = PetscFree2(newParents,newChildIDs);CHKERRQ(ierr);
1131     ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr);
1132   }
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 #undef __FUNCT__
1137 #define __FUNCT__ "DMPlexDistributeSF"
1138 PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1139 {
1140   DM_Plex               *mesh  = (DM_Plex*) dm->data;
1141   DM_Plex               *pmesh = (DM_Plex*) (dmParallel)->data;
1142   PetscMPIInt            rank, numProcs;
1143   MPI_Comm               comm;
1144   PetscErrorCode         ierr;
1145 
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1148   PetscValidPointer(dmParallel,7);
1149 
1150   /* Create point SF for parallel mesh */
1151   ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1152   ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr);
1153   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1154   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1155   {
1156     const PetscInt *leaves;
1157     PetscSFNode    *remotePoints, *rowners, *lowners;
1158     PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1159     PetscInt        pStart, pEnd;
1160 
1161     ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
1162     ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
1163     ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
1164     for (p=0; p<numRoots; p++) {
1165       rowners[p].rank  = -1;
1166       rowners[p].index = -1;
1167     }
1168     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1169     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1170     for (p = 0; p < numLeaves; ++p) {
1171       if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1172         lowners[p].rank  = rank;
1173         lowners[p].index = leaves ? leaves[p] : p;
1174       } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1175         lowners[p].rank  = -2;
1176         lowners[p].index = -2;
1177       }
1178     }
1179     for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1180       rowners[p].rank  = -3;
1181       rowners[p].index = -3;
1182     }
1183     ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1184     ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
1185     ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1186     ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
1187     for (p = 0; p < numLeaves; ++p) {
1188       if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
1189       if (lowners[p].rank != rank) ++numGhostPoints;
1190     }
1191     ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr);
1192     ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
1193     for (p = 0, gp = 0; p < numLeaves; ++p) {
1194       if (lowners[p].rank != rank) {
1195         ghostPoints[gp]        = leaves ? leaves[p] : p;
1196         remotePoints[gp].rank  = lowners[p].rank;
1197         remotePoints[gp].index = lowners[p].index;
1198         ++gp;
1199       }
1200     }
1201     ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
1202     ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
1203     ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr);
1204   }
1205   pmesh->useCone    = mesh->useCone;
1206   pmesh->useClosure = mesh->useClosure;
1207   ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 #undef __FUNCT__
1212 #define __FUNCT__ "DMPlexDistribute"
1213 /*@C
1214   DMPlexDistribute - Distributes the mesh and any associated sections.
1215 
1216   Not Collective
1217 
1218   Input Parameter:
1219 + dm  - The original DMPlex object
1220 - overlap - The overlap of partitions, 0 is the default
1221 
1222   Output Parameter:
1223 + sf - The PetscSF used for point distribution
1224 - parallelMesh - The distributed DMPlex object, or NULL
1225 
1226   Note: If the mesh was not distributed, the return value is NULL.
1227 
1228   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1229   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1230   representation on the mesh.
1231 
1232   Level: intermediate
1233 
1234 .keywords: mesh, elements
1235 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1236 @*/
1237 PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1238 {
1239   DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
1240   MPI_Comm               comm;
1241   PetscInt               dim, nroots, nleaves;
1242   DM                     dmOverlap;
1243   IS                     cellPart;
1244   PetscSection           cellPartSection;
1245   DMLabel                lblPartition, lblMigration;
1246   PetscSFNode           *newRemote;
1247   const PetscSFNode     *oldRemote;
1248   PetscSF                sfProcess, sfMigration, overlapPointSF, overlapSF;
1249   ISLocalToGlobalMapping renumbering;
1250   PetscBool              flg;
1251   PetscMPIInt            rank, numProcs, p;
1252   PetscErrorCode         ierr;
1253 
1254   PetscFunctionBegin;
1255   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1256   if (sf) PetscValidPointer(sf,4);
1257   PetscValidPointer(dmParallel,5);
1258 
1259   ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1260   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1261   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1262   ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
1263 
1264   *dmParallel = NULL;
1265   if (numProcs == 1) PetscFunctionReturn(0);
1266 
1267   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1268   /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
1269   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1270   ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr);
1271   ierr = PetscPartitionerPartition(mesh->partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr);
1272   {
1273     /* Convert partition to DMLabel */
1274     PetscInt proc, pStart, pEnd, npoints, poffset;
1275     const PetscInt *points;
1276     ierr = DMLabelCreate("Point Partition", &lblPartition);CHKERRQ(ierr);
1277     ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr);
1278     ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr);
1279     for (proc = pStart; proc < pEnd; proc++) {
1280       ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr);
1281       ierr = PetscSectionGetOffset(cellPartSection, proc, &poffset);CHKERRQ(ierr);
1282       for (p = poffset; p < poffset+npoints; p++) {
1283         ierr = DMLabelSetValue(lblPartition, points[p], proc);CHKERRQ(ierr);
1284       }
1285     }
1286     ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr);
1287   }
1288   ierr = DMPlexPartitionLabelClosure(dm, lblPartition);CHKERRQ(ierr);
1289   {
1290     /* Build a global process SF */
1291     PetscSFNode *remoteProc;
1292     ierr = PetscMalloc1(numProcs, &remoteProc);CHKERRQ(ierr);
1293     for (p = 0; p < numProcs; ++p) {
1294       remoteProc[p].rank  = p;
1295       remoteProc[p].index = rank;
1296     }
1297     ierr = PetscSFCreate(comm, &sfProcess);CHKERRQ(ierr);
1298     ierr = PetscObjectSetName((PetscObject) sfProcess, "Process SF");CHKERRQ(ierr);
1299     ierr = PetscSFSetGraph(sfProcess, numProcs, numProcs, NULL, PETSC_OWN_POINTER, remoteProc, PETSC_OWN_POINTER);CHKERRQ(ierr);
1300   }
1301   ierr = DMLabelCreate("Point migration", &lblMigration);CHKERRQ(ierr);
1302   ierr = DMPlexPartitionLabelInvert(dm, lblPartition, sfProcess, lblMigration);CHKERRQ(ierr);
1303   ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);
1304   ierr = ISLocalToGlobalMappingCreateSF(sfMigration, 0, &renumbering);CHKERRQ(ierr);
1305 
1306   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1307   ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
1308   if (flg) {
1309     ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD, PETSC_TRUE);CHKERRQ(ierr);
1310     ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1311     ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
1312     ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
1313     ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
1314   }
1315 
1316   /* Create new mesh */
1317   ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
1318   ierr  = DMSetDimension(*dmParallel, dim);CHKERRQ(ierr);
1319   ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
1320   pmesh = (DM_Plex*) (*dmParallel)->data;
1321   pmesh->useAnchors = mesh->useAnchors;
1322   /* Migrate data to a non-overlapping parallel DM */
1323   ierr = DMPlexDistributeCones(dm, sfMigration, renumbering, *dmParallel);CHKERRQ(ierr);
1324   ierr = DMPlexDistributeCoordinates(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1325   ierr = DMPlexDistributeLabels(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1326   ierr = DMPlexDistributeSetupHybrid(dm, sfMigration, renumbering, *dmParallel);CHKERRQ(ierr);
1327   ierr = DMPlexDistributeSetupTree(dm, sfMigration, renumbering, *dmParallel);CHKERRQ(ierr);
1328   ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
1329 
1330   /* Build the point SF without overlap */
1331   ierr = DMPlexDistributeSF(dm, sfMigration, *dmParallel);CHKERRQ(ierr);
1332   if (flg) {
1333     ierr = PetscPrintf(comm, "Point SF:\n");CHKERRQ(ierr);
1334     ierr = PetscSFView((*dmParallel)->sf, NULL);CHKERRQ(ierr);
1335   }
1336 
1337   if (overlap > 0) {
1338     ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1339     /* Add the partition overlap to the distributed DM */
1340     ierr = DMPlexDistributeOverlap(*dmParallel, overlap, renumbering, &overlapSF, &dmOverlap);CHKERRQ(ierr);
1341     ierr = DMDestroy(dmParallel);CHKERRQ(ierr);
1342     *dmParallel = dmOverlap;
1343     if (flg) {
1344       ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr);
1345       ierr = PetscSFView(overlapSF, NULL);CHKERRQ(ierr);
1346     }
1347 
1348     /* Re-map the migration SF to establish the full migration pattern */
1349     ierr = PetscSFGetGraph(sfMigration, &nroots, NULL, NULL, &oldRemote);CHKERRQ(ierr);
1350     ierr = PetscSFGetGraph(overlapSF, NULL, &nleaves, NULL, NULL);CHKERRQ(ierr);
1351     ierr = PetscMalloc1(nleaves, &newRemote);CHKERRQ(ierr);
1352     ierr = PetscSFBcastBegin(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1353     ierr = PetscSFBcastEnd(overlapSF, MPIU_2INT, oldRemote, newRemote);CHKERRQ(ierr);
1354     ierr = PetscSFCreate(comm, &overlapPointSF);CHKERRQ(ierr);
1355     ierr = PetscSFSetGraph(overlapPointSF, nroots, nleaves, NULL, PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1356     ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1357     ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);
1358     sfMigration = overlapPointSF;
1359     ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap,dm,0,0,0);CHKERRQ(ierr);
1360   }
1361   /* Cleanup Partition */
1362   ierr = PetscSFDestroy(&sfProcess);CHKERRQ(ierr);
1363   ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr);
1364   ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr);
1365   ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
1366   ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
1367   ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
1368   if (sf) {*sf = sfMigration;}
1369   else    {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr);}
1370   ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
1371   ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 #undef __FUNCT__
1376 #define __FUNCT__ "DMPlexDistributeOverlap"
1377 /*@C
1378   DMPlexDistribute - Add partition overlap to a distributed non-overlapping DM.
1379 
1380   Not Collective
1381 
1382   Input Parameter:
1383 + dm  - The non-overlapping distrbuted DMPlex object
1384 - overlap - The overlap of partitions, 0 is the default
1385 
1386   Output Parameter:
1387 + sf - The PetscSF used for point distribution
1388 - dmOverlap - The overlapping distributed DMPlex object, or NULL
1389 
1390   Note: If the mesh was not distributed, the return value is NULL.
1391 
1392   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
1393   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
1394   representation on the mesh.
1395 
1396   Level: intermediate
1397 
1398 .keywords: mesh, elements
1399 .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
1400 @*/
1401 PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, ISLocalToGlobalMapping renumbering, PetscSF *sf, DM *dmOverlap)
1402 {
1403   MPI_Comm               comm;
1404   PetscMPIInt            rank;
1405   PetscSection           rootSection, leafSection;
1406   IS                     rootrank, leafrank;
1407   PetscSection           coneSection;
1408   PetscSF                overlapSF, migrationSF, pointSF, newPointSF;
1409   PetscSFNode           *ghostRemote;
1410   const PetscSFNode     *overlapRemote;
1411   ISLocalToGlobalMapping overlapRenumbering;
1412   const PetscInt        *renumberingArray, *overlapLocal;
1413   PetscInt               dim, p, pStart, pEnd, conesSize, idx;
1414   PetscInt               numGhostPoints, numOverlapPoints, numSharedPoints, overlapLeaves;
1415   PetscInt              *cones, *ghostLocal, *overlapRenumberingArray, *pointIDs, *recvPointIDs;
1416   PetscErrorCode         ierr;
1417 
1418   PetscFunctionBegin;
1419   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1420   if (sf) PetscValidPointer(sf, 3);
1421   PetscValidPointer(dmOverlap, 4);
1422 
1423   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1424   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1425   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
1426 
1427   /* Compute point overlap with neighbouring processes on the distributed DM */
1428   ierr = PetscLogEventBegin(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1429   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
1430   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
1431   ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr);
1432   ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &overlapSF);CHKERRQ(ierr);
1433   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
1434   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
1435   ierr = ISDestroy(&rootrank);CHKERRQ(ierr);
1436   ierr = ISDestroy(&leafrank);CHKERRQ(ierr);
1437   ierr = PetscLogEventEnd(PETSCPARTITIONER_Partition,dm,0,0,0);CHKERRQ(ierr);
1438 
1439   /* Build dense migration SF that maps the non-overlapping partition to the overlapping one */
1440   ierr = DMPlexCreateOverlapMigrationSF(dm, overlapSF, &migrationSF);CHKERRQ(ierr);
1441 
1442   /* Convert cones to global numbering before migrating them */
1443   ierr = DMPlexGetConeSection(dm, &coneSection);CHKERRQ(ierr);
1444   ierr = PetscSectionGetStorageSize(coneSection, &conesSize);CHKERRQ(ierr);
1445   ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
1446   ierr = ISLocalToGlobalMappingApplyBlock(renumbering, conesSize, cones, cones);CHKERRQ(ierr);
1447 
1448   /* Derive the new local-to-global mapping from the old one */
1449   ierr = PetscSFGetGraph(migrationSF, NULL, &overlapLeaves, &overlapLocal, &overlapRemote);CHKERRQ(ierr);
1450   ierr = PetscMalloc1(overlapLeaves, &overlapRenumberingArray);CHKERRQ(ierr);
1451   ierr = ISLocalToGlobalMappingGetBlockIndices(renumbering, &renumberingArray);CHKERRQ(ierr);
1452   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1453   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, (PetscInt *) renumberingArray, overlapRenumberingArray);CHKERRQ(ierr);
1454   ierr = ISLocalToGlobalMappingCreate(comm, 1, overlapLeaves, (const PetscInt*) overlapRenumberingArray, PETSC_OWN_POINTER, &overlapRenumbering);CHKERRQ(ierr);
1455 
1456   /* Build the overlapping DM */
1457   ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr);
1458   ierr = DMSetDimension(*dmOverlap, dim);CHKERRQ(ierr);
1459   ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr);
1460   ierr = DMPlexDistributeCones(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1461   ierr = DMPlexDistributeCoordinates(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1462   ierr = DMPlexDistributeLabels(dm, migrationSF, *dmOverlap);CHKERRQ(ierr);
1463   ierr = DMPlexDistributeSetupHybrid(dm, migrationSF, overlapRenumbering, *dmOverlap);CHKERRQ(ierr);
1464 
1465   /* Build the new point SF by propagating the depthShift generate remote root indices */
1466   ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr);
1467   ierr = PetscSFGetGraph(pointSF, NULL, &numSharedPoints, NULL, NULL);CHKERRQ(ierr);
1468   ierr = PetscSFGetGraph(overlapSF, NULL, &numOverlapPoints, NULL, NULL);CHKERRQ(ierr);
1469   numGhostPoints = numSharedPoints + numOverlapPoints;
1470   ierr = PetscMalloc1(numGhostPoints, &ghostLocal);CHKERRQ(ierr);
1471   ierr = PetscMalloc1(numGhostPoints, &ghostRemote);CHKERRQ(ierr);
1472   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
1473   ierr = PetscMalloc2(pEnd-pStart, &pointIDs, overlapLeaves, &recvPointIDs);CHKERRQ(ierr);
1474   for (p=0; p<overlapLeaves; p++) {
1475     if (overlapRemote[p].rank == rank) pointIDs[overlapRemote[p].index] = overlapLocal[p];
1476   }
1477   ierr = PetscSFBcastBegin(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1478   ierr = PetscSFBcastEnd(migrationSF, MPIU_INT, pointIDs, recvPointIDs);CHKERRQ(ierr);
1479   for (idx=0, p=0; p<overlapLeaves; p++) {
1480     if (overlapRemote[p].rank != rank) {
1481       ghostLocal[idx] = overlapLocal[p];
1482       ghostRemote[idx].index = recvPointIDs[p];
1483       ghostRemote[idx].rank = overlapRemote[p].rank;
1484       idx++;
1485     }
1486   }
1487   ierr = DMPlexGetChart(*dmOverlap, &pStart, &pEnd);CHKERRQ(ierr);
1488   ierr = PetscSFCreate(comm, &newPointSF);;CHKERRQ(ierr);
1489   ierr = PetscSFSetGraph(newPointSF, pEnd - pStart, numGhostPoints, ghostLocal, PETSC_OWN_POINTER, ghostRemote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1490   ierr = DMSetPointSF(*dmOverlap, newPointSF);CHKERRQ(ierr);
1491   ierr = PetscSFDestroy(&newPointSF);CHKERRQ(ierr);
1492   /* Cleanup overlap partition */
1493   ierr = ISLocalToGlobalMappingDestroy(&overlapRenumbering);CHKERRQ(ierr);
1494   ierr = PetscSFDestroy(&overlapSF);CHKERRQ(ierr);
1495   ierr = PetscFree2(pointIDs, recvPointIDs);CHKERRQ(ierr);
1496   if (sf) *sf = migrationSF;
1497   else    {ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr);}
1498   PetscFunctionReturn(0);
1499 }
1500