xref: /petsc/src/dm/impls/plex/plexpartition.c (revision ffc59708705c40e64a02c41a11771b01110e63f5)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscInt       *adjCells = NULL, *remoteCells = NULL;
62   const PetscInt *local;
63   PetscInt       nroots, nleaves, l;
64   PetscMPIInt    rank;
65   PetscErrorCode ierr;
66 
67   PetscFunctionBegin;
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
69   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
70   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
71   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
72   /* Build adjacency graph via a section/segbuffer */
73   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
74   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
75   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
76   /* Always use FVM adjacency to create partitioner graph */
77   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
78   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
79   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
80   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
81   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
82   if (globalNumbering) {
83     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
84     *globalNumbering = cellNumbering;
85   }
86   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
87   /* For all boundary faces (including faces adjacent to a ghost cell), record the local cell in adjCells
88      Broadcast adjCells to remoteCells (to get cells from roots) and Reduce adjCells to remoteCells (to get cells from leaves)
89    */
90   ierr = PetscSFGetGraph(dm->sf, &nroots, &nleaves, &local, NULL);CHKERRQ(ierr);
91   if (nroots >= 0) {
92     PetscInt fStart, fEnd, f;
93 
94     ierr = PetscCalloc2(nroots, &adjCells, nroots, &remoteCells);CHKERRQ(ierr);
95     ierr = DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);CHKERRQ(ierr);
96     for (l = 0; l < nroots; ++l) adjCells[l] = -3;
97     for (f = fStart; f < fEnd; ++f) {
98       const PetscInt *support;
99       PetscInt        supportSize;
100 
101       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
102       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
103       if (supportSize == 1) adjCells[f] = cellNum[support[0]];
104       else if (supportSize == 2) {
105         ierr = PetscFindInt(support[0], nleaves, local, &p);CHKERRQ(ierr);
106         if (p >= 0) adjCells[f] = cellNum[support[1]];
107         ierr = PetscFindInt(support[1], nleaves, local, &p);CHKERRQ(ierr);
108         if (p >= 0) adjCells[f] = cellNum[support[0]];
109       }
110     }
111     for (l = 0; l < nroots; ++l) remoteCells[l] = -1;
112     ierr = PetscSFBcastBegin(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
113     ierr = PetscSFBcastEnd(dm->sf, MPIU_INT, adjCells, remoteCells);CHKERRQ(ierr);
114     ierr = PetscSFReduceBegin(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
115     ierr = PetscSFReduceEnd(dm->sf, MPIU_INT, adjCells, remoteCells, MPI_MAX);CHKERRQ(ierr);
116   }
117   /* Combine local and global adjacencies */
118   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
119     const PetscInt *cone;
120     PetscInt        coneSize, c;
121 
122     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
123     if (nroots > 0) {if (cellNum[p] < 0) continue;}
124     /* Add remote cells */
125     if (remoteCells) {
126       ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr);
127       ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr);
128       for (c = 0; c < coneSize; ++c) {
129         if (remoteCells[cone[c]] != -1) {
130           PetscInt *PETSC_RESTRICT pBuf;
131 
132           ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
133           ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
134           *pBuf = remoteCells[cone[c]];
135         }
136       }
137     }
138     /* Add local cells */
139     adjSize = PETSC_DETERMINE;
140     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
141     for (a = 0; a < adjSize; ++a) {
142       const PetscInt point = adj[a];
143       if (point != p && pStart <= point && point < pEnd) {
144         PetscInt *PETSC_RESTRICT pBuf;
145         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
146         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
147         *pBuf = cellNum[point];
148       }
149     }
150     (*numVertices)++;
151   }
152   ierr = PetscFree2(adjCells, remoteCells);CHKERRQ(ierr);
153   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
154   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
155   /* Derive CSR graph from section/segbuffer */
156   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
157   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
158   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
159   for (idx = 0, p = pStart; p < pEnd; p++) {
160     if (nroots > 0) {if (cellNum[p] < 0) continue;}
161     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
162   }
163   vOffsets[*numVertices] = size;
164   if (offsets) *offsets = vOffsets;
165   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
166   ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
167   ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
168   if (adjacency) *adjacency = graph;
169   /* Clean up */
170   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
171   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
172   ierr = PetscFree(adj);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 /*@C
177   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
178 
179   Collective
180 
181   Input Arguments:
182 + dm - The DMPlex
183 - cellHeight - The height of mesh points to treat as cells (default should be 0)
184 
185   Output Arguments:
186 + numVertices - The number of local vertices in the graph, or cells in the mesh.
187 . offsets     - The offset to the adjacency list for each cell
188 - adjacency   - The adjacency list for all cells
189 
190   Note: This is suitable for input to a mesh partitioner like ParMetis.
191 
192   Level: advanced
193 
194 .seealso: DMPlexCreate()
195 @*/
196 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
197 {
198   const PetscInt maxFaceCases = 30;
199   PetscInt       numFaceCases = 0;
200   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
201   PetscInt      *off, *adj;
202   PetscInt      *neighborCells = NULL;
203   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
204   PetscErrorCode ierr;
205 
206   PetscFunctionBegin;
207   /* For parallel partitioning, I think you have to communicate supports */
208   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
209   cellDim = dim - cellHeight;
210   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
211   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
212   if (cEnd - cStart == 0) {
213     if (numVertices) *numVertices = 0;
214     if (offsets)   *offsets   = NULL;
215     if (adjacency) *adjacency = NULL;
216     PetscFunctionReturn(0);
217   }
218   numCells  = cEnd - cStart;
219   faceDepth = depth - cellHeight;
220   if (dim == depth) {
221     PetscInt f, fStart, fEnd;
222 
223     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
224     /* Count neighboring cells */
225     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
226     for (f = fStart; f < fEnd; ++f) {
227       const PetscInt *support;
228       PetscInt        supportSize;
229       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
230       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
231       if (supportSize == 2) {
232         PetscInt numChildren;
233 
234         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
235         if (!numChildren) {
236           ++off[support[0]-cStart+1];
237           ++off[support[1]-cStart+1];
238         }
239       }
240     }
241     /* Prefix sum */
242     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
243     if (adjacency) {
244       PetscInt *tmp;
245 
246       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
247       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
248       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
249       /* Get neighboring cells */
250       for (f = fStart; f < fEnd; ++f) {
251         const PetscInt *support;
252         PetscInt        supportSize;
253         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
254         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
255         if (supportSize == 2) {
256           PetscInt numChildren;
257 
258           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
259           if (!numChildren) {
260             adj[tmp[support[0]-cStart]++] = support[1];
261             adj[tmp[support[1]-cStart]++] = support[0];
262           }
263         }
264       }
265 #if defined(PETSC_USE_DEBUG)
266       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
267 #endif
268       ierr = PetscFree(tmp);CHKERRQ(ierr);
269     }
270     if (numVertices) *numVertices = numCells;
271     if (offsets)   *offsets   = off;
272     if (adjacency) *adjacency = adj;
273     PetscFunctionReturn(0);
274   }
275   /* Setup face recognition */
276   if (faceDepth == 1) {
277     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
278 
279     for (c = cStart; c < cEnd; ++c) {
280       PetscInt corners;
281 
282       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
283       if (!cornersSeen[corners]) {
284         PetscInt nFV;
285 
286         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
287         cornersSeen[corners] = 1;
288 
289         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
290 
291         numFaceVertices[numFaceCases++] = nFV;
292       }
293     }
294   }
295   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
296   /* Count neighboring cells */
297   for (cell = cStart; cell < cEnd; ++cell) {
298     PetscInt numNeighbors = PETSC_DETERMINE, n;
299 
300     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
301     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
302     for (n = 0; n < numNeighbors; ++n) {
303       PetscInt        cellPair[2];
304       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
305       PetscInt        meetSize = 0;
306       const PetscInt *meet    = NULL;
307 
308       cellPair[0] = cell; cellPair[1] = neighborCells[n];
309       if (cellPair[0] == cellPair[1]) continue;
310       if (!found) {
311         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
312         if (meetSize) {
313           PetscInt f;
314 
315           for (f = 0; f < numFaceCases; ++f) {
316             if (numFaceVertices[f] == meetSize) {
317               found = PETSC_TRUE;
318               break;
319             }
320           }
321         }
322         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
323       }
324       if (found) ++off[cell-cStart+1];
325     }
326   }
327   /* Prefix sum */
328   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
329 
330   if (adjacency) {
331     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
332     /* Get neighboring cells */
333     for (cell = cStart; cell < cEnd; ++cell) {
334       PetscInt numNeighbors = PETSC_DETERMINE, n;
335       PetscInt cellOffset   = 0;
336 
337       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
338       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
339       for (n = 0; n < numNeighbors; ++n) {
340         PetscInt        cellPair[2];
341         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
342         PetscInt        meetSize = 0;
343         const PetscInt *meet    = NULL;
344 
345         cellPair[0] = cell; cellPair[1] = neighborCells[n];
346         if (cellPair[0] == cellPair[1]) continue;
347         if (!found) {
348           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
349           if (meetSize) {
350             PetscInt f;
351 
352             for (f = 0; f < numFaceCases; ++f) {
353               if (numFaceVertices[f] == meetSize) {
354                 found = PETSC_TRUE;
355                 break;
356               }
357             }
358           }
359           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
360         }
361         if (found) {
362           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
363           ++cellOffset;
364         }
365       }
366     }
367   }
368   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
369   if (numVertices) *numVertices = numCells;
370   if (offsets)   *offsets   = off;
371   if (adjacency) *adjacency = adj;
372   PetscFunctionReturn(0);
373 }
374 
375 /*@C
376   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
377 
378   Not Collective
379 
380   Input Parameters:
381 + name        - The name of a new user-defined creation routine
382 - create_func - The creation routine itself
383 
384   Notes:
385   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
386 
387   Sample usage:
388 .vb
389     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
390 .ve
391 
392   Then, your PetscPartitioner type can be chosen with the procedural interface via
393 .vb
394     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
395     PetscPartitionerSetType(PetscPartitioner, "my_part");
396 .ve
397    or at runtime via the option
398 .vb
399     -petscpartitioner_type my_part
400 .ve
401 
402   Level: advanced
403 
404 .keywords: PetscPartitioner, register
405 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
406 
407 @*/
408 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
409 {
410   PetscErrorCode ierr;
411 
412   PetscFunctionBegin;
413   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 
417 /*@C
418   PetscPartitionerSetType - Builds a particular PetscPartitioner
419 
420   Collective on PetscPartitioner
421 
422   Input Parameters:
423 + part - The PetscPartitioner object
424 - name - The kind of partitioner
425 
426   Options Database Key:
427 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
428 
429   Level: intermediate
430 
431 .keywords: PetscPartitioner, set, type
432 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
433 @*/
434 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
435 {
436   PetscErrorCode (*r)(PetscPartitioner);
437   PetscBool      match;
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
442   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
443   if (match) PetscFunctionReturn(0);
444 
445   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
446   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
447   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
448 
449   if (part->ops->destroy) {
450     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
451   }
452   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
453   ierr = (*r)(part);CHKERRQ(ierr);
454   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
455   PetscFunctionReturn(0);
456 }
457 
458 /*@C
459   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
460 
461   Not Collective
462 
463   Input Parameter:
464 . part - The PetscPartitioner
465 
466   Output Parameter:
467 . name - The PetscPartitioner type name
468 
469   Level: intermediate
470 
471 .keywords: PetscPartitioner, get, type, name
472 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
473 @*/
474 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
475 {
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
480   PetscValidPointer(name, 2);
481   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
482   *name = ((PetscObject) part)->type_name;
483   PetscFunctionReturn(0);
484 }
485 
486 /*@C
487   PetscPartitionerView - Views a PetscPartitioner
488 
489   Collective on PetscPartitioner
490 
491   Input Parameter:
492 + part - the PetscPartitioner object to view
493 - v    - the viewer
494 
495   Level: developer
496 
497 .seealso: PetscPartitionerDestroy()
498 @*/
499 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
500 {
501   PetscMPIInt    size;
502   PetscBool      isascii;
503   PetscErrorCode ierr;
504 
505   PetscFunctionBegin;
506   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
507   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
508   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
509   if (isascii) {
510     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
511     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %d MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
512     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
513     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
514     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
515     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
516     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
517   }
518   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
519   PetscFunctionReturn(0);
520 }
521 
522 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
523 {
524   PetscFunctionBegin;
525   if (!currentType) {
526 #if defined(PETSC_HAVE_CHACO)
527     *defaultType = PETSCPARTITIONERCHACO;
528 #elif defined(PETSC_HAVE_PARMETIS)
529     *defaultType = PETSCPARTITIONERPARMETIS;
530 #elif defined(PETSC_HAVE_PTSCOTCH)
531     *defaultType = PETSCPARTITIONERPTSCOTCH;
532 #else
533     *defaultType = PETSCPARTITIONERSIMPLE;
534 #endif
535   } else {
536     *defaultType = currentType;
537   }
538   PetscFunctionReturn(0);
539 }
540 
541 /*@
542   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
543 
544   Collective on PetscPartitioner
545 
546   Input Parameter:
547 . part - the PetscPartitioner object to set options for
548 
549   Level: developer
550 
551 .seealso: PetscPartitionerView()
552 @*/
553 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
554 {
555   const char    *defaultType;
556   char           name[256];
557   PetscBool      flg;
558   PetscErrorCode ierr;
559 
560   PetscFunctionBegin;
561   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
562   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
563   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
564   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
565   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
566   if (flg) {
567     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
568   } else if (!((PetscObject) part)->type_name) {
569     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
570   }
571   if (part->ops->setfromoptions) {
572     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
573   }
574   /* process any options handlers added with PetscObjectAddOptionsHandler() */
575   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
576   ierr = PetscOptionsEnd();CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@C
581   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
582 
583   Collective on PetscPartitioner
584 
585   Input Parameter:
586 . part - the PetscPartitioner object to setup
587 
588   Level: developer
589 
590 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
591 @*/
592 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
593 {
594   PetscErrorCode ierr;
595 
596   PetscFunctionBegin;
597   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
598   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
599   PetscFunctionReturn(0);
600 }
601 
602 /*@
603   PetscPartitionerDestroy - Destroys a PetscPartitioner object
604 
605   Collective on PetscPartitioner
606 
607   Input Parameter:
608 . part - the PetscPartitioner object to destroy
609 
610   Level: developer
611 
612 .seealso: PetscPartitionerView()
613 @*/
614 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
615 {
616   PetscErrorCode ierr;
617 
618   PetscFunctionBegin;
619   if (!*part) PetscFunctionReturn(0);
620   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
621 
622   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
623   ((PetscObject) (*part))->refct = 0;
624 
625   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
626   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
627   PetscFunctionReturn(0);
628 }
629 
630 /*@
631   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
632 
633   Collective on MPI_Comm
634 
635   Input Parameter:
636 . comm - The communicator for the PetscPartitioner object
637 
638   Output Parameter:
639 . part - The PetscPartitioner object
640 
641   Level: beginner
642 
643 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
644 @*/
645 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
646 {
647   PetscPartitioner p;
648   const char       *partitionerType = NULL;
649   PetscErrorCode   ierr;
650 
651   PetscFunctionBegin;
652   PetscValidPointer(part, 2);
653   *part = NULL;
654   ierr = DMInitializePackage();CHKERRQ(ierr);
655 
656   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
657   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
658   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
659 
660   p->edgeCut = 0;
661   p->balance = 0.0;
662 
663   *part = p;
664   PetscFunctionReturn(0);
665 }
666 
667 /*@
668   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
669 
670   Collective on DM
671 
672   Input Parameters:
673 + part    - The PetscPartitioner
674 - dm      - The mesh DM
675 
676   Output Parameters:
677 + partSection     - The PetscSection giving the division of points by partition
678 - partition       - The list of points by partition
679 
680   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
681 
682   Level: developer
683 
684 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
685 @*/
686 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
687 {
688   PetscMPIInt    size;
689   PetscErrorCode ierr;
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
693   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
694   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
695   PetscValidPointer(partition, 5);
696   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
697   if (size == 1) {
698     PetscInt *points;
699     PetscInt  cStart, cEnd, c;
700 
701     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
702     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
703     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
704     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
705     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
706     for (c = cStart; c < cEnd; ++c) points[c] = c;
707     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
708     PetscFunctionReturn(0);
709   }
710   if (part->height == 0) {
711     PetscInt numVertices;
712     PetscInt *start     = NULL;
713     PetscInt *adjacency = NULL;
714     IS       globalNumbering;
715 
716     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
717     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
718     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
719     ierr = PetscFree(start);CHKERRQ(ierr);
720     ierr = PetscFree(adjacency);CHKERRQ(ierr);
721     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
722       const PetscInt *globalNum;
723       const PetscInt *partIdx;
724       PetscInt *map, cStart, cEnd;
725       PetscInt *adjusted, i, localSize, offset;
726       IS    newPartition;
727 
728       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
729       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
730       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
731       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
732       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
733       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
734       for (i = cStart, offset = 0; i < cEnd; i++) {
735         if (globalNum[i - cStart] >= 0) map[offset++] = i;
736       }
737       for (i = 0; i < localSize; i++) {
738         adjusted[i] = map[partIdx[i]];
739       }
740       ierr = PetscFree(map);CHKERRQ(ierr);
741       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
742       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
743       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
744       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
745       ierr = ISDestroy(partition);CHKERRQ(ierr);
746       *partition = newPartition;
747     }
748   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
749   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
754 {
755   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
756   PetscErrorCode          ierr;
757 
758   PetscFunctionBegin;
759   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
760   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
761   ierr = PetscFree(p);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
766 {
767   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
768   PetscErrorCode          ierr;
769 
770   PetscFunctionBegin;
771   if (p->random) {
772     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
773     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
774     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
775   }
776   PetscFunctionReturn(0);
777 }
778 
779 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
780 {
781   PetscBool      iascii;
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
786   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
787   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
788   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
789   PetscFunctionReturn(0);
790 }
791 
792 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
793 {
794   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
795   PetscErrorCode          ierr;
796 
797   PetscFunctionBegin;
798   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
799   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
800   ierr = PetscOptionsTail();CHKERRQ(ierr);
801   PetscFunctionReturn(0);
802 }
803 
804 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
805 {
806   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
807   PetscInt                np;
808   PetscErrorCode          ierr;
809 
810   PetscFunctionBegin;
811   if (p->random) {
812     PetscRandom r;
813     PetscInt   *sizes, *points, v, p;
814     PetscMPIInt rank;
815 
816     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
817     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
818     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
819     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
820     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
821     for (v = 0; v < numVertices; ++v) {points[v] = v;}
822     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
823     for (v = numVertices-1; v > 0; --v) {
824       PetscReal val;
825       PetscInt  w, tmp;
826 
827       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
828       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
829       w    = PetscFloorReal(val);
830       tmp       = points[v];
831       points[v] = points[w];
832       points[w] = tmp;
833     }
834     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
835     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
836     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
837   }
838   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
839   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
840   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
841   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
842   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
843   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
844   *partition = p->partition;
845   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
846   PetscFunctionReturn(0);
847 }
848 
849 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
850 {
851   PetscFunctionBegin;
852   part->ops->view           = PetscPartitionerView_Shell;
853   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
854   part->ops->destroy        = PetscPartitionerDestroy_Shell;
855   part->ops->partition      = PetscPartitionerPartition_Shell;
856   PetscFunctionReturn(0);
857 }
858 
859 /*MC
860   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
861 
862   Level: intermediate
863 
864 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
865 M*/
866 
867 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
868 {
869   PetscPartitioner_Shell *p;
870   PetscErrorCode          ierr;
871 
872   PetscFunctionBegin;
873   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
874   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
875   part->data = p;
876 
877   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
878   p->random = PETSC_FALSE;
879   PetscFunctionReturn(0);
880 }
881 
882 /*@C
883   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
884 
885   Collective on Part
886 
887   Input Parameters:
888 + part     - The PetscPartitioner
889 . size - The number of partitions
890 . sizes    - array of size size (or NULL) providing the number of points in each partition
891 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
892 
893   Level: developer
894 
895   Notes:
896 
897     It is safe to free the sizes and points arrays after use in this routine.
898 
899 .seealso DMPlexDistribute(), PetscPartitionerCreate()
900 @*/
901 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
902 {
903   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
904   PetscInt                proc, numPoints;
905   PetscErrorCode          ierr;
906 
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
909   if (sizes)  {PetscValidPointer(sizes, 3);}
910   if (points) {PetscValidPointer(points, 4);}
911   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
912   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
913   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
914   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
915   if (sizes) {
916     for (proc = 0; proc < size; ++proc) {
917       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
918     }
919   }
920   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
921   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
922   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 /*@
927   PetscPartitionerShellSetRandom - Set the flag to use a random partition
928 
929   Collective on Part
930 
931   Input Parameters:
932 + part   - The PetscPartitioner
933 - random - The flag to use a random partition
934 
935   Level: intermediate
936 
937 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
938 @*/
939 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
940 {
941   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
942 
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
945   p->random = random;
946   PetscFunctionReturn(0);
947 }
948 
949 /*@
950   PetscPartitionerShellGetRandom - get the flag to use a random partition
951 
952   Collective on Part
953 
954   Input Parameter:
955 . part   - The PetscPartitioner
956 
957   Output Parameter
958 . random - The flag to use a random partition
959 
960   Level: intermediate
961 
962 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
963 @*/
964 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
965 {
966   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
967 
968   PetscFunctionBegin;
969   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
970   PetscValidPointer(random, 2);
971   *random = p->random;
972   PetscFunctionReturn(0);
973 }
974 
975 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
976 {
977   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
978   PetscErrorCode          ierr;
979 
980   PetscFunctionBegin;
981   ierr = PetscFree(p);CHKERRQ(ierr);
982   PetscFunctionReturn(0);
983 }
984 
985 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
986 {
987   PetscFunctionBegin;
988   PetscFunctionReturn(0);
989 }
990 
991 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
992 {
993   PetscBool      iascii;
994   PetscErrorCode ierr;
995 
996   PetscFunctionBegin;
997   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
998   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
999   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1000   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1005 {
1006   MPI_Comm       comm;
1007   PetscInt       np;
1008   PetscMPIInt    size;
1009   PetscErrorCode ierr;
1010 
1011   PetscFunctionBegin;
1012   comm = PetscObjectComm((PetscObject)dm);
1013   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1014   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1015   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1016   if (size == 1) {
1017     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
1018   }
1019   else {
1020     PetscMPIInt rank;
1021     PetscInt nvGlobal, *offsets, myFirst, myLast;
1022 
1023     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
1024     offsets[0] = 0;
1025     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
1026     for (np = 2; np <= size; np++) {
1027       offsets[np] += offsets[np-1];
1028     }
1029     nvGlobal = offsets[size];
1030     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1031     myFirst = offsets[rank];
1032     myLast  = offsets[rank + 1] - 1;
1033     ierr = PetscFree(offsets);CHKERRQ(ierr);
1034     if (numVertices) {
1035       PetscInt firstPart = 0, firstLargePart = 0;
1036       PetscInt lastPart = 0, lastLargePart = 0;
1037       PetscInt rem = nvGlobal % nparts;
1038       PetscInt pSmall = nvGlobal/nparts;
1039       PetscInt pBig = nvGlobal/nparts + 1;
1040 
1041 
1042       if (rem) {
1043         firstLargePart = myFirst / pBig;
1044         lastLargePart  = myLast  / pBig;
1045 
1046         if (firstLargePart < rem) {
1047           firstPart = firstLargePart;
1048         }
1049         else {
1050           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1051         }
1052         if (lastLargePart < rem) {
1053           lastPart = lastLargePart;
1054         }
1055         else {
1056           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1057         }
1058       }
1059       else {
1060         firstPart = myFirst / (nvGlobal/nparts);
1061         lastPart  = myLast  / (nvGlobal/nparts);
1062       }
1063 
1064       for (np = firstPart; np <= lastPart; np++) {
1065         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1066         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1067 
1068         PartStart = PetscMax(PartStart,myFirst);
1069         PartEnd   = PetscMin(PartEnd,myLast+1);
1070         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1071       }
1072     }
1073   }
1074   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1079 {
1080   PetscFunctionBegin;
1081   part->ops->view      = PetscPartitionerView_Simple;
1082   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1083   part->ops->partition = PetscPartitionerPartition_Simple;
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 /*MC
1088   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1089 
1090   Level: intermediate
1091 
1092 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1093 M*/
1094 
1095 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1096 {
1097   PetscPartitioner_Simple *p;
1098   PetscErrorCode           ierr;
1099 
1100   PetscFunctionBegin;
1101   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1102   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1103   part->data = p;
1104 
1105   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1106   PetscFunctionReturn(0);
1107 }
1108 
1109 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1110 {
1111   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1112   PetscErrorCode          ierr;
1113 
1114   PetscFunctionBegin;
1115   ierr = PetscFree(p);CHKERRQ(ierr);
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1120 {
1121   PetscFunctionBegin;
1122   PetscFunctionReturn(0);
1123 }
1124 
1125 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1126 {
1127   PetscBool      iascii;
1128   PetscErrorCode ierr;
1129 
1130   PetscFunctionBegin;
1131   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1132   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1133   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1134   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1139 {
1140   PetscInt       np;
1141   PetscErrorCode ierr;
1142 
1143   PetscFunctionBegin;
1144   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1145   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1146   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1147   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1148   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1153 {
1154   PetscFunctionBegin;
1155   part->ops->view      = PetscPartitionerView_Gather;
1156   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1157   part->ops->partition = PetscPartitionerPartition_Gather;
1158   PetscFunctionReturn(0);
1159 }
1160 
1161 /*MC
1162   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1163 
1164   Level: intermediate
1165 
1166 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1167 M*/
1168 
1169 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1170 {
1171   PetscPartitioner_Gather *p;
1172   PetscErrorCode           ierr;
1173 
1174   PetscFunctionBegin;
1175   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1176   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1177   part->data = p;
1178 
1179   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 
1184 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1185 {
1186   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1187   PetscErrorCode          ierr;
1188 
1189   PetscFunctionBegin;
1190   ierr = PetscFree(p);CHKERRQ(ierr);
1191   PetscFunctionReturn(0);
1192 }
1193 
1194 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1195 {
1196   PetscFunctionBegin;
1197   PetscFunctionReturn(0);
1198 }
1199 
1200 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1201 {
1202   PetscBool      iascii;
1203   PetscErrorCode ierr;
1204 
1205   PetscFunctionBegin;
1206   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1207   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1208   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1209   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1210   PetscFunctionReturn(0);
1211 }
1212 
1213 #if defined(PETSC_HAVE_CHACO)
1214 #if defined(PETSC_HAVE_UNISTD_H)
1215 #include <unistd.h>
1216 #endif
1217 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1218 #include <chaco.h>
1219 #else
1220 /* Older versions of Chaco do not have an include file */
1221 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1222                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1223                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1224                        int mesh_dims[3], double *goal, int global_method, int local_method,
1225                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1226 #endif
1227 extern int FREE_GRAPH;
1228 #endif
1229 
1230 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1231 {
1232 #if defined(PETSC_HAVE_CHACO)
1233   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1234   MPI_Comm       comm;
1235   int            nvtxs          = numVertices; /* number of vertices in full graph */
1236   int           *vwgts          = NULL;   /* weights for all vertices */
1237   float         *ewgts          = NULL;   /* weights for all edges */
1238   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1239   char          *outassignname  = NULL;   /*  name of assignment output file */
1240   char          *outfilename    = NULL;   /* output file name */
1241   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1242   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1243   int            mesh_dims[3];            /* dimensions of mesh of processors */
1244   double        *goal          = NULL;    /* desired set sizes for each set */
1245   int            global_method = 1;       /* global partitioning algorithm */
1246   int            local_method  = 1;       /* local partitioning algorithm */
1247   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1248   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1249   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1250   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1251   long           seed          = 123636512; /* for random graph mutations */
1252 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1253   int           *assignment;              /* Output partition */
1254 #else
1255   short int     *assignment;              /* Output partition */
1256 #endif
1257   int            fd_stdout, fd_pipe[2];
1258   PetscInt      *points;
1259   int            i, v, p;
1260   PetscErrorCode ierr;
1261 
1262   PetscFunctionBegin;
1263   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1264 #if defined (PETSC_USE_DEBUG)
1265   {
1266     int ival,isum;
1267     PetscBool distributed;
1268 
1269     ival = (numVertices > 0);
1270     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1271     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1272     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1273   }
1274 #endif
1275   if (!numVertices) {
1276     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1277     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1278     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1279     PetscFunctionReturn(0);
1280   }
1281   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1282   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1283 
1284   if (global_method == INERTIAL_METHOD) {
1285     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1286     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1287   }
1288   mesh_dims[0] = nparts;
1289   mesh_dims[1] = 1;
1290   mesh_dims[2] = 1;
1291   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1292   /* Chaco outputs to stdout. We redirect this to a buffer. */
1293   /* TODO: check error codes for UNIX calls */
1294 #if defined(PETSC_HAVE_UNISTD_H)
1295   {
1296     int piperet;
1297     piperet = pipe(fd_pipe);
1298     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1299     fd_stdout = dup(1);
1300     close(1);
1301     dup2(fd_pipe[1], 1);
1302   }
1303 #endif
1304   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1305                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1306                    vmax, ndims, eigtol, seed);
1307 #if defined(PETSC_HAVE_UNISTD_H)
1308   {
1309     char msgLog[10000];
1310     int  count;
1311 
1312     fflush(stdout);
1313     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1314     if (count < 0) count = 0;
1315     msgLog[count] = 0;
1316     close(1);
1317     dup2(fd_stdout, 1);
1318     close(fd_stdout);
1319     close(fd_pipe[0]);
1320     close(fd_pipe[1]);
1321     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1322   }
1323 #else
1324   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1325 #endif
1326   /* Convert to PetscSection+IS */
1327   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1328   for (v = 0; v < nvtxs; ++v) {
1329     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1330   }
1331   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1332   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1333   for (p = 0, i = 0; p < nparts; ++p) {
1334     for (v = 0; v < nvtxs; ++v) {
1335       if (assignment[v] == p) points[i++] = v;
1336     }
1337   }
1338   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1339   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1340   if (global_method == INERTIAL_METHOD) {
1341     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1342   }
1343   ierr = PetscFree(assignment);CHKERRQ(ierr);
1344   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1345   PetscFunctionReturn(0);
1346 #else
1347   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1348 #endif
1349 }
1350 
1351 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1352 {
1353   PetscFunctionBegin;
1354   part->ops->view      = PetscPartitionerView_Chaco;
1355   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1356   part->ops->partition = PetscPartitionerPartition_Chaco;
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 /*MC
1361   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1362 
1363   Level: intermediate
1364 
1365 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1366 M*/
1367 
1368 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1369 {
1370   PetscPartitioner_Chaco *p;
1371   PetscErrorCode          ierr;
1372 
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1375   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1376   part->data = p;
1377 
1378   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1379   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1380   PetscFunctionReturn(0);
1381 }
1382 
1383 static const char *ptypes[] = {"kway", "rb"};
1384 
1385 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1386 {
1387   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1388   PetscErrorCode             ierr;
1389 
1390   PetscFunctionBegin;
1391   ierr = PetscFree(p);CHKERRQ(ierr);
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1396 {
1397   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1398   PetscErrorCode             ierr;
1399 
1400   PetscFunctionBegin;
1401   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1402   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1403   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1404   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1405   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1406   PetscFunctionReturn(0);
1407 }
1408 
1409 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1410 {
1411   PetscBool      iascii;
1412   PetscErrorCode ierr;
1413 
1414   PetscFunctionBegin;
1415   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1416   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1417   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1418   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1423 {
1424   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1425   PetscErrorCode            ierr;
1426 
1427   PetscFunctionBegin;
1428   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1429   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1430   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1431   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1432   ierr = PetscOptionsTail();CHKERRQ(ierr);
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #if defined(PETSC_HAVE_PARMETIS)
1437 #include <parmetis.h>
1438 #endif
1439 
1440 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1441 {
1442 #if defined(PETSC_HAVE_PARMETIS)
1443   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1444   MPI_Comm       comm;
1445   PetscSection   section;
1446   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1447   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1448   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1449   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1450   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1451   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1452   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1453   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1454   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1455   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1456   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1457   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1458   PetscInt       options[64];               /* Options */
1459   /* Outputs */
1460   PetscInt       v, i, *assignment, *points;
1461   PetscMPIInt    size, rank, p;
1462   PetscErrorCode ierr;
1463 
1464   PetscFunctionBegin;
1465   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1466   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1467   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1468   /* Calculate vertex distribution */
1469   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1470   vtxdist[0] = 0;
1471   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1472   for (p = 2; p <= size; ++p) {
1473     vtxdist[p] += vtxdist[p-1];
1474   }
1475   /* Calculate partition weights */
1476   for (p = 0; p < nparts; ++p) {
1477     tpwgts[p] = 1.0/nparts;
1478   }
1479   ubvec[0] = pm->imbalanceRatio;
1480   /* Weight cells by dofs on cell by default */
1481   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1482   if (section) {
1483     PetscInt cStart, cEnd, dof;
1484 
1485     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1486     for (v = cStart; v < cEnd; ++v) {
1487       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1488       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1489       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1490       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1491     }
1492   } else {
1493     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1494   }
1495   wgtflag |= 2; /* have weights on graph vertices */
1496 
1497   if (nparts == 1) {
1498     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1499   } else {
1500     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1501     if (vtxdist[p+1] == vtxdist[size]) {
1502       if (rank == p) {
1503         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1504         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1505         if (metis_ptype == 1) {
1506           PetscStackPush("METIS_PartGraphRecursive");
1507           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1508           PetscStackPop;
1509           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1510         } else {
1511           /*
1512            It would be nice to activate the two options below, but they would need some actual testing.
1513            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1514            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1515           */
1516           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1517           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1518           PetscStackPush("METIS_PartGraphKway");
1519           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1520           PetscStackPop;
1521           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1522         }
1523       }
1524     } else {
1525       options[0] = 1;
1526       options[1] = pm->debugFlag;
1527       PetscStackPush("ParMETIS_V3_PartKway");
1528       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1529       PetscStackPop;
1530       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1531     }
1532   }
1533   /* Convert to PetscSection+IS */
1534   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1535   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1536   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1537   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1538   for (p = 0, i = 0; p < nparts; ++p) {
1539     for (v = 0; v < nvtxs; ++v) {
1540       if (assignment[v] == p) points[i++] = v;
1541     }
1542   }
1543   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1544   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1545   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1546   PetscFunctionReturn(0);
1547 #else
1548   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1549 #endif
1550 }
1551 
1552 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1553 {
1554   PetscFunctionBegin;
1555   part->ops->view           = PetscPartitionerView_ParMetis;
1556   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1557   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1558   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 /*MC
1563   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1564 
1565   Level: intermediate
1566 
1567 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1568 M*/
1569 
1570 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1571 {
1572   PetscPartitioner_ParMetis *p;
1573   PetscErrorCode          ierr;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1577   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1578   part->data = p;
1579 
1580   p->ptype          = 0;
1581   p->imbalanceRatio = 1.05;
1582   p->debugFlag      = 0;
1583 
1584   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1585   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 
1590 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1591 const char PTScotchPartitionerCitation[] =
1592   "@article{PTSCOTCH,\n"
1593   "  author  = {C. Chevalier and F. Pellegrini},\n"
1594   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1595   "  journal = {Parallel Computing},\n"
1596   "  volume  = {34},\n"
1597   "  number  = {6},\n"
1598   "  pages   = {318--331},\n"
1599   "  year    = {2008},\n"
1600   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1601   "}\n";
1602 
1603 typedef struct {
1604   PetscInt  strategy;
1605   PetscReal imbalance;
1606 } PetscPartitioner_PTScotch;
1607 
1608 static const char *const
1609 PTScotchStrategyList[] = {
1610   "DEFAULT",
1611   "QUALITY",
1612   "SPEED",
1613   "BALANCE",
1614   "SAFETY",
1615   "SCALABILITY",
1616   "RECURSIVE",
1617   "REMAP"
1618 };
1619 
1620 #if defined(PETSC_HAVE_PTSCOTCH)
1621 
1622 EXTERN_C_BEGIN
1623 #include <ptscotch.h>
1624 EXTERN_C_END
1625 
1626 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1627 
1628 static int PTScotch_Strategy(PetscInt strategy)
1629 {
1630   switch (strategy) {
1631   case  0: return SCOTCH_STRATDEFAULT;
1632   case  1: return SCOTCH_STRATQUALITY;
1633   case  2: return SCOTCH_STRATSPEED;
1634   case  3: return SCOTCH_STRATBALANCE;
1635   case  4: return SCOTCH_STRATSAFETY;
1636   case  5: return SCOTCH_STRATSCALABILITY;
1637   case  6: return SCOTCH_STRATRECURSIVE;
1638   case  7: return SCOTCH_STRATREMAP;
1639   default: return SCOTCH_STRATDEFAULT;
1640   }
1641 }
1642 
1643 
1644 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1645                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1646 {
1647   SCOTCH_Graph   grafdat;
1648   SCOTCH_Strat   stradat;
1649   SCOTCH_Num     vertnbr = n;
1650   SCOTCH_Num     edgenbr = xadj[n];
1651   SCOTCH_Num*    velotab = vtxwgt;
1652   SCOTCH_Num*    edlotab = adjwgt;
1653   SCOTCH_Num     flagval = strategy;
1654   double         kbalval = imbalance;
1655   PetscErrorCode ierr;
1656 
1657   PetscFunctionBegin;
1658   {
1659     PetscBool flg = PETSC_TRUE;
1660     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1661     if (!flg) velotab = NULL;
1662   }
1663   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1664   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1665   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1666   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1667 #if defined(PETSC_USE_DEBUG)
1668   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1669 #endif
1670   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1671   SCOTCH_stratExit(&stradat);
1672   SCOTCH_graphExit(&grafdat);
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1677                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1678 {
1679   PetscMPIInt     procglbnbr;
1680   PetscMPIInt     proclocnum;
1681   SCOTCH_Arch     archdat;
1682   SCOTCH_Dgraph   grafdat;
1683   SCOTCH_Dmapping mappdat;
1684   SCOTCH_Strat    stradat;
1685   SCOTCH_Num      vertlocnbr;
1686   SCOTCH_Num      edgelocnbr;
1687   SCOTCH_Num*     veloloctab = vtxwgt;
1688   SCOTCH_Num*     edloloctab = adjwgt;
1689   SCOTCH_Num      flagval = strategy;
1690   double          kbalval = imbalance;
1691   PetscErrorCode  ierr;
1692 
1693   PetscFunctionBegin;
1694   {
1695     PetscBool flg = PETSC_TRUE;
1696     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1697     if (!flg) veloloctab = NULL;
1698   }
1699   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1700   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1701   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1702   edgelocnbr = xadj[vertlocnbr];
1703 
1704   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1705   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1706 #if defined(PETSC_USE_DEBUG)
1707   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1708 #endif
1709   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1710   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1711   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1712   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1713   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1714   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1715   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1716   SCOTCH_archExit(&archdat);
1717   SCOTCH_stratExit(&stradat);
1718   SCOTCH_dgraphExit(&grafdat);
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 #endif /* PETSC_HAVE_PTSCOTCH */
1723 
1724 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1725 {
1726   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1727   PetscErrorCode             ierr;
1728 
1729   PetscFunctionBegin;
1730   ierr = PetscFree(p);CHKERRQ(ierr);
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1735 {
1736   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1737   PetscErrorCode            ierr;
1738 
1739   PetscFunctionBegin;
1740   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1741   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1742   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1743   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1744   PetscFunctionReturn(0);
1745 }
1746 
1747 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1748 {
1749   PetscBool      iascii;
1750   PetscErrorCode ierr;
1751 
1752   PetscFunctionBegin;
1753   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1754   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1755   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1756   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1761 {
1762   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1763   const char *const         *slist = PTScotchStrategyList;
1764   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1765   PetscBool                 flag;
1766   PetscErrorCode            ierr;
1767 
1768   PetscFunctionBegin;
1769   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1770   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1771   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1772   ierr = PetscOptionsTail();CHKERRQ(ierr);
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1777 {
1778 #if defined(PETSC_HAVE_PTSCOTCH)
1779   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1780   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1781   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1782   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1783   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1784   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1785   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1786   PetscInt       v, i, *assignment, *points;
1787   PetscMPIInt    size, rank, p;
1788   PetscErrorCode ierr;
1789 
1790   PetscFunctionBegin;
1791   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1792   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1793   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1794 
1795   /* Calculate vertex distribution */
1796   vtxdist[0] = 0;
1797   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1798   for (p = 2; p <= size; ++p) {
1799     vtxdist[p] += vtxdist[p-1];
1800   }
1801 
1802   if (nparts == 1) {
1803     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1804   } else {
1805     PetscSection section;
1806     /* Weight cells by dofs on cell by default */
1807     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1808     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1809     if (section) {
1810       PetscInt vStart, vEnd, dof;
1811       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1812       for (v = vStart; v < vEnd; ++v) {
1813         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1814         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1815         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1816         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1817       }
1818     } else {
1819       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1820     }
1821     {
1822       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1823       int                       strat = PTScotch_Strategy(pts->strategy);
1824       double                    imbal = (double)pts->imbalance;
1825 
1826       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1827       if (vtxdist[p+1] == vtxdist[size]) {
1828         if (rank == p) {
1829           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1830         }
1831       } else {
1832         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1833       }
1834     }
1835     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1836   }
1837 
1838   /* Convert to PetscSection+IS */
1839   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1840   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1841   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1842   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1843   for (p = 0, i = 0; p < nparts; ++p) {
1844     for (v = 0; v < nvtxs; ++v) {
1845       if (assignment[v] == p) points[i++] = v;
1846     }
1847   }
1848   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1849   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1850 
1851   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1852   PetscFunctionReturn(0);
1853 #else
1854   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1855 #endif
1856 }
1857 
1858 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1859 {
1860   PetscFunctionBegin;
1861   part->ops->view           = PetscPartitionerView_PTScotch;
1862   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1863   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1864   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1865   PetscFunctionReturn(0);
1866 }
1867 
1868 /*MC
1869   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1870 
1871   Level: intermediate
1872 
1873 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1874 M*/
1875 
1876 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1877 {
1878   PetscPartitioner_PTScotch *p;
1879   PetscErrorCode          ierr;
1880 
1881   PetscFunctionBegin;
1882   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1883   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1884   part->data = p;
1885 
1886   p->strategy  = 0;
1887   p->imbalance = 0.01;
1888 
1889   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1890   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1891   PetscFunctionReturn(0);
1892 }
1893 
1894 
1895 /*@
1896   DMPlexGetPartitioner - Get the mesh partitioner
1897 
1898   Not collective
1899 
1900   Input Parameter:
1901 . dm - The DM
1902 
1903   Output Parameter:
1904 . part - The PetscPartitioner
1905 
1906   Level: developer
1907 
1908   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1909 
1910 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1911 @*/
1912 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1913 {
1914   DM_Plex       *mesh = (DM_Plex *) dm->data;
1915 
1916   PetscFunctionBegin;
1917   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1918   PetscValidPointer(part, 2);
1919   *part = mesh->partitioner;
1920   PetscFunctionReturn(0);
1921 }
1922 
1923 /*@
1924   DMPlexSetPartitioner - Set the mesh partitioner
1925 
1926   logically collective on dm and part
1927 
1928   Input Parameters:
1929 + dm - The DM
1930 - part - The partitioner
1931 
1932   Level: developer
1933 
1934   Note: Any existing PetscPartitioner will be destroyed.
1935 
1936 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1937 @*/
1938 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1939 {
1940   DM_Plex       *mesh = (DM_Plex *) dm->data;
1941   PetscErrorCode ierr;
1942 
1943   PetscFunctionBegin;
1944   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1945   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1946   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1947   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1948   mesh->partitioner = part;
1949   PetscFunctionReturn(0);
1950 }
1951 
1952 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1953 {
1954   PetscErrorCode ierr;
1955 
1956   PetscFunctionBegin;
1957   if (up) {
1958     PetscInt parent;
1959 
1960     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1961     if (parent != point) {
1962       PetscInt closureSize, *closure = NULL, i;
1963 
1964       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1965       for (i = 0; i < closureSize; i++) {
1966         PetscInt cpoint = closure[2*i];
1967 
1968         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1969         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1970       }
1971       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1972     }
1973   }
1974   if (down) {
1975     PetscInt numChildren;
1976     const PetscInt *children;
1977 
1978     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1979     if (numChildren) {
1980       PetscInt i;
1981 
1982       for (i = 0; i < numChildren; i++) {
1983         PetscInt cpoint = children[i];
1984 
1985         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1986         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1987       }
1988     }
1989   }
1990   PetscFunctionReturn(0);
1991 }
1992 
1993 /*@
1994   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1995 
1996   Input Parameters:
1997 + dm     - The DM
1998 - label  - DMLabel assinging ranks to remote roots
1999 
2000   Level: developer
2001 
2002 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2003 @*/
2004 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
2005 {
2006   IS              rankIS,   pointIS;
2007   const PetscInt *ranks,   *points;
2008   PetscInt        numRanks, numPoints, r, p, c, closureSize;
2009   PetscInt       *closure = NULL;
2010   DM_Plex        *mesh    = (DM_Plex *)dm->data;
2011   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
2012   PetscErrorCode  ierr;
2013 
2014   PetscFunctionBegin;
2015   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2016   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2017   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2018 
2019   for (r = 0; r < numRanks; ++r) {
2020     const PetscInt rank = ranks[r];
2021     PetscHSetI     ht;
2022     PetscInt       nelems, *elems, off = 0;
2023 
2024     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2025     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2026     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2027     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
2028     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
2029     for (p = 0; p < numPoints; ++p) {
2030       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
2031       for (c = 0; c < closureSize*2; c += 2) {
2032         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
2033         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
2034       }
2035     }
2036     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2037     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2038     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2039     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2040     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2041     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2042     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2043     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2044     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2045     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2046   }
2047 
2048   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2049   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2050   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 /*@
2055   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2056 
2057   Input Parameters:
2058 + dm     - The DM
2059 - label  - DMLabel assinging ranks to remote roots
2060 
2061   Level: developer
2062 
2063 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2064 @*/
2065 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2066 {
2067   IS              rankIS,   pointIS;
2068   const PetscInt *ranks,   *points;
2069   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2070   PetscInt       *adj = NULL;
2071   PetscErrorCode  ierr;
2072 
2073   PetscFunctionBegin;
2074   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2075   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2076   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2077   for (r = 0; r < numRanks; ++r) {
2078     const PetscInt rank = ranks[r];
2079 
2080     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2081     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2082     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2083     for (p = 0; p < numPoints; ++p) {
2084       adjSize = PETSC_DETERMINE;
2085       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2086       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2087     }
2088     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2089     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2090   }
2091   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2092   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2093   ierr = PetscFree(adj);CHKERRQ(ierr);
2094   PetscFunctionReturn(0);
2095 }
2096 
2097 /*@
2098   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2099 
2100   Input Parameters:
2101 + dm     - The DM
2102 - label  - DMLabel assinging ranks to remote roots
2103 
2104   Level: developer
2105 
2106   Note: This is required when generating multi-level overlaps to capture
2107   overlap points from non-neighbouring partitions.
2108 
2109 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2110 @*/
2111 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2112 {
2113   MPI_Comm        comm;
2114   PetscMPIInt     rank;
2115   PetscSF         sfPoint;
2116   DMLabel         lblRoots, lblLeaves;
2117   IS              rankIS, pointIS;
2118   const PetscInt *ranks;
2119   PetscInt        numRanks, r;
2120   PetscErrorCode  ierr;
2121 
2122   PetscFunctionBegin;
2123   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2124   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2125   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2126   /* Pull point contributions from remote leaves into local roots */
2127   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2128   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2129   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2130   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2131   for (r = 0; r < numRanks; ++r) {
2132     const PetscInt remoteRank = ranks[r];
2133     if (remoteRank == rank) continue;
2134     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2135     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2136     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2137   }
2138   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2139   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2140   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2141   /* Push point contributions from roots into remote leaves */
2142   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2143   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2144   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2145   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2146   for (r = 0; r < numRanks; ++r) {
2147     const PetscInt remoteRank = ranks[r];
2148     if (remoteRank == rank) continue;
2149     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2150     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2151     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2152   }
2153   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2154   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2155   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2156   PetscFunctionReturn(0);
2157 }
2158 
2159 /*@
2160   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2161 
2162   Input Parameters:
2163 + dm        - The DM
2164 . rootLabel - DMLabel assinging ranks to local roots
2165 . processSF - A star forest mapping into the local index on each remote rank
2166 
2167   Output Parameter:
2168 - leafLabel - DMLabel assinging ranks to remote roots
2169 
2170   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2171   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2172 
2173   Level: developer
2174 
2175 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2176 @*/
2177 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2178 {
2179   MPI_Comm           comm;
2180   PetscMPIInt        rank, size;
2181   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2182   PetscSF            sfPoint;
2183   PetscSFNode       *rootPoints, *leafPoints;
2184   PetscSection       rootSection, leafSection;
2185   const PetscSFNode *remote;
2186   const PetscInt    *local, *neighbors;
2187   IS                 valueIS;
2188   PetscErrorCode     ierr;
2189 
2190   PetscFunctionBegin;
2191   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2192   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2193   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2194   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2195 
2196   /* Convert to (point, rank) and use actual owners */
2197   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2198   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2199   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2200   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2201   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2202   for (n = 0; n < numNeighbors; ++n) {
2203     PetscInt numPoints;
2204 
2205     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2206     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2207   }
2208   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2209   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2210   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2211   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2212   for (n = 0; n < numNeighbors; ++n) {
2213     IS              pointIS;
2214     const PetscInt *points;
2215     PetscInt        off, numPoints, p;
2216 
2217     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2218     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2219     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2220     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2221     for (p = 0; p < numPoints; ++p) {
2222       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2223       else       {l = -1;}
2224       if (l >= 0) {rootPoints[off+p] = remote[l];}
2225       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2226     }
2227     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2228     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2229   }
2230   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2231   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2232   /* Communicate overlap */
2233   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2234   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2235   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2236   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2237   for (p = 0; p < ssize; p++) {
2238     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2239   }
2240   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2241   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2242   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2243   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2244   PetscFunctionReturn(0);
2245 }
2246 
2247 /*@
2248   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2249 
2250   Input Parameters:
2251 + dm    - The DM
2252 . label - DMLabel assinging ranks to remote roots
2253 
2254   Output Parameter:
2255 - sf    - The star forest communication context encapsulating the defined mapping
2256 
2257   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2258 
2259   Level: developer
2260 
2261 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2262 @*/
2263 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2264 {
2265   PetscMPIInt     rank, size;
2266   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2267   PetscSFNode    *remotePoints;
2268   IS              remoteRootIS;
2269   const PetscInt *remoteRoots;
2270   PetscErrorCode ierr;
2271 
2272   PetscFunctionBegin;
2273   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2274   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2275 
2276   for (numRemote = 0, n = 0; n < size; ++n) {
2277     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2278     numRemote += numPoints;
2279   }
2280   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2281   /* Put owned points first */
2282   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2283   if (numPoints > 0) {
2284     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2285     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2286     for (p = 0; p < numPoints; p++) {
2287       remotePoints[idx].index = remoteRoots[p];
2288       remotePoints[idx].rank = rank;
2289       idx++;
2290     }
2291     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2292     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2293   }
2294   /* Now add remote points */
2295   for (n = 0; n < size; ++n) {
2296     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2297     if (numPoints <= 0 || n == rank) continue;
2298     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2299     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2300     for (p = 0; p < numPoints; p++) {
2301       remotePoints[idx].index = remoteRoots[p];
2302       remotePoints[idx].rank = n;
2303       idx++;
2304     }
2305     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2306     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2307   }
2308   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2309   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2310   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2311   PetscFunctionReturn(0);
2312 }
2313