xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 72379da4d1fcc050cd4fc0cc70d7d63296789101)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   if (globalNumbering) {
80     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
81     *globalNumbering = cellNumbering;
82   }
83   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
84   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
85     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
86     if (nroots > 0) {if (cellNum[p] < 0) continue;}
87     adjSize = PETSC_DETERMINE;
88     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
89     for (a = 0; a < adjSize; ++a) {
90       const PetscInt point = adj[a];
91       if (point != p && pStart <= point && point < pEnd) {
92         PetscInt *PETSC_RESTRICT pBuf;
93         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
94         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
95         *pBuf = point;
96       }
97     }
98     (*numVertices)++;
99   }
100   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
101   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
102   /* Derive CSR graph from section/segbuffer */
103   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
104   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
105   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
106   for (idx = 0, p = pStart; p < pEnd; p++) {
107     if (nroots > 0) {if (cellNum[p] < 0) continue;}
108     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
109   }
110   vOffsets[*numVertices] = size;
111   if (offsets) *offsets = vOffsets;
112   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
113   {
114     ISLocalToGlobalMapping ltogCells;
115     PetscInt n, size, *cells_arr;
116     /* In parallel, apply a global cell numbering to the graph */
117     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
121     /* Convert to positive global cell numbers */
122     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
123     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
126     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
127   }
128   if (adjacency) *adjacency = graph;
129   /* Clean up */
130   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
131   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
132   ierr = PetscFree(adj);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 /*@C
137   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
138 
139   Collective
140 
141   Input Arguments:
142 + dm - The DMPlex
143 - cellHeight - The height of mesh points to treat as cells (default should be 0)
144 
145   Output Arguments:
146 + numVertices - The number of local vertices in the graph, or cells in the mesh.
147 . offsets     - The offset to the adjacency list for each cell
148 - adjacency   - The adjacency list for all cells
149 
150   Note: This is suitable for input to a mesh partitioner like ParMetis.
151 
152   Level: advanced
153 
154 .seealso: DMPlexCreate()
155 @*/
156 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
157 {
158   const PetscInt maxFaceCases = 30;
159   PetscInt       numFaceCases = 0;
160   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
161   PetscInt      *off, *adj;
162   PetscInt      *neighborCells = NULL;
163   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
164   PetscErrorCode ierr;
165 
166   PetscFunctionBegin;
167   /* For parallel partitioning, I think you have to communicate supports */
168   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
169   cellDim = dim - cellHeight;
170   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
171   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
172   if (cEnd - cStart == 0) {
173     if (numVertices) *numVertices = 0;
174     if (offsets)   *offsets   = NULL;
175     if (adjacency) *adjacency = NULL;
176     PetscFunctionReturn(0);
177   }
178   numCells  = cEnd - cStart;
179   faceDepth = depth - cellHeight;
180   if (dim == depth) {
181     PetscInt f, fStart, fEnd;
182 
183     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
184     /* Count neighboring cells */
185     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
186     for (f = fStart; f < fEnd; ++f) {
187       const PetscInt *support;
188       PetscInt        supportSize;
189       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
190       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
191       if (supportSize == 2) {
192         PetscInt numChildren;
193 
194         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
195         if (!numChildren) {
196           ++off[support[0]-cStart+1];
197           ++off[support[1]-cStart+1];
198         }
199       }
200     }
201     /* Prefix sum */
202     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
203     if (adjacency) {
204       PetscInt *tmp;
205 
206       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
207       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
208       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
209       /* Get neighboring cells */
210       for (f = fStart; f < fEnd; ++f) {
211         const PetscInt *support;
212         PetscInt        supportSize;
213         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
214         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
215         if (supportSize == 2) {
216           PetscInt numChildren;
217 
218           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
219           if (!numChildren) {
220             adj[tmp[support[0]-cStart]++] = support[1];
221             adj[tmp[support[1]-cStart]++] = support[0];
222           }
223         }
224       }
225 #if defined(PETSC_USE_DEBUG)
226       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
227 #endif
228       ierr = PetscFree(tmp);CHKERRQ(ierr);
229     }
230     if (numVertices) *numVertices = numCells;
231     if (offsets)   *offsets   = off;
232     if (adjacency) *adjacency = adj;
233     PetscFunctionReturn(0);
234   }
235   /* Setup face recognition */
236   if (faceDepth == 1) {
237     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
238 
239     for (c = cStart; c < cEnd; ++c) {
240       PetscInt corners;
241 
242       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
243       if (!cornersSeen[corners]) {
244         PetscInt nFV;
245 
246         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
247         cornersSeen[corners] = 1;
248 
249         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
250 
251         numFaceVertices[numFaceCases++] = nFV;
252       }
253     }
254   }
255   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
256   /* Count neighboring cells */
257   for (cell = cStart; cell < cEnd; ++cell) {
258     PetscInt numNeighbors = PETSC_DETERMINE, n;
259 
260     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
261     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
262     for (n = 0; n < numNeighbors; ++n) {
263       PetscInt        cellPair[2];
264       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
265       PetscInt        meetSize = 0;
266       const PetscInt *meet    = NULL;
267 
268       cellPair[0] = cell; cellPair[1] = neighborCells[n];
269       if (cellPair[0] == cellPair[1]) continue;
270       if (!found) {
271         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
272         if (meetSize) {
273           PetscInt f;
274 
275           for (f = 0; f < numFaceCases; ++f) {
276             if (numFaceVertices[f] == meetSize) {
277               found = PETSC_TRUE;
278               break;
279             }
280           }
281         }
282         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
283       }
284       if (found) ++off[cell-cStart+1];
285     }
286   }
287   /* Prefix sum */
288   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
289 
290   if (adjacency) {
291     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
292     /* Get neighboring cells */
293     for (cell = cStart; cell < cEnd; ++cell) {
294       PetscInt numNeighbors = PETSC_DETERMINE, n;
295       PetscInt cellOffset   = 0;
296 
297       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
298       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
299       for (n = 0; n < numNeighbors; ++n) {
300         PetscInt        cellPair[2];
301         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
302         PetscInt        meetSize = 0;
303         const PetscInt *meet    = NULL;
304 
305         cellPair[0] = cell; cellPair[1] = neighborCells[n];
306         if (cellPair[0] == cellPair[1]) continue;
307         if (!found) {
308           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
309           if (meetSize) {
310             PetscInt f;
311 
312             for (f = 0; f < numFaceCases; ++f) {
313               if (numFaceVertices[f] == meetSize) {
314                 found = PETSC_TRUE;
315                 break;
316               }
317             }
318           }
319           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
320         }
321         if (found) {
322           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
323           ++cellOffset;
324         }
325       }
326     }
327   }
328   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
329   if (numVertices) *numVertices = numCells;
330   if (offsets)   *offsets   = off;
331   if (adjacency) *adjacency = adj;
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
337 
338   Not Collective
339 
340   Input Parameters:
341 + name        - The name of a new user-defined creation routine
342 - create_func - The creation routine itself
343 
344   Notes:
345   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
346 
347   Sample usage:
348 .vb
349     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
350 .ve
351 
352   Then, your PetscPartitioner type can be chosen with the procedural interface via
353 .vb
354     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
355     PetscPartitionerSetType(PetscPartitioner, "my_part");
356 .ve
357    or at runtime via the option
358 .vb
359     -petscpartitioner_type my_part
360 .ve
361 
362   Level: advanced
363 
364 .keywords: PetscPartitioner, register
365 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
366 
367 @*/
368 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
369 {
370   PetscErrorCode ierr;
371 
372   PetscFunctionBegin;
373   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376 
377 /*@C
378   PetscPartitionerSetType - Builds a particular PetscPartitioner
379 
380   Collective on PetscPartitioner
381 
382   Input Parameters:
383 + part - The PetscPartitioner object
384 - name - The kind of partitioner
385 
386   Options Database Key:
387 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
388 
389   Level: intermediate
390 
391 .keywords: PetscPartitioner, set, type
392 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
393 @*/
394 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
395 {
396   PetscErrorCode (*r)(PetscPartitioner);
397   PetscBool      match;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
402   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
403   if (match) PetscFunctionReturn(0);
404 
405   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
406   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
407   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
408 
409   if (part->ops->destroy) {
410     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
411   }
412   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
413   ierr = (*r)(part);CHKERRQ(ierr);
414   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
415   PetscFunctionReturn(0);
416 }
417 
418 /*@C
419   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
420 
421   Not Collective
422 
423   Input Parameter:
424 . part - The PetscPartitioner
425 
426   Output Parameter:
427 . name - The PetscPartitioner type name
428 
429   Level: intermediate
430 
431 .keywords: PetscPartitioner, get, type, name
432 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
433 @*/
434 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
440   PetscValidPointer(name, 2);
441   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
442   *name = ((PetscObject) part)->type_name;
443   PetscFunctionReturn(0);
444 }
445 
446 /*@C
447   PetscPartitionerView - Views a PetscPartitioner
448 
449   Collective on PetscPartitioner
450 
451   Input Parameter:
452 + part - the PetscPartitioner object to view
453 - v    - the viewer
454 
455   Level: developer
456 
457 .seealso: PetscPartitionerDestroy()
458 @*/
459 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
460 {
461   PetscErrorCode ierr;
462 
463   PetscFunctionBegin;
464   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
465   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
466   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
467   PetscFunctionReturn(0);
468 }
469 
470 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
471 {
472   PetscFunctionBegin;
473   if (!currentType) {
474 #if defined(PETSC_HAVE_CHACO)
475     *defaultType = PETSCPARTITIONERCHACO;
476 #elif defined(PETSC_HAVE_PARMETIS)
477     *defaultType = PETSCPARTITIONERPARMETIS;
478 #elif defined(PETSC_HAVE_PTSCOTCH)
479     *defaultType = PETSCPARTITIONERPTSCOTCH;
480 #else
481     *defaultType = PETSCPARTITIONERSIMPLE;
482 #endif
483   } else {
484     *defaultType = currentType;
485   }
486   PetscFunctionReturn(0);
487 }
488 
489 /*@
490   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
491 
492   Collective on PetscPartitioner
493 
494   Input Parameter:
495 . part - the PetscPartitioner object to set options for
496 
497   Level: developer
498 
499 .seealso: PetscPartitionerView()
500 @*/
501 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
502 {
503   const char    *defaultType;
504   char           name[256];
505   PetscBool      flg;
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
510   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
511   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
512   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
513   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
514   if (flg) {
515     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
516   } else if (!((PetscObject) part)->type_name) {
517     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
518   }
519   if (part->ops->setfromoptions) {
520     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
521   }
522   /* process any options handlers added with PetscObjectAddOptionsHandler() */
523   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
524   ierr = PetscOptionsEnd();CHKERRQ(ierr);
525   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
526   PetscFunctionReturn(0);
527 }
528 
529 /*@C
530   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
531 
532   Collective on PetscPartitioner
533 
534   Input Parameter:
535 . part - the PetscPartitioner object to setup
536 
537   Level: developer
538 
539 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
540 @*/
541 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
547   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
548   PetscFunctionReturn(0);
549 }
550 
551 /*@
552   PetscPartitionerDestroy - Destroys a PetscPartitioner object
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to destroy
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView()
562 @*/
563 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   if (!*part) PetscFunctionReturn(0);
569   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
570 
571   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
572   ((PetscObject) (*part))->refct = 0;
573 
574   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
575   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 /*@
580   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
581 
582   Collective on MPI_Comm
583 
584   Input Parameter:
585 . comm - The communicator for the PetscPartitioner object
586 
587   Output Parameter:
588 . part - The PetscPartitioner object
589 
590   Level: beginner
591 
592 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
593 @*/
594 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
595 {
596   PetscPartitioner p;
597   const char       *partitionerType = NULL;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
607   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
608 
609   p->edgeCut = 0;
610   p->balance = 0.0;
611 
612   *part = p;
613   PetscFunctionReturn(0);
614 }
615 
616 /*@
617   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
618 
619   Collective on DM
620 
621   Input Parameters:
622 + part    - The PetscPartitioner
623 - dm      - The mesh DM
624 
625   Output Parameters:
626 + partSection     - The PetscSection giving the division of points by partition
627 - partition       - The list of points by partition
628 
629   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
630 
631   Level: developer
632 
633 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
634 @*/
635 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
636 {
637   PetscMPIInt    size;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
642   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
643   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
644   PetscValidPointer(partition, 5);
645   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
646   if (size == 1) {
647     PetscInt *points;
648     PetscInt  cStart, cEnd, c;
649 
650     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
651     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
652     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
653     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
654     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
655     for (c = cStart; c < cEnd; ++c) points[c] = c;
656     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
657     PetscFunctionReturn(0);
658   }
659   if (part->height == 0) {
660     PetscInt numVertices;
661     PetscInt *start     = NULL;
662     PetscInt *adjacency = NULL;
663     IS       globalNumbering;
664 
665     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
666     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
667     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
668     ierr = PetscFree(start);CHKERRQ(ierr);
669     ierr = PetscFree(adjacency);CHKERRQ(ierr);
670     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
671       const PetscInt *globalNum;
672       const PetscInt *partIdx;
673       PetscInt *map, cStart, cEnd;
674       PetscInt *adjusted, i, localSize, offset;
675       IS    newPartition;
676 
677       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
678       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
679       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
680       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
681       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
682       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
683       for (i = cStart, offset = 0; i < cEnd; i++) {
684         if (globalNum[i - cStart] >= 0) map[offset++] = i;
685       }
686       for (i = 0; i < localSize; i++) {
687         adjusted[i] = map[partIdx[i]];
688       }
689       ierr = PetscFree(map);CHKERRQ(ierr);
690       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
691       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
692       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
693       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
694       ierr = ISDestroy(partition);CHKERRQ(ierr);
695       *partition = newPartition;
696     }
697   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
698   PetscFunctionReturn(0);
699 
700 }
701 
702 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
703 {
704   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
705   PetscErrorCode          ierr;
706 
707   PetscFunctionBegin;
708   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
709   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
710   ierr = PetscFree(p);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
715 {
716   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
717   PetscViewerFormat       format;
718   PetscErrorCode          ierr;
719 
720   PetscFunctionBegin;
721   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
722   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
723   if (p->random) {
724     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
725     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
726     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
727   }
728   PetscFunctionReturn(0);
729 }
730 
731 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
732 {
733   PetscBool      iascii;
734   PetscErrorCode ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
738   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
739   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
740   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
741   PetscFunctionReturn(0);
742 }
743 
744 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
745 {
746   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
747   PetscErrorCode          ierr;
748 
749   PetscFunctionBegin;
750   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
751   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
752   ierr = PetscOptionsTail();CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
757 {
758   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
759   PetscInt                np;
760   PetscErrorCode          ierr;
761 
762   PetscFunctionBegin;
763   if (p->random) {
764     PetscRandom r;
765     PetscInt   *sizes, *points, v, p;
766     PetscMPIInt rank;
767 
768     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
769     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
770     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
771     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
772     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
773     for (v = 0; v < numVertices; ++v) {points[v] = v;}
774     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
775     for (v = numVertices-1; v > 0; --v) {
776       PetscReal val;
777       PetscInt  w, tmp;
778 
779       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
780       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
781       w    = PetscFloorReal(val);
782       tmp       = points[v];
783       points[v] = points[w];
784       points[w] = tmp;
785     }
786     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
787     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
788     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
789   }
790   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
791   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
792   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
793   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
794   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
795   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
796   *partition = p->partition;
797   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
798   PetscFunctionReturn(0);
799 }
800 
801 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
802 {
803   PetscFunctionBegin;
804   part->ops->view           = PetscPartitionerView_Shell;
805   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
806   part->ops->destroy        = PetscPartitionerDestroy_Shell;
807   part->ops->partition      = PetscPartitionerPartition_Shell;
808   PetscFunctionReturn(0);
809 }
810 
811 /*MC
812   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
813 
814   Level: intermediate
815 
816 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
817 M*/
818 
819 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
820 {
821   PetscPartitioner_Shell *p;
822   PetscErrorCode          ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
826   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
827   part->data = p;
828 
829   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
830   p->random = PETSC_FALSE;
831   PetscFunctionReturn(0);
832 }
833 
834 /*@C
835   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
836 
837   Collective on Part
838 
839   Input Parameters:
840 + part     - The PetscPartitioner
841 . size - The number of partitions
842 . sizes    - array of size size (or NULL) providing the number of points in each partition
843 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
844 
845   Level: developer
846 
847   Notes:
848 
849     It is safe to free the sizes and points arrays after use in this routine.
850 
851 .seealso DMPlexDistribute(), PetscPartitionerCreate()
852 @*/
853 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
854 {
855   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
856   PetscInt                proc, numPoints;
857   PetscErrorCode          ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
861   if (sizes)  {PetscValidPointer(sizes, 3);}
862   if (points) {PetscValidPointer(points, 4);}
863   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
864   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
865   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
866   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
867   if (sizes) {
868     for (proc = 0; proc < size; ++proc) {
869       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
870     }
871   }
872   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
873   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
874   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@
879   PetscPartitionerShellSetRandom - Set the flag to use a random partition
880 
881   Collective on Part
882 
883   Input Parameters:
884 + part   - The PetscPartitioner
885 - random - The flag to use a random partition
886 
887   Level: intermediate
888 
889 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
890 @*/
891 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
892 {
893   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
894 
895   PetscFunctionBegin;
896   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
897   p->random = random;
898   PetscFunctionReturn(0);
899 }
900 
901 /*@
902   PetscPartitionerShellGetRandom - get the flag to use a random partition
903 
904   Collective on Part
905 
906   Input Parameter:
907 . part   - The PetscPartitioner
908 
909   Output Parameter
910 . random - The flag to use a random partition
911 
912   Level: intermediate
913 
914 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
915 @*/
916 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
917 {
918   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
922   PetscValidPointer(random, 2);
923   *random = p->random;
924   PetscFunctionReturn(0);
925 }
926 
927 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
928 {
929   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
930   PetscErrorCode          ierr;
931 
932   PetscFunctionBegin;
933   ierr = PetscFree(p);CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
938 {
939   PetscViewerFormat format;
940   PetscErrorCode    ierr;
941 
942   PetscFunctionBegin;
943   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
944   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
949 {
950   PetscBool      iascii;
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
955   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
956   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
957   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
958   PetscFunctionReturn(0);
959 }
960 
961 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
962 {
963   MPI_Comm       comm;
964   PetscInt       np;
965   PetscMPIInt    size;
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   comm = PetscObjectComm((PetscObject)dm);
970   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
971   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
972   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
973   if (size == 1) {
974     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
975   }
976   else {
977     PetscMPIInt rank;
978     PetscInt nvGlobal, *offsets, myFirst, myLast;
979 
980     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
981     offsets[0] = 0;
982     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
983     for (np = 2; np <= size; np++) {
984       offsets[np] += offsets[np-1];
985     }
986     nvGlobal = offsets[size];
987     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
988     myFirst = offsets[rank];
989     myLast  = offsets[rank + 1] - 1;
990     ierr = PetscFree(offsets);CHKERRQ(ierr);
991     if (numVertices) {
992       PetscInt firstPart = 0, firstLargePart = 0;
993       PetscInt lastPart = 0, lastLargePart = 0;
994       PetscInt rem = nvGlobal % nparts;
995       PetscInt pSmall = nvGlobal/nparts;
996       PetscInt pBig = nvGlobal/nparts + 1;
997 
998 
999       if (rem) {
1000         firstLargePart = myFirst / pBig;
1001         lastLargePart  = myLast  / pBig;
1002 
1003         if (firstLargePart < rem) {
1004           firstPart = firstLargePart;
1005         }
1006         else {
1007           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1008         }
1009         if (lastLargePart < rem) {
1010           lastPart = lastLargePart;
1011         }
1012         else {
1013           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1014         }
1015       }
1016       else {
1017         firstPart = myFirst / (nvGlobal/nparts);
1018         lastPart  = myLast  / (nvGlobal/nparts);
1019       }
1020 
1021       for (np = firstPart; np <= lastPart; np++) {
1022         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1023         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1024 
1025         PartStart = PetscMax(PartStart,myFirst);
1026         PartEnd   = PetscMin(PartEnd,myLast+1);
1027         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1028       }
1029     }
1030   }
1031   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1036 {
1037   PetscFunctionBegin;
1038   part->ops->view      = PetscPartitionerView_Simple;
1039   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1040   part->ops->partition = PetscPartitionerPartition_Simple;
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 /*MC
1045   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1046 
1047   Level: intermediate
1048 
1049 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1050 M*/
1051 
1052 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1053 {
1054   PetscPartitioner_Simple *p;
1055   PetscErrorCode           ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1059   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1060   part->data = p;
1061 
1062   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1067 {
1068   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1069   PetscErrorCode          ierr;
1070 
1071   PetscFunctionBegin;
1072   ierr = PetscFree(p);CHKERRQ(ierr);
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1077 {
1078   PetscViewerFormat format;
1079   PetscErrorCode    ierr;
1080 
1081   PetscFunctionBegin;
1082   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1083   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1088 {
1089   PetscBool      iascii;
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1094   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1095   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1096   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101 {
1102   PetscInt       np;
1103   PetscErrorCode ierr;
1104 
1105   PetscFunctionBegin;
1106   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1107   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1108   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1109   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1110   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1115 {
1116   PetscFunctionBegin;
1117   part->ops->view      = PetscPartitionerView_Gather;
1118   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1119   part->ops->partition = PetscPartitionerPartition_Gather;
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 /*MC
1124   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1125 
1126   Level: intermediate
1127 
1128 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1129 M*/
1130 
1131 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1132 {
1133   PetscPartitioner_Gather *p;
1134   PetscErrorCode           ierr;
1135 
1136   PetscFunctionBegin;
1137   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1138   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1139   part->data = p;
1140 
1141   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 
1146 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1147 {
1148   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1149   PetscErrorCode          ierr;
1150 
1151   PetscFunctionBegin;
1152   ierr = PetscFree(p);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1157 {
1158   PetscViewerFormat format;
1159   PetscErrorCode    ierr;
1160 
1161   PetscFunctionBegin;
1162   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1163   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1164   PetscFunctionReturn(0);
1165 }
1166 
1167 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1168 {
1169   PetscBool      iascii;
1170   PetscErrorCode ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1175   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1176   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 #if defined(PETSC_HAVE_CHACO)
1181 #if defined(PETSC_HAVE_UNISTD_H)
1182 #include <unistd.h>
1183 #endif
1184 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1185 #include <chaco.h>
1186 #else
1187 /* Older versions of Chaco do not have an include file */
1188 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1189                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1190                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1191                        int mesh_dims[3], double *goal, int global_method, int local_method,
1192                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1193 #endif
1194 extern int FREE_GRAPH;
1195 #endif
1196 
1197 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1198 {
1199 #if defined(PETSC_HAVE_CHACO)
1200   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1201   MPI_Comm       comm;
1202   int            nvtxs          = numVertices; /* number of vertices in full graph */
1203   int           *vwgts          = NULL;   /* weights for all vertices */
1204   float         *ewgts          = NULL;   /* weights for all edges */
1205   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1206   char          *outassignname  = NULL;   /*  name of assignment output file */
1207   char          *outfilename    = NULL;   /* output file name */
1208   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1209   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1210   int            mesh_dims[3];            /* dimensions of mesh of processors */
1211   double        *goal          = NULL;    /* desired set sizes for each set */
1212   int            global_method = 1;       /* global partitioning algorithm */
1213   int            local_method  = 1;       /* local partitioning algorithm */
1214   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1215   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1216   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1217   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1218   long           seed          = 123636512; /* for random graph mutations */
1219 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1220   int           *assignment;              /* Output partition */
1221 #else
1222   short int     *assignment;              /* Output partition */
1223 #endif
1224   int            fd_stdout, fd_pipe[2];
1225   PetscInt      *points;
1226   int            i, v, p;
1227   PetscErrorCode ierr;
1228 
1229   PetscFunctionBegin;
1230   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1231 #if defined (PETSC_USE_DEBUG)
1232   {
1233     int ival,isum;
1234     PetscBool distributed;
1235 
1236     ival = (numVertices > 0);
1237     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1238     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1239     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1240   }
1241 #endif
1242   if (!numVertices) {
1243     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1244     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1245     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1246     PetscFunctionReturn(0);
1247   }
1248   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1249   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1250 
1251   if (global_method == INERTIAL_METHOD) {
1252     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1253     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1254   }
1255   mesh_dims[0] = nparts;
1256   mesh_dims[1] = 1;
1257   mesh_dims[2] = 1;
1258   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1259   /* Chaco outputs to stdout. We redirect this to a buffer. */
1260   /* TODO: check error codes for UNIX calls */
1261 #if defined(PETSC_HAVE_UNISTD_H)
1262   {
1263     int piperet;
1264     piperet = pipe(fd_pipe);
1265     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1266     fd_stdout = dup(1);
1267     close(1);
1268     dup2(fd_pipe[1], 1);
1269   }
1270 #endif
1271   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1272                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1273                    vmax, ndims, eigtol, seed);
1274 #if defined(PETSC_HAVE_UNISTD_H)
1275   {
1276     char msgLog[10000];
1277     int  count;
1278 
1279     fflush(stdout);
1280     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1281     if (count < 0) count = 0;
1282     msgLog[count] = 0;
1283     close(1);
1284     dup2(fd_stdout, 1);
1285     close(fd_stdout);
1286     close(fd_pipe[0]);
1287     close(fd_pipe[1]);
1288     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1289   }
1290 #else
1291   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1292 #endif
1293   /* Convert to PetscSection+IS */
1294   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1295   for (v = 0; v < nvtxs; ++v) {
1296     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1297   }
1298   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1299   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1300   for (p = 0, i = 0; p < nparts; ++p) {
1301     for (v = 0; v < nvtxs; ++v) {
1302       if (assignment[v] == p) points[i++] = v;
1303     }
1304   }
1305   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1306   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1307   if (global_method == INERTIAL_METHOD) {
1308     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1309   }
1310   ierr = PetscFree(assignment);CHKERRQ(ierr);
1311   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1312   PetscFunctionReturn(0);
1313 #else
1314   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1315 #endif
1316 }
1317 
1318 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1319 {
1320   PetscFunctionBegin;
1321   part->ops->view      = PetscPartitionerView_Chaco;
1322   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1323   part->ops->partition = PetscPartitionerPartition_Chaco;
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 /*MC
1328   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1329 
1330   Level: intermediate
1331 
1332 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1333 M*/
1334 
1335 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1336 {
1337   PetscPartitioner_Chaco *p;
1338   PetscErrorCode          ierr;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1342   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1343   part->data = p;
1344 
1345   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1346   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1347   PetscFunctionReturn(0);
1348 }
1349 
1350 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1351 {
1352   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1353   PetscErrorCode             ierr;
1354 
1355   PetscFunctionBegin;
1356   ierr = PetscFree(p);CHKERRQ(ierr);
1357   PetscFunctionReturn(0);
1358 }
1359 
1360 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1361 {
1362   PetscViewerFormat format;
1363   PetscErrorCode    ierr;
1364 
1365   PetscFunctionBegin;
1366   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1367   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1372 {
1373   PetscBool      iascii;
1374   PetscErrorCode ierr;
1375 
1376   PetscFunctionBegin;
1377   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1378   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1379   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1380   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1381   PetscFunctionReturn(0);
1382 }
1383 
1384 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1385 {
1386   static const char         *ptypes[] = {"kway", "rb"};
1387   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1388   PetscErrorCode            ierr;
1389 
1390   PetscFunctionBegin;
1391   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1392   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1393   ierr = PetscOptionsTail();CHKERRQ(ierr);
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 #if defined(PETSC_HAVE_PARMETIS)
1398 #include <parmetis.h>
1399 #endif
1400 
1401 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1402 {
1403 #if defined(PETSC_HAVE_PARMETIS)
1404   MPI_Comm       comm;
1405   PetscSection   section;
1406   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1407   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1408   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1409   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1410   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1411   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1412   PetscInt       wgtflag    = 0;           /* Indicates which weights are present */
1413   PetscInt       numflag    = 0;           /* Indicates initial offset (0 or 1) */
1414   PetscInt       ncon       = 1;           /* The number of weights per vertex */
1415   real_t        *tpwgts;                   /* The fraction of vertex weights assigned to each partition */
1416   real_t        *ubvec;                    /* The balance intolerance for vertex weights */
1417   PetscInt       options[64];              /* Options */
1418   /* Outputs */
1419   PetscInt       v, i, *assignment, *points;
1420   PetscMPIInt    size, rank, p;
1421   PetscInt       metis_ptype = ((PetscPartitioner_ParMetis *) part->data)->ptype;
1422   PetscErrorCode ierr;
1423 
1424   PetscFunctionBegin;
1425   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1426   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1427   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1428   /* Calculate vertex distribution */
1429   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1430   vtxdist[0] = 0;
1431   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1432   for (p = 2; p <= size; ++p) {
1433     vtxdist[p] += vtxdist[p-1];
1434   }
1435   /* Calculate partition weights */
1436   for (p = 0; p < nparts; ++p) {
1437     tpwgts[p] = 1.0/nparts;
1438   }
1439   ubvec[0] = 1.05;
1440   /* Weight cells by dofs on cell by default */
1441   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1442   if (section) {
1443     PetscInt cStart, cEnd, dof;
1444 
1445     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1446     for (v = cStart; v < cEnd; ++v) {
1447       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1448       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1449       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1450       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1451     }
1452   } else {
1453     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1454   }
1455   wgtflag |= 2; /* have weights on graph vertices */
1456 
1457   if (nparts == 1) {
1458     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1459   } else {
1460     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1461     if (vtxdist[p+1] == vtxdist[size]) {
1462       if (rank == p) {
1463         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1464         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1465         if (metis_ptype == 1) {
1466           PetscStackPush("METIS_PartGraphRecursive");
1467           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1468           PetscStackPop;
1469           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1470         } else {
1471           /*
1472            It would be nice to activate the two options below, but they would need some actual testing.
1473            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1474            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1475           */
1476           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1477           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1478           PetscStackPush("METIS_PartGraphKway");
1479           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1480           PetscStackPop;
1481           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1482         }
1483       }
1484     } else {
1485       options[0] = 0; /* use all defaults */
1486       PetscStackPush("ParMETIS_V3_PartKway");
1487       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1488       PetscStackPop;
1489       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1490     }
1491   }
1492   /* Convert to PetscSection+IS */
1493   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1494   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1495   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1496   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1497   for (p = 0, i = 0; p < nparts; ++p) {
1498     for (v = 0; v < nvtxs; ++v) {
1499       if (assignment[v] == p) points[i++] = v;
1500     }
1501   }
1502   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1503   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1504   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1505   PetscFunctionReturn(0);
1506 #else
1507   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1508 #endif
1509 }
1510 
1511 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1512 {
1513   PetscFunctionBegin;
1514   part->ops->view           = PetscPartitionerView_ParMetis;
1515   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1516   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1517   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1518   PetscFunctionReturn(0);
1519 }
1520 
1521 /*MC
1522   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1523 
1524   Level: intermediate
1525 
1526 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1527 M*/
1528 
1529 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1530 {
1531   PetscPartitioner_ParMetis *p;
1532   PetscErrorCode          ierr;
1533 
1534   PetscFunctionBegin;
1535   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1536   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1537   part->data = p;
1538 
1539   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1540   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 
1545 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1546 const char PTScotchPartitionerCitation[] =
1547   "@article{PTSCOTCH,\n"
1548   "  author  = {C. Chevalier and F. Pellegrini},\n"
1549   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1550   "  journal = {Parallel Computing},\n"
1551   "  volume  = {34},\n"
1552   "  number  = {6},\n"
1553   "  pages   = {318--331},\n"
1554   "  year    = {2008},\n"
1555   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1556   "}\n";
1557 
1558 typedef struct {
1559   PetscInt  strategy;
1560   PetscReal imbalance;
1561 } PetscPartitioner_PTScotch;
1562 
1563 static const char *const
1564 PTScotchStrategyList[] = {
1565   "DEFAULT",
1566   "QUALITY",
1567   "SPEED",
1568   "BALANCE",
1569   "SAFETY",
1570   "SCALABILITY",
1571   "RECURSIVE",
1572   "REMAP"
1573 };
1574 
1575 #if defined(PETSC_HAVE_PTSCOTCH)
1576 
1577 EXTERN_C_BEGIN
1578 #include <ptscotch.h>
1579 EXTERN_C_END
1580 
1581 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1582 
1583 static int PTScotch_Strategy(PetscInt strategy)
1584 {
1585   switch (strategy) {
1586   case  0: return SCOTCH_STRATDEFAULT;
1587   case  1: return SCOTCH_STRATQUALITY;
1588   case  2: return SCOTCH_STRATSPEED;
1589   case  3: return SCOTCH_STRATBALANCE;
1590   case  4: return SCOTCH_STRATSAFETY;
1591   case  5: return SCOTCH_STRATSCALABILITY;
1592   case  6: return SCOTCH_STRATRECURSIVE;
1593   case  7: return SCOTCH_STRATREMAP;
1594   default: return SCOTCH_STRATDEFAULT;
1595   }
1596 }
1597 
1598 
1599 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1600                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1601 {
1602   SCOTCH_Graph   grafdat;
1603   SCOTCH_Strat   stradat;
1604   SCOTCH_Num     vertnbr = n;
1605   SCOTCH_Num     edgenbr = xadj[n];
1606   SCOTCH_Num*    velotab = vtxwgt;
1607   SCOTCH_Num*    edlotab = adjwgt;
1608   SCOTCH_Num     flagval = strategy;
1609   double         kbalval = imbalance;
1610   PetscErrorCode ierr;
1611 
1612   PetscFunctionBegin;
1613   {
1614     PetscBool flg = PETSC_TRUE;
1615     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1616     if (!flg) velotab = NULL;
1617   }
1618   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1619   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1620   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1621   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1622 #if defined(PETSC_USE_DEBUG)
1623   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1624 #endif
1625   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1626   SCOTCH_stratExit(&stradat);
1627   SCOTCH_graphExit(&grafdat);
1628   PetscFunctionReturn(0);
1629 }
1630 
1631 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1632                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1633 {
1634   PetscMPIInt     procglbnbr;
1635   PetscMPIInt     proclocnum;
1636   SCOTCH_Arch     archdat;
1637   SCOTCH_Dgraph   grafdat;
1638   SCOTCH_Dmapping mappdat;
1639   SCOTCH_Strat    stradat;
1640   SCOTCH_Num      vertlocnbr;
1641   SCOTCH_Num      edgelocnbr;
1642   SCOTCH_Num*     veloloctab = vtxwgt;
1643   SCOTCH_Num*     edloloctab = adjwgt;
1644   SCOTCH_Num      flagval = strategy;
1645   double          kbalval = imbalance;
1646   PetscErrorCode  ierr;
1647 
1648   PetscFunctionBegin;
1649   {
1650     PetscBool flg = PETSC_TRUE;
1651     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1652     if (!flg) veloloctab = NULL;
1653   }
1654   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1655   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1656   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1657   edgelocnbr = xadj[vertlocnbr];
1658 
1659   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1660   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1661 #if defined(PETSC_USE_DEBUG)
1662   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1663 #endif
1664   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1665   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1666   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1667   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1668   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1669   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1670   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1671   SCOTCH_archExit(&archdat);
1672   SCOTCH_stratExit(&stradat);
1673   SCOTCH_dgraphExit(&grafdat);
1674   PetscFunctionReturn(0);
1675 }
1676 
1677 #endif /* PETSC_HAVE_PTSCOTCH */
1678 
1679 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1680 {
1681   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1682   PetscErrorCode             ierr;
1683 
1684   PetscFunctionBegin;
1685   ierr = PetscFree(p);CHKERRQ(ierr);
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1690 {
1691   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1692   PetscErrorCode            ierr;
1693 
1694   PetscFunctionBegin;
1695   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1696   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1697   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1698   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1699   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1700   PetscFunctionReturn(0);
1701 }
1702 
1703 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1704 {
1705   PetscBool      iascii;
1706   PetscErrorCode ierr;
1707 
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1710   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1711   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1712   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1713   PetscFunctionReturn(0);
1714 }
1715 
1716 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1717 {
1718   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1719   const char *const         *slist = PTScotchStrategyList;
1720   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1721   PetscBool                 flag;
1722   PetscErrorCode            ierr;
1723 
1724   PetscFunctionBegin;
1725   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1726   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1727   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1728   ierr = PetscOptionsTail();CHKERRQ(ierr);
1729   PetscFunctionReturn(0);
1730 }
1731 
1732 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1733 {
1734 #if defined(PETSC_HAVE_PTSCOTCH)
1735   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1736   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1737   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1738   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1739   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1740   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1741   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1742   PetscInt       v, i, *assignment, *points;
1743   PetscMPIInt    size, rank, p;
1744   PetscErrorCode ierr;
1745 
1746   PetscFunctionBegin;
1747   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1748   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1749   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1750 
1751   /* Calculate vertex distribution */
1752   vtxdist[0] = 0;
1753   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1754   for (p = 2; p <= size; ++p) {
1755     vtxdist[p] += vtxdist[p-1];
1756   }
1757 
1758   if (nparts == 1) {
1759     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1760   } else {
1761     PetscSection section;
1762     /* Weight cells by dofs on cell by default */
1763     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1764     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1765     if (section) {
1766       PetscInt vStart, vEnd, dof;
1767       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1768       for (v = vStart; v < vEnd; ++v) {
1769         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1770         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1771         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1772         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1773       }
1774     } else {
1775       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1776     }
1777     {
1778       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1779       int                       strat = PTScotch_Strategy(pts->strategy);
1780       double                    imbal = (double)pts->imbalance;
1781 
1782       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1783       if (vtxdist[p+1] == vtxdist[size]) {
1784         if (rank == p) {
1785           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1786         }
1787       } else {
1788         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1789       }
1790     }
1791     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1792   }
1793 
1794   /* Convert to PetscSection+IS */
1795   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1796   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1797   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1798   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1799   for (p = 0, i = 0; p < nparts; ++p) {
1800     for (v = 0; v < nvtxs; ++v) {
1801       if (assignment[v] == p) points[i++] = v;
1802     }
1803   }
1804   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1805   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1806 
1807   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1808   PetscFunctionReturn(0);
1809 #else
1810   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1811 #endif
1812 }
1813 
1814 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1815 {
1816   PetscFunctionBegin;
1817   part->ops->view           = PetscPartitionerView_PTScotch;
1818   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1819   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1820   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1821   PetscFunctionReturn(0);
1822 }
1823 
1824 /*MC
1825   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1826 
1827   Level: intermediate
1828 
1829 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1830 M*/
1831 
1832 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1833 {
1834   PetscPartitioner_PTScotch *p;
1835   PetscErrorCode          ierr;
1836 
1837   PetscFunctionBegin;
1838   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1839   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1840   part->data = p;
1841 
1842   p->strategy  = 0;
1843   p->imbalance = 0.01;
1844 
1845   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1846   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1847   PetscFunctionReturn(0);
1848 }
1849 
1850 
1851 /*@
1852   DMPlexGetPartitioner - Get the mesh partitioner
1853 
1854   Not collective
1855 
1856   Input Parameter:
1857 . dm - The DM
1858 
1859   Output Parameter:
1860 . part - The PetscPartitioner
1861 
1862   Level: developer
1863 
1864   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1865 
1866 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1867 @*/
1868 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1869 {
1870   DM_Plex       *mesh = (DM_Plex *) dm->data;
1871 
1872   PetscFunctionBegin;
1873   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1874   PetscValidPointer(part, 2);
1875   *part = mesh->partitioner;
1876   PetscFunctionReturn(0);
1877 }
1878 
1879 /*@
1880   DMPlexSetPartitioner - Set the mesh partitioner
1881 
1882   logically collective on dm and part
1883 
1884   Input Parameters:
1885 + dm - The DM
1886 - part - The partitioner
1887 
1888   Level: developer
1889 
1890   Note: Any existing PetscPartitioner will be destroyed.
1891 
1892 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1893 @*/
1894 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1895 {
1896   DM_Plex       *mesh = (DM_Plex *) dm->data;
1897   PetscErrorCode ierr;
1898 
1899   PetscFunctionBegin;
1900   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1901   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1902   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1903   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1904   mesh->partitioner = part;
1905   PetscFunctionReturn(0);
1906 }
1907 
1908 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1909 {
1910   PetscErrorCode ierr;
1911 
1912   PetscFunctionBegin;
1913   if (up) {
1914     PetscInt parent;
1915 
1916     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1917     if (parent != point) {
1918       PetscInt closureSize, *closure = NULL, i;
1919 
1920       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1921       for (i = 0; i < closureSize; i++) {
1922         PetscInt cpoint = closure[2*i];
1923 
1924         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1925         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1926       }
1927       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1928     }
1929   }
1930   if (down) {
1931     PetscInt numChildren;
1932     const PetscInt *children;
1933 
1934     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1935     if (numChildren) {
1936       PetscInt i;
1937 
1938       for (i = 0; i < numChildren; i++) {
1939         PetscInt cpoint = children[i];
1940 
1941         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1942         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1943       }
1944     }
1945   }
1946   PetscFunctionReturn(0);
1947 }
1948 
1949 /*@
1950   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1951 
1952   Input Parameters:
1953 + dm     - The DM
1954 - label  - DMLabel assinging ranks to remote roots
1955 
1956   Level: developer
1957 
1958 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1959 @*/
1960 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1961 {
1962   IS              rankIS,   pointIS;
1963   const PetscInt *ranks,   *points;
1964   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1965   PetscInt       *closure = NULL;
1966   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1967   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1968   PetscErrorCode  ierr;
1969 
1970   PetscFunctionBegin;
1971   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1972   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1973   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1974 
1975   for (r = 0; r < numRanks; ++r) {
1976     const PetscInt rank = ranks[r];
1977     PetscHSetI     ht;
1978     PetscInt       nelems, *elems, off = 0;
1979 
1980     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1981     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1982     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1983     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1984     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
1985     for (p = 0; p < numPoints; ++p) {
1986       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1987       for (c = 0; c < closureSize*2; c += 2) {
1988         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
1989         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1990       }
1991     }
1992     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1993     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1994     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
1995     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
1996     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
1997     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
1998     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
1999     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2000     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2001     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2002   }
2003 
2004   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2005   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2006   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 /*@
2011   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2012 
2013   Input Parameters:
2014 + dm     - The DM
2015 - label  - DMLabel assinging ranks to remote roots
2016 
2017   Level: developer
2018 
2019 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2020 @*/
2021 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2022 {
2023   IS              rankIS,   pointIS;
2024   const PetscInt *ranks,   *points;
2025   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2026   PetscInt       *adj = NULL;
2027   PetscErrorCode  ierr;
2028 
2029   PetscFunctionBegin;
2030   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2031   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2032   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2033   for (r = 0; r < numRanks; ++r) {
2034     const PetscInt rank = ranks[r];
2035 
2036     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2037     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2038     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2039     for (p = 0; p < numPoints; ++p) {
2040       adjSize = PETSC_DETERMINE;
2041       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2042       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2043     }
2044     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2045     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2046   }
2047   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2048   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2049   ierr = PetscFree(adj);CHKERRQ(ierr);
2050   PetscFunctionReturn(0);
2051 }
2052 
2053 /*@
2054   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2055 
2056   Input Parameters:
2057 + dm     - The DM
2058 - label  - DMLabel assinging ranks to remote roots
2059 
2060   Level: developer
2061 
2062   Note: This is required when generating multi-level overlaps to capture
2063   overlap points from non-neighbouring partitions.
2064 
2065 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2066 @*/
2067 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2068 {
2069   MPI_Comm        comm;
2070   PetscMPIInt     rank;
2071   PetscSF         sfPoint;
2072   DMLabel         lblRoots, lblLeaves;
2073   IS              rankIS, pointIS;
2074   const PetscInt *ranks;
2075   PetscInt        numRanks, r;
2076   PetscErrorCode  ierr;
2077 
2078   PetscFunctionBegin;
2079   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2080   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2081   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2082   /* Pull point contributions from remote leaves into local roots */
2083   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2084   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2085   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2086   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2087   for (r = 0; r < numRanks; ++r) {
2088     const PetscInt remoteRank = ranks[r];
2089     if (remoteRank == rank) continue;
2090     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2091     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2092     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2093   }
2094   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2095   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2096   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2097   /* Push point contributions from roots into remote leaves */
2098   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2099   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2100   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2101   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2102   for (r = 0; r < numRanks; ++r) {
2103     const PetscInt remoteRank = ranks[r];
2104     if (remoteRank == rank) continue;
2105     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2106     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2107     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2108   }
2109   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2110   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2111   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2112   PetscFunctionReturn(0);
2113 }
2114 
2115 /*@
2116   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2117 
2118   Input Parameters:
2119 + dm        - The DM
2120 . rootLabel - DMLabel assinging ranks to local roots
2121 . processSF - A star forest mapping into the local index on each remote rank
2122 
2123   Output Parameter:
2124 - leafLabel - DMLabel assinging ranks to remote roots
2125 
2126   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2127   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2128 
2129   Level: developer
2130 
2131 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2132 @*/
2133 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2134 {
2135   MPI_Comm           comm;
2136   PetscMPIInt        rank, size;
2137   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2138   PetscSF            sfPoint;
2139   PetscSFNode       *rootPoints, *leafPoints;
2140   PetscSection       rootSection, leafSection;
2141   const PetscSFNode *remote;
2142   const PetscInt    *local, *neighbors;
2143   IS                 valueIS;
2144   PetscErrorCode     ierr;
2145 
2146   PetscFunctionBegin;
2147   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2148   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2149   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2150   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2151 
2152   /* Convert to (point, rank) and use actual owners */
2153   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2154   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2155   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2156   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2157   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2158   for (n = 0; n < numNeighbors; ++n) {
2159     PetscInt numPoints;
2160 
2161     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2162     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2163   }
2164   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2165   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2166   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2167   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2168   for (n = 0; n < numNeighbors; ++n) {
2169     IS              pointIS;
2170     const PetscInt *points;
2171     PetscInt        off, numPoints, p;
2172 
2173     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2174     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2175     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2176     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2177     for (p = 0; p < numPoints; ++p) {
2178       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2179       else       {l = -1;}
2180       if (l >= 0) {rootPoints[off+p] = remote[l];}
2181       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2182     }
2183     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2184     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2185   }
2186   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2187   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2188   /* Communicate overlap */
2189   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2190   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2191   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2192   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2193   for (p = 0; p < ssize; p++) {
2194     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2195   }
2196   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2197   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2198   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2199   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2200   PetscFunctionReturn(0);
2201 }
2202 
2203 /*@
2204   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2205 
2206   Input Parameters:
2207 + dm    - The DM
2208 . label - DMLabel assinging ranks to remote roots
2209 
2210   Output Parameter:
2211 - sf    - The star forest communication context encapsulating the defined mapping
2212 
2213   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2214 
2215   Level: developer
2216 
2217 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2218 @*/
2219 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2220 {
2221   PetscMPIInt     rank, size;
2222   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2223   PetscSFNode    *remotePoints;
2224   IS              remoteRootIS;
2225   const PetscInt *remoteRoots;
2226   PetscErrorCode ierr;
2227 
2228   PetscFunctionBegin;
2229   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2230   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2231 
2232   for (numRemote = 0, n = 0; n < size; ++n) {
2233     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2234     numRemote += numPoints;
2235   }
2236   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2237   /* Put owned points first */
2238   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2239   if (numPoints > 0) {
2240     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2241     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2242     for (p = 0; p < numPoints; p++) {
2243       remotePoints[idx].index = remoteRoots[p];
2244       remotePoints[idx].rank = rank;
2245       idx++;
2246     }
2247     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2248     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2249   }
2250   /* Now add remote points */
2251   for (n = 0; n < size; ++n) {
2252     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2253     if (numPoints <= 0 || n == rank) continue;
2254     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2255     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2256     for (p = 0; p < numPoints; p++) {
2257       remotePoints[idx].index = remoteRoots[p];
2258       remotePoints[idx].rank = n;
2259       idx++;
2260     }
2261     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2262     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2263   }
2264   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2265   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2266   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2267   PetscFunctionReturn(0);
2268 }
2269