xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 5b440754c970ee174272a365895ca861449cade0)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   if (globalNumbering) {
80     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
81     *globalNumbering = cellNumbering;
82   }
83   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
84   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
85     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
86     if (nroots > 0) {if (cellNum[p] < 0) continue;}
87     adjSize = PETSC_DETERMINE;
88     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
89     for (a = 0; a < adjSize; ++a) {
90       const PetscInt point = adj[a];
91       if (point != p && pStart <= point && point < pEnd) {
92         PetscInt *PETSC_RESTRICT pBuf;
93         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
94         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
95         *pBuf = point;
96       }
97     }
98     (*numVertices)++;
99   }
100   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
101   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
102   /* Derive CSR graph from section/segbuffer */
103   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
104   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
105   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
106   for (idx = 0, p = pStart; p < pEnd; p++) {
107     if (nroots > 0) {if (cellNum[p] < 0) continue;}
108     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
109   }
110   vOffsets[*numVertices] = size;
111   if (offsets) *offsets = vOffsets;
112   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
113   {
114     ISLocalToGlobalMapping ltogCells;
115     PetscInt n, size, *cells_arr;
116     /* In parallel, apply a global cell numbering to the graph */
117     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
121     /* Convert to positive global cell numbers */
122     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
123     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
126     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
127   }
128   if (adjacency) *adjacency = graph;
129   /* Clean up */
130   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
131   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
132   ierr = PetscFree(adj);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 /*@C
137   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
138 
139   Collective
140 
141   Input Arguments:
142 + dm - The DMPlex
143 - cellHeight - The height of mesh points to treat as cells (default should be 0)
144 
145   Output Arguments:
146 + numVertices - The number of local vertices in the graph, or cells in the mesh.
147 . offsets     - The offset to the adjacency list for each cell
148 - adjacency   - The adjacency list for all cells
149 
150   Note: This is suitable for input to a mesh partitioner like ParMetis.
151 
152   Level: advanced
153 
154 .seealso: DMPlexCreate()
155 @*/
156 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
157 {
158   const PetscInt maxFaceCases = 30;
159   PetscInt       numFaceCases = 0;
160   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
161   PetscInt      *off, *adj;
162   PetscInt      *neighborCells = NULL;
163   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
164   PetscErrorCode ierr;
165 
166   PetscFunctionBegin;
167   /* For parallel partitioning, I think you have to communicate supports */
168   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
169   cellDim = dim - cellHeight;
170   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
171   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
172   if (cEnd - cStart == 0) {
173     if (numVertices) *numVertices = 0;
174     if (offsets)   *offsets   = NULL;
175     if (adjacency) *adjacency = NULL;
176     PetscFunctionReturn(0);
177   }
178   numCells  = cEnd - cStart;
179   faceDepth = depth - cellHeight;
180   if (dim == depth) {
181     PetscInt f, fStart, fEnd;
182 
183     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
184     /* Count neighboring cells */
185     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
186     for (f = fStart; f < fEnd; ++f) {
187       const PetscInt *support;
188       PetscInt        supportSize;
189       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
190       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
191       if (supportSize == 2) {
192         PetscInt numChildren;
193 
194         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
195         if (!numChildren) {
196           ++off[support[0]-cStart+1];
197           ++off[support[1]-cStart+1];
198         }
199       }
200     }
201     /* Prefix sum */
202     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
203     if (adjacency) {
204       PetscInt *tmp;
205 
206       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
207       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
208       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
209       /* Get neighboring cells */
210       for (f = fStart; f < fEnd; ++f) {
211         const PetscInt *support;
212         PetscInt        supportSize;
213         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
214         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
215         if (supportSize == 2) {
216           PetscInt numChildren;
217 
218           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
219           if (!numChildren) {
220             adj[tmp[support[0]-cStart]++] = support[1];
221             adj[tmp[support[1]-cStart]++] = support[0];
222           }
223         }
224       }
225 #if defined(PETSC_USE_DEBUG)
226       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
227 #endif
228       ierr = PetscFree(tmp);CHKERRQ(ierr);
229     }
230     if (numVertices) *numVertices = numCells;
231     if (offsets)   *offsets   = off;
232     if (adjacency) *adjacency = adj;
233     PetscFunctionReturn(0);
234   }
235   /* Setup face recognition */
236   if (faceDepth == 1) {
237     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
238 
239     for (c = cStart; c < cEnd; ++c) {
240       PetscInt corners;
241 
242       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
243       if (!cornersSeen[corners]) {
244         PetscInt nFV;
245 
246         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
247         cornersSeen[corners] = 1;
248 
249         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
250 
251         numFaceVertices[numFaceCases++] = nFV;
252       }
253     }
254   }
255   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
256   /* Count neighboring cells */
257   for (cell = cStart; cell < cEnd; ++cell) {
258     PetscInt numNeighbors = PETSC_DETERMINE, n;
259 
260     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
261     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
262     for (n = 0; n < numNeighbors; ++n) {
263       PetscInt        cellPair[2];
264       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
265       PetscInt        meetSize = 0;
266       const PetscInt *meet    = NULL;
267 
268       cellPair[0] = cell; cellPair[1] = neighborCells[n];
269       if (cellPair[0] == cellPair[1]) continue;
270       if (!found) {
271         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
272         if (meetSize) {
273           PetscInt f;
274 
275           for (f = 0; f < numFaceCases; ++f) {
276             if (numFaceVertices[f] == meetSize) {
277               found = PETSC_TRUE;
278               break;
279             }
280           }
281         }
282         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
283       }
284       if (found) ++off[cell-cStart+1];
285     }
286   }
287   /* Prefix sum */
288   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
289 
290   if (adjacency) {
291     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
292     /* Get neighboring cells */
293     for (cell = cStart; cell < cEnd; ++cell) {
294       PetscInt numNeighbors = PETSC_DETERMINE, n;
295       PetscInt cellOffset   = 0;
296 
297       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
298       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
299       for (n = 0; n < numNeighbors; ++n) {
300         PetscInt        cellPair[2];
301         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
302         PetscInt        meetSize = 0;
303         const PetscInt *meet    = NULL;
304 
305         cellPair[0] = cell; cellPair[1] = neighborCells[n];
306         if (cellPair[0] == cellPair[1]) continue;
307         if (!found) {
308           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
309           if (meetSize) {
310             PetscInt f;
311 
312             for (f = 0; f < numFaceCases; ++f) {
313               if (numFaceVertices[f] == meetSize) {
314                 found = PETSC_TRUE;
315                 break;
316               }
317             }
318           }
319           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
320         }
321         if (found) {
322           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
323           ++cellOffset;
324         }
325       }
326     }
327   }
328   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
329   if (numVertices) *numVertices = numCells;
330   if (offsets)   *offsets   = off;
331   if (adjacency) *adjacency = adj;
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
337 
338   Not Collective
339 
340   Input Parameters:
341 + name        - The name of a new user-defined creation routine
342 - create_func - The creation routine itself
343 
344   Notes:
345   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
346 
347   Sample usage:
348 .vb
349     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
350 .ve
351 
352   Then, your PetscPartitioner type can be chosen with the procedural interface via
353 .vb
354     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
355     PetscPartitionerSetType(PetscPartitioner, "my_part");
356 .ve
357    or at runtime via the option
358 .vb
359     -petscpartitioner_type my_part
360 .ve
361 
362   Level: advanced
363 
364 .keywords: PetscPartitioner, register
365 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
366 
367 @*/
368 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
369 {
370   PetscErrorCode ierr;
371 
372   PetscFunctionBegin;
373   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376 
377 /*@C
378   PetscPartitionerSetType - Builds a particular PetscPartitioner
379 
380   Collective on PetscPartitioner
381 
382   Input Parameters:
383 + part - The PetscPartitioner object
384 - name - The kind of partitioner
385 
386   Options Database Key:
387 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
388 
389   Level: intermediate
390 
391 .keywords: PetscPartitioner, set, type
392 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
393 @*/
394 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
395 {
396   PetscErrorCode (*r)(PetscPartitioner);
397   PetscBool      match;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
402   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
403   if (match) PetscFunctionReturn(0);
404 
405   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
406   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
407   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
408 
409   if (part->ops->destroy) {
410     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
411   }
412   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
413   ierr = (*r)(part);CHKERRQ(ierr);
414   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
415   PetscFunctionReturn(0);
416 }
417 
418 /*@C
419   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
420 
421   Not Collective
422 
423   Input Parameter:
424 . part - The PetscPartitioner
425 
426   Output Parameter:
427 . name - The PetscPartitioner type name
428 
429   Level: intermediate
430 
431 .keywords: PetscPartitioner, get, type, name
432 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
433 @*/
434 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
440   PetscValidPointer(name, 2);
441   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
442   *name = ((PetscObject) part)->type_name;
443   PetscFunctionReturn(0);
444 }
445 
446 /*@C
447   PetscPartitionerView - Views a PetscPartitioner
448 
449   Collective on PetscPartitioner
450 
451   Input Parameter:
452 + part - the PetscPartitioner object to view
453 - v    - the viewer
454 
455   Level: developer
456 
457 .seealso: PetscPartitionerDestroy()
458 @*/
459 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
460 {
461   PetscErrorCode ierr;
462 
463   PetscFunctionBegin;
464   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
465   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
466   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
467   PetscFunctionReturn(0);
468 }
469 
470 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
471 {
472   PetscFunctionBegin;
473   if (!currentType) {
474 #if defined(PETSC_HAVE_CHACO)
475     *defaultType = PETSCPARTITIONERCHACO;
476 #elif defined(PETSC_HAVE_PARMETIS)
477     *defaultType = PETSCPARTITIONERPARMETIS;
478 #elif defined(PETSC_HAVE_PTSCOTCH)
479     *defaultType = PETSCPARTITIONERPTSCOTCH;
480 #else
481     *defaultType = PETSCPARTITIONERSIMPLE;
482 #endif
483   } else {
484     *defaultType = currentType;
485   }
486   PetscFunctionReturn(0);
487 }
488 
489 /*@
490   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
491 
492   Collective on PetscPartitioner
493 
494   Input Parameter:
495 . part - the PetscPartitioner object to set options for
496 
497   Level: developer
498 
499 .seealso: PetscPartitionerView()
500 @*/
501 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
502 {
503   const char    *defaultType;
504   char           name[256];
505   PetscBool      flg;
506   PetscErrorCode ierr;
507 
508   PetscFunctionBegin;
509   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
510   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
511   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
512   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
513   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
514   if (flg) {
515     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
516   } else if (!((PetscObject) part)->type_name) {
517     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
518   }
519   if (part->ops->setfromoptions) {
520     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
521   }
522   /* process any options handlers added with PetscObjectAddOptionsHandler() */
523   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
524   ierr = PetscOptionsEnd();CHKERRQ(ierr);
525   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
526   PetscFunctionReturn(0);
527 }
528 
529 /*@C
530   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
531 
532   Collective on PetscPartitioner
533 
534   Input Parameter:
535 . part - the PetscPartitioner object to setup
536 
537   Level: developer
538 
539 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
540 @*/
541 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
547   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
548   PetscFunctionReturn(0);
549 }
550 
551 /*@
552   PetscPartitionerDestroy - Destroys a PetscPartitioner object
553 
554   Collective on PetscPartitioner
555 
556   Input Parameter:
557 . part - the PetscPartitioner object to destroy
558 
559   Level: developer
560 
561 .seealso: PetscPartitionerView()
562 @*/
563 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
564 {
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   if (!*part) PetscFunctionReturn(0);
569   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
570 
571   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
572   ((PetscObject) (*part))->refct = 0;
573 
574   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
575   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 /*@
580   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
581 
582   Collective on MPI_Comm
583 
584   Input Parameter:
585 . comm - The communicator for the PetscPartitioner object
586 
587   Output Parameter:
588 . part - The PetscPartitioner object
589 
590   Level: beginner
591 
592 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
593 @*/
594 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
595 {
596   PetscPartitioner p;
597   const char       *partitionerType = NULL;
598   PetscErrorCode   ierr;
599 
600   PetscFunctionBegin;
601   PetscValidPointer(part, 2);
602   *part = NULL;
603   ierr = DMInitializePackage();CHKERRQ(ierr);
604 
605   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
606   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
607   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
608 
609   p->edgeCut = 0;
610   p->balance = 0.0;
611 
612   *part = p;
613   PetscFunctionReturn(0);
614 }
615 
616 /*@
617   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
618 
619   Collective on DM
620 
621   Input Parameters:
622 + part    - The PetscPartitioner
623 - dm      - The mesh DM
624 
625   Output Parameters:
626 + partSection     - The PetscSection giving the division of points by partition
627 - partition       - The list of points by partition
628 
629   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
630 
631   Level: developer
632 
633 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
634 @*/
635 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
636 {
637   PetscMPIInt    size;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
642   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
643   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
644   PetscValidPointer(partition, 5);
645   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
646   if (size == 1) {
647     PetscInt *points;
648     PetscInt  cStart, cEnd, c;
649 
650     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
651     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
652     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
653     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
654     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
655     for (c = cStart; c < cEnd; ++c) points[c] = c;
656     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
657     PetscFunctionReturn(0);
658   }
659   if (part->height == 0) {
660     PetscInt numVertices;
661     PetscInt *start     = NULL;
662     PetscInt *adjacency = NULL;
663     IS       globalNumbering;
664 
665     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
666     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
667     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
668     ierr = PetscFree(start);CHKERRQ(ierr);
669     ierr = PetscFree(adjacency);CHKERRQ(ierr);
670     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
671       const PetscInt *globalNum;
672       const PetscInt *partIdx;
673       PetscInt *map, cStart, cEnd;
674       PetscInt *adjusted, i, localSize, offset;
675       IS    newPartition;
676 
677       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
678       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
679       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
680       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
681       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
682       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
683       for (i = cStart, offset = 0; i < cEnd; i++) {
684         if (globalNum[i - cStart] >= 0) map[offset++] = i;
685       }
686       for (i = 0; i < localSize; i++) {
687         adjusted[i] = map[partIdx[i]];
688       }
689       ierr = PetscFree(map);CHKERRQ(ierr);
690       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
691       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
692       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
693       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
694       ierr = ISDestroy(partition);CHKERRQ(ierr);
695       *partition = newPartition;
696     }
697   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
698   PetscFunctionReturn(0);
699 
700 }
701 
702 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
703 {
704   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
705   PetscErrorCode          ierr;
706 
707   PetscFunctionBegin;
708   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
709   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
710   ierr = PetscFree(p);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
715 {
716   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
717   PetscViewerFormat       format;
718   PetscErrorCode          ierr;
719 
720   PetscFunctionBegin;
721   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
722   ierr = PetscViewerASCIIPrintf(viewer, "Shell Graph Partitioner:\n");CHKERRQ(ierr);
723   if (p->random) {
724     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
725     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
726     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
727   }
728   PetscFunctionReturn(0);
729 }
730 
731 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
732 {
733   PetscBool      iascii;
734   PetscErrorCode ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
738   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
739   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
740   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
741   PetscFunctionReturn(0);
742 }
743 
744 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
745 {
746   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
747   PetscErrorCode          ierr;
748 
749   PetscFunctionBegin;
750   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
751   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
752   ierr = PetscOptionsTail();CHKERRQ(ierr);
753   PetscFunctionReturn(0);
754 }
755 
756 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
757 {
758   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
759   PetscInt                np;
760   PetscErrorCode          ierr;
761 
762   PetscFunctionBegin;
763   if (p->random) {
764     PetscRandom r;
765     PetscInt   *sizes, *points, v, p;
766     PetscMPIInt rank;
767 
768     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
769     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
770     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
771     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
772     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
773     for (v = 0; v < numVertices; ++v) {points[v] = v;}
774     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
775     for (v = numVertices-1; v > 0; --v) {
776       PetscReal val;
777       PetscInt  w, tmp;
778 
779       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
780       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
781       w    = PetscFloorReal(val);
782       tmp       = points[v];
783       points[v] = points[w];
784       points[w] = tmp;
785     }
786     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
787     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
788     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
789   }
790   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
791   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
792   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
793   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
794   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
795   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
796   *partition = p->partition;
797   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
798   PetscFunctionReturn(0);
799 }
800 
801 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
802 {
803   PetscFunctionBegin;
804   part->ops->view           = PetscPartitionerView_Shell;
805   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
806   part->ops->destroy        = PetscPartitionerDestroy_Shell;
807   part->ops->partition      = PetscPartitionerPartition_Shell;
808   PetscFunctionReturn(0);
809 }
810 
811 /*MC
812   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
813 
814   Level: intermediate
815 
816 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
817 M*/
818 
819 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
820 {
821   PetscPartitioner_Shell *p;
822   PetscErrorCode          ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
826   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
827   part->data = p;
828 
829   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
830   p->random = PETSC_FALSE;
831   PetscFunctionReturn(0);
832 }
833 
834 /*@C
835   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
836 
837   Collective on Part
838 
839   Input Parameters:
840 + part     - The PetscPartitioner
841 . size - The number of partitions
842 . sizes    - array of size size (or NULL) providing the number of points in each partition
843 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
844 
845   Level: developer
846 
847   Notes:
848 
849     It is safe to free the sizes and points arrays after use in this routine.
850 
851 .seealso DMPlexDistribute(), PetscPartitionerCreate()
852 @*/
853 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
854 {
855   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
856   PetscInt                proc, numPoints;
857   PetscErrorCode          ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
861   if (sizes)  {PetscValidPointer(sizes, 3);}
862   if (points) {PetscValidPointer(points, 4);}
863   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
864   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
865   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
866   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
867   if (sizes) {
868     for (proc = 0; proc < size; ++proc) {
869       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
870     }
871   }
872   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
873   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
874   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
875   PetscFunctionReturn(0);
876 }
877 
878 /*@
879   PetscPartitionerShellSetRandom - Set the flag to use a random partition
880 
881   Collective on Part
882 
883   Input Parameters:
884 + part   - The PetscPartitioner
885 - random - The flag to use a random partition
886 
887   Level: intermediate
888 
889 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
890 @*/
891 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
892 {
893   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
894 
895   PetscFunctionBegin;
896   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
897   p->random = random;
898   PetscFunctionReturn(0);
899 }
900 
901 /*@
902   PetscPartitionerShellGetRandom - get the flag to use a random partition
903 
904   Collective on Part
905 
906   Input Parameter:
907 . part   - The PetscPartitioner
908 
909   Output Parameter
910 . random - The flag to use a random partition
911 
912   Level: intermediate
913 
914 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
915 @*/
916 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
917 {
918   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
922   PetscValidPointer(random, 2);
923   *random = p->random;
924   PetscFunctionReturn(0);
925 }
926 
927 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
928 {
929   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
930   PetscErrorCode          ierr;
931 
932   PetscFunctionBegin;
933   ierr = PetscFree(p);CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
938 {
939   PetscViewerFormat format;
940   PetscErrorCode    ierr;
941 
942   PetscFunctionBegin;
943   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
944   ierr = PetscViewerASCIIPrintf(viewer, "Simple Graph Partitioner:\n");CHKERRQ(ierr);
945   PetscFunctionReturn(0);
946 }
947 
948 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
949 {
950   PetscBool      iascii;
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
955   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
956   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
957   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
958   PetscFunctionReturn(0);
959 }
960 
961 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
962 {
963   MPI_Comm       comm;
964   PetscInt       np;
965   PetscMPIInt    size;
966   PetscErrorCode ierr;
967 
968   PetscFunctionBegin;
969   comm = PetscObjectComm((PetscObject)dm);
970   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
971   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
972   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
973   if (size == 1) {
974     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
975   }
976   else {
977     PetscMPIInt rank;
978     PetscInt nvGlobal, *offsets, myFirst, myLast;
979 
980     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
981     offsets[0] = 0;
982     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
983     for (np = 2; np <= size; np++) {
984       offsets[np] += offsets[np-1];
985     }
986     nvGlobal = offsets[size];
987     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
988     myFirst = offsets[rank];
989     myLast  = offsets[rank + 1] - 1;
990     ierr = PetscFree(offsets);CHKERRQ(ierr);
991     if (numVertices) {
992       PetscInt firstPart = 0, firstLargePart = 0;
993       PetscInt lastPart = 0, lastLargePart = 0;
994       PetscInt rem = nvGlobal % nparts;
995       PetscInt pSmall = nvGlobal/nparts;
996       PetscInt pBig = nvGlobal/nparts + 1;
997 
998 
999       if (rem) {
1000         firstLargePart = myFirst / pBig;
1001         lastLargePart  = myLast  / pBig;
1002 
1003         if (firstLargePart < rem) {
1004           firstPart = firstLargePart;
1005         }
1006         else {
1007           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1008         }
1009         if (lastLargePart < rem) {
1010           lastPart = lastLargePart;
1011         }
1012         else {
1013           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1014         }
1015       }
1016       else {
1017         firstPart = myFirst / (nvGlobal/nparts);
1018         lastPart  = myLast  / (nvGlobal/nparts);
1019       }
1020 
1021       for (np = firstPart; np <= lastPart; np++) {
1022         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1023         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1024 
1025         PartStart = PetscMax(PartStart,myFirst);
1026         PartEnd   = PetscMin(PartEnd,myLast+1);
1027         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1028       }
1029     }
1030   }
1031   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1032   PetscFunctionReturn(0);
1033 }
1034 
1035 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1036 {
1037   PetscFunctionBegin;
1038   part->ops->view      = PetscPartitionerView_Simple;
1039   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1040   part->ops->partition = PetscPartitionerPartition_Simple;
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 /*MC
1045   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1046 
1047   Level: intermediate
1048 
1049 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1050 M*/
1051 
1052 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1053 {
1054   PetscPartitioner_Simple *p;
1055   PetscErrorCode           ierr;
1056 
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1059   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1060   part->data = p;
1061 
1062   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1067 {
1068   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1069   PetscErrorCode          ierr;
1070 
1071   PetscFunctionBegin;
1072   ierr = PetscFree(p);CHKERRQ(ierr);
1073   PetscFunctionReturn(0);
1074 }
1075 
1076 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1077 {
1078   PetscViewerFormat format;
1079   PetscErrorCode    ierr;
1080 
1081   PetscFunctionBegin;
1082   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1083   ierr = PetscViewerASCIIPrintf(viewer, "Gather Graph Partitioner:\n");CHKERRQ(ierr);
1084   PetscFunctionReturn(0);
1085 }
1086 
1087 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1088 {
1089   PetscBool      iascii;
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1094   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1095   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1096   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1101 {
1102   PetscInt       np;
1103   PetscErrorCode ierr;
1104 
1105   PetscFunctionBegin;
1106   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1107   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1108   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1109   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1110   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1115 {
1116   PetscFunctionBegin;
1117   part->ops->view      = PetscPartitionerView_Gather;
1118   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1119   part->ops->partition = PetscPartitionerPartition_Gather;
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 /*MC
1124   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1125 
1126   Level: intermediate
1127 
1128 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1129 M*/
1130 
1131 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1132 {
1133   PetscPartitioner_Gather *p;
1134   PetscErrorCode           ierr;
1135 
1136   PetscFunctionBegin;
1137   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1138   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1139   part->data = p;
1140 
1141   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 
1146 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1147 {
1148   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1149   PetscErrorCode          ierr;
1150 
1151   PetscFunctionBegin;
1152   ierr = PetscFree(p);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1157 {
1158   PetscViewerFormat format;
1159   PetscErrorCode    ierr;
1160 
1161   PetscFunctionBegin;
1162   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1163   ierr = PetscViewerASCIIPrintf(viewer, "Chaco Graph Partitioner:\n");CHKERRQ(ierr);
1164   PetscFunctionReturn(0);
1165 }
1166 
1167 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1168 {
1169   PetscBool      iascii;
1170   PetscErrorCode ierr;
1171 
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1174   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1175   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1176   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 #if defined(PETSC_HAVE_CHACO)
1181 #if defined(PETSC_HAVE_UNISTD_H)
1182 #include <unistd.h>
1183 #endif
1184 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1185 #include <chaco.h>
1186 #else
1187 /* Older versions of Chaco do not have an include file */
1188 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1189                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1190                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1191                        int mesh_dims[3], double *goal, int global_method, int local_method,
1192                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1193 #endif
1194 extern int FREE_GRAPH;
1195 #endif
1196 
1197 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1198 {
1199 #if defined(PETSC_HAVE_CHACO)
1200   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1201   MPI_Comm       comm;
1202   int            nvtxs          = numVertices; /* number of vertices in full graph */
1203   int           *vwgts          = NULL;   /* weights for all vertices */
1204   float         *ewgts          = NULL;   /* weights for all edges */
1205   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1206   char          *outassignname  = NULL;   /*  name of assignment output file */
1207   char          *outfilename    = NULL;   /* output file name */
1208   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1209   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1210   int            mesh_dims[3];            /* dimensions of mesh of processors */
1211   double        *goal          = NULL;    /* desired set sizes for each set */
1212   int            global_method = 1;       /* global partitioning algorithm */
1213   int            local_method  = 1;       /* local partitioning algorithm */
1214   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1215   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1216   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1217   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1218   long           seed          = 123636512; /* for random graph mutations */
1219 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1220   int           *assignment;              /* Output partition */
1221 #else
1222   short int     *assignment;              /* Output partition */
1223 #endif
1224   int            fd_stdout, fd_pipe[2];
1225   PetscInt      *points;
1226   int            i, v, p;
1227   PetscErrorCode ierr;
1228 
1229   PetscFunctionBegin;
1230   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1231 #if defined (PETSC_USE_DEBUG)
1232   {
1233     int ival,isum;
1234     PetscBool distributed;
1235 
1236     ival = (numVertices > 0);
1237     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1238     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1239     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1240   }
1241 #endif
1242   if (!numVertices) {
1243     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1244     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1245     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1246     PetscFunctionReturn(0);
1247   }
1248   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1249   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1250 
1251   if (global_method == INERTIAL_METHOD) {
1252     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1253     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1254   }
1255   mesh_dims[0] = nparts;
1256   mesh_dims[1] = 1;
1257   mesh_dims[2] = 1;
1258   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1259   /* Chaco outputs to stdout. We redirect this to a buffer. */
1260   /* TODO: check error codes for UNIX calls */
1261 #if defined(PETSC_HAVE_UNISTD_H)
1262   {
1263     int piperet;
1264     piperet = pipe(fd_pipe);
1265     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1266     fd_stdout = dup(1);
1267     close(1);
1268     dup2(fd_pipe[1], 1);
1269   }
1270 #endif
1271   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1272                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1273                    vmax, ndims, eigtol, seed);
1274 #if defined(PETSC_HAVE_UNISTD_H)
1275   {
1276     char msgLog[10000];
1277     int  count;
1278 
1279     fflush(stdout);
1280     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1281     if (count < 0) count = 0;
1282     msgLog[count] = 0;
1283     close(1);
1284     dup2(fd_stdout, 1);
1285     close(fd_stdout);
1286     close(fd_pipe[0]);
1287     close(fd_pipe[1]);
1288     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1289   }
1290 #else
1291   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1292 #endif
1293   /* Convert to PetscSection+IS */
1294   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1295   for (v = 0; v < nvtxs; ++v) {
1296     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1297   }
1298   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1299   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1300   for (p = 0, i = 0; p < nparts; ++p) {
1301     for (v = 0; v < nvtxs; ++v) {
1302       if (assignment[v] == p) points[i++] = v;
1303     }
1304   }
1305   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1306   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1307   if (global_method == INERTIAL_METHOD) {
1308     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1309   }
1310   ierr = PetscFree(assignment);CHKERRQ(ierr);
1311   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1312   PetscFunctionReturn(0);
1313 #else
1314   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1315 #endif
1316 }
1317 
1318 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1319 {
1320   PetscFunctionBegin;
1321   part->ops->view      = PetscPartitionerView_Chaco;
1322   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1323   part->ops->partition = PetscPartitionerPartition_Chaco;
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 /*MC
1328   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1329 
1330   Level: intermediate
1331 
1332 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1333 M*/
1334 
1335 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1336 {
1337   PetscPartitioner_Chaco *p;
1338   PetscErrorCode          ierr;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1342   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1343   part->data = p;
1344 
1345   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1346   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1347   PetscFunctionReturn(0);
1348 }
1349 
1350 static const char *ptypes[] = {"kway", "rb"};
1351 
1352 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1353 {
1354   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1355   PetscErrorCode             ierr;
1356 
1357   PetscFunctionBegin;
1358   ierr = PetscFree(p);CHKERRQ(ierr);
1359   PetscFunctionReturn(0);
1360 }
1361 
1362 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1363 {
1364   PetscViewerFormat format;
1365   PetscErrorCode    ierr;
1366 
1367   PetscFunctionBegin;
1368   ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
1369   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis Graph Partitioner:\n");CHKERRQ(ierr);
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1374 {
1375   PetscBool      iascii;
1376   PetscErrorCode ierr;
1377 
1378   PetscFunctionBegin;
1379   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1380   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1381   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1382   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1383   PetscFunctionReturn(0);
1384 }
1385 
1386 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1387 {
1388   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1389   PetscErrorCode            ierr;
1390 
1391   PetscFunctionBegin;
1392   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1393   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1394   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1395   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1396   ierr = PetscOptionsTail();CHKERRQ(ierr);
1397   PetscFunctionReturn(0);
1398 }
1399 
1400 #if defined(PETSC_HAVE_PARMETIS)
1401 #include <parmetis.h>
1402 #endif
1403 
1404 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1405 {
1406 #if defined(PETSC_HAVE_PARMETIS)
1407   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1408   MPI_Comm       comm;
1409   PetscSection   section;
1410   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1411   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1412   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1413   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1414   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1415   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1416   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1417   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1418   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1419   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1420   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1421   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1422   PetscInt       options[64];               /* Options */
1423   /* Outputs */
1424   PetscInt       v, i, *assignment, *points;
1425   PetscMPIInt    size, rank, p;
1426   PetscErrorCode ierr;
1427 
1428   PetscFunctionBegin;
1429   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1430   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1431   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1432   /* Calculate vertex distribution */
1433   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1434   vtxdist[0] = 0;
1435   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1436   for (p = 2; p <= size; ++p) {
1437     vtxdist[p] += vtxdist[p-1];
1438   }
1439   /* Calculate partition weights */
1440   for (p = 0; p < nparts; ++p) {
1441     tpwgts[p] = 1.0/nparts;
1442   }
1443   ubvec[0] = pm->imbalanceRatio;
1444   /* Weight cells by dofs on cell by default */
1445   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1446   if (section) {
1447     PetscInt cStart, cEnd, dof;
1448 
1449     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1450     for (v = cStart; v < cEnd; ++v) {
1451       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1452       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1453       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1454       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1455     }
1456   } else {
1457     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1458   }
1459   wgtflag |= 2; /* have weights on graph vertices */
1460 
1461   if (nparts == 1) {
1462     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1463   } else {
1464     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1465     if (vtxdist[p+1] == vtxdist[size]) {
1466       if (rank == p) {
1467         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1468         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1469         if (metis_ptype == 1) {
1470           PetscStackPush("METIS_PartGraphRecursive");
1471           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1472           PetscStackPop;
1473           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1474         } else {
1475           /*
1476            It would be nice to activate the two options below, but they would need some actual testing.
1477            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1478            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1479           */
1480           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1481           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1482           PetscStackPush("METIS_PartGraphKway");
1483           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1484           PetscStackPop;
1485           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1486         }
1487       }
1488     } else {
1489       options[0] = 1;
1490       options[1] = pm->debugFlag;
1491       PetscStackPush("ParMETIS_V3_PartKway");
1492       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1493       PetscStackPop;
1494       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1495     }
1496   }
1497   /* Convert to PetscSection+IS */
1498   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1499   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1500   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1501   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1502   for (p = 0, i = 0; p < nparts; ++p) {
1503     for (v = 0; v < nvtxs; ++v) {
1504       if (assignment[v] == p) points[i++] = v;
1505     }
1506   }
1507   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1508   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1509   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1510   PetscFunctionReturn(0);
1511 #else
1512   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1513 #endif
1514 }
1515 
1516 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1517 {
1518   PetscFunctionBegin;
1519   part->ops->view           = PetscPartitionerView_ParMetis;
1520   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1521   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1522   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 /*MC
1527   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1528 
1529   Level: intermediate
1530 
1531 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1532 M*/
1533 
1534 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1535 {
1536   PetscPartitioner_ParMetis *p;
1537   PetscErrorCode          ierr;
1538 
1539   PetscFunctionBegin;
1540   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1541   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1542   part->data = p;
1543 
1544   p->ptype          = 0;
1545   p->imbalanceRatio = 1.05;
1546   p->debugFlag      = 0;
1547 
1548   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1549   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1550   PetscFunctionReturn(0);
1551 }
1552 
1553 
1554 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1555 const char PTScotchPartitionerCitation[] =
1556   "@article{PTSCOTCH,\n"
1557   "  author  = {C. Chevalier and F. Pellegrini},\n"
1558   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1559   "  journal = {Parallel Computing},\n"
1560   "  volume  = {34},\n"
1561   "  number  = {6},\n"
1562   "  pages   = {318--331},\n"
1563   "  year    = {2008},\n"
1564   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1565   "}\n";
1566 
1567 typedef struct {
1568   PetscInt  strategy;
1569   PetscReal imbalance;
1570 } PetscPartitioner_PTScotch;
1571 
1572 static const char *const
1573 PTScotchStrategyList[] = {
1574   "DEFAULT",
1575   "QUALITY",
1576   "SPEED",
1577   "BALANCE",
1578   "SAFETY",
1579   "SCALABILITY",
1580   "RECURSIVE",
1581   "REMAP"
1582 };
1583 
1584 #if defined(PETSC_HAVE_PTSCOTCH)
1585 
1586 EXTERN_C_BEGIN
1587 #include <ptscotch.h>
1588 EXTERN_C_END
1589 
1590 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1591 
1592 static int PTScotch_Strategy(PetscInt strategy)
1593 {
1594   switch (strategy) {
1595   case  0: return SCOTCH_STRATDEFAULT;
1596   case  1: return SCOTCH_STRATQUALITY;
1597   case  2: return SCOTCH_STRATSPEED;
1598   case  3: return SCOTCH_STRATBALANCE;
1599   case  4: return SCOTCH_STRATSAFETY;
1600   case  5: return SCOTCH_STRATSCALABILITY;
1601   case  6: return SCOTCH_STRATRECURSIVE;
1602   case  7: return SCOTCH_STRATREMAP;
1603   default: return SCOTCH_STRATDEFAULT;
1604   }
1605 }
1606 
1607 
1608 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1609                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1610 {
1611   SCOTCH_Graph   grafdat;
1612   SCOTCH_Strat   stradat;
1613   SCOTCH_Num     vertnbr = n;
1614   SCOTCH_Num     edgenbr = xadj[n];
1615   SCOTCH_Num*    velotab = vtxwgt;
1616   SCOTCH_Num*    edlotab = adjwgt;
1617   SCOTCH_Num     flagval = strategy;
1618   double         kbalval = imbalance;
1619   PetscErrorCode ierr;
1620 
1621   PetscFunctionBegin;
1622   {
1623     PetscBool flg = PETSC_TRUE;
1624     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1625     if (!flg) velotab = NULL;
1626   }
1627   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1628   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1629   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1630   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1631 #if defined(PETSC_USE_DEBUG)
1632   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1633 #endif
1634   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1635   SCOTCH_stratExit(&stradat);
1636   SCOTCH_graphExit(&grafdat);
1637   PetscFunctionReturn(0);
1638 }
1639 
1640 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1641                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1642 {
1643   PetscMPIInt     procglbnbr;
1644   PetscMPIInt     proclocnum;
1645   SCOTCH_Arch     archdat;
1646   SCOTCH_Dgraph   grafdat;
1647   SCOTCH_Dmapping mappdat;
1648   SCOTCH_Strat    stradat;
1649   SCOTCH_Num      vertlocnbr;
1650   SCOTCH_Num      edgelocnbr;
1651   SCOTCH_Num*     veloloctab = vtxwgt;
1652   SCOTCH_Num*     edloloctab = adjwgt;
1653   SCOTCH_Num      flagval = strategy;
1654   double          kbalval = imbalance;
1655   PetscErrorCode  ierr;
1656 
1657   PetscFunctionBegin;
1658   {
1659     PetscBool flg = PETSC_TRUE;
1660     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1661     if (!flg) veloloctab = NULL;
1662   }
1663   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1664   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1665   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1666   edgelocnbr = xadj[vertlocnbr];
1667 
1668   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1669   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1670 #if defined(PETSC_USE_DEBUG)
1671   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1672 #endif
1673   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1674   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1675   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1676   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1677   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1678   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1679   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1680   SCOTCH_archExit(&archdat);
1681   SCOTCH_stratExit(&stradat);
1682   SCOTCH_dgraphExit(&grafdat);
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 #endif /* PETSC_HAVE_PTSCOTCH */
1687 
1688 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1689 {
1690   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1691   PetscErrorCode             ierr;
1692 
1693   PetscFunctionBegin;
1694   ierr = PetscFree(p);CHKERRQ(ierr);
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1699 {
1700   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1701   PetscErrorCode            ierr;
1702 
1703   PetscFunctionBegin;
1704   ierr = PetscViewerASCIIPrintf(viewer, "PTScotch Graph Partitioner:\n");CHKERRQ(ierr);
1705   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1706   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1707   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1708   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1713 {
1714   PetscBool      iascii;
1715   PetscErrorCode ierr;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1719   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1720   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1721   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1722   PetscFunctionReturn(0);
1723 }
1724 
1725 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1726 {
1727   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1728   const char *const         *slist = PTScotchStrategyList;
1729   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1730   PetscBool                 flag;
1731   PetscErrorCode            ierr;
1732 
1733   PetscFunctionBegin;
1734   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1735   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1736   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1737   ierr = PetscOptionsTail();CHKERRQ(ierr);
1738   PetscFunctionReturn(0);
1739 }
1740 
1741 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1742 {
1743 #if defined(PETSC_HAVE_PTSCOTCH)
1744   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1745   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1746   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1747   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1748   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1749   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1750   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1751   PetscInt       v, i, *assignment, *points;
1752   PetscMPIInt    size, rank, p;
1753   PetscErrorCode ierr;
1754 
1755   PetscFunctionBegin;
1756   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1757   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1758   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1759 
1760   /* Calculate vertex distribution */
1761   vtxdist[0] = 0;
1762   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1763   for (p = 2; p <= size; ++p) {
1764     vtxdist[p] += vtxdist[p-1];
1765   }
1766 
1767   if (nparts == 1) {
1768     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1769   } else {
1770     PetscSection section;
1771     /* Weight cells by dofs on cell by default */
1772     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1773     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1774     if (section) {
1775       PetscInt vStart, vEnd, dof;
1776       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1777       for (v = vStart; v < vEnd; ++v) {
1778         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1779         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1780         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1781         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1782       }
1783     } else {
1784       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1785     }
1786     {
1787       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1788       int                       strat = PTScotch_Strategy(pts->strategy);
1789       double                    imbal = (double)pts->imbalance;
1790 
1791       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1792       if (vtxdist[p+1] == vtxdist[size]) {
1793         if (rank == p) {
1794           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1795         }
1796       } else {
1797         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1798       }
1799     }
1800     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1801   }
1802 
1803   /* Convert to PetscSection+IS */
1804   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1805   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1806   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1807   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1808   for (p = 0, i = 0; p < nparts; ++p) {
1809     for (v = 0; v < nvtxs; ++v) {
1810       if (assignment[v] == p) points[i++] = v;
1811     }
1812   }
1813   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1814   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1815 
1816   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1817   PetscFunctionReturn(0);
1818 #else
1819   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1820 #endif
1821 }
1822 
1823 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1824 {
1825   PetscFunctionBegin;
1826   part->ops->view           = PetscPartitionerView_PTScotch;
1827   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1828   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1829   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1830   PetscFunctionReturn(0);
1831 }
1832 
1833 /*MC
1834   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1835 
1836   Level: intermediate
1837 
1838 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1839 M*/
1840 
1841 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1842 {
1843   PetscPartitioner_PTScotch *p;
1844   PetscErrorCode          ierr;
1845 
1846   PetscFunctionBegin;
1847   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1848   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1849   part->data = p;
1850 
1851   p->strategy  = 0;
1852   p->imbalance = 0.01;
1853 
1854   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1855   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1856   PetscFunctionReturn(0);
1857 }
1858 
1859 
1860 /*@
1861   DMPlexGetPartitioner - Get the mesh partitioner
1862 
1863   Not collective
1864 
1865   Input Parameter:
1866 . dm - The DM
1867 
1868   Output Parameter:
1869 . part - The PetscPartitioner
1870 
1871   Level: developer
1872 
1873   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1874 
1875 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1876 @*/
1877 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1878 {
1879   DM_Plex       *mesh = (DM_Plex *) dm->data;
1880 
1881   PetscFunctionBegin;
1882   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1883   PetscValidPointer(part, 2);
1884   *part = mesh->partitioner;
1885   PetscFunctionReturn(0);
1886 }
1887 
1888 /*@
1889   DMPlexSetPartitioner - Set the mesh partitioner
1890 
1891   logically collective on dm and part
1892 
1893   Input Parameters:
1894 + dm - The DM
1895 - part - The partitioner
1896 
1897   Level: developer
1898 
1899   Note: Any existing PetscPartitioner will be destroyed.
1900 
1901 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1902 @*/
1903 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1904 {
1905   DM_Plex       *mesh = (DM_Plex *) dm->data;
1906   PetscErrorCode ierr;
1907 
1908   PetscFunctionBegin;
1909   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1910   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1911   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1912   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1913   mesh->partitioner = part;
1914   PetscFunctionReturn(0);
1915 }
1916 
1917 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1918 {
1919   PetscErrorCode ierr;
1920 
1921   PetscFunctionBegin;
1922   if (up) {
1923     PetscInt parent;
1924 
1925     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1926     if (parent != point) {
1927       PetscInt closureSize, *closure = NULL, i;
1928 
1929       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1930       for (i = 0; i < closureSize; i++) {
1931         PetscInt cpoint = closure[2*i];
1932 
1933         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1934         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1935       }
1936       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1937     }
1938   }
1939   if (down) {
1940     PetscInt numChildren;
1941     const PetscInt *children;
1942 
1943     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1944     if (numChildren) {
1945       PetscInt i;
1946 
1947       for (i = 0; i < numChildren; i++) {
1948         PetscInt cpoint = children[i];
1949 
1950         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1951         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1952       }
1953     }
1954   }
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 /*@
1959   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1960 
1961   Input Parameters:
1962 + dm     - The DM
1963 - label  - DMLabel assinging ranks to remote roots
1964 
1965   Level: developer
1966 
1967 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1968 @*/
1969 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1970 {
1971   IS              rankIS,   pointIS;
1972   const PetscInt *ranks,   *points;
1973   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1974   PetscInt       *closure = NULL;
1975   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1976   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1977   PetscErrorCode  ierr;
1978 
1979   PetscFunctionBegin;
1980   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1981   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1982   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1983 
1984   for (r = 0; r < numRanks; ++r) {
1985     const PetscInt rank = ranks[r];
1986     PetscHSetI     ht;
1987     PetscInt       nelems, *elems, off = 0;
1988 
1989     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1990     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1991     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1992     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1993     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
1994     for (p = 0; p < numPoints; ++p) {
1995       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1996       for (c = 0; c < closureSize*2; c += 2) {
1997         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
1998         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1999       }
2000     }
2001     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2002     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2003     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
2004     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2005     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2006     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2007     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2008     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2009     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2010     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2011   }
2012 
2013   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2014   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2015   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2016   PetscFunctionReturn(0);
2017 }
2018 
2019 /*@
2020   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2021 
2022   Input Parameters:
2023 + dm     - The DM
2024 - label  - DMLabel assinging ranks to remote roots
2025 
2026   Level: developer
2027 
2028 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2029 @*/
2030 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2031 {
2032   IS              rankIS,   pointIS;
2033   const PetscInt *ranks,   *points;
2034   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2035   PetscInt       *adj = NULL;
2036   PetscErrorCode  ierr;
2037 
2038   PetscFunctionBegin;
2039   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2040   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2041   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2042   for (r = 0; r < numRanks; ++r) {
2043     const PetscInt rank = ranks[r];
2044 
2045     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2046     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2047     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2048     for (p = 0; p < numPoints; ++p) {
2049       adjSize = PETSC_DETERMINE;
2050       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2051       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2052     }
2053     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2054     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2055   }
2056   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2057   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2058   ierr = PetscFree(adj);CHKERRQ(ierr);
2059   PetscFunctionReturn(0);
2060 }
2061 
2062 /*@
2063   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2064 
2065   Input Parameters:
2066 + dm     - The DM
2067 - label  - DMLabel assinging ranks to remote roots
2068 
2069   Level: developer
2070 
2071   Note: This is required when generating multi-level overlaps to capture
2072   overlap points from non-neighbouring partitions.
2073 
2074 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2075 @*/
2076 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2077 {
2078   MPI_Comm        comm;
2079   PetscMPIInt     rank;
2080   PetscSF         sfPoint;
2081   DMLabel         lblRoots, lblLeaves;
2082   IS              rankIS, pointIS;
2083   const PetscInt *ranks;
2084   PetscInt        numRanks, r;
2085   PetscErrorCode  ierr;
2086 
2087   PetscFunctionBegin;
2088   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2089   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2090   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2091   /* Pull point contributions from remote leaves into local roots */
2092   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2093   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2094   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2095   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2096   for (r = 0; r < numRanks; ++r) {
2097     const PetscInt remoteRank = ranks[r];
2098     if (remoteRank == rank) continue;
2099     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2100     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2101     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2102   }
2103   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2104   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2105   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2106   /* Push point contributions from roots into remote leaves */
2107   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2108   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2109   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2110   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2111   for (r = 0; r < numRanks; ++r) {
2112     const PetscInt remoteRank = ranks[r];
2113     if (remoteRank == rank) continue;
2114     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2115     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2116     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2117   }
2118   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2119   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2120   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2121   PetscFunctionReturn(0);
2122 }
2123 
2124 /*@
2125   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2126 
2127   Input Parameters:
2128 + dm        - The DM
2129 . rootLabel - DMLabel assinging ranks to local roots
2130 . processSF - A star forest mapping into the local index on each remote rank
2131 
2132   Output Parameter:
2133 - leafLabel - DMLabel assinging ranks to remote roots
2134 
2135   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2136   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2137 
2138   Level: developer
2139 
2140 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2141 @*/
2142 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2143 {
2144   MPI_Comm           comm;
2145   PetscMPIInt        rank, size;
2146   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2147   PetscSF            sfPoint;
2148   PetscSFNode       *rootPoints, *leafPoints;
2149   PetscSection       rootSection, leafSection;
2150   const PetscSFNode *remote;
2151   const PetscInt    *local, *neighbors;
2152   IS                 valueIS;
2153   PetscErrorCode     ierr;
2154 
2155   PetscFunctionBegin;
2156   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2157   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2158   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2159   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2160 
2161   /* Convert to (point, rank) and use actual owners */
2162   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2163   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2164   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2165   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2166   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2167   for (n = 0; n < numNeighbors; ++n) {
2168     PetscInt numPoints;
2169 
2170     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2171     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2172   }
2173   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2174   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2175   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2176   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2177   for (n = 0; n < numNeighbors; ++n) {
2178     IS              pointIS;
2179     const PetscInt *points;
2180     PetscInt        off, numPoints, p;
2181 
2182     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2183     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2184     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2185     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2186     for (p = 0; p < numPoints; ++p) {
2187       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2188       else       {l = -1;}
2189       if (l >= 0) {rootPoints[off+p] = remote[l];}
2190       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2191     }
2192     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2193     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2194   }
2195   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2196   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2197   /* Communicate overlap */
2198   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2199   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2200   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2201   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2202   for (p = 0; p < ssize; p++) {
2203     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2204   }
2205   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2206   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2207   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2208   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2209   PetscFunctionReturn(0);
2210 }
2211 
2212 /*@
2213   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2214 
2215   Input Parameters:
2216 + dm    - The DM
2217 . label - DMLabel assinging ranks to remote roots
2218 
2219   Output Parameter:
2220 - sf    - The star forest communication context encapsulating the defined mapping
2221 
2222   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2223 
2224   Level: developer
2225 
2226 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2227 @*/
2228 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2229 {
2230   PetscMPIInt     rank, size;
2231   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2232   PetscSFNode    *remotePoints;
2233   IS              remoteRootIS;
2234   const PetscInt *remoteRoots;
2235   PetscErrorCode ierr;
2236 
2237   PetscFunctionBegin;
2238   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2239   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2240 
2241   for (numRemote = 0, n = 0; n < size; ++n) {
2242     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2243     numRemote += numPoints;
2244   }
2245   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2246   /* Put owned points first */
2247   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2248   if (numPoints > 0) {
2249     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2250     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2251     for (p = 0; p < numPoints; p++) {
2252       remotePoints[idx].index = remoteRoots[p];
2253       remotePoints[idx].rank = rank;
2254       idx++;
2255     }
2256     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2257     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2258   }
2259   /* Now add remote points */
2260   for (n = 0; n < size; ++n) {
2261     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2262     if (numPoints <= 0 || n == rank) continue;
2263     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2264     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2265     for (p = 0; p < numPoints; p++) {
2266       remotePoints[idx].index = remoteRoots[p];
2267       remotePoints[idx].rank = n;
2268       idx++;
2269     }
2270     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2271     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2272   }
2273   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2274   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2275   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2276   PetscFunctionReturn(0);
2277 }
2278