xref: /petsc/src/dm/impls/plex/plexpartition.c (revision 2abdaa7022c6e53f8e087b5908ff978a78ad83a5)
1 #include <petsc/private/dmpleximpl.h>   /*I      "petscdmplex.h"   I*/
2 #include <petsc/private/hashseti.h>
3 
4 PetscClassId PETSCPARTITIONER_CLASSID = 0;
5 
6 PetscFunctionList PetscPartitionerList              = NULL;
7 PetscBool         PetscPartitionerRegisterAllCalled = PETSC_FALSE;
8 
9 PetscBool ChacoPartitionercite = PETSC_FALSE;
10 const char ChacoPartitionerCitation[] = "@inproceedings{Chaco95,\n"
11                                "  author    = {Bruce Hendrickson and Robert Leland},\n"
12                                "  title     = {A multilevel algorithm for partitioning graphs},\n"
13                                "  booktitle = {Supercomputing '95: Proceedings of the 1995 ACM/IEEE Conference on Supercomputing (CDROM)},"
14                                "  isbn      = {0-89791-816-9},\n"
15                                "  pages     = {28},\n"
16                                "  doi       = {http://doi.acm.org/10.1145/224170.224228},\n"
17                                "  publisher = {ACM Press},\n"
18                                "  address   = {New York},\n"
19                                "  year      = {1995}\n}\n";
20 
21 PetscBool ParMetisPartitionercite = PETSC_FALSE;
22 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
23                                "  author  = {George Karypis and Vipin Kumar},\n"
24                                "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
25                                "  journal = {Journal of Parallel and Distributed Computing},\n"
26                                "  volume  = {48},\n"
27                                "  pages   = {71--85},\n"
28                                "  year    = {1998}\n}\n";
29 
30 /*@C
31   DMPlexCreatePartitionerGraph - Create a CSR graph of point connections for the partitioner
32 
33   Input Parameters:
34 + dm      - The mesh DM dm
35 - height  - Height of the strata from which to construct the graph
36 
37   Output Parameter:
38 + numVertices     - Number of vertices in the graph
39 . offsets         - Point offsets in the graph
40 . adjacency       - Point connectivity in the graph
41 - globalNumbering - A map from the local cell numbering to the global numbering used in "adjacency".  Negative indicates that the cell is a duplicate from another process.
42 
43   The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
44   DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
45   representation on the mesh.
46 
47   Level: developer
48 
49 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
50 @*/
51 PetscErrorCode DMPlexCreatePartitionerGraph(DM dm, PetscInt height, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency, IS *globalNumbering)
52 {
53   PetscInt       p, pStart, pEnd, a, adjSize, idx, size, nroots;
54   PetscInt      *adj = NULL, *vOffsets = NULL, *graph = NULL;
55   IS             cellNumbering;
56   const PetscInt *cellNum;
57   PetscBool      useCone, useClosure;
58   PetscSection   section;
59   PetscSegBuffer adjBuffer;
60   PetscSF        sfPoint;
61   PetscMPIInt    rank;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
66   ierr = DMPlexGetHeightStratum(dm, height, &pStart, &pEnd);CHKERRQ(ierr);
67   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
68   ierr = PetscSFGetGraph(sfPoint, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
69   /* Build adjacency graph via a section/segbuffer */
70   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section);CHKERRQ(ierr);
71   ierr = PetscSectionSetChart(section, pStart, pEnd);CHKERRQ(ierr);
72   ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&adjBuffer);CHKERRQ(ierr);
73   /* Always use FVM adjacency to create partitioner graph */
74   ierr = DMPlexGetAdjacencyUseCone(dm, &useCone);CHKERRQ(ierr);
75   ierr = DMPlexGetAdjacencyUseClosure(dm, &useClosure);CHKERRQ(ierr);
76   ierr = DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE);CHKERRQ(ierr);
77   ierr = DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE);CHKERRQ(ierr);
78   ierr = DMPlexCreateCellNumbering_Internal(dm, PETSC_TRUE, &cellNumbering);CHKERRQ(ierr);
79   if (globalNumbering) {
80     ierr = PetscObjectReference((PetscObject)cellNumbering);CHKERRQ(ierr);
81     *globalNumbering = cellNumbering;
82   }
83   ierr = ISGetIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
84   for (*numVertices = 0, p = pStart; p < pEnd; p++) {
85     /* Skip non-owned cells in parallel (ParMetis expects no overlap) */
86     if (nroots > 0) {if (cellNum[p] < 0) continue;}
87     adjSize = PETSC_DETERMINE;
88     ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr);
89     for (a = 0; a < adjSize; ++a) {
90       const PetscInt point = adj[a];
91       if (point != p && pStart <= point && point < pEnd) {
92         PetscInt *PETSC_RESTRICT pBuf;
93         ierr = PetscSectionAddDof(section, p, 1);CHKERRQ(ierr);
94         ierr = PetscSegBufferGetInts(adjBuffer, 1, &pBuf);CHKERRQ(ierr);
95         *pBuf = point;
96       }
97     }
98     (*numVertices)++;
99   }
100   ierr = DMPlexSetAdjacencyUseCone(dm, useCone);CHKERRQ(ierr);
101   ierr = DMPlexSetAdjacencyUseClosure(dm, useClosure);CHKERRQ(ierr);
102   /* Derive CSR graph from section/segbuffer */
103   ierr = PetscSectionSetUp(section);CHKERRQ(ierr);
104   ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
105   ierr = PetscMalloc1(*numVertices+1, &vOffsets);CHKERRQ(ierr);
106   for (idx = 0, p = pStart; p < pEnd; p++) {
107     if (nroots > 0) {if (cellNum[p] < 0) continue;}
108     ierr = PetscSectionGetOffset(section, p, &(vOffsets[idx++]));CHKERRQ(ierr);
109   }
110   vOffsets[*numVertices] = size;
111   if (offsets) *offsets = vOffsets;
112   ierr = PetscSegBufferExtractAlloc(adjBuffer, &graph);CHKERRQ(ierr);
113   {
114     ISLocalToGlobalMapping ltogCells;
115     PetscInt n, size, *cells_arr;
116     /* In parallel, apply a global cell numbering to the graph */
117     ierr = ISRestoreIndices(cellNumbering, &cellNum);CHKERRQ(ierr);
118     ierr = ISLocalToGlobalMappingCreateIS(cellNumbering, &ltogCells);CHKERRQ(ierr);
119     ierr = ISLocalToGlobalMappingGetSize(ltogCells, &size);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingGetIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
121     /* Convert to positive global cell numbers */
122     for (n=0; n<size; n++) {if (cells_arr[n] < 0) cells_arr[n] = -(cells_arr[n]+1);}
123     ierr = ISLocalToGlobalMappingRestoreIndices(ltogCells, (const PetscInt**)&cells_arr);CHKERRQ(ierr);
124     ierr = ISLocalToGlobalMappingApplyBlock(ltogCells, vOffsets[*numVertices], graph, graph);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingDestroy(&ltogCells);CHKERRQ(ierr);
126     ierr = ISDestroy(&cellNumbering);CHKERRQ(ierr);
127   }
128   if (adjacency) *adjacency = graph;
129   /* Clean up */
130   ierr = PetscSegBufferDestroy(&adjBuffer);CHKERRQ(ierr);
131   ierr = PetscSectionDestroy(&section);CHKERRQ(ierr);
132   ierr = PetscFree(adj);CHKERRQ(ierr);
133   PetscFunctionReturn(0);
134 }
135 
136 /*@C
137   DMPlexCreateNeighborCSR - Create a mesh graph (cell-cell adjacency) in parallel CSR format.
138 
139   Collective
140 
141   Input Arguments:
142 + dm - The DMPlex
143 - cellHeight - The height of mesh points to treat as cells (default should be 0)
144 
145   Output Arguments:
146 + numVertices - The number of local vertices in the graph, or cells in the mesh.
147 . offsets     - The offset to the adjacency list for each cell
148 - adjacency   - The adjacency list for all cells
149 
150   Note: This is suitable for input to a mesh partitioner like ParMetis.
151 
152   Level: advanced
153 
154 .seealso: DMPlexCreate()
155 @*/
156 PetscErrorCode DMPlexCreateNeighborCSR(DM dm, PetscInt cellHeight, PetscInt *numVertices, PetscInt **offsets, PetscInt **adjacency)
157 {
158   const PetscInt maxFaceCases = 30;
159   PetscInt       numFaceCases = 0;
160   PetscInt       numFaceVertices[30]; /* maxFaceCases, C89 sucks sucks sucks */
161   PetscInt      *off, *adj;
162   PetscInt      *neighborCells = NULL;
163   PetscInt       dim, cellDim, depth = 0, faceDepth, cStart, cEnd, c, numCells, cell;
164   PetscErrorCode ierr;
165 
166   PetscFunctionBegin;
167   /* For parallel partitioning, I think you have to communicate supports */
168   ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr);
169   cellDim = dim - cellHeight;
170   ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
171   ierr = DMPlexGetHeightStratum(dm, cellHeight, &cStart, &cEnd);CHKERRQ(ierr);
172   if (cEnd - cStart == 0) {
173     if (numVertices) *numVertices = 0;
174     if (offsets)   *offsets   = NULL;
175     if (adjacency) *adjacency = NULL;
176     PetscFunctionReturn(0);
177   }
178   numCells  = cEnd - cStart;
179   faceDepth = depth - cellHeight;
180   if (dim == depth) {
181     PetscInt f, fStart, fEnd;
182 
183     ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
184     /* Count neighboring cells */
185     ierr = DMPlexGetHeightStratum(dm, cellHeight+1, &fStart, &fEnd);CHKERRQ(ierr);
186     for (f = fStart; f < fEnd; ++f) {
187       const PetscInt *support;
188       PetscInt        supportSize;
189       ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
190       ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
191       if (supportSize == 2) {
192         PetscInt numChildren;
193 
194         ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
195         if (!numChildren) {
196           ++off[support[0]-cStart+1];
197           ++off[support[1]-cStart+1];
198         }
199       }
200     }
201     /* Prefix sum */
202     for (c = 1; c <= numCells; ++c) off[c] += off[c-1];
203     if (adjacency) {
204       PetscInt *tmp;
205 
206       ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
207       ierr = PetscMalloc1(numCells+1, &tmp);CHKERRQ(ierr);
208       ierr = PetscMemcpy(tmp, off, (numCells+1) * sizeof(PetscInt));CHKERRQ(ierr);
209       /* Get neighboring cells */
210       for (f = fStart; f < fEnd; ++f) {
211         const PetscInt *support;
212         PetscInt        supportSize;
213         ierr = DMPlexGetSupportSize(dm, f, &supportSize);CHKERRQ(ierr);
214         ierr = DMPlexGetSupport(dm, f, &support);CHKERRQ(ierr);
215         if (supportSize == 2) {
216           PetscInt numChildren;
217 
218           ierr = DMPlexGetTreeChildren(dm,f,&numChildren,NULL);CHKERRQ(ierr);
219           if (!numChildren) {
220             adj[tmp[support[0]-cStart]++] = support[1];
221             adj[tmp[support[1]-cStart]++] = support[0];
222           }
223         }
224       }
225 #if defined(PETSC_USE_DEBUG)
226       for (c = 0; c < cEnd-cStart; ++c) if (tmp[c] != off[c+1]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Offset %d != %d for cell %d", tmp[c], off[c], c+cStart);
227 #endif
228       ierr = PetscFree(tmp);CHKERRQ(ierr);
229     }
230     if (numVertices) *numVertices = numCells;
231     if (offsets)   *offsets   = off;
232     if (adjacency) *adjacency = adj;
233     PetscFunctionReturn(0);
234   }
235   /* Setup face recognition */
236   if (faceDepth == 1) {
237     PetscInt cornersSeen[30] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; /* Could use PetscBT */
238 
239     for (c = cStart; c < cEnd; ++c) {
240       PetscInt corners;
241 
242       ierr = DMPlexGetConeSize(dm, c, &corners);CHKERRQ(ierr);
243       if (!cornersSeen[corners]) {
244         PetscInt nFV;
245 
246         if (numFaceCases >= maxFaceCases) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Exceeded maximum number of face recognition cases");
247         cornersSeen[corners] = 1;
248 
249         ierr = DMPlexGetNumFaceVertices(dm, cellDim, corners, &nFV);CHKERRQ(ierr);
250 
251         numFaceVertices[numFaceCases++] = nFV;
252       }
253     }
254   }
255   ierr = PetscCalloc1(numCells+1, &off);CHKERRQ(ierr);
256   /* Count neighboring cells */
257   for (cell = cStart; cell < cEnd; ++cell) {
258     PetscInt numNeighbors = PETSC_DETERMINE, n;
259 
260     ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
261     /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
262     for (n = 0; n < numNeighbors; ++n) {
263       PetscInt        cellPair[2];
264       PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
265       PetscInt        meetSize = 0;
266       const PetscInt *meet    = NULL;
267 
268       cellPair[0] = cell; cellPair[1] = neighborCells[n];
269       if (cellPair[0] == cellPair[1]) continue;
270       if (!found) {
271         ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
272         if (meetSize) {
273           PetscInt f;
274 
275           for (f = 0; f < numFaceCases; ++f) {
276             if (numFaceVertices[f] == meetSize) {
277               found = PETSC_TRUE;
278               break;
279             }
280           }
281         }
282         ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
283       }
284       if (found) ++off[cell-cStart+1];
285     }
286   }
287   /* Prefix sum */
288   for (cell = 1; cell <= numCells; ++cell) off[cell] += off[cell-1];
289 
290   if (adjacency) {
291     ierr = PetscMalloc1(off[numCells], &adj);CHKERRQ(ierr);
292     /* Get neighboring cells */
293     for (cell = cStart; cell < cEnd; ++cell) {
294       PetscInt numNeighbors = PETSC_DETERMINE, n;
295       PetscInt cellOffset   = 0;
296 
297       ierr = DMPlexGetAdjacency_Internal(dm, cell, PETSC_TRUE, PETSC_FALSE, PETSC_FALSE, &numNeighbors, &neighborCells);CHKERRQ(ierr);
298       /* Get meet with each cell, and check with recognizer (could optimize to check each pair only once) */
299       for (n = 0; n < numNeighbors; ++n) {
300         PetscInt        cellPair[2];
301         PetscBool       found    = faceDepth > 1 ? PETSC_TRUE : PETSC_FALSE;
302         PetscInt        meetSize = 0;
303         const PetscInt *meet    = NULL;
304 
305         cellPair[0] = cell; cellPair[1] = neighborCells[n];
306         if (cellPair[0] == cellPair[1]) continue;
307         if (!found) {
308           ierr = DMPlexGetMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
309           if (meetSize) {
310             PetscInt f;
311 
312             for (f = 0; f < numFaceCases; ++f) {
313               if (numFaceVertices[f] == meetSize) {
314                 found = PETSC_TRUE;
315                 break;
316               }
317             }
318           }
319           ierr = DMPlexRestoreMeet(dm, 2, cellPair, &meetSize, &meet);CHKERRQ(ierr);
320         }
321         if (found) {
322           adj[off[cell-cStart]+cellOffset] = neighborCells[n];
323           ++cellOffset;
324         }
325       }
326     }
327   }
328   ierr = PetscFree(neighborCells);CHKERRQ(ierr);
329   if (numVertices) *numVertices = numCells;
330   if (offsets)   *offsets   = off;
331   if (adjacency) *adjacency = adj;
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336   PetscPartitionerRegister - Adds a new PetscPartitioner implementation
337 
338   Not Collective
339 
340   Input Parameters:
341 + name        - The name of a new user-defined creation routine
342 - create_func - The creation routine itself
343 
344   Notes:
345   PetscPartitionerRegister() may be called multiple times to add several user-defined PetscPartitioners
346 
347   Sample usage:
348 .vb
349     PetscPartitionerRegister("my_part", MyPetscPartitionerCreate);
350 .ve
351 
352   Then, your PetscPartitioner type can be chosen with the procedural interface via
353 .vb
354     PetscPartitionerCreate(MPI_Comm, PetscPartitioner *);
355     PetscPartitionerSetType(PetscPartitioner, "my_part");
356 .ve
357    or at runtime via the option
358 .vb
359     -petscpartitioner_type my_part
360 .ve
361 
362   Level: advanced
363 
364 .keywords: PetscPartitioner, register
365 .seealso: PetscPartitionerRegisterAll(), PetscPartitionerRegisterDestroy()
366 
367 @*/
368 PetscErrorCode PetscPartitionerRegister(const char sname[], PetscErrorCode (*function)(PetscPartitioner))
369 {
370   PetscErrorCode ierr;
371 
372   PetscFunctionBegin;
373   ierr = PetscFunctionListAdd(&PetscPartitionerList, sname, function);CHKERRQ(ierr);
374   PetscFunctionReturn(0);
375 }
376 
377 /*@C
378   PetscPartitionerSetType - Builds a particular PetscPartitioner
379 
380   Collective on PetscPartitioner
381 
382   Input Parameters:
383 + part - The PetscPartitioner object
384 - name - The kind of partitioner
385 
386   Options Database Key:
387 . -petscpartitioner_type <type> - Sets the PetscPartitioner type; use -help for a list of available types
388 
389   Level: intermediate
390 
391 .keywords: PetscPartitioner, set, type
392 .seealso: PetscPartitionerGetType(), PetscPartitionerCreate()
393 @*/
394 PetscErrorCode PetscPartitionerSetType(PetscPartitioner part, PetscPartitionerType name)
395 {
396   PetscErrorCode (*r)(PetscPartitioner);
397   PetscBool      match;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
402   ierr = PetscObjectTypeCompare((PetscObject) part, name, &match);CHKERRQ(ierr);
403   if (match) PetscFunctionReturn(0);
404 
405   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
406   ierr = PetscFunctionListFind(PetscPartitionerList, name, &r);CHKERRQ(ierr);
407   if (!r) SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown PetscPartitioner type: %s", name);
408 
409   if (part->ops->destroy) {
410     ierr              = (*part->ops->destroy)(part);CHKERRQ(ierr);
411   }
412   ierr = PetscMemzero(part->ops, sizeof(struct _PetscPartitionerOps));CHKERRQ(ierr);
413   ierr = (*r)(part);CHKERRQ(ierr);
414   ierr = PetscObjectChangeTypeName((PetscObject) part, name);CHKERRQ(ierr);
415   PetscFunctionReturn(0);
416 }
417 
418 /*@C
419   PetscPartitionerGetType - Gets the PetscPartitioner type name (as a string) from the object.
420 
421   Not Collective
422 
423   Input Parameter:
424 . part - The PetscPartitioner
425 
426   Output Parameter:
427 . name - The PetscPartitioner type name
428 
429   Level: intermediate
430 
431 .keywords: PetscPartitioner, get, type, name
432 .seealso: PetscPartitionerSetType(), PetscPartitionerCreate()
433 @*/
434 PetscErrorCode PetscPartitionerGetType(PetscPartitioner part, PetscPartitionerType *name)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
440   PetscValidPointer(name, 2);
441   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
442   *name = ((PetscObject) part)->type_name;
443   PetscFunctionReturn(0);
444 }
445 
446 /*@C
447   PetscPartitionerView - Views a PetscPartitioner
448 
449   Collective on PetscPartitioner
450 
451   Input Parameter:
452 + part - the PetscPartitioner object to view
453 - v    - the viewer
454 
455   Level: developer
456 
457 .seealso: PetscPartitionerDestroy()
458 @*/
459 PetscErrorCode PetscPartitionerView(PetscPartitioner part, PetscViewer v)
460 {
461   PetscInt       size;
462   PetscBool      isascii;
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
467   if (!v) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject) part), &v);CHKERRQ(ierr);}
468   ierr = PetscObjectTypeCompare((PetscObject) v, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
469   if (isascii) {
470     ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
471     ierr = PetscViewerASCIIPrintf(v, "Graph Partitioner: %D MPI Process%s\n", size, size > 1 ? "es" : "");CHKERRQ(ierr);
472     ierr = PetscViewerASCIIPrintf(v, "  type: %s\n", part->hdr.type_name);CHKERRQ(ierr);
473     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
474     ierr = PetscViewerASCIIPrintf(v, "edge cut: %D\n", part->edgeCut);CHKERRQ(ierr);
475     ierr = PetscViewerASCIIPrintf(v, "balance:  %.2g\n", part->balance);CHKERRQ(ierr);
476     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
477   }
478   if (part->ops->view) {ierr = (*part->ops->view)(part, v);CHKERRQ(ierr);}
479   PetscFunctionReturn(0);
480 }
481 
482 static PetscErrorCode PetscPartitionerGetDefaultType(const char *currentType, const char **defaultType)
483 {
484   PetscFunctionBegin;
485   if (!currentType) {
486 #if defined(PETSC_HAVE_CHACO)
487     *defaultType = PETSCPARTITIONERCHACO;
488 #elif defined(PETSC_HAVE_PARMETIS)
489     *defaultType = PETSCPARTITIONERPARMETIS;
490 #elif defined(PETSC_HAVE_PTSCOTCH)
491     *defaultType = PETSCPARTITIONERPTSCOTCH;
492 #else
493     *defaultType = PETSCPARTITIONERSIMPLE;
494 #endif
495   } else {
496     *defaultType = currentType;
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 /*@
502   PetscPartitionerSetFromOptions - sets parameters in a PetscPartitioner from the options database
503 
504   Collective on PetscPartitioner
505 
506   Input Parameter:
507 . part - the PetscPartitioner object to set options for
508 
509   Level: developer
510 
511 .seealso: PetscPartitionerView()
512 @*/
513 PetscErrorCode PetscPartitionerSetFromOptions(PetscPartitioner part)
514 {
515   const char    *defaultType;
516   char           name[256];
517   PetscBool      flg;
518   PetscErrorCode ierr;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
522   ierr = PetscPartitionerRegisterAll();CHKERRQ(ierr);
523   ierr = PetscPartitionerGetDefaultType(((PetscObject) part)->type_name,&defaultType);CHKERRQ(ierr);
524   ierr = PetscObjectOptionsBegin((PetscObject) part);CHKERRQ(ierr);
525   ierr = PetscOptionsFList("-petscpartitioner_type", "Graph partitioner", "PetscPartitionerSetType", PetscPartitionerList, defaultType, name, sizeof(name), &flg);CHKERRQ(ierr);
526   if (flg) {
527     ierr = PetscPartitionerSetType(part, name);CHKERRQ(ierr);
528   } else if (!((PetscObject) part)->type_name) {
529     ierr = PetscPartitionerSetType(part, defaultType);CHKERRQ(ierr);
530   }
531   if (part->ops->setfromoptions) {
532     ierr = (*part->ops->setfromoptions)(PetscOptionsObject,part);CHKERRQ(ierr);
533   }
534   /* process any options handlers added with PetscObjectAddOptionsHandler() */
535   ierr = PetscObjectProcessOptionsHandlers(PetscOptionsObject,(PetscObject) part);CHKERRQ(ierr);
536   ierr = PetscOptionsEnd();CHKERRQ(ierr);
537   PetscFunctionReturn(0);
538 }
539 
540 /*@C
541   PetscPartitionerSetUp - Construct data structures for the PetscPartitioner
542 
543   Collective on PetscPartitioner
544 
545   Input Parameter:
546 . part - the PetscPartitioner object to setup
547 
548   Level: developer
549 
550 .seealso: PetscPartitionerView(), PetscPartitionerDestroy()
551 @*/
552 PetscErrorCode PetscPartitionerSetUp(PetscPartitioner part)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
558   if (part->ops->setup) {ierr = (*part->ops->setup)(part);CHKERRQ(ierr);}
559   PetscFunctionReturn(0);
560 }
561 
562 /*@
563   PetscPartitionerDestroy - Destroys a PetscPartitioner object
564 
565   Collective on PetscPartitioner
566 
567   Input Parameter:
568 . part - the PetscPartitioner object to destroy
569 
570   Level: developer
571 
572 .seealso: PetscPartitionerView()
573 @*/
574 PetscErrorCode PetscPartitionerDestroy(PetscPartitioner *part)
575 {
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   if (!*part) PetscFunctionReturn(0);
580   PetscValidHeaderSpecific((*part), PETSCPARTITIONER_CLASSID, 1);
581 
582   if (--((PetscObject)(*part))->refct > 0) {*part = 0; PetscFunctionReturn(0);}
583   ((PetscObject) (*part))->refct = 0;
584 
585   if ((*part)->ops->destroy) {ierr = (*(*part)->ops->destroy)(*part);CHKERRQ(ierr);}
586   ierr = PetscHeaderDestroy(part);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 /*@
591   PetscPartitionerCreate - Creates an empty PetscPartitioner object. The type can then be set with PetscPartitionerSetType().
592 
593   Collective on MPI_Comm
594 
595   Input Parameter:
596 . comm - The communicator for the PetscPartitioner object
597 
598   Output Parameter:
599 . part - The PetscPartitioner object
600 
601   Level: beginner
602 
603 .seealso: PetscPartitionerSetType(), PETSCPARTITIONERCHACO, PETSCPARTITIONERPARMETIS, PETSCPARTITIONERSHELL, PETSCPARTITIONERSIMPLE, PETSCPARTITIONERGATHER
604 @*/
605 PetscErrorCode PetscPartitionerCreate(MPI_Comm comm, PetscPartitioner *part)
606 {
607   PetscPartitioner p;
608   const char       *partitionerType = NULL;
609   PetscErrorCode   ierr;
610 
611   PetscFunctionBegin;
612   PetscValidPointer(part, 2);
613   *part = NULL;
614   ierr = DMInitializePackage();CHKERRQ(ierr);
615 
616   ierr = PetscHeaderCreate(p, PETSCPARTITIONER_CLASSID, "PetscPartitioner", "Graph Partitioner", "PetscPartitioner", comm, PetscPartitionerDestroy, PetscPartitionerView);CHKERRQ(ierr);
617   ierr = PetscPartitionerGetDefaultType(NULL,&partitionerType);CHKERRQ(ierr);
618   ierr = PetscPartitionerSetType(p,partitionerType);CHKERRQ(ierr);
619 
620   p->edgeCut = 0;
621   p->balance = 0.0;
622 
623   *part = p;
624   PetscFunctionReturn(0);
625 }
626 
627 /*@
628   PetscPartitionerPartition - Create a non-overlapping partition of the cells in the mesh
629 
630   Collective on DM
631 
632   Input Parameters:
633 + part    - The PetscPartitioner
634 - dm      - The mesh DM
635 
636   Output Parameters:
637 + partSection     - The PetscSection giving the division of points by partition
638 - partition       - The list of points by partition
639 
640   Note: Instead of cells, points at a given height can be partitioned by calling PetscPartitionerSetPointHeight()
641 
642   Level: developer
643 
644 .seealso DMPlexDistribute(), PetscPartitionerSetPointHeight(), PetscPartitionerCreate()
645 @*/
646 PetscErrorCode PetscPartitionerPartition(PetscPartitioner part, DM dm, PetscSection partSection, IS *partition)
647 {
648   PetscMPIInt    size;
649   PetscErrorCode ierr;
650 
651   PetscFunctionBegin;
652   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
653   PetscValidHeaderSpecific(dm, DM_CLASSID, 2);
654   PetscValidHeaderSpecific(partSection, PETSC_SECTION_CLASSID, 4);
655   PetscValidPointer(partition, 5);
656   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) part), &size);CHKERRQ(ierr);
657   if (size == 1) {
658     PetscInt *points;
659     PetscInt  cStart, cEnd, c;
660 
661     ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
662     ierr = PetscSectionSetChart(partSection, 0, size);CHKERRQ(ierr);
663     ierr = PetscSectionSetDof(partSection, 0, cEnd-cStart);CHKERRQ(ierr);
664     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
665     ierr = PetscMalloc1(cEnd-cStart, &points);CHKERRQ(ierr);
666     for (c = cStart; c < cEnd; ++c) points[c] = c;
667     ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), cEnd-cStart, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
668     PetscFunctionReturn(0);
669   }
670   if (part->height == 0) {
671     PetscInt numVertices;
672     PetscInt *start     = NULL;
673     PetscInt *adjacency = NULL;
674     IS       globalNumbering;
675 
676     ierr = DMPlexCreatePartitionerGraph(dm, 0, &numVertices, &start, &adjacency, &globalNumbering);CHKERRQ(ierr);
677     if (!part->ops->partition) SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_WRONGSTATE, "PetscPartitioner has no type");
678     ierr = (*part->ops->partition)(part, dm, size, numVertices, start, adjacency, partSection, partition);CHKERRQ(ierr);
679     ierr = PetscFree(start);CHKERRQ(ierr);
680     ierr = PetscFree(adjacency);CHKERRQ(ierr);
681     if (globalNumbering) { /* partition is wrt global unique numbering: change this to be wrt local numbering */
682       const PetscInt *globalNum;
683       const PetscInt *partIdx;
684       PetscInt *map, cStart, cEnd;
685       PetscInt *adjusted, i, localSize, offset;
686       IS    newPartition;
687 
688       ierr = ISGetLocalSize(*partition,&localSize);CHKERRQ(ierr);
689       ierr = PetscMalloc1(localSize,&adjusted);CHKERRQ(ierr);
690       ierr = ISGetIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
691       ierr = ISGetIndices(*partition,&partIdx);CHKERRQ(ierr);
692       ierr = PetscMalloc1(localSize,&map);CHKERRQ(ierr);
693       ierr = DMPlexGetHeightStratum(dm, part->height, &cStart, &cEnd);CHKERRQ(ierr);
694       for (i = cStart, offset = 0; i < cEnd; i++) {
695         if (globalNum[i - cStart] >= 0) map[offset++] = i;
696       }
697       for (i = 0; i < localSize; i++) {
698         adjusted[i] = map[partIdx[i]];
699       }
700       ierr = PetscFree(map);CHKERRQ(ierr);
701       ierr = ISRestoreIndices(*partition,&partIdx);CHKERRQ(ierr);
702       ierr = ISRestoreIndices(globalNumbering,&globalNum);CHKERRQ(ierr);
703       ierr = ISCreateGeneral(PETSC_COMM_SELF,localSize,adjusted,PETSC_OWN_POINTER,&newPartition);CHKERRQ(ierr);
704       ierr = ISDestroy(&globalNumbering);CHKERRQ(ierr);
705       ierr = ISDestroy(partition);CHKERRQ(ierr);
706       *partition = newPartition;
707     }
708   } else SETERRQ1(PetscObjectComm((PetscObject) part), PETSC_ERR_ARG_OUTOFRANGE, "Invalid height %D for points to partition", part->height);
709   ierr = PetscPartitionerViewFromOptions(part, NULL, "-petscpartitioner_view");CHKERRQ(ierr);
710   PetscFunctionReturn(0);
711 }
712 
713 static PetscErrorCode PetscPartitionerDestroy_Shell(PetscPartitioner part)
714 {
715   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
716   PetscErrorCode          ierr;
717 
718   PetscFunctionBegin;
719   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
720   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
721   ierr = PetscFree(p);CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 static PetscErrorCode PetscPartitionerView_Shell_Ascii(PetscPartitioner part, PetscViewer viewer)
726 {
727   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
728   PetscErrorCode          ierr;
729 
730   PetscFunctionBegin;
731   if (p->random) {
732     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
733     ierr = PetscViewerASCIIPrintf(viewer, "using random partition\n");CHKERRQ(ierr);
734     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
735   }
736   PetscFunctionReturn(0);
737 }
738 
739 static PetscErrorCode PetscPartitionerView_Shell(PetscPartitioner part, PetscViewer viewer)
740 {
741   PetscBool      iascii;
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
746   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
747   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
748   if (iascii) {ierr = PetscPartitionerView_Shell_Ascii(part, viewer);CHKERRQ(ierr);}
749   PetscFunctionReturn(0);
750 }
751 
752 static PetscErrorCode PetscPartitionerSetFromOptions_Shell(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
753 {
754   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
755   PetscErrorCode          ierr;
756 
757   PetscFunctionBegin;
758   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner Shell Options");CHKERRQ(ierr);
759   ierr = PetscOptionsBool("-petscpartitioner_shell_random", "Use a random partition", "PetscPartitionerView", PETSC_FALSE, &p->random, NULL);CHKERRQ(ierr);
760   ierr = PetscOptionsTail();CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 static PetscErrorCode PetscPartitionerPartition_Shell(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
765 {
766   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
767   PetscInt                np;
768   PetscErrorCode          ierr;
769 
770   PetscFunctionBegin;
771   if (p->random) {
772     PetscRandom r;
773     PetscInt   *sizes, *points, v, p;
774     PetscMPIInt rank;
775 
776     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
777     ierr = PetscRandomCreate(PETSC_COMM_SELF, &r);CHKERRQ(ierr);
778     ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) nparts);CHKERRQ(ierr);
779     ierr = PetscRandomSetFromOptions(r);CHKERRQ(ierr);
780     ierr = PetscCalloc2(nparts, &sizes, numVertices, &points);CHKERRQ(ierr);
781     for (v = 0; v < numVertices; ++v) {points[v] = v;}
782     for (p = 0; p < nparts; ++p) {sizes[p] = numVertices/nparts + (PetscInt) (p < numVertices % nparts);}
783     for (v = numVertices-1; v > 0; --v) {
784       PetscReal val;
785       PetscInt  w, tmp;
786 
787       ierr = PetscRandomSetInterval(r, 0.0, (PetscScalar) (v+1));CHKERRQ(ierr);
788       ierr = PetscRandomGetValueReal(r, &val);CHKERRQ(ierr);
789       w    = PetscFloorReal(val);
790       tmp       = points[v];
791       points[v] = points[w];
792       points[w] = tmp;
793     }
794     ierr = PetscRandomDestroy(&r);CHKERRQ(ierr);
795     ierr = PetscPartitionerShellSetPartition(part, nparts, sizes, points);CHKERRQ(ierr);
796     ierr = PetscFree2(sizes, points);CHKERRQ(ierr);
797   }
798   if (!p->section) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONG, "Shell partitioner information not provided. Please call PetscPartitionerShellSetPartition()");
799   ierr = PetscSectionGetChart(p->section, NULL, &np);CHKERRQ(ierr);
800   if (nparts != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of requested partitions %d != configured partitions %d", nparts, np);
801   ierr = ISGetLocalSize(p->partition, &np);CHKERRQ(ierr);
802   if (numVertices != np) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of input vertices %d != configured vertices %d", numVertices, np);
803   ierr = PetscSectionCopy(p->section, partSection);CHKERRQ(ierr);
804   *partition = p->partition;
805   ierr = PetscObjectReference((PetscObject) p->partition);CHKERRQ(ierr);
806   PetscFunctionReturn(0);
807 }
808 
809 static PetscErrorCode PetscPartitionerInitialize_Shell(PetscPartitioner part)
810 {
811   PetscFunctionBegin;
812   part->ops->view           = PetscPartitionerView_Shell;
813   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Shell;
814   part->ops->destroy        = PetscPartitionerDestroy_Shell;
815   part->ops->partition      = PetscPartitionerPartition_Shell;
816   PetscFunctionReturn(0);
817 }
818 
819 /*MC
820   PETSCPARTITIONERSHELL = "shell" - A PetscPartitioner object
821 
822   Level: intermediate
823 
824 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
825 M*/
826 
827 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Shell(PetscPartitioner part)
828 {
829   PetscPartitioner_Shell *p;
830   PetscErrorCode          ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
834   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
835   part->data = p;
836 
837   ierr = PetscPartitionerInitialize_Shell(part);CHKERRQ(ierr);
838   p->random = PETSC_FALSE;
839   PetscFunctionReturn(0);
840 }
841 
842 /*@C
843   PetscPartitionerShellSetPartition - Set an artifical partition for a mesh
844 
845   Collective on Part
846 
847   Input Parameters:
848 + part     - The PetscPartitioner
849 . size - The number of partitions
850 . sizes    - array of size size (or NULL) providing the number of points in each partition
851 - points   - array of size sum(sizes) (may be NULL iff sizes is NULL), a permutation of the points that groups those assigned to each partition in order (i.e., partition 0 first, partition 1 next, etc.)
852 
853   Level: developer
854 
855   Notes:
856 
857     It is safe to free the sizes and points arrays after use in this routine.
858 
859 .seealso DMPlexDistribute(), PetscPartitionerCreate()
860 @*/
861 PetscErrorCode PetscPartitionerShellSetPartition(PetscPartitioner part, PetscInt size, const PetscInt sizes[], const PetscInt points[])
862 {
863   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
864   PetscInt                proc, numPoints;
865   PetscErrorCode          ierr;
866 
867   PetscFunctionBegin;
868   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
869   if (sizes)  {PetscValidPointer(sizes, 3);}
870   if (points) {PetscValidPointer(points, 4);}
871   ierr = PetscSectionDestroy(&p->section);CHKERRQ(ierr);
872   ierr = ISDestroy(&p->partition);CHKERRQ(ierr);
873   ierr = PetscSectionCreate(PetscObjectComm((PetscObject) part), &p->section);CHKERRQ(ierr);
874   ierr = PetscSectionSetChart(p->section, 0, size);CHKERRQ(ierr);
875   if (sizes) {
876     for (proc = 0; proc < size; ++proc) {
877       ierr = PetscSectionSetDof(p->section, proc, sizes[proc]);CHKERRQ(ierr);
878     }
879   }
880   ierr = PetscSectionSetUp(p->section);CHKERRQ(ierr);
881   ierr = PetscSectionGetStorageSize(p->section, &numPoints);CHKERRQ(ierr);
882   ierr = ISCreateGeneral(PetscObjectComm((PetscObject) part), numPoints, points, PETSC_COPY_VALUES, &p->partition);CHKERRQ(ierr);
883   PetscFunctionReturn(0);
884 }
885 
886 /*@
887   PetscPartitionerShellSetRandom - Set the flag to use a random partition
888 
889   Collective on Part
890 
891   Input Parameters:
892 + part   - The PetscPartitioner
893 - random - The flag to use a random partition
894 
895   Level: intermediate
896 
897 .seealso PetscPartitionerShellGetRandom(), PetscPartitionerCreate()
898 @*/
899 PetscErrorCode PetscPartitionerShellSetRandom(PetscPartitioner part, PetscBool random)
900 {
901   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
902 
903   PetscFunctionBegin;
904   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
905   p->random = random;
906   PetscFunctionReturn(0);
907 }
908 
909 /*@
910   PetscPartitionerShellGetRandom - get the flag to use a random partition
911 
912   Collective on Part
913 
914   Input Parameter:
915 . part   - The PetscPartitioner
916 
917   Output Parameter
918 . random - The flag to use a random partition
919 
920   Level: intermediate
921 
922 .seealso PetscPartitionerShellSetRandom(), PetscPartitionerCreate()
923 @*/
924 PetscErrorCode PetscPartitionerShellGetRandom(PetscPartitioner part, PetscBool *random)
925 {
926   PetscPartitioner_Shell *p = (PetscPartitioner_Shell *) part->data;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
930   PetscValidPointer(random, 2);
931   *random = p->random;
932   PetscFunctionReturn(0);
933 }
934 
935 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
936 {
937   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *) part->data;
938   PetscErrorCode          ierr;
939 
940   PetscFunctionBegin;
941   ierr = PetscFree(p);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 static PetscErrorCode PetscPartitionerView_Simple_Ascii(PetscPartitioner part, PetscViewer viewer)
946 {
947   PetscFunctionBegin;
948   PetscFunctionReturn(0);
949 }
950 
951 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
952 {
953   PetscBool      iascii;
954   PetscErrorCode ierr;
955 
956   PetscFunctionBegin;
957   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
958   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
959   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
960   if (iascii) {ierr = PetscPartitionerView_Simple_Ascii(part, viewer);CHKERRQ(ierr);}
961   PetscFunctionReturn(0);
962 }
963 
964 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
965 {
966   MPI_Comm       comm;
967   PetscInt       np;
968   PetscMPIInt    size;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   comm = PetscObjectComm((PetscObject)dm);
973   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
974   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
975   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
976   if (size == 1) {
977     for (np = 0; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, numVertices/nparts + ((numVertices % nparts) > np));CHKERRQ(ierr);}
978   }
979   else {
980     PetscMPIInt rank;
981     PetscInt nvGlobal, *offsets, myFirst, myLast;
982 
983     ierr = PetscMalloc1(size+1,&offsets);CHKERRQ(ierr);
984     offsets[0] = 0;
985     ierr = MPI_Allgather(&numVertices,1,MPIU_INT,&offsets[1],1,MPIU_INT,comm);CHKERRQ(ierr);
986     for (np = 2; np <= size; np++) {
987       offsets[np] += offsets[np-1];
988     }
989     nvGlobal = offsets[size];
990     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
991     myFirst = offsets[rank];
992     myLast  = offsets[rank + 1] - 1;
993     ierr = PetscFree(offsets);CHKERRQ(ierr);
994     if (numVertices) {
995       PetscInt firstPart = 0, firstLargePart = 0;
996       PetscInt lastPart = 0, lastLargePart = 0;
997       PetscInt rem = nvGlobal % nparts;
998       PetscInt pSmall = nvGlobal/nparts;
999       PetscInt pBig = nvGlobal/nparts + 1;
1000 
1001 
1002       if (rem) {
1003         firstLargePart = myFirst / pBig;
1004         lastLargePart  = myLast  / pBig;
1005 
1006         if (firstLargePart < rem) {
1007           firstPart = firstLargePart;
1008         }
1009         else {
1010           firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
1011         }
1012         if (lastLargePart < rem) {
1013           lastPart = lastLargePart;
1014         }
1015         else {
1016           lastPart = rem + (myLast - (rem * pBig)) / pSmall;
1017         }
1018       }
1019       else {
1020         firstPart = myFirst / (nvGlobal/nparts);
1021         lastPart  = myLast  / (nvGlobal/nparts);
1022       }
1023 
1024       for (np = firstPart; np <= lastPart; np++) {
1025         PetscInt PartStart =  np    * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np);
1026         PetscInt PartEnd   = (np+1) * (nvGlobal/nparts) + PetscMin(nvGlobal % nparts,np+1);
1027 
1028         PartStart = PetscMax(PartStart,myFirst);
1029         PartEnd   = PetscMin(PartEnd,myLast+1);
1030         ierr = PetscSectionSetDof(partSection,np,PartEnd-PartStart);CHKERRQ(ierr);
1031       }
1032     }
1033   }
1034   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1035   PetscFunctionReturn(0);
1036 }
1037 
1038 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
1039 {
1040   PetscFunctionBegin;
1041   part->ops->view      = PetscPartitionerView_Simple;
1042   part->ops->destroy   = PetscPartitionerDestroy_Simple;
1043   part->ops->partition = PetscPartitionerPartition_Simple;
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 /*MC
1048   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
1049 
1050   Level: intermediate
1051 
1052 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1053 M*/
1054 
1055 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
1056 {
1057   PetscPartitioner_Simple *p;
1058   PetscErrorCode           ierr;
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1062   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1063   part->data = p;
1064 
1065   ierr = PetscPartitionerInitialize_Simple(part);CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 static PetscErrorCode PetscPartitionerDestroy_Gather(PetscPartitioner part)
1070 {
1071   PetscPartitioner_Gather *p = (PetscPartitioner_Gather *) part->data;
1072   PetscErrorCode          ierr;
1073 
1074   PetscFunctionBegin;
1075   ierr = PetscFree(p);CHKERRQ(ierr);
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 static PetscErrorCode PetscPartitionerView_Gather_Ascii(PetscPartitioner part, PetscViewer viewer)
1080 {
1081   PetscFunctionBegin;
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 static PetscErrorCode PetscPartitionerView_Gather(PetscPartitioner part, PetscViewer viewer)
1086 {
1087   PetscBool      iascii;
1088   PetscErrorCode ierr;
1089 
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1092   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1093   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1094   if (iascii) {ierr = PetscPartitionerView_Gather_Ascii(part, viewer);CHKERRQ(ierr);}
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 static PetscErrorCode PetscPartitionerPartition_Gather(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1099 {
1100   PetscInt       np;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1105   ierr = ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition);CHKERRQ(ierr);
1106   ierr = PetscSectionSetDof(partSection,0,numVertices);CHKERRQ(ierr);
1107   for (np = 1; np < nparts; ++np) {ierr = PetscSectionSetDof(partSection, np, 0);CHKERRQ(ierr);}
1108   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1109   PetscFunctionReturn(0);
1110 }
1111 
1112 static PetscErrorCode PetscPartitionerInitialize_Gather(PetscPartitioner part)
1113 {
1114   PetscFunctionBegin;
1115   part->ops->view      = PetscPartitionerView_Gather;
1116   part->ops->destroy   = PetscPartitionerDestroy_Gather;
1117   part->ops->partition = PetscPartitionerPartition_Gather;
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 /*MC
1122   PETSCPARTITIONERGATHER = "gather" - A PetscPartitioner object
1123 
1124   Level: intermediate
1125 
1126 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1127 M*/
1128 
1129 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Gather(PetscPartitioner part)
1130 {
1131   PetscPartitioner_Gather *p;
1132   PetscErrorCode           ierr;
1133 
1134   PetscFunctionBegin;
1135   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1136   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1137   part->data = p;
1138 
1139   ierr = PetscPartitionerInitialize_Gather(part);CHKERRQ(ierr);
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 
1144 static PetscErrorCode PetscPartitionerDestroy_Chaco(PetscPartitioner part)
1145 {
1146   PetscPartitioner_Chaco *p = (PetscPartitioner_Chaco *) part->data;
1147   PetscErrorCode          ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = PetscFree(p);CHKERRQ(ierr);
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 static PetscErrorCode PetscPartitionerView_Chaco_Ascii(PetscPartitioner part, PetscViewer viewer)
1155 {
1156   PetscFunctionBegin;
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 static PetscErrorCode PetscPartitionerView_Chaco(PetscPartitioner part, PetscViewer viewer)
1161 {
1162   PetscBool      iascii;
1163   PetscErrorCode ierr;
1164 
1165   PetscFunctionBegin;
1166   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1167   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1168   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1169   if (iascii) {ierr = PetscPartitionerView_Chaco_Ascii(part, viewer);CHKERRQ(ierr);}
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #if defined(PETSC_HAVE_CHACO)
1174 #if defined(PETSC_HAVE_UNISTD_H)
1175 #include <unistd.h>
1176 #endif
1177 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1178 #include <chaco.h>
1179 #else
1180 /* Older versions of Chaco do not have an include file */
1181 PETSC_EXTERN int interface(int nvtxs, int *start, int *adjacency, int *vwgts,
1182                        float *ewgts, float *x, float *y, float *z, char *outassignname,
1183                        char *outfilename, short *assignment, int architecture, int ndims_tot,
1184                        int mesh_dims[3], double *goal, int global_method, int local_method,
1185                        int rqi_flag, int vmax, int ndims, double eigtol, long seed);
1186 #endif
1187 extern int FREE_GRAPH;
1188 #endif
1189 
1190 static PetscErrorCode PetscPartitionerPartition_Chaco(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1191 {
1192 #if defined(PETSC_HAVE_CHACO)
1193   enum {DEFAULT_METHOD = 1, INERTIAL_METHOD = 3};
1194   MPI_Comm       comm;
1195   int            nvtxs          = numVertices; /* number of vertices in full graph */
1196   int           *vwgts          = NULL;   /* weights for all vertices */
1197   float         *ewgts          = NULL;   /* weights for all edges */
1198   float         *x              = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
1199   char          *outassignname  = NULL;   /*  name of assignment output file */
1200   char          *outfilename    = NULL;   /* output file name */
1201   int            architecture   = 1;      /* 0 => hypercube, d => d-dimensional mesh */
1202   int            ndims_tot      = 0;      /* total number of cube dimensions to divide */
1203   int            mesh_dims[3];            /* dimensions of mesh of processors */
1204   double        *goal          = NULL;    /* desired set sizes for each set */
1205   int            global_method = 1;       /* global partitioning algorithm */
1206   int            local_method  = 1;       /* local partitioning algorithm */
1207   int            rqi_flag      = 0;       /* should I use RQI/Symmlq eigensolver? */
1208   int            vmax          = 200;     /* how many vertices to coarsen down to? */
1209   int            ndims         = 1;       /* number of eigenvectors (2^d sets) */
1210   double         eigtol        = 0.001;   /* tolerance on eigenvectors */
1211   long           seed          = 123636512; /* for random graph mutations */
1212 #if defined(PETSC_HAVE_CHACO_INT_ASSIGNMENT)
1213   int           *assignment;              /* Output partition */
1214 #else
1215   short int     *assignment;              /* Output partition */
1216 #endif
1217   int            fd_stdout, fd_pipe[2];
1218   PetscInt      *points;
1219   int            i, v, p;
1220   PetscErrorCode ierr;
1221 
1222   PetscFunctionBegin;
1223   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
1224 #if defined (PETSC_USE_DEBUG)
1225   {
1226     int ival,isum;
1227     PetscBool distributed;
1228 
1229     ival = (numVertices > 0);
1230     ierr = MPI_Allreduce(&ival, &isum, 1, MPI_INT, MPI_SUM, comm);CHKERRQ(ierr);
1231     distributed = (isum > 1) ? PETSC_TRUE : PETSC_FALSE;
1232     if (distributed) SETERRQ(comm, PETSC_ERR_SUP, "Chaco cannot partition a distributed graph");
1233   }
1234 #endif
1235   if (!numVertices) {
1236     ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1237     ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1238     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1239     PetscFunctionReturn(0);
1240   }
1241   FREE_GRAPH = 0;                         /* Do not let Chaco free my memory */
1242   for (i = 0; i < start[numVertices]; ++i) ++adjacency[i];
1243 
1244   if (global_method == INERTIAL_METHOD) {
1245     /* manager.createCellCoordinates(nvtxs, &x, &y, &z); */
1246     SETERRQ(comm, PETSC_ERR_SUP, "Inertial partitioning not yet supported");
1247   }
1248   mesh_dims[0] = nparts;
1249   mesh_dims[1] = 1;
1250   mesh_dims[2] = 1;
1251   ierr = PetscMalloc1(nvtxs, &assignment);CHKERRQ(ierr);
1252   /* Chaco outputs to stdout. We redirect this to a buffer. */
1253   /* TODO: check error codes for UNIX calls */
1254 #if defined(PETSC_HAVE_UNISTD_H)
1255   {
1256     int piperet;
1257     piperet = pipe(fd_pipe);
1258     if (piperet) SETERRQ(comm,PETSC_ERR_SYS,"Could not create pipe");
1259     fd_stdout = dup(1);
1260     close(1);
1261     dup2(fd_pipe[1], 1);
1262   }
1263 #endif
1264   ierr = interface(nvtxs, (int*) start, (int*) adjacency, vwgts, ewgts, x, y, z, outassignname, outfilename,
1265                    assignment, architecture, ndims_tot, mesh_dims, goal, global_method, local_method, rqi_flag,
1266                    vmax, ndims, eigtol, seed);
1267 #if defined(PETSC_HAVE_UNISTD_H)
1268   {
1269     char msgLog[10000];
1270     int  count;
1271 
1272     fflush(stdout);
1273     count = read(fd_pipe[0], msgLog, (10000-1)*sizeof(char));
1274     if (count < 0) count = 0;
1275     msgLog[count] = 0;
1276     close(1);
1277     dup2(fd_stdout, 1);
1278     close(fd_stdout);
1279     close(fd_pipe[0]);
1280     close(fd_pipe[1]);
1281     if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", msgLog);
1282   }
1283 #else
1284   if (ierr) SETERRQ1(comm, PETSC_ERR_LIB, "Error in Chaco library: %s", "error in stdout");
1285 #endif
1286   /* Convert to PetscSection+IS */
1287   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1288   for (v = 0; v < nvtxs; ++v) {
1289     ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);
1290   }
1291   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1292   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1293   for (p = 0, i = 0; p < nparts; ++p) {
1294     for (v = 0; v < nvtxs; ++v) {
1295       if (assignment[v] == p) points[i++] = v;
1296     }
1297   }
1298   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1299   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1300   if (global_method == INERTIAL_METHOD) {
1301     /* manager.destroyCellCoordinates(nvtxs, &x, &y, &z); */
1302   }
1303   ierr = PetscFree(assignment);CHKERRQ(ierr);
1304   for (i = 0; i < start[numVertices]; ++i) --adjacency[i];
1305   PetscFunctionReturn(0);
1306 #else
1307   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-chaco.");
1308 #endif
1309 }
1310 
1311 static PetscErrorCode PetscPartitionerInitialize_Chaco(PetscPartitioner part)
1312 {
1313   PetscFunctionBegin;
1314   part->ops->view      = PetscPartitionerView_Chaco;
1315   part->ops->destroy   = PetscPartitionerDestroy_Chaco;
1316   part->ops->partition = PetscPartitionerPartition_Chaco;
1317   PetscFunctionReturn(0);
1318 }
1319 
1320 /*MC
1321   PETSCPARTITIONERCHACO = "chaco" - A PetscPartitioner object using the Chaco library
1322 
1323   Level: intermediate
1324 
1325 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1326 M*/
1327 
1328 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Chaco(PetscPartitioner part)
1329 {
1330   PetscPartitioner_Chaco *p;
1331   PetscErrorCode          ierr;
1332 
1333   PetscFunctionBegin;
1334   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1335   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1336   part->data = p;
1337 
1338   ierr = PetscPartitionerInitialize_Chaco(part);CHKERRQ(ierr);
1339   ierr = PetscCitationsRegister(ChacoPartitionerCitation, &ChacoPartitionercite);CHKERRQ(ierr);
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 static const char *ptypes[] = {"kway", "rb"};
1344 
1345 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
1346 {
1347   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1348   PetscErrorCode             ierr;
1349 
1350   PetscFunctionBegin;
1351   ierr = PetscFree(p);CHKERRQ(ierr);
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 static PetscErrorCode PetscPartitionerView_ParMetis_Ascii(PetscPartitioner part, PetscViewer viewer)
1356 {
1357   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1358   PetscErrorCode             ierr;
1359 
1360   PetscFunctionBegin;
1361   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1362   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
1363   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
1364   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
1365   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
1370 {
1371   PetscBool      iascii;
1372   PetscErrorCode ierr;
1373 
1374   PetscFunctionBegin;
1375   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1376   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1377   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1378   if (iascii) {ierr = PetscPartitionerView_ParMetis_Ascii(part, viewer);CHKERRQ(ierr);}
1379   PetscFunctionReturn(0);
1380 }
1381 
1382 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1383 {
1384   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
1385   PetscErrorCode            ierr;
1386 
1387   PetscFunctionBegin;
1388   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
1389   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
1390   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
1391   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
1392   ierr = PetscOptionsTail();CHKERRQ(ierr);
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 #if defined(PETSC_HAVE_PARMETIS)
1397 #include <parmetis.h>
1398 #endif
1399 
1400 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1401 {
1402 #if defined(PETSC_HAVE_PARMETIS)
1403   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
1404   MPI_Comm       comm;
1405   PetscSection   section;
1406   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
1407   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
1408   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
1409   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
1410   PetscInt      *vwgt        = NULL;        /* Vertex weights */
1411   PetscInt      *adjwgt      = NULL;        /* Edge weights */
1412   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
1413   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
1414   PetscInt       ncon        = 1;           /* The number of weights per vertex */
1415   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
1416   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
1417   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
1418   PetscInt       options[64];               /* Options */
1419   /* Outputs */
1420   PetscInt       v, i, *assignment, *points;
1421   PetscMPIInt    size, rank, p;
1422   PetscErrorCode ierr;
1423 
1424   PetscFunctionBegin;
1425   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
1426   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1427   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1428   /* Calculate vertex distribution */
1429   ierr = PetscMalloc5(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment,nvtxs,&vwgt);CHKERRQ(ierr);
1430   vtxdist[0] = 0;
1431   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1432   for (p = 2; p <= size; ++p) {
1433     vtxdist[p] += vtxdist[p-1];
1434   }
1435   /* Calculate partition weights */
1436   for (p = 0; p < nparts; ++p) {
1437     tpwgts[p] = 1.0/nparts;
1438   }
1439   ubvec[0] = pm->imbalanceRatio;
1440   /* Weight cells by dofs on cell by default */
1441   ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1442   if (section) {
1443     PetscInt cStart, cEnd, dof;
1444 
1445     ierr = DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);CHKERRQ(ierr);
1446     for (v = cStart; v < cEnd; ++v) {
1447       ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1448       /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1449       /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1450       if (v-cStart < numVertices) vwgt[v-cStart] = PetscMax(dof, 1);
1451     }
1452   } else {
1453     for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1454   }
1455   wgtflag |= 2; /* have weights on graph vertices */
1456 
1457   if (nparts == 1) {
1458     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1459   } else {
1460     for (p = 0; !vtxdist[p+1] && p < size; ++p);
1461     if (vtxdist[p+1] == vtxdist[size]) {
1462       if (rank == p) {
1463         ierr = METIS_SetDefaultOptions(options); /* initialize all defaults */
1464         if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
1465         if (metis_ptype == 1) {
1466           PetscStackPush("METIS_PartGraphRecursive");
1467           ierr = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1468           PetscStackPop;
1469           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
1470         } else {
1471           /*
1472            It would be nice to activate the two options below, but they would need some actual testing.
1473            - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
1474            - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
1475           */
1476           /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
1477           /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
1478           PetscStackPush("METIS_PartGraphKway");
1479           ierr = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
1480           PetscStackPop;
1481           if (ierr != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
1482         }
1483       }
1484     } else {
1485       options[0] = 1;
1486       options[1] = pm->debugFlag;
1487       PetscStackPush("ParMETIS_V3_PartKway");
1488       ierr = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &comm);
1489       PetscStackPop;
1490       if (ierr != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", ierr);
1491     }
1492   }
1493   /* Convert to PetscSection+IS */
1494   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1495   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1496   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1497   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1498   for (p = 0, i = 0; p < nparts; ++p) {
1499     for (v = 0; v < nvtxs; ++v) {
1500       if (assignment[v] == p) points[i++] = v;
1501     }
1502   }
1503   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1504   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1505   ierr = PetscFree5(vtxdist,tpwgts,ubvec,assignment,vwgt);CHKERRQ(ierr);
1506   PetscFunctionReturn(0);
1507 #else
1508   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
1509 #endif
1510 }
1511 
1512 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
1513 {
1514   PetscFunctionBegin;
1515   part->ops->view           = PetscPartitionerView_ParMetis;
1516   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
1517   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
1518   part->ops->partition      = PetscPartitionerPartition_ParMetis;
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 /*MC
1523   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMetis library
1524 
1525   Level: intermediate
1526 
1527 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1528 M*/
1529 
1530 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
1531 {
1532   PetscPartitioner_ParMetis *p;
1533   PetscErrorCode          ierr;
1534 
1535   PetscFunctionBegin;
1536   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1537   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
1538   part->data = p;
1539 
1540   p->ptype          = 0;
1541   p->imbalanceRatio = 1.05;
1542   p->debugFlag      = 0;
1543 
1544   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
1545   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionercite);CHKERRQ(ierr);
1546   PetscFunctionReturn(0);
1547 }
1548 
1549 
1550 PetscBool PTScotchPartitionercite = PETSC_FALSE;
1551 const char PTScotchPartitionerCitation[] =
1552   "@article{PTSCOTCH,\n"
1553   "  author  = {C. Chevalier and F. Pellegrini},\n"
1554   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
1555   "  journal = {Parallel Computing},\n"
1556   "  volume  = {34},\n"
1557   "  number  = {6},\n"
1558   "  pages   = {318--331},\n"
1559   "  year    = {2008},\n"
1560   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
1561   "}\n";
1562 
1563 typedef struct {
1564   PetscInt  strategy;
1565   PetscReal imbalance;
1566 } PetscPartitioner_PTScotch;
1567 
1568 static const char *const
1569 PTScotchStrategyList[] = {
1570   "DEFAULT",
1571   "QUALITY",
1572   "SPEED",
1573   "BALANCE",
1574   "SAFETY",
1575   "SCALABILITY",
1576   "RECURSIVE",
1577   "REMAP"
1578 };
1579 
1580 #if defined(PETSC_HAVE_PTSCOTCH)
1581 
1582 EXTERN_C_BEGIN
1583 #include <ptscotch.h>
1584 EXTERN_C_END
1585 
1586 #define CHKERRPTSCOTCH(ierr) do { if (ierr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while(0)
1587 
1588 static int PTScotch_Strategy(PetscInt strategy)
1589 {
1590   switch (strategy) {
1591   case  0: return SCOTCH_STRATDEFAULT;
1592   case  1: return SCOTCH_STRATQUALITY;
1593   case  2: return SCOTCH_STRATSPEED;
1594   case  3: return SCOTCH_STRATBALANCE;
1595   case  4: return SCOTCH_STRATSAFETY;
1596   case  5: return SCOTCH_STRATSCALABILITY;
1597   case  6: return SCOTCH_STRATRECURSIVE;
1598   case  7: return SCOTCH_STRATREMAP;
1599   default: return SCOTCH_STRATDEFAULT;
1600   }
1601 }
1602 
1603 
1604 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1605                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[])
1606 {
1607   SCOTCH_Graph   grafdat;
1608   SCOTCH_Strat   stradat;
1609   SCOTCH_Num     vertnbr = n;
1610   SCOTCH_Num     edgenbr = xadj[n];
1611   SCOTCH_Num*    velotab = vtxwgt;
1612   SCOTCH_Num*    edlotab = adjwgt;
1613   SCOTCH_Num     flagval = strategy;
1614   double         kbalval = imbalance;
1615   PetscErrorCode ierr;
1616 
1617   PetscFunctionBegin;
1618   {
1619     PetscBool flg = PETSC_TRUE;
1620     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1621     if (!flg) velotab = NULL;
1622   }
1623   ierr = SCOTCH_graphInit(&grafdat);CHKERRPTSCOTCH(ierr);
1624   ierr = SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);CHKERRPTSCOTCH(ierr);
1625   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1626   ierr = SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);CHKERRPTSCOTCH(ierr);
1627 #if defined(PETSC_USE_DEBUG)
1628   ierr = SCOTCH_graphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1629 #endif
1630   ierr = SCOTCH_graphPart(&grafdat, nparts, &stradat, part);CHKERRPTSCOTCH(ierr);
1631   SCOTCH_stratExit(&stradat);
1632   SCOTCH_graphExit(&grafdat);
1633   PetscFunctionReturn(0);
1634 }
1635 
1636 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
1637                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num part[], MPI_Comm comm)
1638 {
1639   PetscMPIInt     procglbnbr;
1640   PetscMPIInt     proclocnum;
1641   SCOTCH_Arch     archdat;
1642   SCOTCH_Dgraph   grafdat;
1643   SCOTCH_Dmapping mappdat;
1644   SCOTCH_Strat    stradat;
1645   SCOTCH_Num      vertlocnbr;
1646   SCOTCH_Num      edgelocnbr;
1647   SCOTCH_Num*     veloloctab = vtxwgt;
1648   SCOTCH_Num*     edloloctab = adjwgt;
1649   SCOTCH_Num      flagval = strategy;
1650   double          kbalval = imbalance;
1651   PetscErrorCode  ierr;
1652 
1653   PetscFunctionBegin;
1654   {
1655     PetscBool flg = PETSC_TRUE;
1656     ierr = PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);CHKERRQ(ierr);
1657     if (!flg) veloloctab = NULL;
1658   }
1659   ierr = MPI_Comm_size(comm, &procglbnbr);CHKERRQ(ierr);
1660   ierr = MPI_Comm_rank(comm, &proclocnum);CHKERRQ(ierr);
1661   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
1662   edgelocnbr = xadj[vertlocnbr];
1663 
1664   ierr = SCOTCH_dgraphInit(&grafdat, comm);CHKERRPTSCOTCH(ierr);
1665   ierr = SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);CHKERRPTSCOTCH(ierr);
1666 #if defined(PETSC_USE_DEBUG)
1667   ierr = SCOTCH_dgraphCheck(&grafdat);CHKERRPTSCOTCH(ierr);
1668 #endif
1669   ierr = SCOTCH_stratInit(&stradat);CHKERRPTSCOTCH(ierr);
1670   ierr = SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);CHKERRQ(ierr);
1671   ierr = SCOTCH_archInit(&archdat);CHKERRPTSCOTCH(ierr);
1672   ierr = SCOTCH_archCmplt(&archdat, nparts);CHKERRPTSCOTCH(ierr);
1673   ierr = SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);CHKERRPTSCOTCH(ierr);
1674   ierr = SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);CHKERRPTSCOTCH(ierr);
1675   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
1676   SCOTCH_archExit(&archdat);
1677   SCOTCH_stratExit(&stradat);
1678   SCOTCH_dgraphExit(&grafdat);
1679   PetscFunctionReturn(0);
1680 }
1681 
1682 #endif /* PETSC_HAVE_PTSCOTCH */
1683 
1684 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
1685 {
1686   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1687   PetscErrorCode             ierr;
1688 
1689   PetscFunctionBegin;
1690   ierr = PetscFree(p);CHKERRQ(ierr);
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 static PetscErrorCode PetscPartitionerView_PTScotch_Ascii(PetscPartitioner part, PetscViewer viewer)
1695 {
1696   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1697   PetscErrorCode            ierr;
1698 
1699   PetscFunctionBegin;
1700   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
1701   ierr = PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);CHKERRQ(ierr);
1702   ierr = PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n",(double)p->imbalance);CHKERRQ(ierr);
1703   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
1704   PetscFunctionReturn(0);
1705 }
1706 
1707 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
1708 {
1709   PetscBool      iascii;
1710   PetscErrorCode ierr;
1711 
1712   PetscFunctionBegin;
1713   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1714   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
1715   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
1716   if (iascii) {ierr = PetscPartitionerView_PTScotch_Ascii(part, viewer);CHKERRQ(ierr);}
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
1721 {
1722   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
1723   const char *const         *slist = PTScotchStrategyList;
1724   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
1725   PetscBool                 flag;
1726   PetscErrorCode            ierr;
1727 
1728   PetscFunctionBegin;
1729   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");CHKERRQ(ierr);
1730   ierr = PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);CHKERRQ(ierr);
1731   ierr = PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);CHKERRQ(ierr);
1732   ierr = PetscOptionsTail();CHKERRQ(ierr);
1733   PetscFunctionReturn(0);
1734 }
1735 
1736 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, DM dm, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection partSection, IS *partition)
1737 {
1738 #if defined(PETSC_HAVE_PTSCOTCH)
1739   MPI_Comm       comm       = PetscObjectComm((PetscObject)part);
1740   PetscInt       nvtxs      = numVertices; /* The number of vertices in full graph */
1741   PetscInt      *vtxdist;                  /* Distribution of vertices across processes */
1742   PetscInt      *xadj       = start;       /* Start of edge list for each vertex */
1743   PetscInt      *adjncy     = adjacency;   /* Edge lists for all vertices */
1744   PetscInt      *vwgt       = NULL;        /* Vertex weights */
1745   PetscInt      *adjwgt     = NULL;        /* Edge weights */
1746   PetscInt       v, i, *assignment, *points;
1747   PetscMPIInt    size, rank, p;
1748   PetscErrorCode ierr;
1749 
1750   PetscFunctionBegin;
1751   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
1752   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
1753   ierr = PetscMalloc2(nparts+1,&vtxdist,PetscMax(nvtxs,1),&assignment);CHKERRQ(ierr);
1754 
1755   /* Calculate vertex distribution */
1756   vtxdist[0] = 0;
1757   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRQ(ierr);
1758   for (p = 2; p <= size; ++p) {
1759     vtxdist[p] += vtxdist[p-1];
1760   }
1761 
1762   if (nparts == 1) {
1763     ierr = PetscMemzero(assignment, nvtxs * sizeof(PetscInt));CHKERRQ(ierr);
1764   } else {
1765     PetscSection section;
1766     /* Weight cells by dofs on cell by default */
1767     ierr = PetscMalloc1(PetscMax(nvtxs,1),&vwgt);CHKERRQ(ierr);
1768     ierr = DMGetSection(dm, &section);CHKERRQ(ierr);
1769     if (section) {
1770       PetscInt vStart, vEnd, dof;
1771       ierr = DMPlexGetHeightStratum(dm, 0, &vStart, &vEnd);CHKERRQ(ierr);
1772       for (v = vStart; v < vEnd; ++v) {
1773         ierr = PetscSectionGetDof(section, v, &dof);CHKERRQ(ierr);
1774         /* WARNING: Assumes that meshes with overlap have the overlapped cells at the end of the stratum. */
1775         /* To do this properly, we should use the cell numbering created in DMPlexCreatePartitionerGraph. */
1776         if (v-vStart < numVertices) vwgt[v-vStart] = PetscMax(dof, 1);
1777       }
1778     } else {
1779       for (v = 0; v < nvtxs; ++v) vwgt[v] = 1;
1780     }
1781     {
1782       PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
1783       int                       strat = PTScotch_Strategy(pts->strategy);
1784       double                    imbal = (double)pts->imbalance;
1785 
1786       for (p = 0; !vtxdist[p+1] && p < size; ++p);
1787       if (vtxdist[p+1] == vtxdist[size]) {
1788         if (rank == p) {
1789           ierr = PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, assignment);CHKERRQ(ierr);
1790         }
1791       } else {
1792         ierr = PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, assignment, comm);CHKERRQ(ierr);
1793       }
1794     }
1795     ierr = PetscFree(vwgt);CHKERRQ(ierr);
1796   }
1797 
1798   /* Convert to PetscSection+IS */
1799   ierr = PetscSectionSetChart(partSection, 0, nparts);CHKERRQ(ierr);
1800   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
1801   ierr = PetscSectionSetUp(partSection);CHKERRQ(ierr);
1802   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
1803   for (p = 0, i = 0; p < nparts; ++p) {
1804     for (v = 0; v < nvtxs; ++v) {
1805       if (assignment[v] == p) points[i++] = v;
1806     }
1807   }
1808   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
1809   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
1810 
1811   ierr = PetscFree2(vtxdist,assignment);CHKERRQ(ierr);
1812   PetscFunctionReturn(0);
1813 #else
1814   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
1815 #endif
1816 }
1817 
1818 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
1819 {
1820   PetscFunctionBegin;
1821   part->ops->view           = PetscPartitionerView_PTScotch;
1822   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
1823   part->ops->partition      = PetscPartitionerPartition_PTScotch;
1824   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
1825   PetscFunctionReturn(0);
1826 }
1827 
1828 /*MC
1829   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
1830 
1831   Level: intermediate
1832 
1833 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
1834 M*/
1835 
1836 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
1837 {
1838   PetscPartitioner_PTScotch *p;
1839   PetscErrorCode          ierr;
1840 
1841   PetscFunctionBegin;
1842   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
1843   ierr = PetscNewLog(part, &p);CHKERRQ(ierr);
1844   part->data = p;
1845 
1846   p->strategy  = 0;
1847   p->imbalance = 0.01;
1848 
1849   ierr = PetscPartitionerInitialize_PTScotch(part);CHKERRQ(ierr);
1850   ierr = PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionercite);CHKERRQ(ierr);
1851   PetscFunctionReturn(0);
1852 }
1853 
1854 
1855 /*@
1856   DMPlexGetPartitioner - Get the mesh partitioner
1857 
1858   Not collective
1859 
1860   Input Parameter:
1861 . dm - The DM
1862 
1863   Output Parameter:
1864 . part - The PetscPartitioner
1865 
1866   Level: developer
1867 
1868   Note: This gets a borrowed reference, so the user should not destroy this PetscPartitioner.
1869 
1870 .seealso DMPlexDistribute(), DMPlexSetPartitioner(), PetscPartitionerCreate()
1871 @*/
1872 PetscErrorCode DMPlexGetPartitioner(DM dm, PetscPartitioner *part)
1873 {
1874   DM_Plex       *mesh = (DM_Plex *) dm->data;
1875 
1876   PetscFunctionBegin;
1877   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1878   PetscValidPointer(part, 2);
1879   *part = mesh->partitioner;
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 /*@
1884   DMPlexSetPartitioner - Set the mesh partitioner
1885 
1886   logically collective on dm and part
1887 
1888   Input Parameters:
1889 + dm - The DM
1890 - part - The partitioner
1891 
1892   Level: developer
1893 
1894   Note: Any existing PetscPartitioner will be destroyed.
1895 
1896 .seealso DMPlexDistribute(), DMPlexGetPartitioner(), PetscPartitionerCreate()
1897 @*/
1898 PetscErrorCode DMPlexSetPartitioner(DM dm, PetscPartitioner part)
1899 {
1900   DM_Plex       *mesh = (DM_Plex *) dm->data;
1901   PetscErrorCode ierr;
1902 
1903   PetscFunctionBegin;
1904   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1905   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 2);
1906   ierr = PetscObjectReference((PetscObject)part);CHKERRQ(ierr);
1907   ierr = PetscPartitionerDestroy(&mesh->partitioner);CHKERRQ(ierr);
1908   mesh->partitioner = part;
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 static PetscErrorCode DMPlexAddClosure_Tree(DM dm, PetscHSetI ht, PetscInt point, PetscBool up, PetscBool down)
1913 {
1914   PetscErrorCode ierr;
1915 
1916   PetscFunctionBegin;
1917   if (up) {
1918     PetscInt parent;
1919 
1920     ierr = DMPlexGetTreeParent(dm,point,&parent,NULL);CHKERRQ(ierr);
1921     if (parent != point) {
1922       PetscInt closureSize, *closure = NULL, i;
1923 
1924       ierr = DMPlexGetTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1925       for (i = 0; i < closureSize; i++) {
1926         PetscInt cpoint = closure[2*i];
1927 
1928         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1929         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_TRUE,PETSC_FALSE);CHKERRQ(ierr);
1930       }
1931       ierr = DMPlexRestoreTransitiveClosure(dm,parent,PETSC_TRUE,&closureSize,&closure);CHKERRQ(ierr);
1932     }
1933   }
1934   if (down) {
1935     PetscInt numChildren;
1936     const PetscInt *children;
1937 
1938     ierr = DMPlexGetTreeChildren(dm,point,&numChildren,&children);CHKERRQ(ierr);
1939     if (numChildren) {
1940       PetscInt i;
1941 
1942       for (i = 0; i < numChildren; i++) {
1943         PetscInt cpoint = children[i];
1944 
1945         ierr = PetscHSetIAdd(ht, cpoint);CHKERRQ(ierr);
1946         ierr = DMPlexAddClosure_Tree(dm,ht,cpoint,PETSC_FALSE,PETSC_TRUE);CHKERRQ(ierr);
1947       }
1948     }
1949   }
1950   PetscFunctionReturn(0);
1951 }
1952 
1953 /*@
1954   DMPlexPartitionLabelClosure - Add the closure of all points to the partition label
1955 
1956   Input Parameters:
1957 + dm     - The DM
1958 - label  - DMLabel assinging ranks to remote roots
1959 
1960   Level: developer
1961 
1962 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
1963 @*/
1964 PetscErrorCode DMPlexPartitionLabelClosure(DM dm, DMLabel label)
1965 {
1966   IS              rankIS,   pointIS;
1967   const PetscInt *ranks,   *points;
1968   PetscInt        numRanks, numPoints, r, p, c, closureSize;
1969   PetscInt       *closure = NULL;
1970   DM_Plex        *mesh    = (DM_Plex *)dm->data;
1971   PetscBool       hasTree = (mesh->parentSection || mesh->childSection) ? PETSC_TRUE : PETSC_FALSE;
1972   PetscErrorCode  ierr;
1973 
1974   PetscFunctionBegin;
1975   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
1976   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
1977   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
1978 
1979   for (r = 0; r < numRanks; ++r) {
1980     const PetscInt rank = ranks[r];
1981     PetscHSetI     ht;
1982     PetscInt       nelems, *elems, off = 0;
1983 
1984     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
1985     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
1986     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
1987     ierr = PetscHSetICreate(&ht);CHKERRQ(ierr);
1988     ierr = PetscHSetIResize(ht, numPoints*16);CHKERRQ(ierr);
1989     for (p = 0; p < numPoints; ++p) {
1990       ierr = DMPlexGetTransitiveClosure(dm, points[p], PETSC_TRUE, &closureSize, &closure);CHKERRQ(ierr);
1991       for (c = 0; c < closureSize*2; c += 2) {
1992         ierr = PetscHSetIAdd(ht, closure[c]);CHKERRQ(ierr);
1993         if (hasTree) {ierr = DMPlexAddClosure_Tree(dm, ht, closure[c], PETSC_TRUE, PETSC_TRUE);CHKERRQ(ierr);}
1994       }
1995     }
1996     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
1997     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
1998     ierr = PetscHSetIGetSize(ht, &nelems);CHKERRQ(ierr);
1999     ierr = PetscMalloc1(nelems, &elems);CHKERRQ(ierr);
2000     ierr = PetscHSetIGetElems(ht, &off, elems);CHKERRQ(ierr);
2001     ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr);
2002     ierr = PetscSortInt(nelems, elems);CHKERRQ(ierr);
2003     ierr = ISCreateGeneral(PETSC_COMM_SELF, nelems, elems, PETSC_OWN_POINTER, &pointIS);CHKERRQ(ierr);
2004     ierr = DMLabelSetStratumIS(label, rank, pointIS);CHKERRQ(ierr);
2005     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2006   }
2007 
2008   if (closure) {ierr = DMPlexRestoreTransitiveClosure(dm, 0, PETSC_TRUE, NULL, &closure);CHKERRQ(ierr);}
2009   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2010   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2011   PetscFunctionReturn(0);
2012 }
2013 
2014 /*@
2015   DMPlexPartitionLabelAdjacency - Add one level of adjacent points to the partition label
2016 
2017   Input Parameters:
2018 + dm     - The DM
2019 - label  - DMLabel assinging ranks to remote roots
2020 
2021   Level: developer
2022 
2023 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2024 @*/
2025 PetscErrorCode DMPlexPartitionLabelAdjacency(DM dm, DMLabel label)
2026 {
2027   IS              rankIS,   pointIS;
2028   const PetscInt *ranks,   *points;
2029   PetscInt        numRanks, numPoints, r, p, a, adjSize;
2030   PetscInt       *adj = NULL;
2031   PetscErrorCode  ierr;
2032 
2033   PetscFunctionBegin;
2034   ierr = DMLabelGetValueIS(label, &rankIS);CHKERRQ(ierr);
2035   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2036   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2037   for (r = 0; r < numRanks; ++r) {
2038     const PetscInt rank = ranks[r];
2039 
2040     ierr = DMLabelGetStratumIS(label, rank, &pointIS);CHKERRQ(ierr);
2041     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2042     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2043     for (p = 0; p < numPoints; ++p) {
2044       adjSize = PETSC_DETERMINE;
2045       ierr = DMPlexGetAdjacency(dm, points[p], &adjSize, &adj);CHKERRQ(ierr);
2046       for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(label, adj[a], rank);CHKERRQ(ierr);}
2047     }
2048     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2049     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2050   }
2051   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2052   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2053   ierr = PetscFree(adj);CHKERRQ(ierr);
2054   PetscFunctionReturn(0);
2055 }
2056 
2057 /*@
2058   DMPlexPartitionLabelPropagate - Propagate points in a partition label over the point SF
2059 
2060   Input Parameters:
2061 + dm     - The DM
2062 - label  - DMLabel assinging ranks to remote roots
2063 
2064   Level: developer
2065 
2066   Note: This is required when generating multi-level overlaps to capture
2067   overlap points from non-neighbouring partitions.
2068 
2069 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2070 @*/
2071 PetscErrorCode DMPlexPartitionLabelPropagate(DM dm, DMLabel label)
2072 {
2073   MPI_Comm        comm;
2074   PetscMPIInt     rank;
2075   PetscSF         sfPoint;
2076   DMLabel         lblRoots, lblLeaves;
2077   IS              rankIS, pointIS;
2078   const PetscInt *ranks;
2079   PetscInt        numRanks, r;
2080   PetscErrorCode  ierr;
2081 
2082   PetscFunctionBegin;
2083   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2084   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2085   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2086   /* Pull point contributions from remote leaves into local roots */
2087   ierr = DMLabelGather(label, sfPoint, &lblLeaves);CHKERRQ(ierr);
2088   ierr = DMLabelGetValueIS(lblLeaves, &rankIS);CHKERRQ(ierr);
2089   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2090   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2091   for (r = 0; r < numRanks; ++r) {
2092     const PetscInt remoteRank = ranks[r];
2093     if (remoteRank == rank) continue;
2094     ierr = DMLabelGetStratumIS(lblLeaves, remoteRank, &pointIS);CHKERRQ(ierr);
2095     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2096     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2097   }
2098   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2099   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2100   ierr = DMLabelDestroy(&lblLeaves);CHKERRQ(ierr);
2101   /* Push point contributions from roots into remote leaves */
2102   ierr = DMLabelDistribute(label, sfPoint, &lblRoots);CHKERRQ(ierr);
2103   ierr = DMLabelGetValueIS(lblRoots, &rankIS);CHKERRQ(ierr);
2104   ierr = ISGetLocalSize(rankIS, &numRanks);CHKERRQ(ierr);
2105   ierr = ISGetIndices(rankIS, &ranks);CHKERRQ(ierr);
2106   for (r = 0; r < numRanks; ++r) {
2107     const PetscInt remoteRank = ranks[r];
2108     if (remoteRank == rank) continue;
2109     ierr = DMLabelGetStratumIS(lblRoots, remoteRank, &pointIS);CHKERRQ(ierr);
2110     ierr = DMLabelInsertIS(label, pointIS, remoteRank);CHKERRQ(ierr);
2111     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2112   }
2113   ierr = ISRestoreIndices(rankIS, &ranks);CHKERRQ(ierr);
2114   ierr = ISDestroy(&rankIS);CHKERRQ(ierr);
2115   ierr = DMLabelDestroy(&lblRoots);CHKERRQ(ierr);
2116   PetscFunctionReturn(0);
2117 }
2118 
2119 /*@
2120   DMPlexPartitionLabelInvert - Create a partition label of remote roots from a local root label
2121 
2122   Input Parameters:
2123 + dm        - The DM
2124 . rootLabel - DMLabel assinging ranks to local roots
2125 . processSF - A star forest mapping into the local index on each remote rank
2126 
2127   Output Parameter:
2128 - leafLabel - DMLabel assinging ranks to remote roots
2129 
2130   Note: The rootLabel defines a send pattern by mapping local points to remote target ranks. The
2131   resulting leafLabel is a receiver mapping of remote roots to their parent rank.
2132 
2133   Level: developer
2134 
2135 .seealso: DMPlexPartitionLabelCreateSF, DMPlexDistribute(), DMPlexCreateOverlap
2136 @*/
2137 PetscErrorCode DMPlexPartitionLabelInvert(DM dm, DMLabel rootLabel, PetscSF processSF, DMLabel leafLabel)
2138 {
2139   MPI_Comm           comm;
2140   PetscMPIInt        rank, size;
2141   PetscInt           p, n, numNeighbors, ssize, l, nleaves;
2142   PetscSF            sfPoint;
2143   PetscSFNode       *rootPoints, *leafPoints;
2144   PetscSection       rootSection, leafSection;
2145   const PetscSFNode *remote;
2146   const PetscInt    *local, *neighbors;
2147   IS                 valueIS;
2148   PetscErrorCode     ierr;
2149 
2150   PetscFunctionBegin;
2151   ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr);
2152   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2153   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2154   ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr);
2155 
2156   /* Convert to (point, rank) and use actual owners */
2157   ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr);
2158   ierr = PetscSectionSetChart(rootSection, 0, size);CHKERRQ(ierr);
2159   ierr = DMLabelGetValueIS(rootLabel, &valueIS);CHKERRQ(ierr);
2160   ierr = ISGetLocalSize(valueIS, &numNeighbors);CHKERRQ(ierr);
2161   ierr = ISGetIndices(valueIS, &neighbors);CHKERRQ(ierr);
2162   for (n = 0; n < numNeighbors; ++n) {
2163     PetscInt numPoints;
2164 
2165     ierr = DMLabelGetStratumSize(rootLabel, neighbors[n], &numPoints);CHKERRQ(ierr);
2166     ierr = PetscSectionAddDof(rootSection, neighbors[n], numPoints);CHKERRQ(ierr);
2167   }
2168   ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr);
2169   ierr = PetscSectionGetStorageSize(rootSection, &ssize);CHKERRQ(ierr);
2170   ierr = PetscMalloc1(ssize, &rootPoints);CHKERRQ(ierr);
2171   ierr = PetscSFGetGraph(sfPoint, NULL, &nleaves, &local, &remote);CHKERRQ(ierr);
2172   for (n = 0; n < numNeighbors; ++n) {
2173     IS              pointIS;
2174     const PetscInt *points;
2175     PetscInt        off, numPoints, p;
2176 
2177     ierr = PetscSectionGetOffset(rootSection, neighbors[n], &off);CHKERRQ(ierr);
2178     ierr = DMLabelGetStratumIS(rootLabel, neighbors[n], &pointIS);CHKERRQ(ierr);
2179     ierr = ISGetLocalSize(pointIS, &numPoints);CHKERRQ(ierr);
2180     ierr = ISGetIndices(pointIS, &points);CHKERRQ(ierr);
2181     for (p = 0; p < numPoints; ++p) {
2182       if (local) {ierr = PetscFindInt(points[p], nleaves, local, &l);CHKERRQ(ierr);}
2183       else       {l = -1;}
2184       if (l >= 0) {rootPoints[off+p] = remote[l];}
2185       else        {rootPoints[off+p].index = points[p]; rootPoints[off+p].rank = rank;}
2186     }
2187     ierr = ISRestoreIndices(pointIS, &points);CHKERRQ(ierr);
2188     ierr = ISDestroy(&pointIS);CHKERRQ(ierr);
2189   }
2190   ierr = ISRestoreIndices(valueIS, &neighbors);CHKERRQ(ierr);
2191   ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
2192   /* Communicate overlap */
2193   ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr);
2194   ierr = DMPlexDistributeData(dm, processSF, rootSection, MPIU_2INT, rootPoints, leafSection, (void**) &leafPoints);CHKERRQ(ierr);
2195   /* Filter remote contributions (ovLeafPoints) into the overlapSF */
2196   ierr = PetscSectionGetStorageSize(leafSection, &ssize);CHKERRQ(ierr);
2197   for (p = 0; p < ssize; p++) {
2198     ierr = DMLabelSetValue(leafLabel, leafPoints[p].index, leafPoints[p].rank);CHKERRQ(ierr);
2199   }
2200   ierr = PetscFree(rootPoints);CHKERRQ(ierr);
2201   ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr);
2202   ierr = PetscFree(leafPoints);CHKERRQ(ierr);
2203   ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr);
2204   PetscFunctionReturn(0);
2205 }
2206 
2207 /*@
2208   DMPlexPartitionLabelCreateSF - Create a star forest from a label that assigns ranks to points
2209 
2210   Input Parameters:
2211 + dm    - The DM
2212 . label - DMLabel assinging ranks to remote roots
2213 
2214   Output Parameter:
2215 - sf    - The star forest communication context encapsulating the defined mapping
2216 
2217   Note: The incoming label is a receiver mapping of remote points to their parent rank.
2218 
2219   Level: developer
2220 
2221 .seealso: DMPlexDistribute(), DMPlexCreateOverlap
2222 @*/
2223 PetscErrorCode DMPlexPartitionLabelCreateSF(DM dm, DMLabel label, PetscSF *sf)
2224 {
2225   PetscMPIInt     rank, size;
2226   PetscInt        n, numRemote, p, numPoints, pStart, pEnd, idx = 0;
2227   PetscSFNode    *remotePoints;
2228   IS              remoteRootIS;
2229   const PetscInt *remoteRoots;
2230   PetscErrorCode ierr;
2231 
2232   PetscFunctionBegin;
2233   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2234   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr);
2235 
2236   for (numRemote = 0, n = 0; n < size; ++n) {
2237     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2238     numRemote += numPoints;
2239   }
2240   ierr = PetscMalloc1(numRemote, &remotePoints);CHKERRQ(ierr);
2241   /* Put owned points first */
2242   ierr = DMLabelGetStratumSize(label, rank, &numPoints);CHKERRQ(ierr);
2243   if (numPoints > 0) {
2244     ierr = DMLabelGetStratumIS(label, rank, &remoteRootIS);CHKERRQ(ierr);
2245     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2246     for (p = 0; p < numPoints; p++) {
2247       remotePoints[idx].index = remoteRoots[p];
2248       remotePoints[idx].rank = rank;
2249       idx++;
2250     }
2251     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2252     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2253   }
2254   /* Now add remote points */
2255   for (n = 0; n < size; ++n) {
2256     ierr = DMLabelGetStratumSize(label, n, &numPoints);CHKERRQ(ierr);
2257     if (numPoints <= 0 || n == rank) continue;
2258     ierr = DMLabelGetStratumIS(label, n, &remoteRootIS);CHKERRQ(ierr);
2259     ierr = ISGetIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2260     for (p = 0; p < numPoints; p++) {
2261       remotePoints[idx].index = remoteRoots[p];
2262       remotePoints[idx].rank = n;
2263       idx++;
2264     }
2265     ierr = ISRestoreIndices(remoteRootIS, &remoteRoots);CHKERRQ(ierr);
2266     ierr = ISDestroy(&remoteRootIS);CHKERRQ(ierr);
2267   }
2268   ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), sf);CHKERRQ(ierr);
2269   ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
2270   ierr = PetscSFSetGraph(*sf, pEnd-pStart, numRemote, NULL, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
2271   PetscFunctionReturn(0);
2272 }
2273