xref: /petsc/src/dm/partitioner/impls/ptscotch/partptscotch.c (revision 9d0fdff2e8b5ebdd0e57bf4dfe77e45fec5df614)
1 #include <petsc/private/partitionerimpl.h> /*I "petscpartitioner.h" I*/
2 
3 #if defined(PETSC_HAVE_PTSCOTCH)
4 EXTERN_C_BEGIN
5 #include <ptscotch.h>
6 EXTERN_C_END
7 #endif
8 
9 PetscBool  PTScotchPartitionerCite       = PETSC_FALSE;
10 const char PTScotchPartitionerCitation[] = "@article{PTSCOTCH,\n"
11                                            "  author  = {C. Chevalier and F. Pellegrini},\n"
12                                            "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
13                                            "  journal = {Parallel Computing},\n"
14                                            "  volume  = {34},\n"
15                                            "  number  = {6},\n"
16                                            "  pages   = {318--331},\n"
17                                            "  year    = {2008},\n"
18                                            "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
19                                            "}\n";
20 
21 typedef struct {
22   MPI_Comm  pcomm;
23   PetscInt  strategy;
24   PetscReal imbalance;
25 } PetscPartitioner_PTScotch;
26 
27 #if defined(PETSC_HAVE_PTSCOTCH)
28 
29 #define PetscCallPTSCOTCH(...) \
30   do { PetscCheck(!(__VA_ARGS__), PETSC_COMM_SELF, PETSC_ERR_LIB, "Error calling PT-Scotch library"); } while (0)
31 
32 static int PTScotch_Strategy(PetscInt strategy) {
33   switch (strategy) {
34   case 0: return SCOTCH_STRATDEFAULT;
35   case 1: return SCOTCH_STRATQUALITY;
36   case 2: return SCOTCH_STRATSPEED;
37   case 3: return SCOTCH_STRATBALANCE;
38   case 4: return SCOTCH_STRATSAFETY;
39   case 5: return SCOTCH_STRATSCALABILITY;
40   case 6: return SCOTCH_STRATRECURSIVE;
41   case 7: return SCOTCH_STRATREMAP;
42   default: return SCOTCH_STRATDEFAULT;
43   }
44 }
45 
46 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[]) {
47   SCOTCH_Arch  archdat;
48   SCOTCH_Graph grafdat;
49   SCOTCH_Strat stradat;
50   SCOTCH_Num   vertnbr = n;
51   SCOTCH_Num   edgenbr = xadj[n];
52   SCOTCH_Num  *velotab = vtxwgt;
53   SCOTCH_Num  *edlotab = adjwgt;
54   SCOTCH_Num   flagval = strategy;
55   double       kbalval = imbalance;
56 
57   PetscFunctionBegin;
58   {
59     PetscBool flg = PETSC_TRUE;
60     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
61     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
62     if (!flg) velotab = NULL;
63   }
64   PetscCallPTSCOTCH(SCOTCH_graphInit(&grafdat));
65   PetscCallPTSCOTCH(SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab));
66   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
67   PetscCallPTSCOTCH(SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval));
68   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
69   if (tpart) {
70     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
71   } else {
72     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
73   }
74   PetscCallPTSCOTCH(SCOTCH_graphMap(&grafdat, &archdat, &stradat, part));
75   SCOTCH_archExit(&archdat);
76   SCOTCH_stratExit(&stradat);
77   SCOTCH_graphExit(&grafdat);
78   PetscFunctionReturn(0);
79 }
80 
81 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm) {
82   PetscMPIInt     procglbnbr;
83   PetscMPIInt     proclocnum;
84   SCOTCH_Arch     archdat;
85   SCOTCH_Dgraph   grafdat;
86   SCOTCH_Dmapping mappdat;
87   SCOTCH_Strat    stradat;
88   SCOTCH_Num      vertlocnbr;
89   SCOTCH_Num      edgelocnbr;
90   SCOTCH_Num     *veloloctab = vtxwgt;
91   SCOTCH_Num     *edloloctab = adjwgt;
92   SCOTCH_Num      flagval    = strategy;
93   double          kbalval    = imbalance;
94 
95   PetscFunctionBegin;
96   {
97     PetscBool flg = PETSC_TRUE;
98     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", NULL, "3.13", "Use -petscpartitioner_use_vertex_weights"));
99     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
100     if (!flg) veloloctab = NULL;
101   }
102   PetscCallMPI(MPI_Comm_size(comm, &procglbnbr));
103   PetscCallMPI(MPI_Comm_rank(comm, &proclocnum));
104   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
105   edgelocnbr = xadj[vertlocnbr];
106 
107   PetscCallPTSCOTCH(SCOTCH_dgraphInit(&grafdat, comm));
108   PetscCallPTSCOTCH(SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab));
109   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
110   PetscCall(SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval));
111   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
112   if (tpart) { /* target partition weights */
113     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
114   } else {
115     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
116   }
117   PetscCallPTSCOTCH(SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part));
118   PetscCallPTSCOTCH(SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat));
119   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
120   SCOTCH_archExit(&archdat);
121   SCOTCH_stratExit(&stradat);
122   SCOTCH_dgraphExit(&grafdat);
123   PetscFunctionReturn(0);
124 }
125 
126 #endif /* PETSC_HAVE_PTSCOTCH */
127 
128 static const char *const PTScotchStrategyList[] = {"DEFAULT", "QUALITY", "SPEED", "BALANCE", "SAFETY", "SCALABILITY", "RECURSIVE", "REMAP"};
129 
130 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part) {
131   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
132 
133   PetscFunctionBegin;
134   PetscCallMPI(MPI_Comm_free(&p->pcomm));
135   PetscCall(PetscFree(part->data));
136   PetscFunctionReturn(0);
137 }
138 
139 static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer) {
140   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
141 
142   PetscFunctionBegin;
143   PetscCall(PetscViewerASCIIPushTab(viewer));
144   PetscCall(PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n", PTScotchStrategyList[p->strategy]));
145   PetscCall(PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n", (double)p->imbalance));
146   PetscCall(PetscViewerASCIIPopTab(viewer));
147   PetscFunctionReturn(0);
148 }
149 
150 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer) {
151   PetscBool iascii;
152 
153   PetscFunctionBegin;
154   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
155   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
156   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
157   if (iascii) PetscCall(PetscPartitionerView_PTScotch_ASCII(part, viewer));
158   PetscFunctionReturn(0);
159 }
160 
161 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner part, PetscOptionItems *PetscOptionsObject) {
162   PetscPartitioner_PTScotch *p     = (PetscPartitioner_PTScotch *)part->data;
163   const char *const         *slist = PTScotchStrategyList;
164   PetscInt                   nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
165   PetscBool                  flag;
166 
167   PetscFunctionBegin;
168   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
169   PetscCall(PetscOptionsEList("-petscpartitioner_ptscotch_strategy", "Partitioning strategy", "", slist, nlist, slist[p->strategy], &p->strategy, &flag));
170   PetscCall(PetscOptionsReal("-petscpartitioner_ptscotch_imbalance", "Load imbalance ratio", "", p->imbalance, &p->imbalance, &flag));
171   PetscOptionsHeadEnd();
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) {
176 #if defined(PETSC_HAVE_PTSCOTCH)
177   MPI_Comm    comm;
178   PetscInt    nvtxs = numVertices; /* The number of vertices in full graph */
179   PetscInt   *vtxdist;             /* Distribution of vertices across processes */
180   PetscInt   *xadj   = start;      /* Start of edge list for each vertex */
181   PetscInt   *adjncy = adjacency;  /* Edge lists for all vertices */
182   PetscInt   *vwgt   = NULL;       /* Vertex weights */
183   PetscInt   *adjwgt = NULL;       /* Edge weights */
184   PetscInt    v, i, *assignment, *points;
185   PetscMPIInt size, rank, p;
186   PetscBool   hasempty = PETSC_FALSE;
187   PetscInt   *tpwgts   = NULL;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
191   PetscCallMPI(MPI_Comm_size(comm, &size));
192   PetscCallMPI(MPI_Comm_rank(comm, &rank));
193   PetscCall(PetscMalloc2(size + 1, &vtxdist, PetscMax(nvtxs, 1), &assignment));
194   /* Calculate vertex distribution */
195   vtxdist[0] = 0;
196   PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
197   for (p = 2; p <= size; ++p) {
198     hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
199     vtxdist[p] += vtxdist[p - 1];
200   }
201   /* null graph */
202   if (vtxdist[size] == 0) {
203     PetscCall(PetscFree2(vtxdist, assignment));
204     PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
205     PetscFunctionReturn(0);
206   }
207 
208   /* Calculate vertex weights */
209   if (vertSection) {
210     PetscCall(PetscMalloc1(nvtxs, &vwgt));
211     for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
212   }
213 
214   /* Calculate partition weights */
215   if (targetSection) {
216     PetscInt sumw;
217 
218     PetscCall(PetscCalloc1(nparts, &tpwgts));
219     for (p = 0, sumw = 0; p < nparts; ++p) {
220       PetscCall(PetscSectionGetDof(targetSection, p, &tpwgts[p]));
221       sumw += tpwgts[p];
222     }
223     if (!sumw) PetscCall(PetscFree(tpwgts));
224   }
225 
226   {
227     PetscPartitioner_PTScotch *pts   = (PetscPartitioner_PTScotch *)part->data;
228     int                        strat = PTScotch_Strategy(pts->strategy);
229     double                     imbal = (double)pts->imbalance;
230 
231     for (p = 0; !vtxdist[p + 1] && p < size; ++p)
232       ;
233     if (vtxdist[p + 1] == vtxdist[size]) {
234       if (rank == p) PetscCall(PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment));
235     } else {
236       MPI_Comm pcomm = pts->pcomm;
237 
238       if (hasempty) {
239         PetscInt cnt;
240 
241         PetscCallMPI(MPI_Comm_split(pts->pcomm, !!nvtxs, rank, &pcomm));
242         for (p = 0, cnt = 0; p < size; p++) {
243           if (vtxdist[p + 1] != vtxdist[p]) {
244             vtxdist[cnt + 1] = vtxdist[p + 1];
245             cnt++;
246           }
247         }
248       };
249       if (nvtxs) PetscCall(PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm));
250       if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
251     }
252   }
253   PetscCall(PetscFree(vwgt));
254   PetscCall(PetscFree(tpwgts));
255 
256   /* Convert to PetscSection+IS */
257   for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
258   PetscCall(PetscMalloc1(nvtxs, &points));
259   for (p = 0, i = 0; p < nparts; ++p) {
260     for (v = 0; v < nvtxs; ++v) {
261       if (assignment[v] == p) points[i++] = v;
262     }
263   }
264   PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
265   PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
266 
267   PetscCall(PetscFree2(vtxdist, assignment));
268   PetscFunctionReturn(0);
269 #else
270   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
271 #endif
272 }
273 
274 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part) {
275   PetscFunctionBegin;
276   part->noGraph             = PETSC_FALSE;
277   part->ops->view           = PetscPartitionerView_PTScotch;
278   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
279   part->ops->partition      = PetscPartitionerPartition_PTScotch;
280   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
281   PetscFunctionReturn(0);
282 }
283 
284 /*MC
285   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
286 
287   Level: intermediate
288 
289   Options Database Keys:
290 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
291 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
292 
293   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
294 
295 .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
296 M*/
297 
298 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part) {
299   PetscPartitioner_PTScotch *p;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
303   PetscCall(PetscNew(&p));
304   part->data = p;
305 
306   PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
307   p->strategy  = 0;
308   p->imbalance = 0.01;
309 
310   PetscCall(PetscPartitionerInitialize_PTScotch(part));
311   PetscCall(PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite));
312   PetscFunctionReturn(0);
313 }
314