xref: /petsc/src/dm/partitioner/impls/ptscotch/partptscotch.c (revision ed00a6c129cbc4f1b4754b50ab0c4e7536acfadd)
1 #include <petsc/private/partitionerimpl.h> /*I "petscpartitioner.h" I*/
2 
3 #if defined(PETSC_HAVE_PTSCOTCH)
4 EXTERN_C_BEGIN
5   #include <ptscotch.h>
6 EXTERN_C_END
7 #endif
8 
9 PetscBool  PTScotchPartitionerCite       = PETSC_FALSE;
10 const char PTScotchPartitionerCitation[] = "@article{PTSCOTCH,\n"
11                                            "  author  = {C. Chevalier and F. Pellegrini},\n"
12                                            "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
13                                            "  journal = {Parallel Computing},\n"
14                                            "  volume  = {34},\n"
15                                            "  number  = {6},\n"
16                                            "  pages   = {318--331},\n"
17                                            "  year    = {2008},\n"
18                                            "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
19                                            "}\n";
20 
21 typedef struct {
22   MPI_Comm  pcomm;
23   PetscInt  strategy;
24   PetscReal imbalance;
25 } PetscPartitioner_PTScotch;
26 
27 #if defined(PETSC_HAVE_PTSCOTCH)
28 
29   #define PetscCallPTSCOTCH(...) \
30     do { \
31       PetscCheck(!(__VA_ARGS__), PETSC_COMM_SELF, PETSC_ERR_LIB, "Error calling PT-Scotch library"); \
32     } while (0)
33 
34 static int PTScotch_Strategy(PetscInt strategy)
35 {
36   switch (strategy) {
37   case 0:
38     return SCOTCH_STRATDEFAULT;
39   case 1:
40     return SCOTCH_STRATQUALITY;
41   case 2:
42     return SCOTCH_STRATSPEED;
43   case 3:
44     return SCOTCH_STRATBALANCE;
45   case 4:
46     return SCOTCH_STRATSAFETY;
47   case 5:
48     return SCOTCH_STRATSCALABILITY;
49   case 6:
50     return SCOTCH_STRATRECURSIVE;
51   case 7:
52     return SCOTCH_STRATREMAP;
53   default:
54     return SCOTCH_STRATDEFAULT;
55   }
56 }
57 
58 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
59 {
60   SCOTCH_Arch  archdat;
61   SCOTCH_Graph grafdat;
62   SCOTCH_Strat stradat;
63   SCOTCH_Num   vertnbr = n;
64   SCOTCH_Num   edgenbr = xadj[n];
65   SCOTCH_Num  *velotab = vtxwgt;
66   SCOTCH_Num  *edlotab = adjwgt;
67   SCOTCH_Num   flagval = strategy;
68   double       kbalval = imbalance;
69 
70   PetscFunctionBegin;
71   {
72     PetscBool flg = PETSC_TRUE;
73     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", "-petscpartitioner_use_vertex_weights", "3.13", NULL));
74     /*
75        Cannot remove the PetscOptionsGetBool() below since the PetscOptionsDeprecatedNoObject() above is called after the non-deprecated version
76        has already been checked in PetscPartitionerSetFromOptions().
77     */
78     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_use_vertex_weight", &flg, NULL));
79     if (!flg) velotab = NULL;
80   }
81   PetscCallPTSCOTCH(SCOTCH_graphInit(&grafdat));
82   PetscCallPTSCOTCH(SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab));
83   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
84   PetscCallPTSCOTCH(SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval));
85   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
86   if (tpart) {
87     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
88   } else {
89     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
90   }
91   PetscCallPTSCOTCH(SCOTCH_graphMap(&grafdat, &archdat, &stradat, part));
92   SCOTCH_archExit(&archdat);
93   SCOTCH_stratExit(&stradat);
94   SCOTCH_graphExit(&grafdat);
95   PetscFunctionReturn(PETSC_SUCCESS);
96 }
97 
98 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[], SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
99 {
100   PetscMPIInt     procglbnbr;
101   PetscMPIInt     proclocnum;
102   SCOTCH_Arch     archdat;
103   SCOTCH_Dgraph   grafdat;
104   SCOTCH_Dmapping mappdat;
105   SCOTCH_Strat    stradat;
106   SCOTCH_Num      vertlocnbr;
107   SCOTCH_Num      edgelocnbr;
108   SCOTCH_Num     *veloloctab = vtxwgt;
109   SCOTCH_Num     *edloloctab = adjwgt;
110   SCOTCH_Num      flagval    = strategy;
111   double          kbalval    = imbalance;
112 
113   PetscFunctionBegin;
114   {
115     PetscBool flg = PETSC_TRUE;
116     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight", "-petscpartitioner_use_vertex_weights", "3.13", NULL));
117     /*
118        Cannot remove the PetscOptionsGetBool() below since the PetscOptionsDeprecatedNoObject() above is called after the non-deprecated version
119        has already been checked in PetscPartitionerSetFromOptions().
120     */
121     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_use_vertex_weight", &flg, NULL));
122     if (!flg) veloloctab = NULL;
123   }
124   PetscCallMPI(MPI_Comm_size(comm, &procglbnbr));
125   PetscCallMPI(MPI_Comm_rank(comm, &proclocnum));
126   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
127   edgelocnbr = xadj[vertlocnbr];
128 
129   PetscCallPTSCOTCH(SCOTCH_dgraphInit(&grafdat, comm));
130   PetscCallPTSCOTCH(SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab));
131   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
132   PetscCallPTSCOTCH(SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval));
133   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
134   if (tpart) { /* target partition weights */
135     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
136   } else {
137     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
138   }
139   PetscCallPTSCOTCH(SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part));
140   PetscCallPTSCOTCH(SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat));
141   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
142   SCOTCH_archExit(&archdat);
143   SCOTCH_stratExit(&stradat);
144   SCOTCH_dgraphExit(&grafdat);
145   PetscFunctionReturn(PETSC_SUCCESS);
146 }
147 
148 #endif /* PETSC_HAVE_PTSCOTCH */
149 
150 static const char *const PTScotchStrategyList[] = {"DEFAULT", "QUALITY", "SPEED", "BALANCE", "SAFETY", "SCALABILITY", "RECURSIVE", "REMAP"};
151 
152 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
153 {
154   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
155 
156   PetscFunctionBegin;
157   PetscCallMPI(MPI_Comm_free(&p->pcomm));
158   PetscCall(PetscFree(part->data));
159   PetscFunctionReturn(PETSC_SUCCESS);
160 }
161 
162 static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
163 {
164   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *)part->data;
165 
166   PetscFunctionBegin;
167   PetscCall(PetscViewerASCIIPushTab(viewer));
168   PetscCall(PetscViewerASCIIPrintf(viewer, "using partitioning strategy %s\n", PTScotchStrategyList[p->strategy]));
169   PetscCall(PetscViewerASCIIPrintf(viewer, "using load imbalance ratio %g\n", (double)p->imbalance));
170   PetscCall(PetscViewerASCIIPopTab(viewer));
171   PetscFunctionReturn(PETSC_SUCCESS);
172 }
173 
174 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
175 {
176   PetscBool iascii;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
180   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
181   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
182   if (iascii) PetscCall(PetscPartitionerView_PTScotch_ASCII(part, viewer));
183   PetscFunctionReturn(PETSC_SUCCESS);
184 }
185 
186 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
187 {
188   PetscPartitioner_PTScotch *p     = (PetscPartitioner_PTScotch *)part->data;
189   const char *const         *slist = PTScotchStrategyList;
190   PetscInt                   nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
191   PetscBool                  flag;
192 
193   PetscFunctionBegin;
194   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
195   PetscCall(PetscOptionsEList("-petscpartitioner_ptscotch_strategy", "Partitioning strategy", "", slist, nlist, slist[p->strategy], &p->strategy, &flag));
196   PetscCall(PetscOptionsReal("-petscpartitioner_ptscotch_imbalance", "Load imbalance ratio", "", p->imbalance, &p->imbalance, &flag));
197   PetscOptionsHeadEnd();
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
202 {
203 #if defined(PETSC_HAVE_PTSCOTCH)
204   MPI_Comm    comm;
205   PetscInt    nvtxs = numVertices; /* The number of vertices in full graph */
206   PetscInt   *vtxdist;             /* Distribution of vertices across processes */
207   PetscInt   *xadj   = start;      /* Start of edge list for each vertex */
208   PetscInt   *adjncy = adjacency;  /* Edge lists for all vertices */
209   PetscInt   *vwgt   = NULL;       /* Vertex weights */
210   PetscInt   *adjwgt = NULL;       /* Edge weights */
211   PetscInt    v, i, *assignment, *points;
212   PetscMPIInt size, rank, p;
213   PetscBool   hasempty = PETSC_FALSE;
214   PetscInt   *tpwgts   = NULL;
215 
216   PetscFunctionBegin;
217   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
218   PetscCallMPI(MPI_Comm_size(comm, &size));
219   PetscCallMPI(MPI_Comm_rank(comm, &rank));
220   PetscCall(PetscMalloc2(size + 1, &vtxdist, PetscMax(nvtxs, 1), &assignment));
221   /* Calculate vertex distribution */
222   vtxdist[0] = 0;
223   PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
224   for (p = 2; p <= size; ++p) {
225     hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
226     vtxdist[p] += vtxdist[p - 1];
227   }
228   /* null graph */
229   if (vtxdist[size] == 0) {
230     PetscCall(PetscFree2(vtxdist, assignment));
231     PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
232     PetscFunctionReturn(PETSC_SUCCESS);
233   }
234 
235   /* Calculate vertex weights */
236   if (vertSection) {
237     PetscCall(PetscMalloc1(nvtxs, &vwgt));
238     for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
239   }
240 
241   /* Calculate partition weights */
242   if (targetSection) {
243     PetscInt sumw;
244 
245     PetscCall(PetscCalloc1(nparts, &tpwgts));
246     for (p = 0, sumw = 0; p < nparts; ++p) {
247       PetscCall(PetscSectionGetDof(targetSection, p, &tpwgts[p]));
248       sumw += tpwgts[p];
249     }
250     if (!sumw) PetscCall(PetscFree(tpwgts));
251   }
252 
253   {
254     PetscPartitioner_PTScotch *pts   = (PetscPartitioner_PTScotch *)part->data;
255     int                        strat = PTScotch_Strategy(pts->strategy);
256     double                     imbal = (double)pts->imbalance;
257 
258     for (p = 0; !vtxdist[p + 1] && p < size; ++p)
259       ;
260     if (vtxdist[p + 1] == vtxdist[size]) {
261       if (rank == p) PetscCall(PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment));
262     } else {
263       MPI_Comm pcomm = pts->pcomm;
264 
265       if (hasempty) {
266         PetscInt cnt;
267 
268         PetscCallMPI(MPI_Comm_split(pts->pcomm, !!nvtxs, rank, &pcomm));
269         for (p = 0, cnt = 0; p < size; p++) {
270           if (vtxdist[p + 1] != vtxdist[p]) {
271             vtxdist[cnt + 1] = vtxdist[p + 1];
272             cnt++;
273           }
274         }
275       };
276       if (nvtxs) PetscCall(PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm));
277       if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
278     }
279   }
280   PetscCall(PetscFree(vwgt));
281   PetscCall(PetscFree(tpwgts));
282 
283   /* Convert to PetscSection+IS */
284   for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
285   PetscCall(PetscMalloc1(nvtxs, &points));
286   for (p = 0, i = 0; p < nparts; ++p) {
287     for (v = 0; v < nvtxs; ++v) {
288       if (assignment[v] == p) points[i++] = v;
289     }
290   }
291   PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
292   PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
293 
294   PetscCall(PetscFree2(vtxdist, assignment));
295   PetscFunctionReturn(PETSC_SUCCESS);
296 #else
297   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
298 #endif
299 }
300 
301 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
302 {
303   PetscFunctionBegin;
304   part->noGraph             = PETSC_FALSE;
305   part->ops->view           = PetscPartitionerView_PTScotch;
306   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
307   part->ops->partition      = PetscPartitionerPartition_PTScotch;
308   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
309   PetscFunctionReturn(PETSC_SUCCESS);
310 }
311 
312 /*MC
313   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
314 
315   Level: intermediate
316 
317   Options Database Keys:
318 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
319 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
320 
321   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
322 
323 .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
324 M*/
325 
326 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
327 {
328   PetscPartitioner_PTScotch *p;
329 
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
332   PetscCall(PetscNew(&p));
333   part->data = p;
334 
335   PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
336   p->strategy  = 0;
337   p->imbalance = 0.01;
338 
339   PetscCall(PetscPartitionerInitialize_PTScotch(part));
340   PetscCall(PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite));
341   PetscFunctionReturn(PETSC_SUCCESS);
342 }
343