xref: /petsc/src/dm/partitioner/impls/ptscotch/partptscotch.c (revision 3fc23ce8277e00a65c4b1be6e2b4535b56ba7978)
1 #include <petsc/private/partitionerimpl.h>        /*I "petscpartitioner.h" I*/
2 
3 #if defined(PETSC_HAVE_PTSCOTCH)
4 EXTERN_C_BEGIN
5 #include <ptscotch.h>
6 EXTERN_C_END
7 #endif
8 
9 PetscBool  PTScotchPartitionerCite = PETSC_FALSE;
10 const char PTScotchPartitionerCitation[] =
11   "@article{PTSCOTCH,\n"
12   "  author  = {C. Chevalier and F. Pellegrini},\n"
13   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
14   "  journal = {Parallel Computing},\n"
15   "  volume  = {34},\n"
16   "  number  = {6},\n"
17   "  pages   = {318--331},\n"
18   "  year    = {2008},\n"
19   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
20   "}\n";
21 
22 typedef struct {
23   MPI_Comm  pcomm;
24   PetscInt  strategy;
25   PetscReal imbalance;
26 } PetscPartitioner_PTScotch;
27 
28 #if defined(PETSC_HAVE_PTSCOTCH)
29 
30 #define PetscCallPTSCOTCH(...) do { PetscCheck(!(__VA_ARGS__),PETSC_COMM_SELF,PETSC_ERR_LIB,"Error calling PT-Scotch library"); } while (0)
31 
32 static int PTScotch_Strategy(PetscInt strategy)
33 {
34   switch (strategy) {
35   case  0: return SCOTCH_STRATDEFAULT;
36   case  1: return SCOTCH_STRATQUALITY;
37   case  2: return SCOTCH_STRATSPEED;
38   case  3: return SCOTCH_STRATBALANCE;
39   case  4: return SCOTCH_STRATSAFETY;
40   case  5: return SCOTCH_STRATSCALABILITY;
41   case  6: return SCOTCH_STRATRECURSIVE;
42   case  7: return SCOTCH_STRATREMAP;
43   default: return SCOTCH_STRATDEFAULT;
44   }
45 }
46 
47 static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
48                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
49 {
50   SCOTCH_Arch    archdat;
51   SCOTCH_Graph   grafdat;
52   SCOTCH_Strat   stradat;
53   SCOTCH_Num     vertnbr = n;
54   SCOTCH_Num     edgenbr = xadj[n];
55   SCOTCH_Num*    velotab = vtxwgt;
56   SCOTCH_Num*    edlotab = adjwgt;
57   SCOTCH_Num     flagval = strategy;
58   double         kbalval = imbalance;
59 
60   PetscFunctionBegin;
61   {
62     PetscBool flg = PETSC_TRUE;
63     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights"));
64     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
65     if (!flg) velotab = NULL;
66   }
67   PetscCallPTSCOTCH(SCOTCH_graphInit(&grafdat));
68   PetscCallPTSCOTCH(SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab));
69   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
70   PetscCallPTSCOTCH(SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval));
71   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
72   if (tpart) {
73     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
74   } else {
75     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
76   }
77   PetscCallPTSCOTCH(SCOTCH_graphMap(&grafdat, &archdat, &stradat, part));
78   SCOTCH_archExit(&archdat);
79   SCOTCH_stratExit(&stradat);
80   SCOTCH_graphExit(&grafdat);
81   PetscFunctionReturn(0);
82 }
83 
84 static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
85                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
86 {
87   PetscMPIInt     procglbnbr;
88   PetscMPIInt     proclocnum;
89   SCOTCH_Arch     archdat;
90   SCOTCH_Dgraph   grafdat;
91   SCOTCH_Dmapping mappdat;
92   SCOTCH_Strat    stradat;
93   SCOTCH_Num      vertlocnbr;
94   SCOTCH_Num      edgelocnbr;
95   SCOTCH_Num*     veloloctab = vtxwgt;
96   SCOTCH_Num*     edloloctab = adjwgt;
97   SCOTCH_Num      flagval = strategy;
98   double          kbalval = imbalance;
99 
100   PetscFunctionBegin;
101   {
102     PetscBool flg = PETSC_TRUE;
103     PetscCall(PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights"));
104     PetscCall(PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL));
105     if (!flg) veloloctab = NULL;
106   }
107   PetscCallMPI(MPI_Comm_size(comm, &procglbnbr));
108   PetscCallMPI(MPI_Comm_rank(comm, &proclocnum));
109   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
110   edgelocnbr = xadj[vertlocnbr];
111 
112   PetscCallPTSCOTCH(SCOTCH_dgraphInit(&grafdat, comm));
113   PetscCallPTSCOTCH(SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab));
114   PetscCallPTSCOTCH(SCOTCH_stratInit(&stradat));
115   PetscCall(SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval));
116   PetscCallPTSCOTCH(SCOTCH_archInit(&archdat));
117   if (tpart) { /* target partition weights */
118     PetscCallPTSCOTCH(SCOTCH_archCmpltw(&archdat, nparts, tpart));
119   } else {
120     PetscCallPTSCOTCH(SCOTCH_archCmplt(&archdat, nparts));
121   }
122   PetscCallPTSCOTCH(SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part));
123   PetscCallPTSCOTCH(SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat));
124   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
125   SCOTCH_archExit(&archdat);
126   SCOTCH_stratExit(&stradat);
127   SCOTCH_dgraphExit(&grafdat);
128   PetscFunctionReturn(0);
129 }
130 
131 #endif /* PETSC_HAVE_PTSCOTCH */
132 
133 static const char *const
134 PTScotchStrategyList[] = {
135   "DEFAULT",
136   "QUALITY",
137   "SPEED",
138   "BALANCE",
139   "SAFETY",
140   "SCALABILITY",
141   "RECURSIVE",
142   "REMAP"
143 };
144 
145 static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
146 {
147   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
148 
149   PetscFunctionBegin;
150   PetscCallMPI(MPI_Comm_free(&p->pcomm));
151   PetscCall(PetscFree(part->data));
152   PetscFunctionReturn(0);
153 }
154 
155 static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
156 {
157   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
158 
159   PetscFunctionBegin;
160   PetscCall(PetscViewerASCIIPushTab(viewer));
161   PetscCall(PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]));
162   PetscCall(PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance));
163   PetscCall(PetscViewerASCIIPopTab(viewer));
164   PetscFunctionReturn(0);
165 }
166 
167 static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
168 {
169   PetscBool iascii;
170 
171   PetscFunctionBegin;
172   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
173   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
174   PetscCall(PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii));
175   if (iascii) PetscCall(PetscPartitionerView_PTScotch_ASCII(part, viewer));
176   PetscFunctionReturn(0);
177 }
178 
179 static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
180 {
181   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
182   const char *const         *slist = PTScotchStrategyList;
183   PetscInt                  nlist = PETSC_STATIC_ARRAY_LENGTH(PTScotchStrategyList);
184   PetscBool                 flag;
185 
186   PetscFunctionBegin;
187   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner PTScotch Options");
188   PetscCall(PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag));
189   PetscCall(PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag));
190   PetscOptionsHeadEnd();
191   PetscFunctionReturn(0);
192 }
193 
194 static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
195 {
196 #if defined(PETSC_HAVE_PTSCOTCH)
197   MPI_Comm     comm;
198   PetscInt     nvtxs    = numVertices; /* The number of vertices in full graph */
199   PetscInt    *vtxdist;         /* Distribution of vertices across processes */
200   PetscInt    *xadj     = start; /* Start of edge list for each vertex */
201   PetscInt    *adjncy   = adjacency; /* Edge lists for all vertices */
202   PetscInt    *vwgt     = NULL; /* Vertex weights */
203   PetscInt    *adjwgt   = NULL; /* Edge weights */
204   PetscInt     v, i, *assignment, *points;
205   PetscMPIInt  size, rank, p;
206   PetscBool    hasempty = PETSC_FALSE;
207   PetscInt     *tpwgts  = NULL;
208 
209   PetscFunctionBegin;
210   PetscCall(PetscObjectGetComm((PetscObject)part,&comm));
211   PetscCallMPI(MPI_Comm_size(comm, &size));
212   PetscCallMPI(MPI_Comm_rank(comm, &rank));
213   PetscCall(PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment));
214   /* Calculate vertex distribution */
215   vtxdist[0] = 0;
216   PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
217   for (p = 2; p <= size; ++p) {
218     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
219     vtxdist[p] += vtxdist[p-1];
220   }
221   /* null graph */
222   if (vtxdist[size] == 0) {
223     PetscCall(PetscFree2(vtxdist, assignment));
224     PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
225     PetscFunctionReturn(0);
226   }
227 
228   /* Calculate vertex weights */
229   if (vertSection) {
230     PetscCall(PetscMalloc1(nvtxs,&vwgt));
231     for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
232   }
233 
234   /* Calculate partition weights */
235   if (targetSection) {
236     PetscInt sumw;
237 
238     PetscCall(PetscCalloc1(nparts,&tpwgts));
239     for (p = 0, sumw = 0; p < nparts; ++p) {
240       PetscCall(PetscSectionGetDof(targetSection,p,&tpwgts[p]));
241       sumw += tpwgts[p];
242     }
243     if (!sumw) PetscCall(PetscFree(tpwgts));
244   }
245 
246   {
247     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
248     int                       strat = PTScotch_Strategy(pts->strategy);
249     double                    imbal = (double)pts->imbalance;
250 
251     for (p = 0; !vtxdist[p+1] && p < size; ++p);
252     if (vtxdist[p+1] == vtxdist[size]) {
253       if (rank == p) PetscCall(PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment));
254     } else {
255       MPI_Comm pcomm = pts->pcomm;
256 
257       if (hasempty) {
258         PetscInt cnt;
259 
260         PetscCallMPI(MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm));
261         for (p=0,cnt=0;p<size;p++) {
262           if (vtxdist[p+1] != vtxdist[p]) {
263             vtxdist[cnt+1] = vtxdist[p+1];
264             cnt++;
265           }
266         }
267       };
268       if (nvtxs) PetscCall(PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm));
269       if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
270     }
271   }
272   PetscCall(PetscFree(vwgt));
273   PetscCall(PetscFree(tpwgts));
274 
275   /* Convert to PetscSection+IS */
276   for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
277   PetscCall(PetscMalloc1(nvtxs, &points));
278   for (p = 0, i = 0; p < nparts; ++p) {
279     for (v = 0; v < nvtxs; ++v) {
280       if (assignment[v] == p) points[i++] = v;
281     }
282   }
283   PetscCheck(i == nvtxs,comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
284   PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
285 
286   PetscCall(PetscFree2(vtxdist,assignment));
287   PetscFunctionReturn(0);
288 #else
289   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
290 #endif
291 }
292 
293 static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
294 {
295   PetscFunctionBegin;
296   part->noGraph             = PETSC_FALSE;
297   part->ops->view           = PetscPartitionerView_PTScotch;
298   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
299   part->ops->partition      = PetscPartitionerPartition_PTScotch;
300   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
301   PetscFunctionReturn(0);
302 }
303 
304 /*MC
305   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library
306 
307   Level: intermediate
308 
309   Options Database Keys:
310 +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
311 -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio
312 
313   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch
314 
315 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
316 M*/
317 
318 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
319 {
320   PetscPartitioner_PTScotch *p;
321 
322   PetscFunctionBegin;
323   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
324   PetscCall(PetscNewLog(part, &p));
325   part->data = p;
326 
327   PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm));
328   p->strategy  = 0;
329   p->imbalance = 0.01;
330 
331   PetscCall(PetscPartitionerInitialize_PTScotch(part));
332   PetscCall(PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite));
333   PetscFunctionReturn(0);
334 }
335