xref: /petsc/src/dm/partitioner/impls/parmetis/partparmetis.c (revision 9d0fdff2e8b5ebdd0e57bf4dfe77e45fec5df614)
1 #include <petsc/private/partitionerimpl.h> /*I "petscpartitioner.h" I*/
2 
3 #if defined(PETSC_HAVE_PARMETIS)
4 #include <parmetis.h>
5 #endif
6 
7 PetscBool  ParMetisPartitionerCite       = PETSC_FALSE;
8 const char ParMetisPartitionerCitation[] = "@article{KarypisKumar98,\n"
9                                            "  author  = {George Karypis and Vipin Kumar},\n"
10                                            "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
11                                            "  journal = {Journal of Parallel and Distributed Computing},\n"
12                                            "  volume  = {48},\n"
13                                            "  pages   = {71--85},\n"
14                                            "  year    = {1998}\n"
15                                            "  doi     = {https://doi.org/10.1006/jpdc.1997.1403}\n"
16                                            "}\n";
17 
18 typedef struct {
19   MPI_Comm  pcomm;
20   PetscInt  ptype;
21   PetscReal imbalanceRatio;
22   PetscInt  debugFlag;
23   PetscInt  randomSeed;
24 } PetscPartitioner_ParMetis;
25 
26 static const char *ptypes[] = {"kway", "rb"};
27 
28 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part) {
29   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
30 
31   PetscFunctionBegin;
32   PetscCallMPI(MPI_Comm_free(&p->pcomm));
33   PetscCall(PetscFree(part->data));
34   PetscFunctionReturn(0);
35 }
36 
37 static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer) {
38   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
39 
40   PetscFunctionBegin;
41   PetscCall(PetscViewerASCIIPushTab(viewer));
42   PetscCall(PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]));
43   PetscCall(PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double)p->imbalanceRatio));
44   PetscCall(PetscViewerASCIIPrintf(viewer, "debug flag %" PetscInt_FMT "\n", p->debugFlag));
45   PetscCall(PetscViewerASCIIPrintf(viewer, "random seed %" PetscInt_FMT "\n", p->randomSeed));
46   PetscCall(PetscViewerASCIIPopTab(viewer));
47   PetscFunctionReturn(0);
48 }
49 
50 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer) {
51   PetscBool iascii;
52 
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
55   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
56   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
57   if (iascii) PetscCall(PetscPartitionerView_ParMetis_ASCII(part, viewer));
58   PetscFunctionReturn(0);
59 }
60 
61 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscPartitioner part, PetscOptionItems *PetscOptionsObject) {
62   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *)part->data;
63 
64   PetscFunctionBegin;
65   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner ParMetis Options");
66   PetscCall(PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL));
67   PetscCall(PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL));
68   PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL));
69   PetscCall(PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL));
70   PetscOptionsHeadEnd();
71   PetscFunctionReturn(0);
72 }
73 
74 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) {
75 #if defined(PETSC_HAVE_PARMETIS)
76   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *)part->data;
77   MPI_Comm                   comm;
78   PetscInt                   nvtxs = numVertices;     /* The number of vertices in full graph */
79   PetscInt                  *vtxdist;                 /* Distribution of vertices across processes */
80   PetscInt                  *xadj        = start;     /* Start of edge list for each vertex */
81   PetscInt                  *adjncy      = adjacency; /* Edge lists for all vertices */
82   PetscInt                  *vwgt        = NULL;      /* Vertex weights */
83   PetscInt                  *adjwgt      = NULL;      /* Edge weights */
84   PetscInt                   wgtflag     = 0;         /* Indicates which weights are present */
85   PetscInt                   numflag     = 0;         /* Indicates initial offset (0 or 1) */
86   PetscInt                   ncon        = 1;         /* The number of weights per vertex */
87   PetscInt                   metis_ptype = pm->ptype; /* kway or recursive bisection */
88   real_t                    *tpwgts;                  /* The fraction of vertex weights assigned to each partition */
89   real_t                    *ubvec;                   /* The balance intolerance for vertex weights */
90   PetscInt                   options[64];             /* Options */
91   PetscInt                   v, i, *assignment, *points;
92   PetscMPIInt                p, size, rank;
93   PetscBool                  hasempty = PETSC_FALSE;
94 
95   PetscFunctionBegin;
96   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
97   PetscCallMPI(MPI_Comm_size(comm, &size));
98   PetscCallMPI(MPI_Comm_rank(comm, &rank));
99   /* Calculate vertex distribution */
100   PetscCall(PetscMalloc4(size + 1, &vtxdist, nparts * ncon, &tpwgts, ncon, &ubvec, nvtxs, &assignment));
101   vtxdist[0] = 0;
102   PetscCallMPI(MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm));
103   for (p = 2; p <= size; ++p) {
104     hasempty = (PetscBool)(hasempty || !vtxdist[p - 1] || !vtxdist[p]);
105     vtxdist[p] += vtxdist[p - 1];
106   }
107   /* null graph */
108   if (vtxdist[size] == 0) {
109     PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
110     PetscCall(ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition));
111     PetscFunctionReturn(0);
112   }
113   /* Calculate partition weights */
114   if (targetSection) {
115     PetscInt p;
116     real_t   sumt = 0.0;
117 
118     for (p = 0; p < nparts; ++p) {
119       PetscInt tpd;
120 
121       PetscCall(PetscSectionGetDof(targetSection, p, &tpd));
122       sumt += tpd;
123       tpwgts[p] = tpd;
124     }
125     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
126       for (p = 0, sumt = 0.0; p < nparts; ++p) {
127         tpwgts[p] = PetscMax(tpwgts[p], PETSC_SMALL);
128         sumt += tpwgts[p];
129       }
130       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
131       for (p = 0, sumt = 0.0; p < nparts - 1; ++p) sumt += tpwgts[p];
132       tpwgts[nparts - 1] = 1. - sumt;
133     }
134   } else {
135     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0 / nparts;
136   }
137   ubvec[0] = pm->imbalanceRatio;
138 
139   /* Weight cells */
140   if (vertSection) {
141     PetscCall(PetscMalloc1(nvtxs, &vwgt));
142     for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionGetDof(vertSection, v, &vwgt[v]));
143     wgtflag |= 2; /* have weights on graph vertices */
144   }
145 
146   for (p = 0; !vtxdist[p + 1] && p < size; ++p)
147     ;
148   if (vtxdist[p + 1] == vtxdist[size]) {
149     if (rank == p) {
150       int err;
151       err                          = METIS_SetDefaultOptions(options); /* initialize all defaults */
152       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
153       options[METIS_OPTION_SEED]   = pm->randomSeed;
154       PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
155       if (metis_ptype == 1) {
156         PetscStackPushExternal("METIS_PartGraphRecursive");
157         err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
158         PetscStackPop;
159         PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
160       } else {
161         /*
162          It would be nice to activate the two options below, but they would need some actual testing.
163          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
164          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
165         */
166         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
167         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
168         PetscStackPushExternal("METIS_PartGraphKway");
169         err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
170         PetscStackPop;
171         PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
172       }
173     }
174   } else {
175     MPI_Comm pcomm = pm->pcomm;
176 
177     options[0] = 1; /*use options */
178     options[1] = pm->debugFlag;
179     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
180 
181     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
182       PetscInt cnt;
183 
184       PetscCallMPI(MPI_Comm_split(pm->pcomm, !!nvtxs, rank, &pcomm));
185       for (p = 0, cnt = 0; p < size; p++) {
186         if (vtxdist[p + 1] != vtxdist[p]) {
187           vtxdist[cnt + 1] = vtxdist[p + 1];
188           cnt++;
189         }
190       }
191     }
192     if (nvtxs) {
193       int err;
194       PetscStackPushExternal("ParMETIS_V3_PartKway");
195       err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
196       PetscStackPop;
197       PetscCheck(err == METIS_OK, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", err);
198     }
199     if (hasempty) PetscCallMPI(MPI_Comm_free(&pcomm));
200   }
201 
202   /* Convert to PetscSection+IS */
203   for (v = 0; v < nvtxs; ++v) PetscCall(PetscSectionAddDof(partSection, assignment[v], 1));
204   PetscCall(PetscMalloc1(nvtxs, &points));
205   for (p = 0, i = 0; p < nparts; ++p) {
206     for (v = 0; v < nvtxs; ++v) {
207       if (assignment[v] == p) points[i++] = v;
208     }
209   }
210   PetscCheck(i == nvtxs, comm, PETSC_ERR_PLIB, "Number of points %" PetscInt_FMT " should be %" PetscInt_FMT, i, nvtxs);
211   PetscCall(ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition));
212   PetscCall(PetscFree4(vtxdist, tpwgts, ubvec, assignment));
213   PetscCall(PetscFree(vwgt));
214   PetscFunctionReturn(0);
215 #else
216   SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
217 #endif
218 }
219 
220 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part) {
221   PetscFunctionBegin;
222   part->noGraph             = PETSC_FALSE;
223   part->ops->view           = PetscPartitionerView_ParMetis;
224   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
225   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
226   part->ops->partition      = PetscPartitionerPartition_ParMetis;
227   PetscFunctionReturn(0);
228 }
229 
230 /*MC
231   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
232 
233   Level: intermediate
234 
235   Options Database Keys:
236 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
237 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
238 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
239 -  -petscpartitioner_parmetis_seed <int> - Random seed
240 
241   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
242 
243 .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
244 M*/
245 
246 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part) {
247   PetscPartitioner_ParMetis *p;
248 
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
251   PetscCall(PetscNew(&p));
252   part->data = p;
253 
254   PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)part), &p->pcomm));
255   p->ptype          = 0;
256   p->imbalanceRatio = 1.05;
257   p->debugFlag      = 0;
258   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
259 
260   PetscCall(PetscPartitionerInitialize_ParMetis(part));
261   PetscCall(PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite));
262   PetscFunctionReturn(0);
263 }
264