xref: /petsc/src/dm/partitioner/impls/parmetis/partparmetis.c (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1 #include <petsc/private/partitionerimpl.h>        /*I "petscpartitioner.h" I*/
2 
3 #if defined(PETSC_HAVE_PARMETIS)
4 #include <parmetis.h>
5 #endif
6 
7 PetscBool  ParMetisPartitionerCite = PETSC_FALSE;
8 const char ParMetisPartitionerCitation[] =
9   "@article{KarypisKumar98,\n"
10   "  author  = {George Karypis and Vipin Kumar},\n"
11   "  title   = {A Parallel Algorithm for Multilevel Graph Partitioning and Sparse Matrix Ordering},\n"
12   "  journal = {Journal of Parallel and Distributed Computing},\n"
13   "  volume  = {48},\n"
14   "  pages   = {71--85},\n"
15   "  year    = {1998}\n"
16   "  doi     = {https://doi.org/10.1006/jpdc.1997.1403}\n"
17   "}\n";
18 
19 typedef struct {
20   MPI_Comm  pcomm;
21   PetscInt  ptype;
22   PetscReal imbalanceRatio;
23   PetscInt  debugFlag;
24   PetscInt  randomSeed;
25 } PetscPartitioner_ParMetis;
26 
27 static const char *ptypes[] = {"kway", "rb"};
28 
29 static PetscErrorCode PetscPartitionerDestroy_ParMetis(PetscPartitioner part)
30 {
31   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
32   PetscErrorCode             ierr;
33 
34   PetscFunctionBegin;
35   ierr = MPI_Comm_free(&p->pcomm);CHKERRMPI(ierr);
36   ierr = PetscFree(part->data);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 static PetscErrorCode PetscPartitionerView_ParMetis_ASCII(PetscPartitioner part, PetscViewer viewer)
41 {
42   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
43   PetscErrorCode             ierr;
44 
45   PetscFunctionBegin;
46   ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
47   ierr = PetscViewerASCIIPrintf(viewer, "ParMetis type: %s\n", ptypes[p->ptype]);CHKERRQ(ierr);
48   ierr = PetscViewerASCIIPrintf(viewer, "load imbalance ratio %g\n", (double) p->imbalanceRatio);CHKERRQ(ierr);
49   ierr = PetscViewerASCIIPrintf(viewer, "debug flag %D\n", p->debugFlag);CHKERRQ(ierr);
50   ierr = PetscViewerASCIIPrintf(viewer, "random seed %D\n", p->randomSeed);CHKERRQ(ierr);
51   ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode PetscPartitionerView_ParMetis(PetscPartitioner part, PetscViewer viewer)
56 {
57   PetscBool      iascii;
58   PetscErrorCode ierr;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
62   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
63   ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr);
64   if (iascii) {ierr = PetscPartitionerView_ParMetis_ASCII(part, viewer);CHKERRQ(ierr);}
65   PetscFunctionReturn(0);
66 }
67 
68 static PetscErrorCode PetscPartitionerSetFromOptions_ParMetis(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
69 {
70   PetscPartitioner_ParMetis *p = (PetscPartitioner_ParMetis *) part->data;
71   PetscErrorCode            ierr;
72 
73   PetscFunctionBegin;
74   ierr = PetscOptionsHead(PetscOptionsObject, "PetscPartitioner ParMetis Options");CHKERRQ(ierr);
75   ierr = PetscOptionsEList("-petscpartitioner_parmetis_type", "Partitioning method", "", ptypes, 2, ptypes[p->ptype], &p->ptype, NULL);CHKERRQ(ierr);
76   ierr = PetscOptionsReal("-petscpartitioner_parmetis_imbalance_ratio", "Load imbalance ratio limit", "", p->imbalanceRatio, &p->imbalanceRatio, NULL);CHKERRQ(ierr);
77   ierr = PetscOptionsInt("-petscpartitioner_parmetis_debug", "Debugging flag", "", p->debugFlag, &p->debugFlag, NULL);CHKERRQ(ierr);
78   ierr = PetscOptionsInt("-petscpartitioner_parmetis_seed", "Random seed", "", p->randomSeed, &p->randomSeed, NULL);CHKERRQ(ierr);
79   ierr = PetscOptionsTail();CHKERRQ(ierr);
80   PetscFunctionReturn(0);
81 }
82 
83 static PetscErrorCode PetscPartitionerPartition_ParMetis(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
84 {
85 #if defined(PETSC_HAVE_PARMETIS)
86   PetscPartitioner_ParMetis *pm = (PetscPartitioner_ParMetis *) part->data;
87   MPI_Comm       comm;
88   PetscInt       nvtxs       = numVertices; /* The number of vertices in full graph */
89   PetscInt      *vtxdist;                   /* Distribution of vertices across processes */
90   PetscInt      *xadj        = start;       /* Start of edge list for each vertex */
91   PetscInt      *adjncy      = adjacency;   /* Edge lists for all vertices */
92   PetscInt      *vwgt        = NULL;        /* Vertex weights */
93   PetscInt      *adjwgt      = NULL;        /* Edge weights */
94   PetscInt       wgtflag     = 0;           /* Indicates which weights are present */
95   PetscInt       numflag     = 0;           /* Indicates initial offset (0 or 1) */
96   PetscInt       ncon        = 1;           /* The number of weights per vertex */
97   PetscInt       metis_ptype = pm->ptype;   /* kway or recursive bisection */
98   real_t        *tpwgts;                    /* The fraction of vertex weights assigned to each partition */
99   real_t        *ubvec;                     /* The balance intolerance for vertex weights */
100   PetscInt       options[64];               /* Options */
101   PetscInt       v, i, *assignment, *points;
102   PetscMPIInt    p, size, rank;
103   PetscBool      hasempty = PETSC_FALSE;
104   PetscErrorCode ierr;
105 
106   PetscFunctionBegin;
107   ierr = PetscObjectGetComm((PetscObject) part, &comm);CHKERRQ(ierr);
108   ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr);
109   ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr);
110   /* Calculate vertex distribution */
111   ierr = PetscMalloc4(size+1,&vtxdist,nparts*ncon,&tpwgts,ncon,&ubvec,nvtxs,&assignment);CHKERRQ(ierr);
112   vtxdist[0] = 0;
113   ierr = MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);CHKERRMPI(ierr);
114   for (p = 2; p <= size; ++p) {
115     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
116     vtxdist[p] += vtxdist[p-1];
117   }
118   /* null graph */
119   if (vtxdist[size] == 0) {
120     ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
121     ierr = ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
122     PetscFunctionReturn(0);
123   }
124   /* Calculate partition weights */
125   if (targetSection) {
126     PetscInt p;
127     real_t   sumt = 0.0;
128 
129     for (p = 0; p < nparts; ++p) {
130       PetscInt tpd;
131 
132       ierr = PetscSectionGetDof(targetSection,p,&tpd);CHKERRQ(ierr);
133       sumt += tpd;
134       tpwgts[p] = tpd;
135     }
136     if (sumt) { /* METIS/ParMETIS do not like exactly zero weight */
137       for (p = 0, sumt = 0.0; p < nparts; ++p) {
138         tpwgts[p] = PetscMax(tpwgts[p],PETSC_SMALL);
139         sumt += tpwgts[p];
140       }
141       for (p = 0; p < nparts; ++p) tpwgts[p] /= sumt;
142       for (p = 0, sumt = 0.0; p < nparts-1; ++p) sumt += tpwgts[p];
143       tpwgts[nparts - 1] = 1. - sumt;
144     }
145   } else {
146     for (p = 0; p < nparts; ++p) tpwgts[p] = 1.0/nparts;
147   }
148   ubvec[0] = pm->imbalanceRatio;
149 
150   /* Weight cells */
151   if (vertSection) {
152     ierr = PetscMalloc1(nvtxs,&vwgt);CHKERRQ(ierr);
153     for (v = 0; v < nvtxs; ++v) {
154       ierr = PetscSectionGetDof(vertSection, v, &vwgt[v]);CHKERRQ(ierr);
155     }
156     wgtflag |= 2; /* have weights on graph vertices */
157   }
158 
159   for (p = 0; !vtxdist[p+1] && p < size; ++p);
160   if (vtxdist[p+1] == vtxdist[size]) {
161     if (rank == p) {
162       int err;
163       err = METIS_SetDefaultOptions(options); /* initialize all defaults */
164       options[METIS_OPTION_DBGLVL] = pm->debugFlag;
165       options[METIS_OPTION_SEED]   = pm->randomSeed;
166       if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_SetDefaultOptions()");
167       if (metis_ptype == 1) {
168         PetscStackPush("METIS_PartGraphRecursive");
169         err = METIS_PartGraphRecursive(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
170         PetscStackPop;
171         if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphRecursive()");
172       } else {
173         /*
174          It would be nice to activate the two options below, but they would need some actual testing.
175          - Turning on these options may exercise path of the METIS code that have bugs and may break production runs.
176          - If CONTIG is set to 1, METIS will exit with error if the graph is disconnected, despite the manual saying the option is ignored in such case.
177         */
178         /* options[METIS_OPTION_CONTIG]  = 1; */ /* try to produce partitions that are contiguous */
179         /* options[METIS_OPTION_MINCONN] = 1; */ /* minimize the maximum degree of the subdomain graph */
180         PetscStackPush("METIS_PartGraphKway");
181         err = METIS_PartGraphKway(&nvtxs, &ncon, xadj, adjncy, vwgt, NULL, adjwgt, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment);
182         PetscStackPop;
183         if (err != METIS_OK) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in METIS_PartGraphKway()");
184       }
185     }
186   } else {
187     MPI_Comm pcomm = pm->pcomm;
188 
189     options[0] = 1; /*use options */
190     options[1] = pm->debugFlag;
191     options[2] = (pm->randomSeed == -1) ? 15 : pm->randomSeed; /* default is GLOBAL_SEED=15 from `libparmetis/defs.h` */
192 
193     if (hasempty) { /* parmetis does not support empty graphs on some of the processes */
194       PetscInt cnt;
195 
196       ierr = MPI_Comm_split(pm->pcomm,!!nvtxs,rank,&pcomm);CHKERRMPI(ierr);
197       for (p=0,cnt=0;p<size;p++) {
198         if (vtxdist[p+1] != vtxdist[p]) {
199           vtxdist[cnt+1] = vtxdist[p+1];
200           cnt++;
201         }
202       }
203     }
204     if (nvtxs) {
205       int err;
206       PetscStackPush("ParMETIS_V3_PartKway");
207       err = ParMETIS_V3_PartKway(vtxdist, xadj, adjncy, vwgt, adjwgt, &wgtflag, &numflag, &ncon, &nparts, tpwgts, ubvec, options, &part->edgeCut, assignment, &pcomm);
208       PetscStackPop;
209       if (err != METIS_OK) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Error %d in ParMETIS_V3_PartKway()", err);
210     }
211     if (hasempty) {
212       ierr = MPI_Comm_free(&pcomm);CHKERRMPI(ierr);
213     }
214   }
215 
216   /* Convert to PetscSection+IS */
217   for (v = 0; v < nvtxs; ++v) {ierr = PetscSectionAddDof(partSection, assignment[v], 1);CHKERRQ(ierr);}
218   ierr = PetscMalloc1(nvtxs, &points);CHKERRQ(ierr);
219   for (p = 0, i = 0; p < nparts; ++p) {
220     for (v = 0; v < nvtxs; ++v) {
221       if (assignment[v] == p) points[i++] = v;
222     }
223   }
224   if (i != nvtxs) SETERRQ2(comm, PETSC_ERR_PLIB, "Number of points %D should be %D", i, nvtxs);
225   ierr = ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);CHKERRQ(ierr);
226   ierr = PetscFree4(vtxdist,tpwgts,ubvec,assignment);CHKERRQ(ierr);
227   ierr = PetscFree(vwgt);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 #else
230   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-parmetis.");
231 #endif
232 }
233 
234 static PetscErrorCode PetscPartitionerInitialize_ParMetis(PetscPartitioner part)
235 {
236   PetscFunctionBegin;
237   part->noGraph             = PETSC_FALSE;
238   part->ops->view           = PetscPartitionerView_ParMetis;
239   part->ops->setfromoptions = PetscPartitionerSetFromOptions_ParMetis;
240   part->ops->destroy        = PetscPartitionerDestroy_ParMetis;
241   part->ops->partition      = PetscPartitionerPartition_ParMetis;
242   PetscFunctionReturn(0);
243 }
244 
245 /*MC
246   PETSCPARTITIONERPARMETIS = "parmetis" - A PetscPartitioner object using the ParMETIS library
247 
248   Level: intermediate
249 
250   Options Database Keys:
251 +  -petscpartitioner_parmetis_type <string> - ParMETIS partitioning type. Either "kway" or "rb" (recursive bisection)
252 .  -petscpartitioner_parmetis_imbalance_ratio <value> - Load imbalance ratio limit
253 .  -petscpartitioner_parmetis_debug <int> - Debugging flag passed to ParMETIS/METIS routines
254 -  -petscpartitioner_parmetis_seed <int> - Random seed
255 
256   Notes: when the graph is on a single process, this partitioner actually calls METIS and not ParMETIS
257 
258 .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
259 M*/
260 
261 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_ParMetis(PetscPartitioner part)
262 {
263   PetscPartitioner_ParMetis *p;
264   PetscErrorCode          ierr;
265 
266   PetscFunctionBegin;
267   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
268   ierr       = PetscNewLog(part, &p);CHKERRQ(ierr);
269   part->data = p;
270 
271   ierr = MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);CHKERRMPI(ierr);
272   p->ptype          = 0;
273   p->imbalanceRatio = 1.05;
274   p->debugFlag      = 0;
275   p->randomSeed     = -1; /* defaults to GLOBAL_SEED=15 from `libparmetis/defs.h` */
276 
277   ierr = PetscPartitionerInitialize_ParMetis(part);CHKERRQ(ierr);
278   ierr = PetscCitationsRegister(ParMetisPartitionerCitation, &ParMetisPartitionerCite);CHKERRQ(ierr);
279   PetscFunctionReturn(0);
280 }
281 
282