xref: /petsc/src/ksp/pc/impls/gamg/agg.c (revision bd412c90fba8895e9763ccee76c7dd2e09a982d7)
1 /*
2  GAMG geometric-algebric multigrid PC - Mark Adams 2011
3  */
4 
5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6 #include <petscblaslapack.h>
7 #include <petscdm.h>
8 #include <petsc/private/kspimpl.h>
9 
10 typedef struct {
11   PetscInt  nsmooths;
12   PetscBool symmetrize_graph;
13   PetscInt  aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk)
14 } PC_GAMG_AGG;
15 
16 /*@
17    PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) used for multigrid on all the levels
18 
19    Logically Collective on pc
20 
21    Input Parameters:
22 .  pc - the preconditioner context
23 
24    Options Database Key:
25 .  -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
26 
27    Level: intermediate
28 
29 .seealso: `PCMG`, `PCGAMG`
30 @*/
31 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n)
32 {
33   PetscFunctionBegin;
34   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
35   PetscValidLogicalCollectiveInt(pc, n, 2);
36   PetscTryMethod(pc, "PCGAMGSetNSmooths_C", (PC, PetscInt), (pc, n));
37   PetscFunctionReturn(0);
38 }
39 
40 static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n)
41 {
42   PC_MG       *mg          = (PC_MG *)pc->data;
43   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
44   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
45 
46   PetscFunctionBegin;
47   pc_gamg_agg->nsmooths = n;
48   PetscFunctionReturn(0);
49 }
50 
51 /*@
52    PCGAMGSetSymmetrizeGraph - Symmetrize the graph before computing the aggregation. Some algorithms require the graph be symmetric
53 
54    Logically Collective on pc
55 
56    Input Parameters:
57 +  pc - the preconditioner context
58 -  n - `PETSC_TRUE` or `PETSC_FALSE`
59 
60    Options Database Key:
61 .  -pc_gamg_symmetrize_graph <true,default=false> - symmetrize the graph before computing the aggregation
62 
63    Level: intermediate
64 
65    Developer Note:
66    If the aggregation can hang with a nonsymmetric graph then the graph should always be symmetrized before calling the aggregation,
67    it should not be up to the user.
68 
69 .seealso: `PCGAMG`, `PCGAMGSetAggressiveLevels()`
70 @*/
71 PetscErrorCode PCGAMGSetSymmetrizeGraph(PC pc, PetscBool n)
72 {
73   PetscFunctionBegin;
74   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
75   PetscValidLogicalCollectiveBool(pc, n, 2);
76   PetscTryMethod(pc, "PCGAMGSetSymmetrizeGraph_C", (PC, PetscBool), (pc, n));
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode PCGAMGSetSymmetrizeGraph_AGG(PC pc, PetscBool n)
81 {
82   PC_MG       *mg          = (PC_MG *)pc->data;
83   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
84   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
85 
86   PetscFunctionBegin;
87   pc_gamg_agg->symmetrize_graph = n;
88   PetscFunctionReturn(0);
89 }
90 
91 /*@
92    PCGAMGSetAggressiveLevels -  Use aggressive coarsening on first n levels
93 
94    Logically Collective on pc
95 
96    Input Parameters:
97 +  pc - the preconditioner context
98 -  n - 0, 1 or more
99 
100    Options Database Key:
101 .  -pc_gamg_aggressive_coarsening <n,default = 1> - Number of levels to square the graph on before aggregating it
102 
103    Level: intermediate
104 
105 .seealso: `PCGAMG`, `PCGAMGSetSymmetrizeGraph()`, `PCGAMGSetThreshold()`
106 @*/
107 PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n)
108 {
109   PetscFunctionBegin;
110   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
111   PetscValidLogicalCollectiveInt(pc, n, 2);
112   PetscTryMethod(pc, "PCGAMGSetAggressiveLevels_C", (PC, PetscInt), (pc, n));
113   PetscFunctionReturn(0);
114 }
115 
116 static PetscErrorCode PCGAMGSetAggressiveLevels_AGG(PC pc, PetscInt n)
117 {
118   PC_MG       *mg          = (PC_MG *)pc->data;
119   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
120   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
121 
122   PetscFunctionBegin;
123   pc_gamg_agg->aggressive_coarsening_levels = n;
124   PetscFunctionReturn(0);
125 }
126 
127 static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems *PetscOptionsObject)
128 {
129   PC_MG       *mg          = (PC_MG *)pc->data;
130   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
131   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
132 
133   PetscFunctionBegin;
134   PetscOptionsHeadBegin(PetscOptionsObject, "GAMG-AGG options");
135   {
136     PetscBool flg;
137     PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "smoothing steps for smoothed aggregation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL));
138     PetscCall(PetscOptionsBool("-pc_gamg_symmetrize_graph", "Set for asymmetric matrices", "PCGAMGSetSymmetrizeGraph", pc_gamg_agg->symmetrize_graph, &pc_gamg_agg->symmetrize_graph, NULL));
139     pc_gamg_agg->aggressive_coarsening_levels = 1;
140     PetscCall(
141       PetscOptionsInt("-pc_gamg_square_graph", "Number of aggressive coarsening (MIS-2) levels from finest (alias for -pc_gamg_aggressive_coarsening, deprecated)", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg));
142     if (!flg) {
143       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, NULL));
144     } else {
145       PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg));
146       if (flg) PetscCall(PetscInfo(pc, "Warning: both -pc_gamg_square_graph and -pc_gamg_aggressive_coarsening are used. -pc_gamg_square_graph is deprecated, Number of aggressive levels is %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels));
147     }
148   }
149   PetscOptionsHeadEnd();
150   PetscFunctionReturn(0);
151 }
152 
153 static PetscErrorCode PCDestroy_GAMG_AGG(PC pc)
154 {
155   PC_MG   *mg      = (PC_MG *)pc->data;
156   PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx;
157 
158   PetscFunctionBegin;
159   PetscCall(PetscFree(pc_gamg->subctx));
160   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", NULL));
161   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetSymmetrizeGraph_C", NULL));
162   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", NULL));
163   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
164   PetscFunctionReturn(0);
165 }
166 
167 /*
168    PCSetCoordinates_AGG
169      - collective
170 
171    Input Parameter:
172    . pc - the preconditioner context
173    . ndm - dimesion of data (used for dof/vertex for Stokes)
174    . a_nloc - number of vertices local
175    . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...}
176 */
177 
178 static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords)
179 {
180   PC_MG   *mg      = (PC_MG *)pc->data;
181   PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx;
182   PetscInt arrsz, kk, ii, jj, nloc, ndatarows, ndf;
183   Mat      mat = pc->pmat;
184 
185   PetscFunctionBegin;
186   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
187   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
188   nloc = a_nloc;
189 
190   /* SA: null space vectors */
191   PetscCall(MatGetBlockSize(mat, &ndf));               /* this does not work for Stokes */
192   if (coords && ndf == 1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */
193   else if (coords) {
194     PetscCheck(ndm <= ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "degrees of motion %" PetscInt_FMT " > block size %" PetscInt_FMT, ndm, ndf);
195     pc_gamg->data_cell_cols = (ndm == 2 ? 3 : 6); /* displacement elasticity */
196     if (ndm != ndf) PetscCheck(pc_gamg->data_cell_cols == ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Don't know how to create null space for ndm=%" PetscInt_FMT ", ndf=%" PetscInt_FMT ".  Use MatSetNearNullSpace().", ndm, ndf);
197   } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */
198   pc_gamg->data_cell_rows = ndatarows = ndf;
199   PetscCheck(pc_gamg->data_cell_cols > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "pc_gamg->data_cell_cols %" PetscInt_FMT " <= 0", pc_gamg->data_cell_cols);
200   arrsz = nloc * pc_gamg->data_cell_rows * pc_gamg->data_cell_cols;
201 
202   if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) {
203     PetscCall(PetscFree(pc_gamg->data));
204     PetscCall(PetscMalloc1(arrsz + 1, &pc_gamg->data));
205   }
206   /* copy data in - column oriented */
207   for (kk = 0; kk < nloc; kk++) {
208     const PetscInt M    = nloc * pc_gamg->data_cell_rows; /* stride into data */
209     PetscReal     *data = &pc_gamg->data[kk * ndatarows]; /* start of cell */
210     if (pc_gamg->data_cell_cols == 1) *data = 1.0;
211     else {
212       /* translational modes */
213       for (ii = 0; ii < ndatarows; ii++) {
214         for (jj = 0; jj < ndatarows; jj++) {
215           if (ii == jj) data[ii * M + jj] = 1.0;
216           else data[ii * M + jj] = 0.0;
217         }
218       }
219 
220       /* rotational modes */
221       if (coords) {
222         if (ndm == 2) {
223           data += 2 * M;
224           data[0] = -coords[2 * kk + 1];
225           data[1] = coords[2 * kk];
226         } else {
227           data += 3 * M;
228           data[0]         = 0.0;
229           data[M + 0]     = coords[3 * kk + 2];
230           data[2 * M + 0] = -coords[3 * kk + 1];
231           data[1]         = -coords[3 * kk + 2];
232           data[M + 1]     = 0.0;
233           data[2 * M + 1] = coords[3 * kk];
234           data[2]         = coords[3 * kk + 1];
235           data[M + 2]     = -coords[3 * kk];
236           data[2 * M + 2] = 0.0;
237         }
238       }
239     }
240   }
241   pc_gamg->data_sz = arrsz;
242   PetscFunctionReturn(0);
243 }
244 
245 /*
246    PCSetData_AGG - called if data is not set with PCSetCoordinates.
247       Looks in Mat for near null space.
248       Does not work for Stokes
249 
250   Input Parameter:
251    . pc -
252    . a_A - matrix to get (near) null space out of.
253 */
254 static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A)
255 {
256   PC_MG       *mg      = (PC_MG *)pc->data;
257   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
258   MatNullSpace mnull;
259 
260   PetscFunctionBegin;
261   PetscCall(MatGetNearNullSpace(a_A, &mnull));
262   if (!mnull) {
263     DM dm;
264     PetscCall(PCGetDM(pc, &dm));
265     if (!dm) PetscCall(MatGetDM(a_A, &dm));
266     if (dm) {
267       PetscObject deformation;
268       PetscInt    Nf;
269 
270       PetscCall(DMGetNumFields(dm, &Nf));
271       if (Nf) {
272         PetscCall(DMGetField(dm, 0, NULL, &deformation));
273         PetscCall(PetscObjectQuery((PetscObject)deformation, "nearnullspace", (PetscObject *)&mnull));
274         if (!mnull) PetscCall(PetscObjectQuery((PetscObject)deformation, "nullspace", (PetscObject *)&mnull));
275       }
276     }
277   }
278 
279   if (!mnull) {
280     PetscInt bs, NN, MM;
281     PetscCall(MatGetBlockSize(a_A, &bs));
282     PetscCall(MatGetLocalSize(a_A, &MM, &NN));
283     PetscCheck(MM % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, MM, bs);
284     PetscCall(PCSetCoordinates_AGG(pc, bs, MM / bs, NULL));
285   } else {
286     PetscReal         *nullvec;
287     PetscBool          has_const;
288     PetscInt           i, j, mlocal, nvec, bs;
289     const Vec         *vecs;
290     const PetscScalar *v;
291 
292     PetscCall(MatGetLocalSize(a_A, &mlocal, NULL));
293     PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs));
294     pc_gamg->data_sz = (nvec + !!has_const) * mlocal;
295     PetscCall(PetscMalloc1((nvec + !!has_const) * mlocal, &nullvec));
296     if (has_const)
297       for (i = 0; i < mlocal; i++) nullvec[i] = 1.0;
298     for (i = 0; i < nvec; i++) {
299       PetscCall(VecGetArrayRead(vecs[i], &v));
300       for (j = 0; j < mlocal; j++) nullvec[(i + !!has_const) * mlocal + j] = PetscRealPart(v[j]);
301       PetscCall(VecRestoreArrayRead(vecs[i], &v));
302     }
303     pc_gamg->data           = nullvec;
304     pc_gamg->data_cell_cols = (nvec + !!has_const);
305     PetscCall(MatGetBlockSize(a_A, &bs));
306     pc_gamg->data_cell_rows = bs;
307   }
308   PetscFunctionReturn(0);
309 }
310 
311 /*
312   formProl0 - collect null space data for each aggregate, do QR, put R in coarse grid data and Q in P_0
313 
314   Input Parameter:
315    . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices
316    . bs - row block size
317    . nSAvec - column bs of new P
318    . my0crs - global index of start of locals
319    . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec]
320    . data_in[data_stride*nSAvec] - local data on fine grid
321    . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist'
322 
323   Output Parameter:
324    . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data
325    . a_Prol - prolongation operator
326 */
327 static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, PetscInt nSAvec, PetscInt my0crs, PetscInt data_stride, PetscReal data_in[], const PetscInt flid_fgid[], PetscReal **a_data_out, Mat a_Prol)
328 {
329   PetscInt        Istart, my0, Iend, nloc, clid, flid = 0, aggID, kk, jj, ii, mm, nSelected, minsz, nghosts, out_data_stride;
330   MPI_Comm        comm;
331   PetscReal      *out_data;
332   PetscCDIntNd   *pos;
333   PCGAMGHashTable fgid_flid;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscObjectGetComm((PetscObject)a_Prol, &comm));
337   PetscCall(MatGetOwnershipRange(a_Prol, &Istart, &Iend));
338   nloc = (Iend - Istart) / bs;
339   my0  = Istart / bs;
340   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
341   Iend /= bs;
342   nghosts = data_stride / bs - nloc;
343 
344   PetscCall(PCGAMGHashTableCreate(2 * nghosts + 1, &fgid_flid));
345   for (kk = 0; kk < nghosts; kk++) PetscCall(PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc + kk], nloc + kk));
346 
347   /* count selected -- same as number of cols of P */
348   for (nSelected = mm = 0; mm < nloc; mm++) {
349     PetscBool ise;
350     PetscCall(PetscCDEmptyAt(agg_llists, mm, &ise));
351     if (!ise) nSelected++;
352   }
353   PetscCall(MatGetOwnershipRangeColumn(a_Prol, &ii, &jj));
354   PetscCheck((ii / nSAvec) == my0crs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "ii %" PetscInt_FMT " /nSAvec %" PetscInt_FMT "  != my0crs %" PetscInt_FMT, ii, nSAvec, my0crs);
355   PetscCheck(nSelected == (jj - ii) / nSAvec, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nSelected %" PetscInt_FMT " != (jj %" PetscInt_FMT " - ii %" PetscInt_FMT ")/nSAvec %" PetscInt_FMT, nSelected, jj, ii, nSAvec);
356 
357   /* aloc space for coarse point data (output) */
358   out_data_stride = nSelected * nSAvec;
359 
360   PetscCall(PetscMalloc1(out_data_stride * nSAvec, &out_data));
361   for (ii = 0; ii < out_data_stride * nSAvec; ii++) out_data[ii] = PETSC_MAX_REAL;
362   *a_data_out = out_data; /* output - stride nSelected*nSAvec */
363 
364   /* find points and set prolongation */
365   minsz = 100;
366   for (mm = clid = 0; mm < nloc; mm++) {
367     PetscCall(PetscCDSizeAt(agg_llists, mm, &jj));
368     if (jj > 0) {
369       const PetscInt lid = mm, cgid = my0crs + clid;
370       PetscInt       cids[100]; /* max bs */
371       PetscBLASInt   asz = jj, M = asz * bs, N = nSAvec, INFO;
372       PetscBLASInt   Mdata = M + ((N - M > 0) ? N - M : 0), LDA = Mdata, LWORK = N * bs;
373       PetscScalar   *qqc, *qqr, *TAU, *WORK;
374       PetscInt      *fids;
375       PetscReal     *data;
376 
377       /* count agg */
378       if (asz < minsz) minsz = asz;
379 
380       /* get block */
381       PetscCall(PetscMalloc5(Mdata * N, &qqc, M * N, &qqr, N, &TAU, LWORK, &WORK, M, &fids));
382 
383       aggID = 0;
384       PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos));
385       while (pos) {
386         PetscInt gid1;
387         PetscCall(PetscCDIntNdGetID(pos, &gid1));
388         PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos));
389 
390         if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0;
391         else {
392           PetscCall(PCGAMGHashTableFind(&fgid_flid, gid1, &flid));
393           PetscCheck(flid >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot find gid1 in table");
394         }
395         /* copy in B_i matrix - column oriented */
396         data = &data_in[flid * bs];
397         for (ii = 0; ii < bs; ii++) {
398           for (jj = 0; jj < N; jj++) {
399             PetscReal d                       = data[jj * data_stride + ii];
400             qqc[jj * Mdata + aggID * bs + ii] = d;
401           }
402         }
403         /* set fine IDs */
404         for (kk = 0; kk < bs; kk++) fids[aggID * bs + kk] = flid_fgid[flid] * bs + kk;
405         aggID++;
406       }
407 
408       /* pad with zeros */
409       for (ii = asz * bs; ii < Mdata; ii++) {
410         for (jj = 0; jj < N; jj++, kk++) qqc[jj * Mdata + ii] = .0;
411       }
412 
413       /* QR */
414       PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF));
415       PetscCallBLAS("LAPACKgeqrf", LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
416       PetscCall(PetscFPTrapPop());
417       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xGEQRF error");
418       /* get R - column oriented - output B_{i+1} */
419       {
420         PetscReal *data = &out_data[clid * nSAvec];
421         for (jj = 0; jj < nSAvec; jj++) {
422           for (ii = 0; ii < nSAvec; ii++) {
423             PetscCheck(data[jj * out_data_stride + ii] == PETSC_MAX_REAL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "data[jj*out_data_stride + ii] != %e", (double)PETSC_MAX_REAL);
424             if (ii <= jj) data[jj * out_data_stride + ii] = PetscRealPart(qqc[jj * Mdata + ii]);
425             else data[jj * out_data_stride + ii] = 0.;
426           }
427         }
428       }
429 
430       /* get Q - row oriented */
431       PetscCallBLAS("LAPACKorgqr", LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
432       PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xORGQR error arg %" PetscBLASInt_FMT, -INFO);
433 
434       for (ii = 0; ii < M; ii++) {
435         for (jj = 0; jj < N; jj++) qqr[N * ii + jj] = qqc[jj * Mdata + ii];
436       }
437 
438       /* add diagonal block of P0 */
439       for (kk = 0; kk < N; kk++) { cids[kk] = N * cgid + kk; /* global col IDs in P0 */ }
440       PetscCall(MatSetValues(a_Prol, M, fids, N, cids, qqr, INSERT_VALUES));
441       PetscCall(PetscFree5(qqc, qqr, TAU, WORK, fids));
442       clid++;
443     } /* coarse agg */
444   }   /* for all fine nodes */
445   PetscCall(MatAssemblyBegin(a_Prol, MAT_FINAL_ASSEMBLY));
446   PetscCall(MatAssemblyEnd(a_Prol, MAT_FINAL_ASSEMBLY));
447   PetscCall(PCGAMGHashTableDestroy(&fgid_flid));
448   PetscFunctionReturn(0);
449 }
450 
451 static PetscErrorCode PCView_GAMG_AGG(PC pc, PetscViewer viewer)
452 {
453   PC_MG       *mg          = (PC_MG *)pc->data;
454   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
455   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
456 
457   PetscFunctionBegin;
458   PetscCall(PetscViewerASCIIPrintf(viewer, "      AGG specific options\n"));
459   PetscCall(PetscViewerASCIIPrintf(viewer, "        Symmetric graph %s\n", pc_gamg_agg->symmetrize_graph ? "true" : "false"));
460   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number of levels to square graph %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels));
461   PetscCall(PetscViewerASCIIPrintf(viewer, "        Number smoothing steps %d\n", (int)pc_gamg_agg->nsmooths));
462   PetscFunctionReturn(0);
463 }
464 
465 /*
466    PCGAMGGraph_AGG
467 
468   Input Parameter:
469    . pc - this
470    . Amat - matrix on this fine level
471 
472   Output Parameter:
473    . a_Gmat -
474 */
475 static PetscErrorCode PCGAMGGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat)
476 {
477   PC_MG                    *mg          = (PC_MG *)pc->data;
478   PC_GAMG                  *pc_gamg     = (PC_GAMG *)mg->innerctx;
479   const PetscReal           vfilter     = pc_gamg->threshold[pc_gamg->current_level];
480   PC_GAMG_AGG              *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
481   Mat                       Gmat, F = NULL;
482   MPI_Comm                  comm;
483   PetscBool /* set,flg , */ symm;
484 
485   PetscFunctionBegin;
486   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
487 
488   /* PetscCall(MatIsSymmetricKnown(Amat, &set, &flg)); || !(set && flg) -- this causes lot of symm calls */
489   symm = (PetscBool)(pc_gamg_agg->symmetrize_graph); /* && !pc_gamg_agg->square_graph; */
490 
491   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
492   PetscCall(MatCreateGraph(Amat, symm, PETSC_TRUE, &Gmat));
493   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0));
494 
495   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_FILTER], 0, 0, 0, 0));
496   PetscCall(MatFilter(Gmat, vfilter, &F));
497   if (F) {
498     PetscCall(MatDestroy(&Gmat));
499     Gmat = F;
500   }
501   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_FILTER], 0, 0, 0, 0));
502 
503   *a_Gmat = Gmat;
504 
505   PetscFunctionReturn(0);
506 }
507 
508 /*
509    PCGAMGCoarsen_AGG - supports squaring the graph (deprecated) and new graph for
510      communication of QR data used with HEM and MISk coarsening
511 
512   Input Parameter:
513    . a_pc - this
514 
515   Input/Output Parameter:
516    . a_Gmat1 - graph to coarsen (in), graph off processor edges for QR gather scatter (out)
517 
518   Output Parameter:
519    . agg_lists - list of aggregates
520 
521 */
522 static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData **agg_lists)
523 {
524   PC_MG       *mg          = (PC_MG *)a_pc->data;
525   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
526   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
527   Mat          mat, Gmat1 = *a_Gmat1; /* aggressive graph */
528   IS           perm;
529   PetscInt     Istart, Iend, Ii, nloc, bs, nn;
530   PetscInt    *permute, *degree;
531   PetscBool   *bIndexSet;
532   MatCoarsen   crs;
533   MPI_Comm     comm;
534   PetscReal    hashfact;
535   PetscInt     iSwapIndex;
536   PetscRandom  random;
537 
538   PetscFunctionBegin;
539   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
540   PetscCall(PetscObjectGetComm((PetscObject)Gmat1, &comm));
541   PetscCall(MatGetLocalSize(Gmat1, &nn, NULL));
542   PetscCall(MatGetBlockSize(Gmat1, &bs));
543   PetscCheck(bs == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bs %" PetscInt_FMT " must be 1", bs);
544   nloc = nn / bs;
545 
546   /* get MIS aggs - randomize */
547   PetscCall(PetscMalloc2(nloc, &permute, nloc, &degree));
548   PetscCall(PetscCalloc1(nloc, &bIndexSet));
549   for (Ii = 0; Ii < nloc; Ii++) permute[Ii] = Ii;
550   PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &random));
551   PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend));
552   for (Ii = 0; Ii < nloc; Ii++) {
553     PetscInt nc;
554     PetscCall(MatGetRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
555     degree[Ii] = nc;
556     PetscCall(MatRestoreRow(Gmat1, Istart + Ii, &nc, NULL, NULL));
557   }
558   for (Ii = 0; Ii < nloc; Ii++) {
559     PetscCall(PetscRandomGetValueReal(random, &hashfact));
560     iSwapIndex = (PetscInt)(hashfact * nloc) % nloc;
561     if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) {
562       PetscInt iTemp        = permute[iSwapIndex];
563       permute[iSwapIndex]   = permute[Ii];
564       permute[Ii]           = iTemp;
565       iTemp                 = degree[iSwapIndex];
566       degree[iSwapIndex]    = degree[Ii];
567       degree[Ii]            = iTemp;
568       bIndexSet[iSwapIndex] = PETSC_TRUE;
569     }
570   }
571   // create minimum degree ordering
572   PetscCall(PetscSortIntWithArray(nloc, degree, permute));
573 
574   PetscCall(PetscFree(bIndexSet));
575   PetscCall(PetscRandomDestroy(&random));
576   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm));
577   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
578   PetscCall(MatCoarsenCreate(comm, &crs));
579   PetscCall(MatCoarsenSetFromOptions(crs));
580   PetscCall(MatCoarsenSetGreedyOrdering(crs, perm));
581   PetscCall(MatCoarsenSetAdjacency(crs, Gmat1));
582   PetscCall(MatCoarsenSetStrictAggs(crs, PETSC_TRUE));
583   if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels) PetscCall(MatCoarsenMISKSetDistance(crs, 2)); // hardwire to MIS-2
584   else PetscCall(MatCoarsenMISKSetDistance(crs, 1));                                                                    // MIS
585   PetscCall(MatCoarsenApply(crs));
586   PetscCall(MatCoarsenViewFromOptions(crs, NULL, "-mat_coarsen_view"));
587   PetscCall(MatCoarsenGetData(crs, agg_lists)); /* output */
588   PetscCall(MatCoarsenDestroy(&crs));
589 
590   PetscCall(ISDestroy(&perm));
591   PetscCall(PetscFree2(permute, degree));
592   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0));
593 
594   {
595     PetscCoarsenData *llist = *agg_lists;
596     /* see if we have a matrix that takes precedence (returned from MatCoarsenApply) */
597     PetscCall(PetscCDGetMat(llist, &mat));
598     if (mat) {
599       PetscCall(MatDestroy(&Gmat1));
600       *a_Gmat1 = mat; /* output */
601     }
602   }
603   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0));
604   PetscFunctionReturn(0);
605 }
606 
607 /*
608  PCGAMGProlongator_AGG
609 
610  Input Parameter:
611  . pc - this
612  . Amat - matrix on this fine level
613  . Graph - used to get ghost data for nodes in
614  . agg_lists - list of aggregates
615  Output Parameter:
616  . a_P_out - prolongation operator to the next level
617  */
618 static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, Mat Gmat, PetscCoarsenData *agg_lists, Mat *a_P_out)
619 {
620   PC_MG         *mg      = (PC_MG *)pc->data;
621   PC_GAMG       *pc_gamg = (PC_GAMG *)mg->innerctx;
622   const PetscInt col_bs  = pc_gamg->data_cell_cols;
623   PetscInt       Istart, Iend, nloc, ii, jj, kk, my0, nLocalSelected, bs;
624   Mat            Prol;
625   PetscMPIInt    size;
626   MPI_Comm       comm;
627   PetscReal     *data_w_ghost;
628   PetscInt       myCrs0, nbnodes = 0, *flid_fgid;
629   MatType        mtype;
630 
631   PetscFunctionBegin;
632   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
633   PetscCheck(col_bs >= 1, comm, PETSC_ERR_PLIB, "Column bs cannot be less than 1");
634   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
635   PetscCallMPI(MPI_Comm_size(comm, &size));
636   PetscCall(MatGetOwnershipRange(Amat, &Istart, &Iend));
637   PetscCall(MatGetBlockSize(Amat, &bs));
638   nloc = (Iend - Istart) / bs;
639   my0  = Istart / bs;
640   PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT, Iend, Istart, bs);
641 
642   /* get 'nLocalSelected' */
643   for (ii = 0, nLocalSelected = 0; ii < nloc; ii++) {
644     PetscBool ise;
645     /* filter out singletons 0 or 1? */
646     PetscCall(PetscCDEmptyAt(agg_lists, ii, &ise));
647     if (!ise) nLocalSelected++;
648   }
649 
650   /* create prolongator, create P matrix */
651   PetscCall(MatGetType(Amat, &mtype));
652   PetscCall(MatCreate(comm, &Prol));
653   PetscCall(MatSetSizes(Prol, nloc * bs, nLocalSelected * col_bs, PETSC_DETERMINE, PETSC_DETERMINE));
654   PetscCall(MatSetBlockSizes(Prol, bs, col_bs));
655   PetscCall(MatSetType(Prol, mtype));
656   PetscCall(MatSeqAIJSetPreallocation(Prol, col_bs, NULL));
657   PetscCall(MatMPIAIJSetPreallocation(Prol, col_bs, NULL, col_bs, NULL));
658 
659   /* can get all points "removed" */
660   PetscCall(MatGetSize(Prol, &kk, &ii));
661   if (!ii) {
662     PetscCall(PetscInfo(pc, "%s: No selected points on coarse grid\n", ((PetscObject)pc)->prefix));
663     PetscCall(MatDestroy(&Prol));
664     *a_P_out = NULL; /* out */
665     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
666     PetscFunctionReturn(0);
667   }
668   PetscCall(PetscInfo(pc, "%s: New grid %" PetscInt_FMT " nodes\n", ((PetscObject)pc)->prefix, ii / col_bs));
669   PetscCall(MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk));
670 
671   PetscCheck((kk - myCrs0) % col_bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT " -myCrs0 %" PetscInt_FMT ") not divisible by col_bs %" PetscInt_FMT, kk, myCrs0, col_bs);
672   myCrs0 = myCrs0 / col_bs;
673   PetscCheck((kk / col_bs - myCrs0) == nLocalSelected, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT "/col_bs %" PetscInt_FMT " - myCrs0 %" PetscInt_FMT ") != nLocalSelected %" PetscInt_FMT ")", kk, col_bs, myCrs0, nLocalSelected);
674 
675   /* create global vector of data in 'data_w_ghost' */
676   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
677   if (size > 1) { /* get ghost null space data */
678     PetscReal *tmp_gdata, *tmp_ldata, *tp2;
679     PetscCall(PetscMalloc1(nloc, &tmp_ldata));
680     for (jj = 0; jj < col_bs; jj++) {
681       for (kk = 0; kk < bs; kk++) {
682         PetscInt         ii, stride;
683         const PetscReal *tp = pc_gamg->data + jj * bs * nloc + kk;
684         for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp;
685 
686         PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata));
687 
688         if (!jj && !kk) { /* now I know how many total nodes - allocate TODO: move below and do in one 'col_bs' call */
689           PetscCall(PetscMalloc1(stride * bs * col_bs, &data_w_ghost));
690           nbnodes = bs * stride;
691         }
692         tp2 = data_w_ghost + jj * bs * stride + kk;
693         for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii];
694         PetscCall(PetscFree(tmp_gdata));
695       }
696     }
697     PetscCall(PetscFree(tmp_ldata));
698   } else {
699     nbnodes      = bs * nloc;
700     data_w_ghost = (PetscReal *)pc_gamg->data;
701   }
702 
703   /* get 'flid_fgid' TODO - move up to get 'stride' and do get null space data above in one step (jj loop) */
704   if (size > 1) {
705     PetscReal *fid_glid_loc, *fiddata;
706     PetscInt   stride;
707 
708     PetscCall(PetscMalloc1(nloc, &fid_glid_loc));
709     for (kk = 0; kk < nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0 + kk);
710     PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata));
711     PetscCall(PetscMalloc1(stride, &flid_fgid)); /* copy real data to in */
712     for (kk = 0; kk < stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk];
713     PetscCall(PetscFree(fiddata));
714 
715     PetscCheck(stride == nbnodes / bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "stride %" PetscInt_FMT " != nbnodes %" PetscInt_FMT "/bs %" PetscInt_FMT, stride, nbnodes, bs);
716     PetscCall(PetscFree(fid_glid_loc));
717   } else {
718     PetscCall(PetscMalloc1(nloc, &flid_fgid));
719     for (kk = 0; kk < nloc; kk++) flid_fgid[kk] = my0 + kk;
720   }
721   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0));
722   /* get P0 */
723   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
724   {
725     PetscReal *data_out = NULL;
726     PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes, data_w_ghost, flid_fgid, &data_out, Prol));
727     PetscCall(PetscFree(pc_gamg->data));
728 
729     pc_gamg->data           = data_out;
730     pc_gamg->data_cell_rows = col_bs;
731     pc_gamg->data_sz        = col_bs * col_bs * nLocalSelected;
732   }
733   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0));
734   if (size > 1) PetscCall(PetscFree(data_w_ghost));
735   PetscCall(PetscFree(flid_fgid));
736 
737   *a_P_out = Prol; /* out */
738 
739   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0));
740   PetscFunctionReturn(0);
741 }
742 
743 /*
744    PCGAMGOptProlongator_AGG
745 
746   Input Parameter:
747    . pc - this
748    . Amat - matrix on this fine level
749  In/Output Parameter:
750    . a_P - prolongation operator to the next level
751 */
752 static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P)
753 {
754   PC_MG       *mg          = (PC_MG *)pc->data;
755   PC_GAMG     *pc_gamg     = (PC_GAMG *)mg->innerctx;
756   PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx;
757   PetscInt     jj;
758   Mat          Prol = *a_P;
759   MPI_Comm     comm;
760   KSP          eksp;
761   Vec          bb, xx;
762   PC           epc;
763   PetscReal    alpha, emax, emin;
764 
765   PetscFunctionBegin;
766   PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm));
767   PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
768 
769   /* compute maximum singular value of operator to be used in smoother */
770   if (0 < pc_gamg_agg->nsmooths) {
771     /* get eigen estimates */
772     if (pc_gamg->emax > 0) {
773       emin = pc_gamg->emin;
774       emax = pc_gamg->emax;
775     } else {
776       const char *prefix;
777 
778       PetscCall(MatCreateVecs(Amat, &bb, NULL));
779       PetscCall(MatCreateVecs(Amat, &xx, NULL));
780       PetscCall(KSPSetNoisy_Private(bb));
781 
782       PetscCall(KSPCreate(comm, &eksp));
783       PetscCall(PCGetOptionsPrefix(pc, &prefix));
784       PetscCall(KSPSetOptionsPrefix(eksp, prefix));
785       PetscCall(KSPAppendOptionsPrefix(eksp, "pc_gamg_esteig_"));
786       {
787         PetscBool isset, sflg;
788         PetscCall(MatIsSPDKnown(Amat, &isset, &sflg));
789         if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG));
790       }
791       PetscCall(KSPSetErrorIfNotConverged(eksp, pc->erroriffailure));
792       PetscCall(KSPSetNormType(eksp, KSP_NORM_NONE));
793 
794       PetscCall(KSPSetInitialGuessNonzero(eksp, PETSC_FALSE));
795       PetscCall(KSPSetOperators(eksp, Amat, Amat));
796 
797       PetscCall(KSPGetPC(eksp, &epc));
798       PetscCall(PCSetType(epc, PCJACOBI)); /* smoother in smoothed agg. */
799 
800       PetscCall(KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10)); // 10 is safer, but 5 is often fine, can override with -pc_gamg_esteig_ksp_max_it -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.2
801 
802       PetscCall(KSPSetFromOptions(eksp));
803       PetscCall(KSPSetComputeSingularValues(eksp, PETSC_TRUE));
804       PetscCall(KSPSolve(eksp, bb, xx));
805       PetscCall(KSPCheckSolve(eksp, pc, xx));
806 
807       PetscCall(KSPComputeExtremeSingularValues(eksp, &emax, &emin));
808       PetscCall(PetscInfo(pc, "%s: Smooth P0: max eigen=%e min=%e PC=%s\n", ((PetscObject)pc)->prefix, (double)emax, (double)emin, PCJACOBI));
809       PetscCall(VecDestroy(&xx));
810       PetscCall(VecDestroy(&bb));
811       PetscCall(KSPDestroy(&eksp));
812     }
813     if (pc_gamg->use_sa_esteig) {
814       mg->min_eigen_DinvA[pc_gamg->current_level] = emin;
815       mg->max_eigen_DinvA[pc_gamg->current_level] = emax;
816       PetscCall(PetscInfo(pc, "%s: Smooth P0: level %" PetscInt_FMT ", cache spectra %g %g\n", ((PetscObject)pc)->prefix, pc_gamg->current_level, (double)emin, (double)emax));
817     } else {
818       mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
819       mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
820     }
821   } else {
822     mg->min_eigen_DinvA[pc_gamg->current_level] = 0;
823     mg->max_eigen_DinvA[pc_gamg->current_level] = 0;
824   }
825 
826   /* smooth P0 */
827   for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) {
828     Mat tMat;
829     Vec diag;
830 
831     PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
832 
833     /* smooth P1 := (I - omega/lam D^{-1}A)P0 */
834     PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
835     PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat));
836     PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0));
837     PetscCall(MatProductClear(tMat));
838     PetscCall(MatCreateVecs(Amat, &diag, NULL));
839     PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */
840     PetscCall(VecReciprocal(diag));
841     PetscCall(MatDiagonalScale(tMat, diag, NULL));
842     PetscCall(VecDestroy(&diag));
843 
844     /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */
845     PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero");
846     /* TODO: Document the 1.4 and don't hardwire it in this routine */
847     alpha = -1.4 / emax;
848 
849     PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN));
850     PetscCall(MatDestroy(&Prol));
851     Prol = tMat;
852     PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0));
853   }
854   PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0));
855   *a_P = Prol;
856   PetscFunctionReturn(0);
857 }
858 
859 /*
860    PCCreateGAMG_AGG
861 
862   Input Parameter:
863    . pc -
864 */
865 PetscErrorCode PCCreateGAMG_AGG(PC pc)
866 {
867   PC_MG       *mg      = (PC_MG *)pc->data;
868   PC_GAMG     *pc_gamg = (PC_GAMG *)mg->innerctx;
869   PC_GAMG_AGG *pc_gamg_agg;
870 
871   PetscFunctionBegin;
872   /* create sub context for SA */
873   PetscCall(PetscNew(&pc_gamg_agg));
874   pc_gamg->subctx = pc_gamg_agg;
875 
876   pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG;
877   pc_gamg->ops->destroy        = PCDestroy_GAMG_AGG;
878   /* reset does not do anything; setup not virtual */
879 
880   /* set internal function pointers */
881   pc_gamg->ops->graph             = PCGAMGGraph_AGG;
882   pc_gamg->ops->coarsen           = PCGAMGCoarsen_AGG;
883   pc_gamg->ops->prolongator       = PCGAMGProlongator_AGG;
884   pc_gamg->ops->optprolongator    = PCGAMGOptProlongator_AGG;
885   pc_gamg->ops->createdefaultdata = PCSetData_AGG;
886   pc_gamg->ops->view              = PCView_GAMG_AGG;
887 
888   pc_gamg_agg->aggressive_coarsening_levels = 0;
889   pc_gamg_agg->symmetrize_graph             = PETSC_FALSE;
890   pc_gamg_agg->nsmooths                     = 1;
891 
892   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", PCGAMGSetNSmooths_AGG));
893   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetSymmetrizeGraph_C", PCGAMGSetSymmetrizeGraph_AGG));
894   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", PCGAMGSetAggressiveLevels_AGG));
895   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_AGG));
896   PetscFunctionReturn(0);
897 }
898