xref: /petsc/src/ksp/pc/impls/mg/mg.c (revision efba34859514f7b59f26d05af8927335bdcc93cf)
1 /*
2     Defines the multigrid preconditioner interface.
3 */
4 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/
5 #include <petsc/private/kspimpl.h>
6 #include <petscdm.h>
7 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *);
8 
9 /*
10    Contains the list of registered coarse space construction routines
11 */
12 PetscFunctionList PCMGCoarseList = NULL;
13 
14 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason)
15 {
16   PC_MG        *mg = (PC_MG *)pc->data;
17   PC_MG_Levels *mgc, *mglevels = *mglevelsin;
18   PetscInt      cycles = (mglevels->level == 1) ? 1 : mglevels->cycles;
19 
20   PetscFunctionBegin;
21   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
22   if (!transpose) {
23     if (matapp) {
24       PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */
25       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL));
26     } else {
27       PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */
28       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
29     }
30   } else {
31     PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
32     PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */
33     PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
34   }
35   if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
36   if (mglevels->level) { /* not the coarsest grid */
37     if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0));
38     if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R));
39     if (!transpose) {
40       if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
41       else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
42     } else {
43       if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R));
44       else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r));
45     }
46     if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0));
47 
48     /* if on finest level and have convergence criteria set */
49     if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) {
50       PetscReal rnorm;
51 
52       PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm));
53       if (rnorm <= mg->ttol) {
54         if (rnorm < mg->abstol) {
55           *reason = PCRICHARDSON_CONVERGED_ATOL;
56           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol));
57         } else {
58           *reason = PCRICHARDSON_CONVERGED_RTOL;
59           PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol));
60         }
61         PetscFunctionReturn(PETSC_SUCCESS);
62       }
63     }
64 
65     mgc = *(mglevelsin - 1);
66     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
67     if (!transpose) {
68       if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B));
69       else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b));
70     } else {
71       if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B));
72       else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b));
73     }
74     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
75     if (matapp) {
76       if (!mgc->X) {
77         PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X));
78       } else {
79         PetscCall(MatZeroEntries(mgc->X));
80       }
81     } else {
82       PetscCall(VecZeroEntries(mgc->x));
83     }
84     while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason));
85     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0));
86     if (!transpose) {
87       if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X));
88       else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x));
89     } else {
90       PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x));
91     }
92     if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0));
93     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0));
94     if (!transpose) {
95       if (matapp) {
96         PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */
97         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL));
98       } else {
99         PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */
100         PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x));
101       }
102     } else {
103       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
104       PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */
105       PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x));
106     }
107     if (mglevels->cr) {
108       Mat crA;
109 
110       PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported");
111       /* TODO Turn on copy and turn off noisy if we have an exact solution
112       PetscCall(VecCopy(mglevels->x, mglevels->crx));
113       PetscCall(VecCopy(mglevels->b, mglevels->crb)); */
114       PetscCall(KSPGetOperators(mglevels->cr, &crA, NULL));
115       PetscCall(KSPSetNoisy_Private(crA, mglevels->crx));
116       PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */
117       PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx));
118     }
119     if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0));
120   }
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason)
125 {
126   PC_MG         *mg       = (PC_MG *)pc->data;
127   PC_MG_Levels **mglevels = mg->levels;
128   PC             tpc;
129   PetscBool      changeu, changed;
130   PetscInt       levels = mglevels[0]->levels, i;
131 
132   PetscFunctionBegin;
133   /* When the DM is supplying the matrix then it will not exist until here */
134   for (i = 0; i < levels; i++) {
135     if (!mglevels[i]->A) {
136       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
137       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
138     }
139   }
140 
141   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
142   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
143   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
144   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
145   if (!changed && !changeu) {
146     PetscCall(VecDestroy(&mglevels[levels - 1]->b));
147     mglevels[levels - 1]->b = b;
148   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
149     if (!mglevels[levels - 1]->b) {
150       Vec *vec;
151 
152       PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
153       mglevels[levels - 1]->b = *vec;
154       PetscCall(PetscFree(vec));
155     }
156     PetscCall(VecCopy(b, mglevels[levels - 1]->b));
157   }
158   mglevels[levels - 1]->x = x;
159 
160   mg->rtol   = rtol;
161   mg->abstol = abstol;
162   mg->dtol   = dtol;
163   if (rtol) {
164     /* compute initial residual norm for relative convergence test */
165     PetscReal rnorm;
166 
167     if (zeroguess) {
168       PetscCall(VecNorm(b, NORM_2, &rnorm));
169     } else {
170       PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w));
171       PetscCall(VecNorm(w, NORM_2, &rnorm));
172     }
173     mg->ttol = PetscMax(rtol * rnorm, abstol);
174   } else if (abstol) mg->ttol = abstol;
175   else mg->ttol = 0.0;
176 
177   /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't
178      stop prematurely due to small residual */
179   for (i = 1; i < levels; i++) {
180     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
181     if (mglevels[i]->smoothu != mglevels[i]->smoothd) {
182       /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */
183       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
184       PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT));
185     }
186   }
187 
188   *reason = PCRICHARDSON_NOT_SET;
189   for (i = 0; i < its; i++) {
190     PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason));
191     if (*reason) break;
192   }
193   if (*reason == PCRICHARDSON_NOT_SET) *reason = PCRICHARDSON_CONVERGED_ITS;
194   *outits = i;
195   if (!changed && !changeu) mglevels[levels - 1]->b = NULL;
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 PetscErrorCode PCReset_MG(PC pc)
200 {
201   PC_MG         *mg       = (PC_MG *)pc->data;
202   PC_MG_Levels **mglevels = mg->levels;
203   PetscInt       i, n;
204 
205   PetscFunctionBegin;
206   if (mglevels) {
207     n = mglevels[0]->levels;
208     for (i = 0; i < n - 1; i++) {
209       PetscCall(VecDestroy(&mglevels[i + 1]->r));
210       PetscCall(VecDestroy(&mglevels[i]->b));
211       PetscCall(VecDestroy(&mglevels[i]->x));
212       PetscCall(MatDestroy(&mglevels[i + 1]->R));
213       PetscCall(MatDestroy(&mglevels[i]->B));
214       PetscCall(MatDestroy(&mglevels[i]->X));
215       PetscCall(VecDestroy(&mglevels[i]->crx));
216       PetscCall(VecDestroy(&mglevels[i]->crb));
217       PetscCall(MatDestroy(&mglevels[i + 1]->restrct));
218       PetscCall(MatDestroy(&mglevels[i + 1]->interpolate));
219       PetscCall(MatDestroy(&mglevels[i + 1]->inject));
220       PetscCall(VecDestroy(&mglevels[i + 1]->rscale));
221     }
222     PetscCall(VecDestroy(&mglevels[n - 1]->crx));
223     PetscCall(VecDestroy(&mglevels[n - 1]->crb));
224     /* this is not null only if the smoother on the finest level
225        changes the rhs during PreSolve */
226     PetscCall(VecDestroy(&mglevels[n - 1]->b));
227     PetscCall(MatDestroy(&mglevels[n - 1]->B));
228 
229     for (i = 0; i < n; i++) {
230       PetscCall(MatDestroy(&mglevels[i]->coarseSpace));
231       PetscCall(MatDestroy(&mglevels[i]->A));
232       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd));
233       PetscCall(KSPReset(mglevels[i]->smoothu));
234       if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr));
235     }
236     mg->Nc = 0;
237   }
238   PetscFunctionReturn(PETSC_SUCCESS);
239 }
240 
241 /* Implementing CR
242 
243 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is
244 
245   Inj^T (Inj Inj^T)^{-1} Inj
246 
247 and if Inj is a VecScatter, as it is now in PETSc, we have
248 
249   Inj^T Inj
250 
251 and
252 
253   S = I - Inj^T Inj
254 
255 since
256 
257   Inj S = Inj - (Inj Inj^T) Inj = 0.
258 
259 Brannick suggests
260 
261   A \to S^T A S  \qquad\mathrm{and}\qquad M \to S^T M S
262 
263 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use
264 
265   M^{-1} A \to S M^{-1} A S
266 
267 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left.
268 
269   Check: || Inj P - I ||_F < tol
270   Check: In general, Inj Inj^T = I
271 */
272 
273 typedef struct {
274   PC       mg;  /* The PCMG object */
275   PetscInt l;   /* The multigrid level for this solver */
276   Mat      Inj; /* The injection matrix */
277   Mat      S;   /* I - Inj^T Inj */
278 } CRContext;
279 
280 static PetscErrorCode CRSetup_Private(PC pc)
281 {
282   CRContext *ctx;
283   Mat        It;
284 
285   PetscFunctionBeginUser;
286   PetscCall(PCShellGetContext(pc, &ctx));
287   PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It));
288   PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG");
289   PetscCall(MatCreateTranspose(It, &ctx->Inj));
290   PetscCall(MatCreateNormal(ctx->Inj, &ctx->S));
291   PetscCall(MatScale(ctx->S, -1.0));
292   PetscCall(MatShift(ctx->S, 1.0));
293   PetscFunctionReturn(PETSC_SUCCESS);
294 }
295 
296 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y)
297 {
298   CRContext *ctx;
299 
300   PetscFunctionBeginUser;
301   PetscCall(PCShellGetContext(pc, &ctx));
302   PetscCall(MatMult(ctx->S, x, y));
303   PetscFunctionReturn(PETSC_SUCCESS);
304 }
305 
306 static PetscErrorCode CRDestroy_Private(PC pc)
307 {
308   CRContext *ctx;
309 
310   PetscFunctionBeginUser;
311   PetscCall(PCShellGetContext(pc, &ctx));
312   PetscCall(MatDestroy(&ctx->Inj));
313   PetscCall(MatDestroy(&ctx->S));
314   PetscCall(PetscFree(ctx));
315   PetscCall(PCShellSetContext(pc, NULL));
316   PetscFunctionReturn(PETSC_SUCCESS);
317 }
318 
319 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr)
320 {
321   CRContext *ctx;
322 
323   PetscFunctionBeginUser;
324   PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr));
325   PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)"));
326   PetscCall(PetscCalloc1(1, &ctx));
327   ctx->mg = pc;
328   ctx->l  = l;
329   PetscCall(PCSetType(*cr, PCSHELL));
330   PetscCall(PCShellSetContext(*cr, ctx));
331   PetscCall(PCShellSetApply(*cr, CRApply_Private));
332   PetscCall(PCShellSetSetUp(*cr, CRSetup_Private));
333   PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private));
334   PetscFunctionReturn(PETSC_SUCCESS);
335 }
336 
337 PETSC_EXTERN PetscErrorCode PetscOptionsFindPairPrefix_Private(PetscOptions, const char[], const char[], const char *[], const char *[], PetscBool *);
338 
339 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms)
340 {
341   PC_MG         *mg = (PC_MG *)pc->data;
342   MPI_Comm       comm;
343   PC_MG_Levels **mglevels = mg->levels;
344   PCMGType       mgtype   = mg->am;
345   PetscInt       mgctype  = (PetscInt)PC_MG_CYCLE_V;
346   PetscInt       i;
347   PetscMPIInt    size;
348   const char    *prefix;
349   PC             ipc;
350   PetscInt       n;
351 
352   PetscFunctionBegin;
353   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
354   PetscValidLogicalCollectiveInt(pc, levels, 2);
355   if (mg->nlevels == levels) PetscFunctionReturn(PETSC_SUCCESS);
356   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
357   if (mglevels) {
358     mgctype = mglevels[0]->cycles;
359     /* changing the number of levels so free up the previous stuff */
360     PetscCall(PCReset_MG(pc));
361     n = mglevels[0]->levels;
362     for (i = 0; i < n; i++) {
363       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
364       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
365       PetscCall(KSPDestroy(&mglevels[i]->cr));
366       PetscCall(PetscFree(mglevels[i]));
367     }
368     PetscCall(PetscFree(mg->levels));
369   }
370 
371   mg->nlevels = levels;
372 
373   PetscCall(PetscMalloc1(levels, &mglevels));
374 
375   PetscCall(PCGetOptionsPrefix(pc, &prefix));
376 
377   mg->stageApply = 0;
378   for (i = 0; i < levels; i++) {
379     PetscCall(PetscNew(&mglevels[i]));
380 
381     mglevels[i]->level               = i;
382     mglevels[i]->levels              = levels;
383     mglevels[i]->cycles              = mgctype;
384     mg->default_smoothu              = 2;
385     mg->default_smoothd              = 2;
386     mglevels[i]->eventsmoothsetup    = 0;
387     mglevels[i]->eventsmoothsolve    = 0;
388     mglevels[i]->eventresidual       = 0;
389     mglevels[i]->eventinterprestrict = 0;
390 
391     if (comms) comm = comms[i];
392     if (comm != MPI_COMM_NULL) {
393       PetscCall(KSPCreate(comm, &mglevels[i]->smoothd));
394       PetscCall(KSPSetNestLevel(mglevels[i]->smoothd, pc->kspnestlevel));
395       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure));
396       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i));
397       PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix));
398       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level));
399       if (i == 0 && levels > 1) { // coarse grid
400         PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_"));
401 
402         /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */
403         PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY));
404         PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc));
405         PetscCallMPI(MPI_Comm_size(comm, &size));
406         if (size > 1) {
407           PetscCall(PCSetType(ipc, PCREDUNDANT));
408         } else {
409           PetscCall(PCSetType(ipc, PCLU));
410         }
411         PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS));
412       } else {
413         char tprefix[128];
414 
415         PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV));
416         PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL));
417         PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE));
418         PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc));
419         PetscCall(PCSetType(ipc, PCSOR));
420         PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
421 
422         if (i == levels - 1 && levels > 1) { // replace 'mg_finegrid_' with 'mg_levels_X_'
423           PetscBool set;
424 
425           PetscCall(PetscOptionsFindPairPrefix_Private(((PetscObject)mglevels[i]->smoothd)->options, ((PetscObject)mglevels[i]->smoothd)->prefix, "-mg_fine_", NULL, NULL, &set));
426           if (set) {
427             if (prefix) PetscCall(PetscSNPrintf(tprefix, 128, "%smg_fine_", prefix));
428             else PetscCall(PetscSNPrintf(tprefix, 128, "mg_fine_"));
429             PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, tprefix));
430           } else {
431             PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
432             PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
433           }
434         } else {
435           PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%" PetscInt_FMT "_", i));
436           PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix));
437         }
438       }
439     }
440     mglevels[i]->smoothu = mglevels[i]->smoothd;
441     mg->rtol             = 0.0;
442     mg->abstol           = 0.0;
443     mg->dtol             = 0.0;
444     mg->ttol             = 0.0;
445     mg->cyclesperpcapply = 1;
446   }
447   mg->levels = mglevels;
448   PetscCall(PCMGSetType(pc, mgtype));
449   PetscFunctionReturn(PETSC_SUCCESS);
450 }
451 
452 /*@C
453   PCMGSetLevels - Sets the number of levels to use with `PCMG`.
454   Must be called before any other `PCMG` routine.
455 
456   Logically Collective
457 
458   Input Parameters:
459 + pc     - the preconditioner context
460 . levels - the number of levels
461 - comms  - optional communicators for each level; this is to allow solving the coarser problems
462            on smaller sets of processes. For processes that are not included in the computation
463            you must pass `MPI_COMM_NULL`. Use comms = `NULL` to specify that all processes
464            should participate in each level of problem.
465 
466   Options Database Key:
467 . -pc_mg_levels <levels> - set the number of levels to use
468 
469   Level: intermediate
470 
471   Notes:
472   If the number of levels is one then the multigrid uses the `-mg_levels` prefix
473   for setting the level options rather than the `-mg_coarse` or `-mg_fine` prefix.
474 
475   You can free the information in comms after this routine is called.
476 
477   The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level
478   are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on
479   the two levels, rank 0 in the original communicator will pass in an array of 2 communicators
480   of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators
481   the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate
482   in the coarse grid solve.
483 
484   Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one
485   must take special care in providing the restriction and interpolation operation. We recommend
486   providing these as two step operations; first perform a standard restriction or interpolation on
487   the full number of ranks for that level and then use an MPI call to copy the resulting vector
488   array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both
489   cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and
490   receives or `MPI_AlltoAllv()` could be used to do the reshuffling of the vector entries.
491 
492   Fortran Notes:
493   Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of `NULL` in the C interface. Note `PETSC_NULL_MPI_COMM`
494   is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc.
495 
496 .seealso: [](ch_ksp), `PCMGSetType()`, `PCMGGetLevels()`
497 @*/
498 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms)
499 {
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
502   if (comms) PetscAssertPointer(comms, 3);
503   PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms));
504   PetscFunctionReturn(PETSC_SUCCESS);
505 }
506 
507 PetscErrorCode PCDestroy_MG(PC pc)
508 {
509   PC_MG         *mg       = (PC_MG *)pc->data;
510   PC_MG_Levels **mglevels = mg->levels;
511   PetscInt       i, n;
512 
513   PetscFunctionBegin;
514   PetscCall(PCReset_MG(pc));
515   if (mglevels) {
516     n = mglevels[0]->levels;
517     for (i = 0; i < n; i++) {
518       if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd));
519       PetscCall(KSPDestroy(&mglevels[i]->smoothu));
520       PetscCall(KSPDestroy(&mglevels[i]->cr));
521       PetscCall(PetscFree(mglevels[i]));
522     }
523     PetscCall(PetscFree(mg->levels));
524   }
525   PetscCall(PetscFree(pc->data));
526   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
527   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
528   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL));
529   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", NULL));
530   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL));
531   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL));
532   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL));
533   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL));
534   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL));
535   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL));
536   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL));
537   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL));
538   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL));
539   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL));
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 /*
544    PCApply_MG - Runs either an additive, multiplicative, Kaskadic
545              or full cycle of multigrid.
546 
547   Note:
548   A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle().
549 */
550 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose)
551 {
552   PC_MG         *mg       = (PC_MG *)pc->data;
553   PC_MG_Levels **mglevels = mg->levels;
554   PC             tpc;
555   PetscInt       levels = mglevels[0]->levels, i;
556   PetscBool      changeu, changed, matapp;
557 
558   PetscFunctionBegin;
559   matapp = (PetscBool)(B && X);
560   if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply));
561   /* When the DM is supplying the matrix then it will not exist until here */
562   for (i = 0; i < levels; i++) {
563     if (!mglevels[i]->A) {
564       PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL));
565       PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A));
566     }
567   }
568 
569   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
570   PetscCall(PCPreSolveChangeRHS(tpc, &changed));
571   PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
572   PetscCall(PCPreSolveChangeRHS(tpc, &changeu));
573   if (!changeu && !changed) {
574     if (matapp) {
575       PetscCall(MatDestroy(&mglevels[levels - 1]->B));
576       mglevels[levels - 1]->B = B;
577     } else {
578       PetscCall(VecDestroy(&mglevels[levels - 1]->b));
579       mglevels[levels - 1]->b = b;
580     }
581   } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */
582     if (matapp) {
583       if (mglevels[levels - 1]->B) {
584         PetscInt  N1, N2;
585         PetscBool flg;
586 
587         PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1));
588         PetscCall(MatGetSize(B, NULL, &N2));
589         PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg));
590         if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B));
591       }
592       if (!mglevels[levels - 1]->B) {
593         PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B));
594       } else {
595         PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN));
596       }
597     } else {
598       if (!mglevels[levels - 1]->b) {
599         Vec *vec;
600 
601         PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL));
602         mglevels[levels - 1]->b = *vec;
603         PetscCall(PetscFree(vec));
604       }
605       PetscCall(VecCopy(b, mglevels[levels - 1]->b));
606     }
607   }
608   if (matapp) {
609     mglevels[levels - 1]->X = X;
610   } else {
611     mglevels[levels - 1]->x = x;
612   }
613 
614   /* If coarser Xs are present, it means we have already block applied the PC at least once
615      Reset operators if sizes/type do no match */
616   if (matapp && levels > 1 && mglevels[levels - 2]->X) {
617     PetscInt  Xc, Bc;
618     PetscBool flg;
619 
620     PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc));
621     PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc));
622     PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg));
623     if (Xc != Bc || !flg) {
624       PetscCall(MatDestroy(&mglevels[levels - 1]->R));
625       for (i = 0; i < levels - 1; i++) {
626         PetscCall(MatDestroy(&mglevels[i]->R));
627         PetscCall(MatDestroy(&mglevels[i]->B));
628         PetscCall(MatDestroy(&mglevels[i]->X));
629       }
630     }
631   }
632 
633   if (mg->am == PC_MG_MULTIPLICATIVE) {
634     if (matapp) PetscCall(MatZeroEntries(X));
635     else PetscCall(VecZeroEntries(x));
636     for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL));
637   } else if (mg->am == PC_MG_ADDITIVE) {
638     PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp));
639   } else if (mg->am == PC_MG_KASKADE) {
640     PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp));
641   } else {
642     PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp));
643   }
644   if (mg->stageApply) PetscCall(PetscLogStagePop());
645   if (!changeu && !changed) {
646     if (matapp) {
647       mglevels[levels - 1]->B = NULL;
648     } else {
649       mglevels[levels - 1]->b = NULL;
650     }
651   }
652   PetscFunctionReturn(PETSC_SUCCESS);
653 }
654 
655 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x)
656 {
657   PetscFunctionBegin;
658   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE));
659   PetscFunctionReturn(PETSC_SUCCESS);
660 }
661 
662 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x)
663 {
664   PetscFunctionBegin;
665   PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE));
666   PetscFunctionReturn(PETSC_SUCCESS);
667 }
668 
669 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x)
670 {
671   PetscFunctionBegin;
672   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE));
673   PetscFunctionReturn(PETSC_SUCCESS);
674 }
675 
676 static PetscErrorCode PCMatApplyTranspose_MG(PC pc, Mat b, Mat x)
677 {
678   PetscFunctionBegin;
679   PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_TRUE));
680   PetscFunctionReturn(PETSC_SUCCESS);
681 }
682 
683 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems PetscOptionsObject)
684 {
685   PetscInt            levels, cycles;
686   PetscBool           flg, flg2;
687   PC_MG              *mg = (PC_MG *)pc->data;
688   PC_MG_Levels      **mglevels;
689   PCMGType            mgtype;
690   PCMGCycleType       mgctype;
691   PCMGGalerkinType    gtype;
692   PCMGCoarseSpaceType coarseSpaceType;
693 
694   PetscFunctionBegin;
695   levels = PetscMax(mg->nlevels, 1);
696   PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options");
697   PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg));
698   if (!flg && !mg->levels && pc->dm) {
699     PetscCall(DMGetRefineLevel(pc->dm, &levels));
700     levels++;
701     mg->usedmfornumberoflevels = PETSC_TRUE;
702   }
703   PetscCall(PCMGSetLevels(pc, levels, NULL));
704   mglevels = mg->levels;
705 
706   mgctype = (PCMGCycleType)mglevels[0]->cycles;
707   PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg));
708   if (flg) PetscCall(PCMGSetCycleType(pc, mgctype));
709   coarseSpaceType = mg->coarseSpaceType;
710   PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg));
711   if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType));
712   PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg));
713   PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg));
714   flg2 = PETSC_FALSE;
715   PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg));
716   if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2));
717   flg = PETSC_FALSE;
718   PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL));
719   if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc));
720   PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)mg->galerkin, (PetscEnum *)&gtype, &flg));
721   if (flg) PetscCall(PCMGSetGalerkin(pc, gtype));
722   mgtype = mg->am;
723   PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg));
724   if (flg) PetscCall(PCMGSetType(pc, mgtype));
725   if (mg->am == PC_MG_MULTIPLICATIVE) {
726     PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg));
727     if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles));
728   }
729   flg = PETSC_FALSE;
730   PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL));
731   if (flg) {
732     PetscInt i;
733     char     eventname[128];
734 
735     levels = mglevels[0]->levels;
736     for (i = 0; i < levels; i++) {
737       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSetup Level %" PetscInt_FMT, i));
738       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup));
739       PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGSmooth Level %" PetscInt_FMT, i));
740       PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve));
741       if (i) {
742         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGResid Level %" PetscInt_FMT, i));
743         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual));
744         PetscCall(PetscSNPrintf(eventname, PETSC_STATIC_ARRAY_LENGTH(eventname), "MGInterp Level %" PetscInt_FMT, i));
745         PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict));
746       }
747     }
748 
749     if (PetscDefined(USE_LOG)) {
750       const char sname[] = "MG Apply";
751 
752       PetscCall(PetscLogStageGetId(sname, &mg->stageApply));
753       if (mg->stageApply < 0) PetscCall(PetscLogStageRegister(sname, &mg->stageApply));
754     }
755   }
756   PetscOptionsHeadEnd();
757   /* Check option consistency */
758   PetscCall(PCMGGetGalerkin(pc, &gtype));
759   PetscCall(PCMGGetAdaptInterpolation(pc, &flg));
760   PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator");
761   PetscFunctionReturn(PETSC_SUCCESS);
762 }
763 
764 const char *const PCMGTypes[]            = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL};
765 const char *const PCMGCycleTypes[]       = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL};
766 const char *const PCMGGalerkinTypes[]    = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL};
767 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL};
768 
769 #include <petscdraw.h>
770 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer)
771 {
772   PC_MG         *mg       = (PC_MG *)pc->data;
773   PC_MG_Levels **mglevels = mg->levels;
774   PetscInt       levels   = mglevels ? mglevels[0]->levels : 0, i;
775   PetscBool      isascii, isbinary, isdraw;
776 
777   PetscFunctionBegin;
778   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
779   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
780   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
781   if (isascii) {
782     const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown";
783     PetscCall(PetscViewerASCIIPrintf(viewer, "  type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename));
784     if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, "    Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply));
785     if (mg->galerkin == PC_MG_GALERKIN_BOTH) {
786       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices\n"));
787     } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) {
788       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for pmat\n"));
789     } else if (mg->galerkin == PC_MG_GALERKIN_MAT) {
790       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using Galerkin computed coarse grid matrices for mat\n"));
791     } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) {
792       PetscCall(PetscViewerASCIIPrintf(viewer, "    Using externally compute Galerkin coarse grid matrices\n"));
793     } else {
794       PetscCall(PetscViewerASCIIPrintf(viewer, "    Not using Galerkin computed coarse grid matrices\n"));
795     }
796     if (mg->view) PetscCall((*mg->view)(pc, viewer));
797     for (i = 0; i < levels; i++) {
798       if (i) {
799         PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
800       } else {
801         PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i));
802       }
803       PetscCall(PetscViewerASCIIPushTab(viewer));
804       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
805       PetscCall(PetscViewerASCIIPopTab(viewer));
806       if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) {
807         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n"));
808       } else if (i) {
809         PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i));
810         PetscCall(PetscViewerASCIIPushTab(viewer));
811         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
812         PetscCall(PetscViewerASCIIPopTab(viewer));
813       }
814       if (i && mglevels[i]->cr) {
815         PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i));
816         PetscCall(PetscViewerASCIIPushTab(viewer));
817         PetscCall(KSPView(mglevels[i]->cr, viewer));
818         PetscCall(PetscViewerASCIIPopTab(viewer));
819       }
820     }
821   } else if (isbinary) {
822     for (i = levels - 1; i >= 0; i--) {
823       PetscCall(KSPView(mglevels[i]->smoothd, viewer));
824       if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer));
825     }
826   } else if (isdraw) {
827     PetscDraw draw;
828     PetscReal x, w, y, bottom, th;
829     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
830     PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y));
831     PetscCall(PetscDrawStringGetSize(draw, NULL, &th));
832     bottom = y - th;
833     for (i = levels - 1; i >= 0; i--) {
834       if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) {
835         PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom));
836         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
837         PetscCall(PetscDrawPopCurrentPoint(draw));
838       } else {
839         w = 0.5 * PetscMin(1.0 - x, x);
840         PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom));
841         PetscCall(KSPView(mglevels[i]->smoothd, viewer));
842         PetscCall(PetscDrawPopCurrentPoint(draw));
843         PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom));
844         PetscCall(KSPView(mglevels[i]->smoothu, viewer));
845         PetscCall(PetscDrawPopCurrentPoint(draw));
846       }
847       PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL));
848       bottom -= th;
849     }
850   }
851   PetscFunctionReturn(PETSC_SUCCESS);
852 }
853 
854 #include <petsc/private/kspimpl.h>
855 
856 /*
857     Calls setup for the KSP on each level
858 */
859 PetscErrorCode PCSetUp_MG(PC pc)
860 {
861   PC_MG         *mg       = (PC_MG *)pc->data;
862   PC_MG_Levels **mglevels = mg->levels;
863   PetscInt       i, n;
864   PC             cpc;
865   PetscBool      dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE;
866   Mat            dA, dB;
867   Vec            tvec;
868   DM            *dms;
869   PetscViewer    viewer = NULL;
870   PetscBool      dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE;
871   PetscBool      adaptInterpolation = mg->adaptInterpolation;
872 
873   PetscFunctionBegin;
874   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up");
875   n = mglevels[0]->levels;
876   /* FIX: Move this to PCSetFromOptions_MG? */
877   if (mg->usedmfornumberoflevels) {
878     PetscInt levels;
879 
880     PetscCall(DMGetRefineLevel(pc->dm, &levels));
881     levels++;
882     if (levels > n) { /* the problem is now being solved on a finer grid */
883       PetscCall(PCMGSetLevels(pc, levels, NULL));
884       n = levels;
885       PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */
886       mglevels = mg->levels;
887     }
888   }
889 
890   /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */
891   /* so use those from global PC */
892   /* Is this what we always want? What if user wants to keep old one? */
893   PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset));
894   if (opsset) {
895     Mat mmat;
896     PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat));
897     if (mmat == pc->pmat) opsset = PETSC_FALSE;
898   }
899   /* fine grid smoother inherits the reuse-pc flag */
900   PetscCall(KSPGetPC(mglevels[n - 1]->smoothd, &cpc));
901   cpc->reusepreconditioner = pc->reusepreconditioner;
902   PetscCall(KSPGetPC(mglevels[n - 1]->smoothu, &cpc));
903   cpc->reusepreconditioner = pc->reusepreconditioner;
904 
905   /* Create CR solvers */
906   PetscCall(PCMGGetAdaptCR(pc, &doCR));
907   if (doCR) {
908     const char *prefix;
909 
910     PetscCall(PCGetOptionsPrefix(pc, &prefix));
911     for (i = 1; i < n; ++i) {
912       PC   ipc, cr;
913       char crprefix[128];
914 
915       PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr));
916       PetscCall(KSPSetNestLevel(mglevels[i]->cr, pc->kspnestlevel));
917       PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE));
918       PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i));
919       PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix));
920       PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level));
921       PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV));
922       PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL));
923       PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED));
924       PetscCall(KSPGetPC(mglevels[i]->cr, &ipc));
925 
926       PetscCall(PCSetType(ipc, PCCOMPOSITE));
927       PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE));
928       PetscCall(PCCompositeAddPCType(ipc, PCSOR));
929       PetscCall(CreateCR_Private(pc, i, &cr));
930       PetscCall(PCCompositeAddPC(ipc, cr));
931       PetscCall(PCDestroy(&cr));
932 
933       PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, mg->default_smoothd));
934       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
935       PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%" PetscInt_FMT "_cr_", i));
936       PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix));
937     }
938   }
939 
940   if (!opsset) {
941     PetscCall(PCGetUseAmat(pc, &use_amat));
942     if (use_amat) {
943       PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
944       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat));
945     } else {
946       PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n  because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n"));
947       PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat));
948     }
949   }
950 
951   for (i = n - 1; i > 0; i--) {
952     if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) {
953       missinginterpolate = PETSC_TRUE;
954       break;
955     }
956   }
957 
958   PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB));
959   if (dA == dB) dAeqdB = PETSC_TRUE;
960   if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */
961 
962   if (pc->dm && !pc->setupcalled) {
963     /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */
964     PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm));
965     PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE));
966     if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) {
967       PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm));
968       PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE));
969     }
970     if (mglevels[n - 1]->cr) {
971       PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm));
972       PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE));
973     }
974   }
975 
976   /*
977    Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS)
978    Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs?
979   */
980   if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) {
981     /* first see if we can compute a coarse space */
982     if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) {
983       for (i = n - 2; i > -1; i--) {
984         if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) {
985           PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace));
986           PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace));
987         }
988       }
989     } else { /* construct the interpolation from the DMs */
990       Mat p;
991       Vec rscale;
992 
993       PetscCheck(n == 1 || pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PC lacks a DM so cannot automatically construct a multigrid hierarchy. Number of levels requested %" PetscInt_FMT, n);
994       PetscCall(PetscMalloc1(n, &dms));
995       dms[n - 1] = pc->dm;
996       /* Separately create them so we do not get DMKSP interference between levels */
997       for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i]));
998       for (i = n - 2; i > -1; i--) {
999         DMKSP     kdm;
1000         PetscBool dmhasrestrict, dmhasinject;
1001 
1002         PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i]));
1003         if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE));
1004         if (mglevels[i]->smoothd != mglevels[i]->smoothu) {
1005           PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i]));
1006           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE));
1007         }
1008         if (mglevels[i]->cr) {
1009           PetscCall(KSPSetDM(mglevels[i]->cr, dms[i]));
1010           if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE));
1011         }
1012         PetscCall(DMGetDMKSPWrite(dms[i], &kdm));
1013         /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
1014          * a bitwise OR of computing the matrix, RHS, and initial iterate. */
1015         kdm->ops->computerhs = NULL;
1016         kdm->rhsctx          = NULL;
1017         if (!mglevels[i + 1]->interpolate) {
1018           PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale));
1019           PetscCall(PCMGSetInterpolation(pc, i + 1, p));
1020           if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale));
1021           PetscCall(VecDestroy(&rscale));
1022           PetscCall(MatDestroy(&p));
1023         }
1024         PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict));
1025         if (dmhasrestrict && !mglevels[i + 1]->restrct) {
1026           PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p));
1027           PetscCall(PCMGSetRestriction(pc, i + 1, p));
1028           PetscCall(MatDestroy(&p));
1029         }
1030         PetscCall(DMHasCreateInjection(dms[i], &dmhasinject));
1031         if (dmhasinject && !mglevels[i + 1]->inject) {
1032           PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p));
1033           PetscCall(PCMGSetInjection(pc, i + 1, p));
1034           PetscCall(MatDestroy(&p));
1035         }
1036       }
1037 
1038       for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i]));
1039       PetscCall(PetscFree(dms));
1040     }
1041   }
1042 
1043   if (mg->galerkin < PC_MG_GALERKIN_NONE) {
1044     Mat       A, B;
1045     PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE;
1046     MatReuse  reuse = MAT_INITIAL_MATRIX;
1047 
1048     if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE;
1049     if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE;
1050     if (pc->setupcalled) reuse = MAT_REUSE_MATRIX;
1051     for (i = n - 2; i > -1; i--) {
1052       PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0");
1053       if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct));
1054       if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate));
1055       if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B));
1056       if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A));
1057       if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B));
1058       /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */
1059       if (!doA && dAeqdB) {
1060         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B));
1061         A = B;
1062       } else if (!doA && reuse == MAT_INITIAL_MATRIX) {
1063         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL));
1064         PetscCall(PetscObjectReference((PetscObject)A));
1065       }
1066       if (!doB && dAeqdB) {
1067         if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A));
1068         B = A;
1069       } else if (!doB && reuse == MAT_INITIAL_MATRIX) {
1070         PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B));
1071         PetscCall(PetscObjectReference((PetscObject)B));
1072       }
1073       if (reuse == MAT_INITIAL_MATRIX) {
1074         PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B));
1075         PetscCall(PetscObjectDereference((PetscObject)A));
1076         PetscCall(PetscObjectDereference((PetscObject)B));
1077       }
1078       dA = A;
1079       dB = B;
1080     }
1081   }
1082 
1083   /* Adapt interpolation matrices */
1084   if (adaptInterpolation) {
1085     for (i = 0; i < n; ++i) {
1086       if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace));
1087       if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace));
1088     }
1089     for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i));
1090   }
1091 
1092   if (needRestricts && pc->dm) {
1093     for (i = n - 2; i >= 0; i--) {
1094       DM  dmfine, dmcoarse;
1095       Mat Restrict, Inject;
1096       Vec rscale;
1097 
1098       PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine));
1099       PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse));
1100       PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict));
1101       PetscCall(PCMGGetRScale(pc, i + 1, &rscale));
1102       PetscCall(PCMGGetInjection(pc, i + 1, &Inject));
1103       PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse));
1104     }
1105   }
1106 
1107   if (!pc->setupcalled) {
1108     for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd));
1109     for (i = 1; i < n; i++) {
1110       if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu));
1111       if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr));
1112     }
1113     /* insure that if either interpolation or restriction is set the other one is set */
1114     for (i = 1; i < n; i++) {
1115       PetscCall(PCMGGetInterpolation(pc, i, NULL));
1116       PetscCall(PCMGGetRestriction(pc, i, NULL));
1117     }
1118     for (i = 0; i < n - 1; i++) {
1119       if (!mglevels[i]->b) {
1120         Vec *vec;
1121         PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL));
1122         PetscCall(PCMGSetRhs(pc, i, *vec));
1123         PetscCall(VecDestroy(vec));
1124         PetscCall(PetscFree(vec));
1125       }
1126       if (!mglevels[i]->r && i) {
1127         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1128         PetscCall(PCMGSetR(pc, i, tvec));
1129         PetscCall(VecDestroy(&tvec));
1130       }
1131       if (!mglevels[i]->x) {
1132         PetscCall(VecDuplicate(mglevels[i]->b, &tvec));
1133         PetscCall(PCMGSetX(pc, i, tvec));
1134         PetscCall(VecDestroy(&tvec));
1135       }
1136       if (doCR) {
1137         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx));
1138         PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb));
1139         PetscCall(VecZeroEntries(mglevels[i]->crb));
1140       }
1141     }
1142     if (n != 1 && !mglevels[n - 1]->r) {
1143       /* PCMGSetR() on the finest level if user did not supply it */
1144       Vec *vec;
1145 
1146       PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL));
1147       PetscCall(PCMGSetR(pc, n - 1, *vec));
1148       PetscCall(VecDestroy(vec));
1149       PetscCall(PetscFree(vec));
1150     }
1151     if (doCR) {
1152       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx));
1153       PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb));
1154       PetscCall(VecZeroEntries(mglevels[n - 1]->crb));
1155     }
1156   }
1157 
1158   if (pc->dm) {
1159     /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */
1160     for (i = 0; i < n - 1; i++) {
1161       if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1162     }
1163   }
1164   // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g.,
1165   // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply.
1166   if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX;
1167 
1168   for (i = 1; i < n; i++) {
1169     if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) {
1170       /* if doing only down then initial guess is zero */
1171       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE));
1172     }
1173     if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1174     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1175     PetscCall(KSPSetUp(mglevels[i]->smoothd));
1176     if (mglevels[i]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1177     if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1178     if (!mglevels[i]->residual) {
1179       Mat mat;
1180 
1181       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1182       PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat));
1183     }
1184     if (!mglevels[i]->residualtranspose) {
1185       Mat mat;
1186 
1187       PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL));
1188       PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat));
1189     }
1190   }
1191   for (i = 1; i < n; i++) {
1192     if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) {
1193       Mat downmat, downpmat;
1194 
1195       /* check if operators have been set for up, if not use down operators to set them */
1196       PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL));
1197       if (!opsset) {
1198         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1199         PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat));
1200       }
1201 
1202       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE));
1203       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1204       PetscCall(KSPSetUp(mglevels[i]->smoothu));
1205       if (mglevels[i]->smoothu->reason) pc->failedreason = PC_SUBPC_ERROR;
1206       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1207     }
1208     if (mglevels[i]->cr) {
1209       Mat downmat, downpmat;
1210 
1211       /* check if operators have been set for up, if not use down operators to set them */
1212       PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL));
1213       if (!opsset) {
1214         PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat));
1215         PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat));
1216       }
1217 
1218       PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE));
1219       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1220       PetscCall(KSPSetUp(mglevels[i]->cr));
1221       if (mglevels[i]->cr->reason) pc->failedreason = PC_SUBPC_ERROR;
1222       if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0));
1223     }
1224   }
1225 
1226   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1227   PetscCall(KSPSetUp(mglevels[0]->smoothd));
1228   if (mglevels[0]->smoothd->reason) pc->failedreason = PC_SUBPC_ERROR;
1229   if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0));
1230 
1231   /*
1232      Dump the interpolation/restriction matrices plus the
1233    Jacobian/stiffness on each level. This allows MATLAB users to
1234    easily check if the Galerkin condition A_c = R A_f R^T is satisfied.
1235 
1236    Only support one or the other at the same time.
1237   */
1238 #if defined(PETSC_USE_SOCKET_VIEWER)
1239   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL));
1240   if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc));
1241   dump = PETSC_FALSE;
1242 #endif
1243   PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL));
1244   if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc));
1245 
1246   if (viewer) {
1247     for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer));
1248     for (i = 0; i < n; i++) {
1249       PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc));
1250       PetscCall(MatView(pc->mat, viewer));
1251     }
1252   }
1253   PetscFunctionReturn(PETSC_SUCCESS);
1254 }
1255 
1256 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels)
1257 {
1258   PC_MG *mg = (PC_MG *)pc->data;
1259 
1260   PetscFunctionBegin;
1261   *levels = mg->nlevels;
1262   PetscFunctionReturn(PETSC_SUCCESS);
1263 }
1264 
1265 /*@
1266   PCMGGetLevels - Gets the number of levels to use with `PCMG`.
1267 
1268   Not Collective
1269 
1270   Input Parameter:
1271 . pc - the preconditioner context
1272 
1273   Output Parameter:
1274 . levels - the number of levels
1275 
1276   Level: advanced
1277 
1278 .seealso: [](ch_ksp), `PCMG`, `PCMGSetLevels()`
1279 @*/
1280 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels)
1281 {
1282   PetscFunctionBegin;
1283   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1284   PetscAssertPointer(levels, 2);
1285   *levels = 0;
1286   PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels));
1287   PetscFunctionReturn(PETSC_SUCCESS);
1288 }
1289 
1290 /*@
1291   PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy
1292 
1293   Input Parameter:
1294 . pc - the preconditioner context
1295 
1296   Output Parameters:
1297 + gc - grid complexity = sum_i(n_i) / n_0
1298 - oc - operator complexity = sum_i(nnz_i) / nnz_0
1299 
1300   Level: advanced
1301 
1302   Note:
1303   This is often call the operator complexity in multigrid literature
1304 
1305 .seealso: [](ch_ksp), `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`
1306 @*/
1307 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc)
1308 {
1309   PC_MG         *mg       = (PC_MG *)pc->data;
1310   PC_MG_Levels **mglevels = mg->levels;
1311   PetscInt       lev, N;
1312   PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0;
1313   MatInfo        info;
1314 
1315   PetscFunctionBegin;
1316   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1317   if (gc) PetscAssertPointer(gc, 2);
1318   if (oc) PetscAssertPointer(oc, 3);
1319   if (!pc->setupcalled) {
1320     if (gc) *gc = 0;
1321     if (oc) *oc = 0;
1322     PetscFunctionReturn(PETSC_SUCCESS);
1323   }
1324   PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels");
1325   for (lev = 0; lev < mg->nlevels; lev++) {
1326     Mat dB;
1327     PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB));
1328     PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */
1329     PetscCall(MatGetSize(dB, &N, NULL));
1330     sgc += N;
1331     soc += info.nz_used;
1332     if (lev == mg->nlevels - 1) {
1333       nnz0 = info.nz_used;
1334       n0   = N;
1335     }
1336   }
1337   PetscCheck(n0 > 0 && gc, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available");
1338   *gc = (PetscReal)(sgc / n0);
1339   if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0);
1340   PetscFunctionReturn(PETSC_SUCCESS);
1341 }
1342 
1343 /*@
1344   PCMGSetType - Determines the form of multigrid to use, either
1345   multiplicative, additive, full, or the Kaskade algorithm.
1346 
1347   Logically Collective
1348 
1349   Input Parameters:
1350 + pc   - the preconditioner context
1351 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`
1352 
1353   Options Database Key:
1354 . -pc_mg_type <form> - Sets <form>, one of multiplicative, additive, full, kaskade
1355 
1356   Level: advanced
1357 
1358 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType`
1359 @*/
1360 PetscErrorCode PCMGSetType(PC pc, PCMGType form)
1361 {
1362   PC_MG *mg = (PC_MG *)pc->data;
1363 
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1366   PetscValidLogicalCollectiveEnum(pc, form, 2);
1367   mg->am = form;
1368   if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG;
1369   else pc->ops->applyrichardson = NULL;
1370   PetscFunctionReturn(PETSC_SUCCESS);
1371 }
1372 
1373 /*@
1374   PCMGGetType - Finds the form of multigrid the `PCMG` is using  multiplicative, additive, full, or the Kaskade algorithm.
1375 
1376   Logically Collective
1377 
1378   Input Parameter:
1379 . pc - the preconditioner context
1380 
1381   Output Parameter:
1382 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType`
1383 
1384   Level: advanced
1385 
1386 .seealso: [](ch_ksp), `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()`
1387 @*/
1388 PetscErrorCode PCMGGetType(PC pc, PCMGType *type)
1389 {
1390   PC_MG *mg = (PC_MG *)pc->data;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1394   *type = mg->am;
1395   PetscFunctionReturn(PETSC_SUCCESS);
1396 }
1397 
1398 /*@
1399   PCMGSetCycleType - Sets the type cycles to use.  Use `PCMGSetCycleTypeOnLevel()` for more
1400   complicated cycling.
1401 
1402   Logically Collective
1403 
1404   Input Parameters:
1405 + pc - the multigrid context
1406 - n  - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W`
1407 
1408   Options Database Key:
1409 . -pc_mg_cycle_type <v,w> - provide the cycle desired
1410 
1411   Level: advanced
1412 
1413 .seealso: [](ch_ksp), `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType`
1414 @*/
1415 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n)
1416 {
1417   PC_MG         *mg       = (PC_MG *)pc->data;
1418   PC_MG_Levels **mglevels = mg->levels;
1419   PetscInt       i, levels;
1420 
1421   PetscFunctionBegin;
1422   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1423   PetscValidLogicalCollectiveEnum(pc, n, 2);
1424   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1425   levels = mglevels[0]->levels;
1426   for (i = 0; i < levels; i++) mglevels[i]->cycles = n;
1427   PetscFunctionReturn(PETSC_SUCCESS);
1428 }
1429 
1430 /*@
1431   PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step
1432   of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE`
1433 
1434   Logically Collective
1435 
1436   Input Parameters:
1437 + pc - the multigrid context
1438 - n  - number of cycles (default is 1)
1439 
1440   Options Database Key:
1441 . -pc_mg_multiplicative_cycles n - set the number of cycles
1442 
1443   Level: advanced
1444 
1445   Note:
1446   This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()`
1447 
1448 .seealso: [](ch_ksp), `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType`
1449 @*/
1450 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n)
1451 {
1452   PC_MG *mg = (PC_MG *)pc->data;
1453 
1454   PetscFunctionBegin;
1455   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1456   PetscValidLogicalCollectiveInt(pc, n, 2);
1457   mg->cyclesperpcapply = n;
1458   PetscFunctionReturn(PETSC_SUCCESS);
1459 }
1460 
1461 /*
1462    Since the finest level KSP shares the original matrix (of the entire system), it's preconditioner
1463    should not be updated if the whole PC is supposed to reuse the preconditioner
1464 */
1465 static PetscErrorCode PCSetReusePreconditioner_MG(PC pc, PetscBool flag)
1466 {
1467   PC_MG         *mg       = (PC_MG *)pc->data;
1468   PC_MG_Levels **mglevels = mg->levels;
1469   PetscInt       levels;
1470   PC             tpc;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1474   PetscValidLogicalCollectiveBool(pc, flag, 2);
1475   if (mglevels) {
1476     levels = mglevels[0]->levels;
1477     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc));
1478     tpc->reusepreconditioner = flag;
1479     PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc));
1480     tpc->reusepreconditioner = flag;
1481   }
1482   PetscFunctionReturn(PETSC_SUCCESS);
1483 }
1484 
1485 static PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use)
1486 {
1487   PC_MG *mg = (PC_MG *)pc->data;
1488 
1489   PetscFunctionBegin;
1490   mg->galerkin = use;
1491   PetscFunctionReturn(PETSC_SUCCESS);
1492 }
1493 
1494 /*@
1495   PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the
1496   finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i
1497 
1498   Logically Collective
1499 
1500   Input Parameters:
1501 + pc  - the multigrid context
1502 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE`
1503 
1504   Options Database Key:
1505 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process
1506 
1507   Level: intermediate
1508 
1509   Note:
1510   Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not
1511   use the `PCMG` construction of the coarser grids.
1512 
1513 .seealso: [](ch_ksp), `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType`
1514 @*/
1515 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use)
1516 {
1517   PetscFunctionBegin;
1518   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1519   PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use));
1520   PetscFunctionReturn(PETSC_SUCCESS);
1521 }
1522 
1523 /*@
1524   PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i
1525 
1526   Not Collective
1527 
1528   Input Parameter:
1529 . pc - the multigrid context
1530 
1531   Output Parameter:
1532 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL`
1533 
1534   Level: intermediate
1535 
1536 .seealso: [](ch_ksp), `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType`
1537 @*/
1538 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin)
1539 {
1540   PC_MG *mg = (PC_MG *)pc->data;
1541 
1542   PetscFunctionBegin;
1543   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1544   *galerkin = mg->galerkin;
1545   PetscFunctionReturn(PETSC_SUCCESS);
1546 }
1547 
1548 static PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt)
1549 {
1550   PC_MG *mg = (PC_MG *)pc->data;
1551 
1552   PetscFunctionBegin;
1553   mg->adaptInterpolation = adapt;
1554   PetscFunctionReturn(PETSC_SUCCESS);
1555 }
1556 
1557 static PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt)
1558 {
1559   PC_MG *mg = (PC_MG *)pc->data;
1560 
1561   PetscFunctionBegin;
1562   *adapt = mg->adaptInterpolation;
1563   PetscFunctionReturn(PETSC_SUCCESS);
1564 }
1565 
1566 static PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype)
1567 {
1568   PC_MG *mg = (PC_MG *)pc->data;
1569 
1570   PetscFunctionBegin;
1571   mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE;
1572   mg->coarseSpaceType    = ctype;
1573   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_BOTH));
1574   PetscFunctionReturn(PETSC_SUCCESS);
1575 }
1576 
1577 static PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype)
1578 {
1579   PC_MG *mg = (PC_MG *)pc->data;
1580 
1581   PetscFunctionBegin;
1582   *ctype = mg->coarseSpaceType;
1583   PetscFunctionReturn(PETSC_SUCCESS);
1584 }
1585 
1586 static PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr)
1587 {
1588   PC_MG *mg = (PC_MG *)pc->data;
1589 
1590   PetscFunctionBegin;
1591   mg->compatibleRelaxation = cr;
1592   PetscFunctionReturn(PETSC_SUCCESS);
1593 }
1594 
1595 static PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr)
1596 {
1597   PC_MG *mg = (PC_MG *)pc->data;
1598 
1599   PetscFunctionBegin;
1600   *cr = mg->compatibleRelaxation;
1601   PetscFunctionReturn(PETSC_SUCCESS);
1602 }
1603 
1604 /*@
1605   PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space.
1606 
1607   Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1608 
1609   Logically Collective
1610 
1611   Input Parameters:
1612 + pc    - the multigrid context
1613 - ctype - the type of coarse space
1614 
1615   Options Database Keys:
1616 + -pc_mg_adapt_interp_n <int>             - The number of modes to use
1617 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, `polynomial`, `harmonic`, `eigenvector`, `generalized_eigenvector`, `gdsw`
1618 
1619   Level: intermediate
1620 
1621   Note:
1622   Requires a `DM` with specific functionality be attached to the `PC`.
1623 
1624 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`, `DM`
1625 @*/
1626 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype)
1627 {
1628   PetscFunctionBegin;
1629   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1630   PetscValidLogicalCollectiveEnum(pc, ctype, 2);
1631   PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype));
1632   PetscFunctionReturn(PETSC_SUCCESS);
1633 }
1634 
1635 /*@
1636   PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space.
1637 
1638   Not Collective
1639 
1640   Input Parameter:
1641 . pc - the multigrid context
1642 
1643   Output Parameter:
1644 . ctype - the type of coarse space
1645 
1646   Level: intermediate
1647 
1648 .seealso: [](ch_ksp), `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()`
1649 @*/
1650 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype)
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1654   PetscAssertPointer(ctype, 2);
1655   PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype));
1656   PetscFunctionReturn(PETSC_SUCCESS);
1657 }
1658 
1659 /* MATT: REMOVE? */
1660 /*@
1661   PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated.
1662 
1663   Logically Collective
1664 
1665   Input Parameters:
1666 + pc    - the multigrid context
1667 - adapt - flag for adaptation of the interpolator
1668 
1669   Options Database Keys:
1670 + -pc_mg_adapt_interp                     - Turn on adaptation
1671 . -pc_mg_adapt_interp_n <int>             - The number of modes to use, should be divisible by dimension
1672 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector
1673 
1674   Level: intermediate
1675 
1676 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1677 @*/
1678 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt)
1679 {
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1682   PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt));
1683   PetscFunctionReturn(PETSC_SUCCESS);
1684 }
1685 
1686 /*@
1687   PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh,
1688   and thus accurately interpolated.
1689 
1690   Not Collective
1691 
1692   Input Parameter:
1693 . pc - the multigrid context
1694 
1695   Output Parameter:
1696 . adapt - flag for adaptation of the interpolator
1697 
1698   Level: intermediate
1699 
1700 .seealso: [](ch_ksp), `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1701 @*/
1702 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt)
1703 {
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1706   PetscAssertPointer(adapt, 2);
1707   PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt));
1708   PetscFunctionReturn(PETSC_SUCCESS);
1709 }
1710 
1711 /*@
1712   PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation.
1713 
1714   Logically Collective
1715 
1716   Input Parameters:
1717 + pc - the multigrid context
1718 - cr - flag for compatible relaxation
1719 
1720   Options Database Key:
1721 . -pc_mg_adapt_cr - Turn on compatible relaxation
1722 
1723   Level: intermediate
1724 
1725 .seealso: [](ch_ksp), `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1726 @*/
1727 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr)
1728 {
1729   PetscFunctionBegin;
1730   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1731   PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr));
1732   PetscFunctionReturn(PETSC_SUCCESS);
1733 }
1734 
1735 /*@
1736   PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation.
1737 
1738   Not Collective
1739 
1740   Input Parameter:
1741 . pc - the multigrid context
1742 
1743   Output Parameter:
1744 . cr - flag for compatible relaxaion
1745 
1746   Level: intermediate
1747 
1748 .seealso: [](ch_ksp), `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1749 @*/
1750 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr)
1751 {
1752   PetscFunctionBegin;
1753   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1754   PetscAssertPointer(cr, 2);
1755   PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr));
1756   PetscFunctionReturn(PETSC_SUCCESS);
1757 }
1758 
1759 /*@
1760   PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use
1761   on all levels.  Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of
1762   pre- and post-smoothing steps.
1763 
1764   Logically Collective
1765 
1766   Input Parameters:
1767 + pc - the multigrid context
1768 - n  - the number of smoothing steps
1769 
1770   Options Database Key:
1771 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps
1772 
1773   Level: advanced
1774 
1775   Note:
1776   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1777 
1778 .seealso: [](ch_ksp), `PCMG`, `PCMGSetDistinctSmoothUp()`
1779 @*/
1780 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n)
1781 {
1782   PC_MG         *mg       = (PC_MG *)pc->data;
1783   PC_MG_Levels **mglevels = mg->levels;
1784   PetscInt       i, levels;
1785 
1786   PetscFunctionBegin;
1787   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1788   PetscValidLogicalCollectiveInt(pc, n, 2);
1789   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1790   levels = mglevels[0]->levels;
1791 
1792   for (i = 1; i < levels; i++) {
1793     PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1794     PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_CURRENT, PETSC_CURRENT, PETSC_CURRENT, n));
1795     mg->default_smoothu = n;
1796     mg->default_smoothd = n;
1797   }
1798   PetscFunctionReturn(PETSC_SUCCESS);
1799 }
1800 
1801 /*@
1802   PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels
1803   and adds the suffix _up to the options name
1804 
1805   Logically Collective
1806 
1807   Input Parameter:
1808 . pc - the preconditioner context
1809 
1810   Options Database Key:
1811 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects
1812 
1813   Level: advanced
1814 
1815   Note:
1816   This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid.
1817 
1818 .seealso: [](ch_ksp), `PCMG`, `PCMGSetNumberSmooth()`
1819 @*/
1820 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc)
1821 {
1822   PC_MG         *mg       = (PC_MG *)pc->data;
1823   PC_MG_Levels **mglevels = mg->levels;
1824   PetscInt       i, levels;
1825   KSP            subksp;
1826 
1827   PetscFunctionBegin;
1828   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1829   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling");
1830   levels = mglevels[0]->levels;
1831 
1832   for (i = 1; i < levels; i++) {
1833     const char *prefix = NULL;
1834     /* make sure smoother up and down are different */
1835     PetscCall(PCMGGetSmootherUp(pc, i, &subksp));
1836     PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix));
1837     PetscCall(KSPSetOptionsPrefix(subksp, prefix));
1838     PetscCall(KSPAppendOptionsPrefix(subksp, "up_"));
1839   }
1840   PetscFunctionReturn(PETSC_SUCCESS);
1841 }
1842 
1843 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1844 static PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[])
1845 {
1846   PC_MG         *mg       = (PC_MG *)pc->data;
1847   PC_MG_Levels **mglevels = mg->levels;
1848   Mat           *mat;
1849   PetscInt       l;
1850 
1851   PetscFunctionBegin;
1852   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1853   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1854   for (l = 1; l < mg->nlevels; l++) {
1855     mat[l - 1] = mglevels[l]->interpolate;
1856     PetscCall(PetscObjectReference((PetscObject)mat[l - 1]));
1857   }
1858   *num_levels     = mg->nlevels;
1859   *interpolations = mat;
1860   PetscFunctionReturn(PETSC_SUCCESS);
1861 }
1862 
1863 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */
1864 static PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[])
1865 {
1866   PC_MG         *mg       = (PC_MG *)pc->data;
1867   PC_MG_Levels **mglevels = mg->levels;
1868   PetscInt       l;
1869   Mat           *mat;
1870 
1871   PetscFunctionBegin;
1872   PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling");
1873   PetscCall(PetscMalloc1(mg->nlevels, &mat));
1874   for (l = 0; l < mg->nlevels - 1; l++) {
1875     PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &mat[l]));
1876     PetscCall(PetscObjectReference((PetscObject)mat[l]));
1877   }
1878   *num_levels      = mg->nlevels;
1879   *coarseOperators = mat;
1880   PetscFunctionReturn(PETSC_SUCCESS);
1881 }
1882 
1883 /*@C
1884   PCMGRegisterCoarseSpaceConstructor -  Adds a method to the `PCMG` package for coarse space construction.
1885 
1886   Not Collective, No Fortran Support
1887 
1888   Input Parameters:
1889 + name     - name of the constructor
1890 - function - constructor routine, see `PCMGCoarseSpaceConstructorFn`
1891 
1892   Level: advanced
1893 
1894   Developer Notes:
1895   This does not appear to be used anywhere
1896 
1897 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()`
1898 @*/
1899 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn *function)
1900 {
1901   PetscFunctionBegin;
1902   PetscCall(PCInitializePackage());
1903   PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function));
1904   PetscFunctionReturn(PETSC_SUCCESS);
1905 }
1906 
1907 /*@C
1908   PCMGGetCoarseSpaceConstructor -  Returns the given coarse space construction method.
1909 
1910   Not Collective, No Fortran Support
1911 
1912   Input Parameter:
1913 . name - name of the constructor
1914 
1915   Output Parameter:
1916 . function - constructor routine
1917 
1918   Level: advanced
1919 
1920 .seealso: [](ch_ksp), `PCMGCoarseSpaceConstructorFn`, `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()`
1921 @*/
1922 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PCMGCoarseSpaceConstructorFn **function)
1923 {
1924   PetscFunctionBegin;
1925   PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function));
1926   PetscFunctionReturn(PETSC_SUCCESS);
1927 }
1928 
1929 /*MC
1930    PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional information about the restriction/interpolation
1931    operators using `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`(and possibly the coarser grid matrices) or a `DM` that can provide such information.
1932 
1933    Options Database Keys:
1934 +  -pc_mg_levels <nlevels>                            - number of levels including finest
1935 .  -pc_mg_cycle_type <v,w>                            - provide the cycle desired
1936 .  -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default
1937 .  -pc_mg_log                                         - log information about time spent on each level of the solver
1938 .  -pc_mg_distinct_smoothup                           - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes)
1939 .  -pc_mg_galerkin <both,pmat,mat,none>               - use the Galerkin process to compute coarser operators, i.e., $A_{coarse} = R A_{fine} R^T$
1940 .  -pc_mg_multiplicative_cycles                        - number of cycles to use as the preconditioner (defaults to 1)
1941 .  -pc_mg_dump_matlab                                  - dumps the matrices for each level and the restriction/interpolation matrices
1942                                                          to a `PETSCVIEWERSOCKET` for reading from MATLAB.
1943 -  -pc_mg_dump_binary                                  -dumps the matrices for each level and the restriction/interpolation matrices
1944                                                         to the binary output file called binaryoutput
1945 
1946    Level: intermediate
1947 
1948    Notes:
1949    `PCMG` provides a general framework for implementing multigrid methods. Use `PCGAMG` for PETSc's algebraic multigrid preconditioner, `PCHYPRE` for hypre's
1950    BoomerAMG algebraic multigrid, and `PCML` for Trilinos's ML preconditioner. `PCAMGX` provides access to NVIDIA's AmgX algebraic multigrid.
1951 
1952    If you use `KSPSetDM()` (or `SNESSetDM()` or `TSSetDM()`) with an appropriate `DM`, such as `DMDA`, then `PCMG` will use the geometric information
1953    from the `DM` to generate appropriate restriction and interpolation information and construct a geometric multigrid.
1954 
1955    If you do not provide an appropriate `DM` and do not provide restriction or interpolation operators with `PCMGSetInterpolation()` and/or `PCMGSetRestriction()`,
1956    then `PCMG` will run multigrid with only a single level (so not really multigrid).
1957 
1958    The Krylov solver (if any) and preconditioner (smoother) and their parameters are controlled from the options database with the standard
1959    options database keywords prefixed with `-mg_levels_` to affect all the levels but the coarsest, which is controlled with `-mg_coarse_`,
1960    and the finest where `-mg_fine_` can override `-mg_levels_`.  One can set different preconditioners etc on specific levels with the prefix
1961    `-mg_levels_n_` where `n` is the level number (zero being the coarse level. For example
1962 .vb
1963    -mg_levels_ksp_type gmres -mg_levels_pc_type bjacobi -mg_coarse_pc_type svd -mg_levels_2_pc_type sor
1964 .ve
1965    These options also work for controlling the smoothers etc inside `PCGAMG`
1966 
1967    If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother than one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method
1968 
1969    When run with a single level the smoother options are used on that level NOT the coarse grid solver options
1970 
1971    When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This
1972    is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing
1973    (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the
1974    residual is computed at the end of each cycle.
1975 
1976 .seealso: [](sec_mg), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE`
1977           `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`,
1978           `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`,
1979           `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`,
1980           `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`,
1981           `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()`
1982 M*/
1983 
1984 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc)
1985 {
1986   PC_MG *mg;
1987 
1988   PetscFunctionBegin;
1989   PetscCall(PetscNew(&mg));
1990   pc->data               = mg;
1991   mg->nlevels            = -1;
1992   mg->am                 = PC_MG_MULTIPLICATIVE;
1993   mg->galerkin           = PC_MG_GALERKIN_NONE;
1994   mg->adaptInterpolation = PETSC_FALSE;
1995   mg->Nc                 = -1;
1996   mg->eigenvalue         = -1;
1997 
1998   pc->useAmat = PETSC_TRUE;
1999 
2000   pc->ops->apply             = PCApply_MG;
2001   pc->ops->applytranspose    = PCApplyTranspose_MG;
2002   pc->ops->matapply          = PCMatApply_MG;
2003   pc->ops->matapplytranspose = PCMatApplyTranspose_MG;
2004   pc->ops->setup             = PCSetUp_MG;
2005   pc->ops->reset             = PCReset_MG;
2006   pc->ops->destroy           = PCDestroy_MG;
2007   pc->ops->setfromoptions    = PCSetFromOptions_MG;
2008   pc->ops->view              = PCView_MG;
2009 
2010   PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue));
2011   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG));
2012   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetReusePreconditioner_C", PCSetReusePreconditioner_MG));
2013   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG));
2014   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG));
2015   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG));
2016   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG));
2017   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG));
2018   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG));
2019   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG));
2020   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG));
2021   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG));
2022   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG));
2023   PetscFunctionReturn(PETSC_SUCCESS);
2024 }
2025