xref: /petsc/src/ksp/pc/impls/hmg/hmg.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #include <petscdm.h>
2 #include <petscctable.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcmgimpl.h>
5 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/
6 
7 typedef struct {
8   PC        innerpc;       /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators  */
9   char     *innerpctype;   /* PCGAMG or PCHYPRE */
10   PetscBool reuseinterp;   /* A flag indicates if or not to reuse the interpolations */
11   PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */
12   PetscBool usematmaij;    /* If or not to use MatMAIJ for saving memory */
13   PetscInt  component;     /* Which subspace is used for the subspace-based coarsening algorithm? */
14 } PC_HMG;
15 
16 PetscErrorCode PCSetFromOptions_HMG(PC, PetscOptionItems *);
17 PetscErrorCode PCReset_MG(PC);
18 
19 static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat, Mat *submat, MatReuse reuse, PetscInt component, PetscInt blocksize) {
20   IS       isrow;
21   PetscInt rstart, rend;
22   MPI_Comm comm;
23 
24   PetscFunctionBegin;
25   PetscCall(PetscObjectGetComm((PetscObject)pmat, &comm));
26   PetscCheck(component < blocksize, comm, PETSC_ERR_ARG_INCOMP, "Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ", component, blocksize);
27   PetscCall(MatGetOwnershipRange(pmat, &rstart, &rend));
28   PetscCheck((rend - rstart) % blocksize == 0, comm, PETSC_ERR_ARG_INCOMP, "Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ", blocksize, rstart, rend);
29   PetscCall(ISCreateStride(comm, (rend - rstart) / blocksize, rstart + component, blocksize, &isrow));
30   PetscCall(MatCreateSubMatrix(pmat, isrow, isrow, reuse, submat));
31   PetscCall(ISDestroy(&isrow));
32   PetscFunctionReturn(0);
33 }
34 
35 static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize) {
36   PetscInt           subrstart, subrend, subrowsize, subcolsize, subcstart, subcend, rowsize, colsize;
37   PetscInt           subrow, row, nz, *d_nnz, *o_nnz, i, j, dnz, onz, max_nz, *indices;
38   const PetscInt    *idx;
39   const PetscScalar *values;
40   MPI_Comm           comm;
41 
42   PetscFunctionBegin;
43   PetscCall(PetscObjectGetComm((PetscObject)subinterp, &comm));
44   PetscCall(MatGetOwnershipRange(subinterp, &subrstart, &subrend));
45   subrowsize = subrend - subrstart;
46   rowsize    = subrowsize * blocksize;
47   PetscCall(PetscCalloc2(rowsize, &d_nnz, rowsize, &o_nnz));
48   PetscCall(MatGetOwnershipRangeColumn(subinterp, &subcstart, &subcend));
49   subcolsize = subcend - subcstart;
50   colsize    = subcolsize * blocksize;
51   max_nz     = 0;
52   for (subrow = subrstart; subrow < subrend; subrow++) {
53     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, NULL));
54     if (max_nz < nz) max_nz = nz;
55     dnz = 0;
56     onz = 0;
57     for (i = 0; i < nz; i++) {
58       if (idx[i] >= subcstart && idx[i] < subcend) dnz++;
59       else onz++;
60     }
61     for (i = 0; i < blocksize; i++) {
62       d_nnz[(subrow - subrstart) * blocksize + i] = dnz;
63       o_nnz[(subrow - subrstart) * blocksize + i] = onz;
64     }
65     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, NULL));
66   }
67   PetscCall(MatCreateAIJ(comm, rowsize, colsize, PETSC_DETERMINE, PETSC_DETERMINE, 0, d_nnz, 0, o_nnz, interp));
68   PetscCall(MatSetOption(*interp, MAT_IGNORE_OFF_PROC_ENTRIES, PETSC_TRUE));
69   PetscCall(MatSetOption(*interp, MAT_IGNORE_ZERO_ENTRIES, PETSC_TRUE));
70   PetscCall(MatSetOption(*interp, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
71   PetscCall(MatSetFromOptions(*interp));
72 
73   PetscCall(MatSetUp(*interp));
74   PetscCall(PetscFree2(d_nnz, o_nnz));
75   PetscCall(PetscMalloc1(max_nz, &indices));
76   for (subrow = subrstart; subrow < subrend; subrow++) {
77     PetscCall(MatGetRow(subinterp, subrow, &nz, &idx, &values));
78     for (i = 0; i < blocksize; i++) {
79       row = subrow * blocksize + i;
80       for (j = 0; j < nz; j++) { indices[j] = idx[j] * blocksize + i; }
81       PetscCall(MatSetValues(*interp, 1, &row, nz, indices, values, INSERT_VALUES));
82     }
83     PetscCall(MatRestoreRow(subinterp, subrow, &nz, &idx, &values));
84   }
85   PetscCall(PetscFree(indices));
86   PetscCall(MatAssemblyBegin(*interp, MAT_FINAL_ASSEMBLY));
87   PetscCall(MatAssemblyEnd(*interp, MAT_FINAL_ASSEMBLY));
88   PetscFunctionReturn(0);
89 }
90 
91 PetscErrorCode PCSetUp_HMG(PC pc) {
92   Mat              PA, submat;
93   PC_MG           *mg  = (PC_MG *)pc->data;
94   PC_HMG          *hmg = (PC_HMG *)mg->innerctx;
95   MPI_Comm         comm;
96   PetscInt         level;
97   PetscInt         num_levels;
98   Mat             *operators, *interpolations;
99   PetscInt         blocksize;
100   const char      *prefix;
101   PCMGGalerkinType galerkin;
102 
103   PetscFunctionBegin;
104   PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
105   if (pc->setupcalled) {
106     if (hmg->reuseinterp) {
107       /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
108       * we have to build from scratch
109       * */
110       PetscCall(PCMGGetGalerkin(pc, &galerkin));
111       if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
112       PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
113       PetscCall(PCSetUp_MG(pc));
114       PetscFunctionReturn(0);
115     } else {
116       PetscCall(PCReset_MG(pc));
117       pc->setupcalled = PETSC_FALSE;
118     }
119   }
120 
121   /* Create an inner PC (GAMG or HYPRE) */
122   if (!hmg->innerpc) {
123     PetscCall(PCCreate(comm, &hmg->innerpc));
124     /* If users do not set an inner pc type, we need to set a default value */
125     if (!hmg->innerpctype) {
126       /* If hypre is available, use hypre, otherwise, use gamg */
127 #if PETSC_HAVE_HYPRE
128       PetscCall(PetscStrallocpy(PCHYPRE, &(hmg->innerpctype)));
129 #else
130       PetscCall(PetscStrallocpy(PCGAMG, &(hmg->innerpctype)));
131 #endif
132     }
133     PetscCall(PCSetType(hmg->innerpc, hmg->innerpctype));
134   }
135   PetscCall(PCGetOperators(pc, NULL, &PA));
136   /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
137   PetscCall(MatGetBlockSize(PA, &blocksize));
138   if (blocksize <= 1) hmg->subcoarsening = PETSC_FALSE;
139   /* Extract a submatrix for constructing subinterpolations */
140   if (hmg->subcoarsening) {
141     PetscCall(PCHMGExtractSubMatrix_Private(PA, &submat, MAT_INITIAL_MATRIX, hmg->component, blocksize));
142     PA = submat;
143   }
144   PetscCall(PCSetOperators(hmg->innerpc, PA, PA));
145   if (hmg->subcoarsening) { PetscCall(MatDestroy(&PA)); }
146   /* Setup inner PC correctly. During this step, matrix will be coarsened */
147   PetscCall(PCSetUseAmat(hmg->innerpc, PETSC_FALSE));
148   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix));
149   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc, prefix));
150   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc, "hmg_inner_"));
151   PetscCall(PCSetFromOptions(hmg->innerpc));
152   PetscCall(PCSetUp(hmg->innerpc));
153 
154   /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
155   PetscCall(PCGetInterpolations(hmg->innerpc, &num_levels, &interpolations));
156   /* We can reuse the coarse operators when we do the full space coarsening */
157   if (!hmg->subcoarsening) { PetscCall(PCGetCoarseOperators(hmg->innerpc, &num_levels, &operators)); }
158 
159   PetscCall(PCDestroy(&hmg->innerpc));
160   hmg->innerpc = NULL;
161   PetscCall(PCMGSetLevels_MG(pc, num_levels, NULL));
162   /* Set coarse matrices and interpolations to PCMG */
163   for (level = num_levels - 1; level > 0; level--) {
164     Mat P = NULL, pmat = NULL;
165     Vec b, x, r;
166     if (hmg->subcoarsening) {
167       if (hmg->usematmaij) {
168         PetscCall(MatCreateMAIJ(interpolations[level - 1], blocksize, &P));
169         PetscCall(MatDestroy(&interpolations[level - 1]));
170       } else {
171         /* Grow interpolation. In the future, we should use MAIJ */
172         PetscCall(PCHMGExpandInterpolation_Private(interpolations[level - 1], &P, blocksize));
173         PetscCall(MatDestroy(&interpolations[level - 1]));
174       }
175     } else {
176       P = interpolations[level - 1];
177     }
178     PetscCall(MatCreateVecs(P, &b, &r));
179     PetscCall(PCMGSetInterpolation(pc, level, P));
180     PetscCall(PCMGSetRestriction(pc, level, P));
181     PetscCall(MatDestroy(&P));
182     /* We reuse the matrices when we do not do subspace coarsening */
183     if ((level - 1) >= 0 && !hmg->subcoarsening) {
184       pmat = operators[level - 1];
185       PetscCall(PCMGSetOperators(pc, level - 1, pmat, pmat));
186       PetscCall(MatDestroy(&pmat));
187     }
188     PetscCall(PCMGSetRhs(pc, level - 1, b));
189 
190     PetscCall(PCMGSetR(pc, level, r));
191     PetscCall(VecDestroy(&r));
192 
193     PetscCall(VecDuplicate(b, &x));
194     PetscCall(PCMGSetX(pc, level - 1, x));
195     PetscCall(VecDestroy(&x));
196     PetscCall(VecDestroy(&b));
197   }
198   PetscCall(PetscFree(interpolations));
199   if (!hmg->subcoarsening) { PetscCall(PetscFree(operators)); }
200   /* Turn Galerkin off when we already have coarse operators */
201   PetscCall(PCMGSetGalerkin(pc, hmg->subcoarsening ? PC_MG_GALERKIN_PMAT : PC_MG_GALERKIN_NONE));
202   PetscCall(PCSetDM(pc, NULL));
203   PetscCall(PCSetUseAmat(pc, PETSC_FALSE));
204   PetscObjectOptionsBegin((PetscObject)pc);
205   PetscCall(PCSetFromOptions_MG(pc, PetscOptionsObject)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
206   PetscOptionsEnd();
207   PetscCall(PCSetUp_MG(pc));
208   PetscFunctionReturn(0);
209 }
210 
211 PetscErrorCode PCDestroy_HMG(PC pc) {
212   PC_MG  *mg  = (PC_MG *)pc->data;
213   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
214 
215   PetscFunctionBegin;
216   PetscCall(PCDestroy(&hmg->innerpc));
217   PetscCall(PetscFree(hmg->innerpctype));
218   PetscCall(PetscFree(hmg));
219   PetscCall(PCDestroy_MG(pc));
220 
221   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", NULL));
222   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", NULL));
223   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", NULL));
224   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", NULL));
225   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", NULL));
226   PetscFunctionReturn(0);
227 }
228 
229 PetscErrorCode PCView_HMG(PC pc, PetscViewer viewer) {
230   PC_MG    *mg  = (PC_MG *)pc->data;
231   PC_HMG   *hmg = (PC_HMG *)mg->innerctx;
232   PetscBool iascii;
233 
234   PetscFunctionBegin;
235   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
236   if (iascii) {
237     PetscCall(PetscViewerASCIIPrintf(viewer, " Reuse interpolation: %s\n", hmg->reuseinterp ? "true" : "false"));
238     PetscCall(PetscViewerASCIIPrintf(viewer, " Use subspace coarsening: %s\n", hmg->subcoarsening ? "true" : "false"));
239     PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening component: %" PetscInt_FMT " \n", hmg->component));
240     PetscCall(PetscViewerASCIIPrintf(viewer, " Use MatMAIJ: %s \n", hmg->usematmaij ? "true" : "false"));
241     PetscCall(PetscViewerASCIIPrintf(viewer, " Inner PC type: %s \n", hmg->innerpctype));
242   }
243   PetscCall(PCView_MG(pc, viewer));
244   PetscFunctionReturn(0);
245 }
246 
247 PetscErrorCode PCSetFromOptions_HMG(PC pc, PetscOptionItems *PetscOptionsObject) {
248   PC_MG  *mg  = (PC_MG *)pc->data;
249   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
250 
251   PetscFunctionBegin;
252   PetscOptionsHeadBegin(PetscOptionsObject, "HMG");
253   PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation", "Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)", "PCHMGSetReuseInterpolation", hmg->reuseinterp, &hmg->reuseinterp, NULL));
254   PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening", "Use the subspace coarsening to compute the interpolations", "PCHMGSetUseSubspaceCoarsening", hmg->subcoarsening, &hmg->subcoarsening, NULL));
255   PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij", "Use MatMAIJ store interpolation for saving memory", "PCHMGSetInnerPCType", hmg->usematmaij, &hmg->usematmaij, NULL));
256   PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component", "Which component is chosen for the subspace-based coarsening algorithm", "PCHMGSetCoarseningComponent", hmg->component, &hmg->component, NULL));
257   PetscOptionsHeadEnd();
258   PetscFunctionReturn(0);
259 }
260 
261 static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse) {
262   PC_MG  *mg  = (PC_MG *)pc->data;
263   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
264 
265   PetscFunctionBegin;
266   hmg->reuseinterp = reuse;
267   PetscFunctionReturn(0);
268 }
269 
270 /*@
271    PCHMGSetReuseInterpolation - Reuse interpolation matrices in HMG
272 
273    Logically Collective on PC
274 
275    Input Parameters:
276 +  pc - the HMG context
277 -  reuse - True indicates that HMG will reuse the interpolations
278 
279    Options Database Keys:
280 .  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
281 
282    Level: beginner
283 
284 .keywords: HMG, multigrid, interpolation, reuse, set
285 
286 .seealso: `PCHMG`
287 @*/
288 PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse) {
289   PetscFunctionBegin;
290   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
291   PetscUseMethod(pc, "PCHMGSetReuseInterpolation_C", (PC, PetscBool), (pc, reuse));
292   PetscFunctionReturn(0);
293 }
294 
295 static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace) {
296   PC_MG  *mg  = (PC_MG *)pc->data;
297   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
298 
299   PetscFunctionBegin;
300   hmg->subcoarsening = subspace;
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305    PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in HMG
306 
307    Logically Collective on PC
308 
309    Input Parameters:
310 +  pc - the HMG context
311 -  reuse - True indicates that HMG will use the subspace coarsening
312 
313    Options Database Keys:
314 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
315 
316    Level: beginner
317 
318 .keywords: HMG, multigrid, interpolation, subspace, coarsening
319 
320 .seealso: `PCHMG`
321 @*/
322 PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace) {
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
325   PetscUseMethod(pc, "PCHMGSetUseSubspaceCoarsening_C", (PC, PetscBool), (pc, subspace));
326   PetscFunctionReturn(0);
327 }
328 
329 static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type) {
330   PC_MG  *mg  = (PC_MG *)pc->data;
331   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
332 
333   PetscFunctionBegin;
334   PetscCall(PetscStrallocpy(type, &(hmg->innerpctype)));
335   PetscFunctionReturn(0);
336 }
337 
338 /*@C
339    PCHMGSetInnerPCType - Set an inner PC type
340 
341    Logically Collective on PC
342 
343    Input Parameters:
344 +  pc - the HMG context
345 -  type - <hypre, gamg> coarsening algorithm
346 
347    Options Database Keys:
348 .  -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix
349 
350    Level: beginner
351 
352 .keywords: HMG, multigrid, interpolation, coarsening
353 
354 .seealso: `PCHMG`, `PCType`
355 @*/
356 PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type) {
357   PetscFunctionBegin;
358   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
359   PetscUseMethod(pc, "PCHMGSetInnerPCType_C", (PC, PCType), (pc, type));
360   PetscFunctionReturn(0);
361 }
362 
363 static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component) {
364   PC_MG  *mg  = (PC_MG *)pc->data;
365   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
366 
367   PetscFunctionBegin;
368   hmg->component = component;
369   PetscFunctionReturn(0);
370 }
371 
372 /*@
373    PCHMGSetCoarseningComponent - Set which component is used for the subspace-based coarsening algorithm
374 
375    Logically Collective on PC
376 
377    Input Parameters:
378 +  pc - the HMG context
379 -  component - which component PC will coarsen
380 
381    Options Database Keys:
382 .  -pc_hmg_coarsening_component - Which component is chosen for the subspace-based coarsening algorithm
383 
384    Level: beginner
385 
386 .keywords: HMG, multigrid, interpolation, coarsening, component
387 
388 .seealso: `PCHMG`, `PCType`
389 @*/
390 PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component) {
391   PetscFunctionBegin;
392   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
393   PetscUseMethod(pc, "PCHMGSetCoarseningComponent_C", (PC, PetscInt), (pc, component));
394   PetscFunctionReturn(0);
395 }
396 
397 static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij) {
398   PC_MG  *mg  = (PC_MG *)pc->data;
399   PC_HMG *hmg = (PC_HMG *)mg->innerctx;
400 
401   PetscFunctionBegin;
402   hmg->usematmaij = usematmaij;
403   PetscFunctionReturn(0);
404 }
405 
406 /*@
407    PCHMGUseMatMAIJ - Set a flag that indicates if or not to use MatMAIJ for interpolations for saving memory
408 
409    Logically Collective on PC
410 
411    Input Parameters:
412 +  pc - the HMG context
413 -  usematmaij - if or not to use MatMAIJ for interpolations. By default, it is true for saving memory
414 
415    Options Database Keys:
416 .  -pc_hmg_use_matmaij - <true | false >
417 
418    Level: beginner
419 
420 .keywords: HMG, multigrid, interpolation, coarsening, MatMAIJ
421 
422 .seealso: `PCHMG`, `PCType`
423 @*/
424 PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij) {
425   PetscFunctionBegin;
426   PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
427   PetscUseMethod(pc, "PCHMGUseMatMAIJ_C", (PC, PetscBool), (pc, usematmaij));
428   PetscFunctionReturn(0);
429 }
430 
431 /*MC
432    PCHMG - Hybrid of PETSc preconditioners (such as ASM, BJacobi, SOR, etc.) and Hypre BoomerAMG, GAMG or other multilevel methods. BoomerAMG, GAMG
433            or other multilevel methods is used to coarsen matrix and generate a sequence of coarse matrices and interpolations. The matrices and
434            interpolations are employed to construct PCMG, and then any available PETSc preconditioners can be chosen as smoothers and the coarse solver.
435 
436    Options Database Keys:
437 +  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
438 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
439 .  -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to coarsen matrix
440 -  -pc_hmg_use_matmaij <true | false> - Whether or not to use MatMAIJ for multicomponent problems for saving memory
441 
442    Notes:
443     For multicomponent problems, we can just coarsen one submatrix associated with one particular component. In this way, the preconditioner setup
444     time is significantly reduced. One typical use case is neutron transport equations. There are many variables on each mesh vertex due to the
445     of angle and energy. Each variable, in fact, corresponds to the same PDEs but with different material properties.
446 
447    Level: beginner
448 
449     References:
450 .   * - Fande Kong, Yaqi Wang, Derek R Gaston, Cody J Permann, Andrew E Slaughter, Alexander D Lindsay, Richard C Martineau, A highly parallel multilevel
451     Newton-Krylov-Schwarz method with subspace-based coarsening and partition-based balancing for the multigroup neutron transport equations on
452     3D unstructured meshes, arXiv preprint arXiv:1903.03659, 2019
453 
454 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`,
455           `PCHMGSetInnerPCType()`
456 
457 M*/
458 PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc) {
459   PC_HMG *hmg;
460   PC_MG  *mg;
461 
462   PetscFunctionBegin;
463   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
464   PetscTryTypeMethod(pc, destroy);
465   pc->data = NULL;
466   PetscCall(PetscFree(((PetscObject)pc)->type_name));
467 
468   PetscCall(PCSetType(pc, PCMG));
469   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
470   PetscCall(PetscNew(&hmg));
471 
472   mg                 = (PC_MG *)pc->data;
473   mg->innerctx       = hmg;
474   hmg->reuseinterp   = PETSC_FALSE;
475   hmg->subcoarsening = PETSC_FALSE;
476   hmg->usematmaij    = PETSC_TRUE;
477   hmg->component     = 0;
478   hmg->innerpc       = NULL;
479 
480   pc->ops->setfromoptions = PCSetFromOptions_HMG;
481   pc->ops->view           = PCView_HMG;
482   pc->ops->destroy        = PCDestroy_HMG;
483   pc->ops->setup          = PCSetUp_HMG;
484 
485   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetReuseInterpolation_C", PCHMGSetReuseInterpolation_HMG));
486   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetUseSubspaceCoarsening_C", PCHMGSetUseSubspaceCoarsening_HMG));
487   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetInnerPCType_C", PCHMGSetInnerPCType_HMG));
488   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGSetCoarseningComponent_C", PCHMGSetCoarseningComponent_HMG));
489   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHMGUseMatMAIJ_C", PCHMGUseMatMAIJ_HMG));
490   PetscFunctionReturn(0);
491 }
492