xref: /petsc/src/ksp/pc/impls/hmg/hmg.c (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 #include <petscdm.h>
2 #include <petscctable.h>
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/pcmgimpl.h>
5 #include <petsc/private/pcimpl.h>      /*I "petscpc.h" I*/
6 
7 typedef struct {
8   PC               innerpc;            /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators  */
9   char*            innerpctype;        /* PCGAMG or PCHYPRE */
10   PetscBool        reuseinterp;        /* A flag indicates if or not to reuse the interpolations */
11   PetscBool        subcoarsening;      /* If or not to use a subspace-based coarsening algorithm */
12   PetscBool        usematmaij;         /* If or not to use MatMAIJ for saving memory */
13   PetscInt         component;          /* Which subspace is used for the subspace-based coarsening algorithm? */
14 } PC_HMG;
15 
16 PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems*,PC);
17 PetscErrorCode PCReset_MG(PC);
18 
19 static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat,Mat *submat,MatReuse reuse,PetscInt component,PetscInt blocksize)
20 {
21   IS             isrow;
22   PetscInt       rstart,rend;
23   MPI_Comm       comm;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)pmat,&comm));
27   PetscCheck(component<blocksize,comm,PETSC_ERR_ARG_INCOMP,"Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ",component,blocksize);
28   PetscCall(MatGetOwnershipRange(pmat,&rstart,&rend));
29   PetscCheck((rend-rstart)%blocksize == 0,comm,PETSC_ERR_ARG_INCOMP,"Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ",blocksize,rstart,rend);
30   PetscCall(ISCreateStride(comm,(rend-rstart)/blocksize,rstart+component,blocksize,&isrow));
31   PetscCall(MatCreateSubMatrix(pmat,isrow,isrow,reuse,submat));
32   PetscCall(ISDestroy(&isrow));
33   PetscFunctionReturn(0);
34 }
35 
36 static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize)
37 {
38   PetscInt              subrstart,subrend,subrowsize,subcolsize,subcstart,subcend,rowsize,colsize;
39   PetscInt              subrow,row,nz,*d_nnz,*o_nnz,i,j,dnz,onz,max_nz,*indices;
40   const PetscInt        *idx;
41   const PetscScalar     *values;
42   MPI_Comm              comm;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectGetComm((PetscObject)subinterp,&comm));
46   PetscCall(MatGetOwnershipRange(subinterp,&subrstart,&subrend));
47   subrowsize = subrend-subrstart;
48   rowsize = subrowsize*blocksize;
49   PetscCall(PetscCalloc2(rowsize,&d_nnz,rowsize,&o_nnz));
50   PetscCall(MatGetOwnershipRangeColumn(subinterp,&subcstart,&subcend));
51   subcolsize = subcend - subcstart;
52   colsize    = subcolsize*blocksize;
53   max_nz = 0;
54   for (subrow=subrstart;subrow<subrend;subrow++) {
55     PetscCall(MatGetRow(subinterp,subrow,&nz,&idx,NULL));
56     if (max_nz<nz) max_nz = nz;
57     dnz = 0; onz = 0;
58     for (i=0;i<nz;i++) {
59       if (idx[i]>=subcstart && idx[i]<subcend) dnz++;
60       else onz++;
61     }
62     for (i=0;i<blocksize;i++) {
63       d_nnz[(subrow-subrstart)*blocksize+i] = dnz;
64       o_nnz[(subrow-subrstart)*blocksize+i] = onz;
65     }
66     PetscCall(MatRestoreRow(subinterp,subrow,&nz,&idx,NULL));
67   }
68   PetscCall(MatCreateAIJ(comm,rowsize,colsize,PETSC_DETERMINE,PETSC_DETERMINE,0,d_nnz,0,o_nnz,interp));
69   PetscCall(MatSetOption(*interp,MAT_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE));
70   PetscCall(MatSetOption(*interp,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE));
71   PetscCall(MatSetOption(*interp,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE));
72   PetscCall(MatSetFromOptions(*interp));
73 
74   PetscCall(MatSetUp(*interp));
75   PetscCall(PetscFree2(d_nnz,o_nnz));
76   PetscCall(PetscMalloc1(max_nz,&indices));
77   for (subrow=subrstart; subrow<subrend; subrow++) {
78     PetscCall(MatGetRow(subinterp,subrow,&nz,&idx,&values));
79     for (i=0;i<blocksize;i++) {
80       row = subrow*blocksize+i;
81       for (j=0;j<nz;j++) {
82         indices[j] = idx[j]*blocksize+i;
83       }
84       PetscCall(MatSetValues(*interp,1,&row,nz,indices,values,INSERT_VALUES));
85     }
86     PetscCall(MatRestoreRow(subinterp,subrow,&nz,&idx,&values));
87   }
88   PetscCall(PetscFree(indices));
89   PetscCall(MatAssemblyBegin(*interp,MAT_FINAL_ASSEMBLY));
90   PetscCall(MatAssemblyEnd(*interp,MAT_FINAL_ASSEMBLY));
91   PetscFunctionReturn(0);
92 }
93 
94 PetscErrorCode PCSetUp_HMG(PC pc)
95 {
96   Mat                PA, submat;
97   PC_MG              *mg   = (PC_MG*)pc->data;
98   PC_HMG             *hmg   = (PC_HMG*) mg->innerctx;
99   MPI_Comm           comm;
100   PetscInt           level;
101   PetscInt           num_levels;
102   Mat                *operators,*interpolations;
103   PetscInt           blocksize;
104   const char         *prefix;
105   PCMGGalerkinType   galerkin;
106 
107   PetscFunctionBegin;
108   PetscCall(PetscObjectGetComm((PetscObject)pc,&comm));
109   if (pc->setupcalled) {
110    if (hmg->reuseinterp) {
111      /* If we did not use Galerkin in the last call or we have a different sparsity pattern now,
112       * we have to build from scratch
113       * */
114      PetscCall(PCMGGetGalerkin(pc,&galerkin));
115      if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE;
116      PetscCall(PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT));
117      PetscCall(PCSetUp_MG(pc));
118      PetscFunctionReturn(0);
119     } else {
120      PetscCall(PCReset_MG(pc));
121      pc->setupcalled = PETSC_FALSE;
122     }
123   }
124 
125   /* Create an inner PC (GAMG or HYPRE) */
126   if (!hmg->innerpc) {
127     PetscCall(PCCreate(comm,&hmg->innerpc));
128     /* If users do not set an inner pc type, we need to set a default value */
129     if (!hmg->innerpctype) {
130     /* If hypre is available, use hypre, otherwise, use gamg */
131 #if PETSC_HAVE_HYPRE
132       PetscCall(PetscStrallocpy(PCHYPRE,&(hmg->innerpctype)));
133 #else
134       PetscCall(PetscStrallocpy(PCGAMG,&(hmg->innerpctype)));
135 #endif
136     }
137     PetscCall(PCSetType(hmg->innerpc,hmg->innerpctype));
138   }
139   PetscCall(PCGetOperators(pc,NULL,&PA));
140   /* Users need to correctly set a block size of matrix in order to use subspace coarsening */
141   PetscCall(MatGetBlockSize(PA,&blocksize));
142   if (blocksize<=1) hmg->subcoarsening = PETSC_FALSE;
143   /* Extract a submatrix for constructing subinterpolations */
144   if (hmg->subcoarsening) {
145     PetscCall(PCHMGExtractSubMatrix_Private(PA,&submat,MAT_INITIAL_MATRIX,hmg->component,blocksize));
146     PA = submat;
147   }
148   PetscCall(PCSetOperators(hmg->innerpc,PA,PA));
149   if (hmg->subcoarsening) {
150    PetscCall(MatDestroy(&PA));
151   }
152   /* Setup inner PC correctly. During this step, matrix will be coarsened */
153   PetscCall(PCSetUseAmat(hmg->innerpc,PETSC_FALSE));
154   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,&prefix));
155   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc,prefix));
156   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc,"hmg_inner_"));
157   PetscCall(PCSetFromOptions(hmg->innerpc));
158   PetscCall(PCSetUp(hmg->innerpc));
159 
160   /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */
161   PetscCall(PCGetInterpolations(hmg->innerpc,&num_levels,&interpolations));
162   /* We can reuse the coarse operators when we do the full space coarsening */
163   if (!hmg->subcoarsening) {
164     PetscCall(PCGetCoarseOperators(hmg->innerpc,&num_levels,&operators));
165   }
166 
167   PetscCall(PCDestroy(&hmg->innerpc));
168   hmg->innerpc = NULL;
169   PetscCall(PCMGSetLevels_MG(pc,num_levels,NULL));
170   /* Set coarse matrices and interpolations to PCMG */
171   for (level=num_levels-1; level>0; level--) {
172     Mat P=NULL, pmat=NULL;
173     Vec b, x,r;
174     if (hmg->subcoarsening) {
175       if (hmg->usematmaij) {
176         PetscCall(MatCreateMAIJ(interpolations[level-1],blocksize,&P));
177         PetscCall(MatDestroy(&interpolations[level-1]));
178       } else {
179         /* Grow interpolation. In the future, we should use MAIJ */
180         PetscCall(PCHMGExpandInterpolation_Private(interpolations[level-1],&P,blocksize));
181         PetscCall(MatDestroy(&interpolations[level-1]));
182       }
183     } else {
184       P = interpolations[level-1];
185     }
186     PetscCall(MatCreateVecs(P,&b,&r));
187     PetscCall(PCMGSetInterpolation(pc,level,P));
188     PetscCall(PCMGSetRestriction(pc,level,P));
189     PetscCall(MatDestroy(&P));
190     /* We reuse the matrices when we do not do subspace coarsening */
191     if ((level-1)>=0 && !hmg->subcoarsening) {
192       pmat = operators[level-1];
193       PetscCall(PCMGSetOperators(pc,level-1,pmat,pmat));
194       PetscCall(MatDestroy(&pmat));
195     }
196     PetscCall(PCMGSetRhs(pc,level-1,b));
197 
198     PetscCall(PCMGSetR(pc,level,r));
199     PetscCall(VecDestroy(&r));
200 
201     PetscCall(VecDuplicate(b,&x));
202     PetscCall(PCMGSetX(pc,level-1,x));
203     PetscCall(VecDestroy(&x));
204     PetscCall(VecDestroy(&b));
205   }
206   PetscCall(PetscFree(interpolations));
207   if (!hmg->subcoarsening) {
208     PetscCall(PetscFree(operators));
209   }
210   /* Turn Galerkin off when we already have coarse operators */
211   PetscCall(PCMGSetGalerkin(pc,hmg->subcoarsening ? PC_MG_GALERKIN_PMAT:PC_MG_GALERKIN_NONE));
212   PetscCall(PCSetDM(pc,NULL));
213   PetscCall(PCSetUseAmat(pc,PETSC_FALSE));
214   PetscObjectOptionsBegin((PetscObject)pc);
215   PetscCall(PCSetFromOptions_MG(PetscOptionsObject,pc)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */
216   PetscOptionsEnd();
217   PetscCall(PCSetUp_MG(pc));
218   PetscFunctionReturn(0);
219 }
220 
221 PetscErrorCode PCDestroy_HMG(PC pc)
222 {
223   PC_MG          *mg  = (PC_MG*)pc->data;
224   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
225 
226   PetscFunctionBegin;
227   PetscCall(PCDestroy(&hmg->innerpc));
228   PetscCall(PetscFree(hmg->innerpctype));
229   PetscCall(PetscFree(hmg));
230   PetscCall(PCDestroy_MG(pc));
231 
232   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",NULL));
233   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",NULL));
234   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",NULL));
235   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",NULL));
236   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGUseMatMAIJ_C",NULL));
237   PetscFunctionReturn(0);
238 }
239 
240 PetscErrorCode PCView_HMG(PC pc,PetscViewer viewer)
241 {
242   PC_MG          *mg = (PC_MG*)pc->data;
243   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
244   PetscBool      iascii;
245 
246   PetscFunctionBegin;
247   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
248   if (iascii) {
249     PetscCall(PetscViewerASCIIPrintf(viewer," Reuse interpolation: %s\n",hmg->reuseinterp? "true":"false"));
250     PetscCall(PetscViewerASCIIPrintf(viewer," Use subspace coarsening: %s\n",hmg->subcoarsening? "true":"false"));
251     PetscCall(PetscViewerASCIIPrintf(viewer," Coarsening component: %" PetscInt_FMT " \n",hmg->component));
252     PetscCall(PetscViewerASCIIPrintf(viewer," Use MatMAIJ: %s \n",hmg->usematmaij? "true":"false"));
253     PetscCall(PetscViewerASCIIPrintf(viewer," Inner PC type: %s \n",hmg->innerpctype));
254   }
255   PetscCall(PCView_MG(pc,viewer));
256   PetscFunctionReturn(0);
257 }
258 
259 PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems *PetscOptionsObject,PC pc)
260 {
261   PC_MG          *mg = (PC_MG*)pc->data;
262   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
263 
264   PetscFunctionBegin;
265   PetscOptionsHeadBegin(PetscOptionsObject,"HMG");
266   PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation","Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)","PCHMGSetReuseInterpolation",hmg->reuseinterp,&hmg->reuseinterp,NULL));
267   PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening","Use the subspace coarsening to compute the interpolations","PCHMGSetUseSubspaceCoarsening",hmg->subcoarsening,&hmg->subcoarsening,NULL));
268   PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij","Use MatMAIJ store interpolation for saving memory","PCHMGSetInnerPCType",hmg->usematmaij,&hmg->usematmaij,NULL));
269   PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component","Which component is chosen for the subspace-based coarsening algorithm","PCHMGSetCoarseningComponent",hmg->component,&hmg->component,NULL));
270   PetscOptionsHeadEnd();
271   PetscFunctionReturn(0);
272 }
273 
274 static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse)
275 {
276   PC_MG          *mg  = (PC_MG*)pc->data;
277   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
278 
279   PetscFunctionBegin;
280   hmg->reuseinterp = reuse;
281   PetscFunctionReturn(0);
282 }
283 
284 /*@
285    PCHMGSetReuseInterpolation - Reuse interpolation matrices in HMG
286 
287    Logically Collective on PC
288 
289    Input Parameters:
290 +  pc - the HMG context
291 -  reuse - True indicates that HMG will reuse the interpolations
292 
293    Options Database Keys:
294 .  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
295 
296    Level: beginner
297 
298 .keywords: HMG, multigrid, interpolation, reuse, set
299 
300 .seealso: `PCHMG`
301 @*/
302 PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse)
303 {
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
306   PetscUseMethod(pc,"PCHMGSetReuseInterpolation_C",(PC,PetscBool),(pc,reuse));
307   PetscFunctionReturn(0);
308 }
309 
310 static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace)
311 {
312   PC_MG          *mg  = (PC_MG*)pc->data;
313   PC_HMG         *hmg = (PC_HMG*) mg->innerctx;
314 
315   PetscFunctionBegin;
316   hmg->subcoarsening = subspace;
317   PetscFunctionReturn(0);
318 }
319 
320 /*@
321    PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in HMG
322 
323    Logically Collective on PC
324 
325    Input Parameters:
326 +  pc - the HMG context
327 -  reuse - True indicates that HMG will use the subspace coarsening
328 
329    Options Database Keys:
330 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
331 
332    Level: beginner
333 
334 .keywords: HMG, multigrid, interpolation, subspace, coarsening
335 
336 .seealso: `PCHMG`
337 @*/
338 PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace)
339 {
340   PetscFunctionBegin;
341   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
342   PetscUseMethod(pc,"PCHMGSetUseSubspaceCoarsening_C",(PC,PetscBool),(pc,subspace));
343   PetscFunctionReturn(0);
344 }
345 
346 static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type)
347 {
348   PC_MG           *mg  = (PC_MG*)pc->data;
349   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;
350 
351   PetscFunctionBegin;
352   PetscCall(PetscStrallocpy(type,&(hmg->innerpctype)));
353   PetscFunctionReturn(0);
354 }
355 
356 /*@C
357    PCHMGSetInnerPCType - Set an inner PC type
358 
359    Logically Collective on PC
360 
361    Input Parameters:
362 +  pc - the HMG context
363 -  type - <hypre, gamg> coarsening algorithm
364 
365    Options Database Keys:
366 .  -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix
367 
368    Level: beginner
369 
370 .keywords: HMG, multigrid, interpolation, coarsening
371 
372 .seealso: `PCHMG`, `PCType`
373 @*/
374 PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type)
375 {
376   PetscFunctionBegin;
377   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
378   PetscUseMethod(pc,"PCHMGSetInnerPCType_C",(PC,PCType),(pc,type));
379   PetscFunctionReturn(0);
380 }
381 
382 static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component)
383 {
384   PC_MG           *mg  = (PC_MG*)pc->data;
385   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;
386 
387   PetscFunctionBegin;
388   hmg->component = component;
389   PetscFunctionReturn(0);
390 }
391 
392 /*@
393    PCHMGSetCoarseningComponent - Set which component is used for the subspace-based coarsening algorithm
394 
395    Logically Collective on PC
396 
397    Input Parameters:
398 +  pc - the HMG context
399 -  component - which component PC will coarsen
400 
401    Options Database Keys:
402 .  -pc_hmg_coarsening_component - Which component is chosen for the subspace-based coarsening algorithm
403 
404    Level: beginner
405 
406 .keywords: HMG, multigrid, interpolation, coarsening, component
407 
408 .seealso: `PCHMG`, `PCType`
409 @*/
410 PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component)
411 {
412   PetscFunctionBegin;
413   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
414   PetscUseMethod(pc,"PCHMGSetCoarseningComponent_C",(PC,PetscInt),(pc,component));
415   PetscFunctionReturn(0);
416 }
417 
418 static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij)
419 {
420   PC_MG           *mg  = (PC_MG*)pc->data;
421   PC_HMG          *hmg = (PC_HMG*) mg->innerctx;
422 
423   PetscFunctionBegin;
424   hmg->usematmaij = usematmaij;
425   PetscFunctionReturn(0);
426 }
427 
428 /*@
429    PCHMGUseMatMAIJ - Set a flag that indicates if or not to use MatMAIJ for interpolations for saving memory
430 
431    Logically Collective on PC
432 
433    Input Parameters:
434 +  pc - the HMG context
435 -  usematmaij - if or not to use MatMAIJ for interpolations. By default, it is true for saving memory
436 
437    Options Database Keys:
438 .  -pc_hmg_use_matmaij - <true | false >
439 
440    Level: beginner
441 
442 .keywords: HMG, multigrid, interpolation, coarsening, MatMAIJ
443 
444 .seealso: `PCHMG`, `PCType`
445 @*/
446 PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij)
447 {
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
450   PetscUseMethod(pc,"PCHMGUseMatMAIJ_C",(PC,PetscBool),(pc,usematmaij));
451   PetscFunctionReturn(0);
452 }
453 
454 /*MC
455    PCHMG - Hybrid of PETSc preconditioners (such as ASM, BJacobi, SOR, etc.) and Hypre BoomerAMG, GAMG or other multilevel methods. BoomerAMG, GAMG
456            or other multilevel methods is used to coarsen matrix and generate a sequence of coarse matrices and interpolations. The matrices and
457            interpolations are employed to construct PCMG, and then any available PETSc preconditioners can be chosen as smoothers and the coarse solver.
458 
459    Options Database Keys:
460 +  -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time.
461 .  -pc_hmg_use_subspace_coarsening  <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix).
462 .  -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to coarsen matrix
463 -  -pc_hmg_use_matmaij <true | false> - Whether or not to use MatMAIJ for multicomponent problems for saving memory
464 
465    Notes:
466     For multicomponent problems, we can just coarsen one submatrix associated with one particular component. In this way, the preconditioner setup
467     time is significantly reduced. One typical use case is neutron transport equations. There are many variables on each mesh vertex due to the
468     of angle and energy. Each variable, in fact, corresponds to the same PDEs but with different material properties.
469 
470    Level: beginner
471 
472     References:
473 .   * - Fande Kong, Yaqi Wang, Derek R Gaston, Cody J Permann, Andrew E Slaughter, Alexander D Lindsay, Richard C Martineau, A highly parallel multilevel
474     Newton-Krylov-Schwarz method with subspace-based coarsening and partition-based balancing for the multigroup neutron transport equations on
475     3D unstructured meshes, arXiv preprint arXiv:1903.03659, 2019
476 
477 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMG`, `PCHYPRE`, `PCHMG`, `PCGetCoarseOperators()`, `PCGetInterpolations()`, `PCHMGSetReuseInterpolation()`, `PCHMGSetUseSubspaceCoarsening()`,
478           `PCHMGSetInnerPCType()`
479 
480 M*/
481 PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc)
482 {
483   PC_HMG         *hmg;
484   PC_MG          *mg;
485 
486   PetscFunctionBegin;
487   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
488   if (pc->ops->destroy) {
489     PetscCall((*pc->ops->destroy)(pc));
490     pc->data = NULL;
491   }
492   PetscCall(PetscFree(((PetscObject)pc)->type_name));
493 
494   PetscCall(PCSetType(pc,PCMG));
495   PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG));
496   PetscCall(PetscNew(&hmg));
497 
498   mg                      = (PC_MG*) pc->data;
499   mg->innerctx            = hmg;
500   hmg->reuseinterp        = PETSC_FALSE;
501   hmg->subcoarsening      = PETSC_FALSE;
502   hmg->usematmaij         = PETSC_TRUE;
503   hmg->component          = 0;
504   hmg->innerpc            = NULL;
505 
506   pc->ops->setfromoptions = PCSetFromOptions_HMG;
507   pc->ops->view           = PCView_HMG;
508   pc->ops->destroy        = PCDestroy_HMG;
509   pc->ops->setup          = PCSetUp_HMG;
510 
511   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",PCHMGSetReuseInterpolation_HMG));
512   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",PCHMGSetUseSubspaceCoarsening_HMG));
513   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",PCHMGSetInnerPCType_HMG));
514   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",PCHMGSetCoarseningComponent_HMG));
515   PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGUseMatMAIJ_C",PCHMGUseMatMAIJ_HMG));
516   PetscFunctionReturn(0);
517 }
518