1 #include <petscdm.h> 2 #include <petscctable.h> 3 #include <petsc/private/matimpl.h> 4 #include <petsc/private/pcmgimpl.h> 5 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 6 7 typedef struct { 8 PC innerpc; /* A MG inner PC (Hypre or PCGAMG) to setup interpolations and coarse operators */ 9 char* innerpctype; /* PCGAMG or PCHYPRE */ 10 PetscBool reuseinterp; /* A flag indicates if or not to reuse the interpolations */ 11 PetscBool subcoarsening; /* If or not to use a subspace-based coarsening algorithm */ 12 PetscBool usematmaij; /* If or not to use MatMAIJ for saving memory */ 13 PetscInt component; /* Which subspace is used for the subspace-based coarsening algorithm? */ 14 } PC_HMG; 15 16 PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems*,PC); 17 PetscErrorCode PCReset_MG(PC); 18 19 static PetscErrorCode PCHMGExtractSubMatrix_Private(Mat pmat,Mat *submat,MatReuse reuse,PetscInt component,PetscInt blocksize) 20 { 21 IS isrow; 22 PetscInt rstart,rend; 23 MPI_Comm comm; 24 25 PetscFunctionBegin; 26 PetscCall(PetscObjectGetComm((PetscObject)pmat,&comm)); 27 PetscCheck(component<blocksize,comm,PETSC_ERR_ARG_INCOMP,"Component %" PetscInt_FMT " should be less than block size %" PetscInt_FMT " ",component,blocksize); 28 PetscCall(MatGetOwnershipRange(pmat,&rstart,&rend)); 29 PetscCheck((rend-rstart)%blocksize == 0,comm,PETSC_ERR_ARG_INCOMP,"Block size %" PetscInt_FMT " is inconsistent for [%" PetscInt_FMT ", %" PetscInt_FMT ") ",blocksize,rstart,rend); 30 PetscCall(ISCreateStride(comm,(rend-rstart)/blocksize,rstart+component,blocksize,&isrow)); 31 PetscCall(MatCreateSubMatrix(pmat,isrow,isrow,reuse,submat)); 32 PetscCall(ISDestroy(&isrow)); 33 PetscFunctionReturn(0); 34 } 35 36 static PetscErrorCode PCHMGExpandInterpolation_Private(Mat subinterp, Mat *interp, PetscInt blocksize) 37 { 38 PetscInt subrstart,subrend,subrowsize,subcolsize,subcstart,subcend,rowsize,colsize; 39 PetscInt subrow,row,nz,*d_nnz,*o_nnz,i,j,dnz,onz,max_nz,*indices; 40 const PetscInt *idx; 41 const PetscScalar *values; 42 MPI_Comm comm; 43 44 PetscFunctionBegin; 45 PetscCall(PetscObjectGetComm((PetscObject)subinterp,&comm)); 46 PetscCall(MatGetOwnershipRange(subinterp,&subrstart,&subrend)); 47 subrowsize = subrend-subrstart; 48 rowsize = subrowsize*blocksize; 49 PetscCall(PetscCalloc2(rowsize,&d_nnz,rowsize,&o_nnz)); 50 PetscCall(MatGetOwnershipRangeColumn(subinterp,&subcstart,&subcend)); 51 subcolsize = subcend - subcstart; 52 colsize = subcolsize*blocksize; 53 max_nz = 0; 54 for (subrow=subrstart;subrow<subrend;subrow++) { 55 PetscCall(MatGetRow(subinterp,subrow,&nz,&idx,NULL)); 56 if (max_nz<nz) max_nz = nz; 57 dnz = 0; onz = 0; 58 for (i=0;i<nz;i++) { 59 if (idx[i]>=subcstart && idx[i]<subcend) dnz++; 60 else onz++; 61 } 62 for (i=0;i<blocksize;i++) { 63 d_nnz[(subrow-subrstart)*blocksize+i] = dnz; 64 o_nnz[(subrow-subrstart)*blocksize+i] = onz; 65 } 66 PetscCall(MatRestoreRow(subinterp,subrow,&nz,&idx,NULL)); 67 } 68 PetscCall(MatCreateAIJ(comm,rowsize,colsize,PETSC_DETERMINE,PETSC_DETERMINE,0,d_nnz,0,o_nnz,interp)); 69 PetscCall(MatSetOption(*interp,MAT_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE)); 70 PetscCall(MatSetOption(*interp,MAT_IGNORE_ZERO_ENTRIES,PETSC_TRUE)); 71 PetscCall(MatSetOption(*interp,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE)); 72 PetscCall(MatSetFromOptions(*interp)); 73 74 PetscCall(MatSetUp(*interp)); 75 PetscCall(PetscFree2(d_nnz,o_nnz)); 76 PetscCall(PetscMalloc1(max_nz,&indices)); 77 for (subrow=subrstart; subrow<subrend; subrow++) { 78 PetscCall(MatGetRow(subinterp,subrow,&nz,&idx,&values)); 79 for (i=0;i<blocksize;i++) { 80 row = subrow*blocksize+i; 81 for (j=0;j<nz;j++) { 82 indices[j] = idx[j]*blocksize+i; 83 } 84 PetscCall(MatSetValues(*interp,1,&row,nz,indices,values,INSERT_VALUES)); 85 } 86 PetscCall(MatRestoreRow(subinterp,subrow,&nz,&idx,&values)); 87 } 88 PetscCall(PetscFree(indices)); 89 PetscCall(MatAssemblyBegin(*interp,MAT_FINAL_ASSEMBLY)); 90 PetscCall(MatAssemblyEnd(*interp,MAT_FINAL_ASSEMBLY)); 91 PetscFunctionReturn(0); 92 } 93 94 PetscErrorCode PCSetUp_HMG(PC pc) 95 { 96 Mat PA, submat; 97 PC_MG *mg = (PC_MG*)pc->data; 98 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 99 MPI_Comm comm; 100 PetscInt level; 101 PetscInt num_levels; 102 Mat *operators,*interpolations; 103 PetscInt blocksize; 104 const char *prefix; 105 PCMGGalerkinType galerkin; 106 107 PetscFunctionBegin; 108 PetscCall(PetscObjectGetComm((PetscObject)pc,&comm)); 109 if (pc->setupcalled) { 110 if (hmg->reuseinterp) { 111 /* If we did not use Galerkin in the last call or we have a different sparsity pattern now, 112 * we have to build from scratch 113 * */ 114 PetscCall(PCMGGetGalerkin(pc,&galerkin)); 115 if (galerkin == PC_MG_GALERKIN_NONE || pc->flag != SAME_NONZERO_PATTERN) pc->setupcalled = PETSC_FALSE; 116 PetscCall(PCMGSetGalerkin(pc,PC_MG_GALERKIN_PMAT)); 117 PetscCall(PCSetUp_MG(pc)); 118 PetscFunctionReturn(0); 119 } else { 120 PetscCall(PCReset_MG(pc)); 121 pc->setupcalled = PETSC_FALSE; 122 } 123 } 124 125 /* Create an inner PC (GAMG or HYPRE) */ 126 if (!hmg->innerpc) { 127 PetscCall(PCCreate(comm,&hmg->innerpc)); 128 /* If users do not set an inner pc type, we need to set a default value */ 129 if (!hmg->innerpctype) { 130 /* If hypre is available, use hypre, otherwise, use gamg */ 131 #if PETSC_HAVE_HYPRE 132 PetscCall(PetscStrallocpy(PCHYPRE,&(hmg->innerpctype))); 133 #else 134 PetscCall(PetscStrallocpy(PCGAMG,&(hmg->innerpctype))); 135 #endif 136 } 137 PetscCall(PCSetType(hmg->innerpc,hmg->innerpctype)); 138 } 139 PetscCall(PCGetOperators(pc,NULL,&PA)); 140 /* Users need to correctly set a block size of matrix in order to use subspace coarsening */ 141 PetscCall(MatGetBlockSize(PA,&blocksize)); 142 if (blocksize<=1) hmg->subcoarsening = PETSC_FALSE; 143 /* Extract a submatrix for constructing subinterpolations */ 144 if (hmg->subcoarsening) { 145 PetscCall(PCHMGExtractSubMatrix_Private(PA,&submat,MAT_INITIAL_MATRIX,hmg->component,blocksize)); 146 PA = submat; 147 } 148 PetscCall(PCSetOperators(hmg->innerpc,PA,PA)); 149 if (hmg->subcoarsening) { 150 PetscCall(MatDestroy(&PA)); 151 } 152 /* Setup inner PC correctly. During this step, matrix will be coarsened */ 153 PetscCall(PCSetUseAmat(hmg->innerpc,PETSC_FALSE)); 154 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc,&prefix)); 155 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)hmg->innerpc,prefix)); 156 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)hmg->innerpc,"hmg_inner_")); 157 PetscCall(PCSetFromOptions(hmg->innerpc)); 158 PetscCall(PCSetUp(hmg->innerpc)); 159 160 /* Obtain interpolations IN PLACE. For BoomerAMG, (I,J,data) is reused to avoid memory overhead */ 161 PetscCall(PCGetInterpolations(hmg->innerpc,&num_levels,&interpolations)); 162 /* We can reuse the coarse operators when we do the full space coarsening */ 163 if (!hmg->subcoarsening) { 164 PetscCall(PCGetCoarseOperators(hmg->innerpc,&num_levels,&operators)); 165 } 166 167 PetscCall(PCDestroy(&hmg->innerpc)); 168 hmg->innerpc = NULL; 169 PetscCall(PCMGSetLevels_MG(pc,num_levels,NULL)); 170 /* Set coarse matrices and interpolations to PCMG */ 171 for (level=num_levels-1; level>0; level--) { 172 Mat P=NULL, pmat=NULL; 173 Vec b, x,r; 174 if (hmg->subcoarsening) { 175 if (hmg->usematmaij) { 176 PetscCall(MatCreateMAIJ(interpolations[level-1],blocksize,&P)); 177 PetscCall(MatDestroy(&interpolations[level-1])); 178 } else { 179 /* Grow interpolation. In the future, we should use MAIJ */ 180 PetscCall(PCHMGExpandInterpolation_Private(interpolations[level-1],&P,blocksize)); 181 PetscCall(MatDestroy(&interpolations[level-1])); 182 } 183 } else { 184 P = interpolations[level-1]; 185 } 186 PetscCall(MatCreateVecs(P,&b,&r)); 187 PetscCall(PCMGSetInterpolation(pc,level,P)); 188 PetscCall(PCMGSetRestriction(pc,level,P)); 189 PetscCall(MatDestroy(&P)); 190 /* We reuse the matrices when we do not do subspace coarsening */ 191 if ((level-1)>=0 && !hmg->subcoarsening) { 192 pmat = operators[level-1]; 193 PetscCall(PCMGSetOperators(pc,level-1,pmat,pmat)); 194 PetscCall(MatDestroy(&pmat)); 195 } 196 PetscCall(PCMGSetRhs(pc,level-1,b)); 197 198 PetscCall(PCMGSetR(pc,level,r)); 199 PetscCall(VecDestroy(&r)); 200 201 PetscCall(VecDuplicate(b,&x)); 202 PetscCall(PCMGSetX(pc,level-1,x)); 203 PetscCall(VecDestroy(&x)); 204 PetscCall(VecDestroy(&b)); 205 } 206 PetscCall(PetscFree(interpolations)); 207 if (!hmg->subcoarsening) { 208 PetscCall(PetscFree(operators)); 209 } 210 /* Turn Galerkin off when we already have coarse operators */ 211 PetscCall(PCMGSetGalerkin(pc,hmg->subcoarsening ? PC_MG_GALERKIN_PMAT:PC_MG_GALERKIN_NONE)); 212 PetscCall(PCSetDM(pc,NULL)); 213 PetscCall(PCSetUseAmat(pc,PETSC_FALSE)); 214 PetscObjectOptionsBegin((PetscObject)pc); 215 PetscCall(PCSetFromOptions_MG(PetscOptionsObject,pc)); /* should be called in PCSetFromOptions_HMG(), but cannot be called prior to PCMGSetLevels() */ 216 PetscOptionsEnd(); 217 PetscCall(PCSetUp_MG(pc)); 218 PetscFunctionReturn(0); 219 } 220 221 PetscErrorCode PCDestroy_HMG(PC pc) 222 { 223 PC_MG *mg = (PC_MG*)pc->data; 224 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 225 226 PetscFunctionBegin; 227 PetscCall(PCDestroy(&hmg->innerpc)); 228 PetscCall(PetscFree(hmg->innerpctype)); 229 PetscCall(PetscFree(hmg)); 230 PetscCall(PCDestroy_MG(pc)); 231 232 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",NULL)); 233 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",NULL)); 234 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",NULL)); 235 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",NULL)); 236 PetscFunctionReturn(0); 237 } 238 239 PetscErrorCode PCView_HMG(PC pc,PetscViewer viewer) 240 { 241 PC_MG *mg = (PC_MG*)pc->data; 242 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 243 PetscBool iascii; 244 245 PetscFunctionBegin; 246 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii)); 247 if (iascii) { 248 PetscCall(PetscViewerASCIIPrintf(viewer," Reuse interpolation: %s\n",hmg->reuseinterp? "true":"false")); 249 PetscCall(PetscViewerASCIIPrintf(viewer," Use subspace coarsening: %s\n",hmg->subcoarsening? "true":"false")); 250 PetscCall(PetscViewerASCIIPrintf(viewer," Coarsening component: %" PetscInt_FMT " \n",hmg->component)); 251 PetscCall(PetscViewerASCIIPrintf(viewer," Use MatMAIJ: %s \n",hmg->usematmaij? "true":"false")); 252 PetscCall(PetscViewerASCIIPrintf(viewer," Inner PC type: %s \n",hmg->innerpctype)); 253 } 254 PetscCall(PCView_MG(pc,viewer)); 255 PetscFunctionReturn(0); 256 } 257 258 PetscErrorCode PCSetFromOptions_HMG(PetscOptionItems *PetscOptionsObject,PC pc) 259 { 260 PC_MG *mg = (PC_MG*)pc->data; 261 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 262 263 PetscFunctionBegin; 264 PetscOptionsHeadBegin(PetscOptionsObject,"HMG"); 265 PetscCall(PetscOptionsBool("-pc_hmg_reuse_interpolation","Reuse the interpolation operators when possible (cheaper, weaker when matrix entries change a lot)","PCHMGSetReuseInterpolation",hmg->reuseinterp,&hmg->reuseinterp,NULL)); 266 PetscCall(PetscOptionsBool("-pc_hmg_use_subspace_coarsening","Use the subspace coarsening to compute the interpolations","PCHMGSetUseSubspaceCoarsening",hmg->subcoarsening,&hmg->subcoarsening,NULL)); 267 PetscCall(PetscOptionsBool("-pc_hmg_use_matmaij","Use MatMAIJ store interpolation for saving memory","PCHMGSetInnerPCType",hmg->usematmaij,&hmg->usematmaij,NULL)); 268 PetscCall(PetscOptionsInt("-pc_hmg_coarsening_component","Which component is chosen for the subspace-based coarsening algorithm","PCHMGSetCoarseningComponent",hmg->component,&hmg->component,NULL)); 269 PetscOptionsHeadEnd(); 270 PetscFunctionReturn(0); 271 } 272 273 static PetscErrorCode PCHMGSetReuseInterpolation_HMG(PC pc, PetscBool reuse) 274 { 275 PC_MG *mg = (PC_MG*)pc->data; 276 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 277 278 PetscFunctionBegin; 279 hmg->reuseinterp = reuse; 280 PetscFunctionReturn(0); 281 } 282 283 /*@ 284 PCHMGSetReuseInterpolation - Reuse interpolation matrices in HMG 285 286 Logically Collective on PC 287 288 Input Parameters: 289 + pc - the HMG context 290 - reuse - True indicates that HMG will reuse the interpolations 291 292 Options Database Keys: 293 . -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time. 294 295 Level: beginner 296 297 .keywords: HMG, multigrid, interpolation, reuse, set 298 299 .seealso: PCHMG 300 @*/ 301 PetscErrorCode PCHMGSetReuseInterpolation(PC pc, PetscBool reuse) 302 { 303 PetscFunctionBegin; 304 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 305 PetscUseMethod(pc,"PCHMGSetReuseInterpolation_C",(PC,PetscBool),(pc,reuse)); 306 PetscFunctionReturn(0); 307 } 308 309 static PetscErrorCode PCHMGSetUseSubspaceCoarsening_HMG(PC pc, PetscBool subspace) 310 { 311 PC_MG *mg = (PC_MG*)pc->data; 312 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 313 314 PetscFunctionBegin; 315 hmg->subcoarsening = subspace; 316 PetscFunctionReturn(0); 317 } 318 319 /*@ 320 PCHMGSetUseSubspaceCoarsening - Use subspace coarsening in HMG 321 322 Logically Collective on PC 323 324 Input Parameters: 325 + pc - the HMG context 326 - reuse - True indicates that HMG will use the subspace coarsening 327 328 Options Database Keys: 329 . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix). 330 331 Level: beginner 332 333 .keywords: HMG, multigrid, interpolation, subspace, coarsening 334 335 .seealso: PCHMG 336 @*/ 337 PetscErrorCode PCHMGSetUseSubspaceCoarsening(PC pc, PetscBool subspace) 338 { 339 PetscFunctionBegin; 340 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 341 PetscUseMethod(pc,"PCHMGSetUseSubspaceCoarsening_C",(PC,PetscBool),(pc,subspace)); 342 PetscFunctionReturn(0); 343 } 344 345 static PetscErrorCode PCHMGSetInnerPCType_HMG(PC pc, PCType type) 346 { 347 PC_MG *mg = (PC_MG*)pc->data; 348 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 349 350 PetscFunctionBegin; 351 PetscCall(PetscStrallocpy(type,&(hmg->innerpctype))); 352 PetscFunctionReturn(0); 353 } 354 355 /*@C 356 PCHMGSetInnerPCType - Set an inner PC type 357 358 Logically Collective on PC 359 360 Input Parameters: 361 + pc - the HMG context 362 - type - <hypre, gamg> coarsening algorithm 363 364 Options Database Keys: 365 . -hmg_inner_pc_type <hypre, gamg> - What method is used to coarsen matrix 366 367 Level: beginner 368 369 .keywords: HMG, multigrid, interpolation, coarsening 370 371 .seealso: PCHMG, PCType 372 @*/ 373 PetscErrorCode PCHMGSetInnerPCType(PC pc, PCType type) 374 { 375 PetscFunctionBegin; 376 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 377 PetscUseMethod(pc,"PCHMGSetInnerPCType_C",(PC,PCType),(pc,type)); 378 PetscFunctionReturn(0); 379 } 380 381 static PetscErrorCode PCHMGSetCoarseningComponent_HMG(PC pc, PetscInt component) 382 { 383 PC_MG *mg = (PC_MG*)pc->data; 384 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 385 386 PetscFunctionBegin; 387 hmg->component = component; 388 PetscFunctionReturn(0); 389 } 390 391 /*@ 392 PCHMGSetCoarseningComponent - Set which component is used for the subspace-based coarsening algorithm 393 394 Logically Collective on PC 395 396 Input Parameters: 397 + pc - the HMG context 398 - component - which component PC will coarsen 399 400 Options Database Keys: 401 . -pc_hmg_coarsening_component - Which component is chosen for the subspace-based coarsening algorithm 402 403 Level: beginner 404 405 .keywords: HMG, multigrid, interpolation, coarsening, component 406 407 .seealso: PCHMG, PCType 408 @*/ 409 PetscErrorCode PCHMGSetCoarseningComponent(PC pc, PetscInt component) 410 { 411 PetscFunctionBegin; 412 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 413 PetscUseMethod(pc,"PCHMGSetCoarseningComponent_C",(PC,PetscInt),(pc,component)); 414 PetscFunctionReturn(0); 415 } 416 417 static PetscErrorCode PCHMGUseMatMAIJ_HMG(PC pc, PetscBool usematmaij) 418 { 419 PC_MG *mg = (PC_MG*)pc->data; 420 PC_HMG *hmg = (PC_HMG*) mg->innerctx; 421 422 PetscFunctionBegin; 423 hmg->usematmaij = usematmaij; 424 PetscFunctionReturn(0); 425 } 426 427 /*@ 428 PCHMGUseMatMAIJ - Set a flag that indicates if or not to use MatMAIJ for interpolations for saving memory 429 430 Logically Collective on PC 431 432 Input Parameters: 433 + pc - the HMG context 434 - usematmaij - if or not to use MatMAIJ for interpolations. By default, it is true for saving memory 435 436 Options Database Keys: 437 . -pc_hmg_use_matmaij - <true | false > 438 439 Level: beginner 440 441 .keywords: HMG, multigrid, interpolation, coarsening, MatMAIJ 442 443 .seealso: PCHMG, PCType 444 @*/ 445 PetscErrorCode PCHMGUseMatMAIJ(PC pc, PetscBool usematmaij) 446 { 447 PetscFunctionBegin; 448 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 449 PetscUseMethod(pc,"PCHMGUseMatMAIJ_C",(PC,PetscBool),(pc,usematmaij)); 450 PetscFunctionReturn(0); 451 } 452 453 /*MC 454 PCHMG - Hybrid of PETSc preconditioners (such as ASM, BJacobi, SOR, etc.) and Hypre BoomerAMG, GAMG or other multilevel methods. BoomerAMG, GAMG 455 or other multilevel methods is used to coarsen matrix and generate a sequence of coarse matrices and interpolations. The matrices and 456 interpolations are employed to construct PCMG, and then any available PETSc preconditioners can be chosen as smoothers and the coarse solver. 457 458 Options Database Keys: 459 + -pc_hmg_reuse_interpolation <true | false> - Whether or not to reuse the interpolations. If true, it potentially save the compute time. 460 . -pc_hmg_use_subspace_coarsening <true | false> - Whether or not to use subspace coarsening (that is, coarsen a submatrix). 461 . -hmg_inner_pc_type <hypre, gamg, ...> - What method is used to coarsen matrix 462 - -pc_hmg_use_matmaij <true | false> - Whether or not to use MatMAIJ for multicomponent problems for saving memory 463 464 Notes: 465 For multicomponent problems, we can just coarsen one submatrix associated with one particular component. In this way, the preconditioner setup 466 time is significantly reduced. One typical use case is neutron transport equations. There are many variables on each mesh vertex due to the 467 of angle and energy. Each variable, in fact, corresponds to the same PDEs but with different material properties. 468 469 Level: beginner 470 471 References: 472 . * - Fande Kong, Yaqi Wang, Derek R Gaston, Cody J Permann, Andrew E Slaughter, Alexander D Lindsay, Richard C Martineau, A highly parallel multilevel 473 Newton-Krylov-Schwarz method with subspace-based coarsening and partition-based balancing for the multigroup neutron transport equations on 474 3D unstructured meshes, arXiv preprint arXiv:1903.03659, 2019 475 476 .seealso: PCCreate(), PCSetType(), PCType, PC, PCMG, PCHYPRE, PCHMG, PCGetCoarseOperators(), PCGetInterpolations(), PCHMGSetReuseInterpolation(), PCHMGSetUseSubspaceCoarsening(), 477 PCHMGSetInnerPCType() 478 479 M*/ 480 PETSC_EXTERN PetscErrorCode PCCreate_HMG(PC pc) 481 { 482 PC_HMG *hmg; 483 PC_MG *mg; 484 485 PetscFunctionBegin; 486 /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */ 487 if (pc->ops->destroy) { 488 PetscCall((*pc->ops->destroy)(pc)); 489 pc->data = NULL; 490 } 491 PetscCall(PetscFree(((PetscObject)pc)->type_name)); 492 493 PetscCall(PCSetType(pc,PCMG)); 494 PetscCall(PetscObjectChangeTypeName((PetscObject)pc, PCHMG)); 495 PetscCall(PetscNew(&hmg)); 496 497 mg = (PC_MG*) pc->data; 498 mg->innerctx = hmg; 499 hmg->reuseinterp = PETSC_FALSE; 500 hmg->subcoarsening = PETSC_FALSE; 501 hmg->usematmaij = PETSC_TRUE; 502 hmg->component = 0; 503 hmg->innerpc = NULL; 504 505 pc->ops->setfromoptions = PCSetFromOptions_HMG; 506 pc->ops->view = PCView_HMG; 507 pc->ops->destroy = PCDestroy_HMG; 508 pc->ops->setup = PCSetUp_HMG; 509 510 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetReuseInterpolation_C",PCHMGSetReuseInterpolation_HMG)); 511 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetUseSubspaceCoarsening_C",PCHMGSetUseSubspaceCoarsening_HMG)); 512 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetInnerPCType_C",PCHMGSetInnerPCType_HMG)); 513 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGSetCoarseningComponent_C",PCHMGSetCoarseningComponent_HMG)); 514 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCHMGUseMatMAIJ_C",PCHMGUseMatMAIJ_HMG)); 515 PetscFunctionReturn(0); 516 } 517