1 /* 2 Provides an interface to the LLNL package hypre 3 */ 4 5 #include <petscpkg_version.h> 6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */ 8 #include <petsc/private/matimpl.h> 9 #include <petsc/private/vecimpl.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <../src/mat/impls/hypre/mhypre.h> 12 #include <../src/dm/impls/da/hypre/mhyp.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <petscmathypre.h> 15 16 #if defined(PETSC_HAVE_HYPRE_DEVICE) 17 #include <petsc/private/deviceimpl.h> 18 #endif 19 20 static PetscBool cite = PETSC_FALSE; 21 static const char hypreCitation[] = "@manual{hypre-web-page,\n title = {{\\sl hypre}: High Performance Preconditioners},\n organization = {Lawrence Livermore National Laboratory},\n note = " 22 "{\\url{https://www.llnl.gov/casc/hypre}}\n}\n"; 23 24 /* 25 Private context (data structure) for the preconditioner. 26 */ 27 typedef struct { 28 HYPRE_Solver hsolver; 29 Mat hpmat; /* MatHYPRE */ 30 31 HYPRE_Int (*destroy)(HYPRE_Solver); 32 HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 33 HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 34 35 MPI_Comm comm_hypre; 36 char *hypre_type; 37 38 /* options for Pilut and BoomerAMG*/ 39 PetscInt maxiter; 40 PetscReal tol; 41 42 /* options for Pilut */ 43 PetscInt factorrowsize; 44 45 /* options for ParaSails */ 46 PetscInt nlevels; 47 PetscReal threshold; 48 PetscReal filter; 49 PetscReal loadbal; 50 PetscInt logging; 51 PetscInt ruse; 52 PetscInt symt; 53 54 /* options for BoomerAMG */ 55 PetscBool printstatistics; 56 57 /* options for BoomerAMG */ 58 PetscInt cycletype; 59 PetscInt maxlevels; 60 PetscReal strongthreshold; 61 PetscReal maxrowsum; 62 PetscInt gridsweeps[3]; 63 PetscInt coarsentype; 64 PetscInt measuretype; 65 PetscInt smoothtype; 66 PetscInt smoothnumlevels; 67 PetscInt eu_level; /* Number of levels for ILU(k) in Euclid */ 68 PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */ 69 PetscInt eu_bj; /* Defines use of Block Jacobi ILU in Euclid */ 70 PetscInt relaxtype[3]; 71 PetscReal relaxweight; 72 PetscReal outerrelaxweight; 73 PetscInt relaxorder; 74 PetscReal truncfactor; 75 PetscBool applyrichardson; 76 PetscInt pmax; 77 PetscInt interptype; 78 PetscInt maxc; 79 PetscInt minc; 80 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 81 char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG 82 #endif 83 /* GPU */ 84 PetscBool keeptranspose; 85 PetscInt rap2; 86 PetscInt mod_rap2; 87 88 /* AIR */ 89 PetscInt Rtype; 90 PetscReal Rstrongthreshold; 91 PetscReal Rfilterthreshold; 92 PetscInt Adroptype; 93 PetscReal Adroptol; 94 95 PetscInt agg_nl; 96 PetscInt agg_interptype; 97 PetscInt agg_num_paths; 98 PetscBool nodal_relax; 99 PetscInt nodal_relax_levels; 100 101 PetscInt nodal_coarsening; 102 PetscInt nodal_coarsening_diag; 103 PetscInt vec_interp_variant; 104 PetscInt vec_interp_qmax; 105 PetscBool vec_interp_smooth; 106 PetscInt interp_refine; 107 108 /* NearNullSpace support */ 109 VecHYPRE_IJVector *hmnull; 110 HYPRE_ParVector *phmnull; 111 PetscInt n_hmnull; 112 Vec hmnull_constant; 113 114 /* options for AS (Auxiliary Space preconditioners) */ 115 PetscInt as_print; 116 PetscInt as_max_iter; 117 PetscReal as_tol; 118 PetscInt as_relax_type; 119 PetscInt as_relax_times; 120 PetscReal as_relax_weight; 121 PetscReal as_omega; 122 PetscInt as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */ 123 PetscReal as_amg_alpha_theta; /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */ 124 PetscInt as_amg_beta_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */ 125 PetscReal as_amg_beta_theta; /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS) */ 126 PetscInt ams_cycle_type; 127 PetscInt ads_cycle_type; 128 129 /* additional data */ 130 Mat G; /* MatHYPRE */ 131 Mat C; /* MatHYPRE */ 132 Mat alpha_Poisson; /* MatHYPRE */ 133 Mat beta_Poisson; /* MatHYPRE */ 134 135 /* extra information for AMS */ 136 PetscInt dim; /* geometrical dimension */ 137 VecHYPRE_IJVector coords[3]; 138 VecHYPRE_IJVector constants[3]; 139 VecHYPRE_IJVector interior; 140 Mat RT_PiFull, RT_Pi[3]; 141 Mat ND_PiFull, ND_Pi[3]; 142 PetscBool ams_beta_is_zero; 143 PetscBool ams_beta_is_zero_part; 144 PetscInt ams_proj_freq; 145 } PC_HYPRE; 146 147 PetscErrorCode PCHYPREGetSolver(PC pc, HYPRE_Solver *hsolver) 148 { 149 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 150 151 PetscFunctionBegin; 152 *hsolver = jac->hsolver; 153 PetscFunctionReturn(PETSC_SUCCESS); 154 } 155 156 /* 157 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 158 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 159 It is used in PCHMG. Other users should avoid using this function. 160 */ 161 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[]) 162 { 163 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 164 PetscBool same = PETSC_FALSE; 165 PetscInt num_levels, l; 166 Mat *mattmp; 167 hypre_ParCSRMatrix **A_array; 168 169 PetscFunctionBegin; 170 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 171 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 172 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 173 PetscCall(PetscMalloc1(num_levels, &mattmp)); 174 A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver)); 175 for (l = 1; l < num_levels; l++) { 176 PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l]))); 177 /* We want to own the data, and HYPRE can not touch this matrix any more */ 178 A_array[l] = NULL; 179 } 180 *nlevels = num_levels; 181 *operators = mattmp; 182 PetscFunctionReturn(PETSC_SUCCESS); 183 } 184 185 /* 186 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 187 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 188 It is used in PCHMG. Other users should avoid using this function. 189 */ 190 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[]) 191 { 192 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 193 PetscBool same = PETSC_FALSE; 194 PetscInt num_levels, l; 195 Mat *mattmp; 196 hypre_ParCSRMatrix **P_array; 197 198 PetscFunctionBegin; 199 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 200 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 201 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 202 PetscCall(PetscMalloc1(num_levels, &mattmp)); 203 P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver)); 204 for (l = 1; l < num_levels; l++) { 205 PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1]))); 206 /* We want to own the data, and HYPRE can not touch this matrix any more */ 207 P_array[num_levels - 1 - l] = NULL; 208 } 209 *nlevels = num_levels; 210 *interpolations = mattmp; 211 PetscFunctionReturn(PETSC_SUCCESS); 212 } 213 214 /* Resets (frees) Hypre's representation of the near null space */ 215 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc) 216 { 217 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 218 PetscInt i; 219 220 PetscFunctionBegin; 221 for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i])); 222 PetscCall(PetscFree(jac->hmnull)); 223 PetscCall(PetscFree(jac->phmnull)); 224 PetscCall(VecDestroy(&jac->hmnull_constant)); 225 jac->n_hmnull = 0; 226 PetscFunctionReturn(PETSC_SUCCESS); 227 } 228 229 static PetscErrorCode PCSetUp_HYPRE(PC pc) 230 { 231 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 232 Mat_HYPRE *hjac; 233 HYPRE_ParCSRMatrix hmat; 234 HYPRE_ParVector bv, xv; 235 PetscBool ishypre; 236 237 PetscFunctionBegin; 238 if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg")); 239 240 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre)); 241 if (!ishypre) { 242 PetscCall(MatDestroy(&jac->hpmat)); 243 PetscCall(MatConvert(pc->pmat, MATHYPRE, MAT_INITIAL_MATRIX, &jac->hpmat)); 244 } else { 245 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 246 PetscCall(MatDestroy(&jac->hpmat)); 247 jac->hpmat = pc->pmat; 248 } 249 /* allow debug */ 250 PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view")); 251 hjac = (Mat_HYPRE *)(jac->hpmat->data); 252 253 /* special case for BoomerAMG */ 254 if (jac->setup == HYPRE_BoomerAMGSetup) { 255 MatNullSpace mnull; 256 PetscBool has_const; 257 PetscInt bs, nvec, i; 258 const Vec *vecs; 259 260 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 261 if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 262 PetscCall(MatGetNearNullSpace(pc->mat, &mnull)); 263 if (mnull) { 264 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 265 PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs)); 266 PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull)); 267 PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull)); 268 for (i = 0; i < nvec; i++) { 269 PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i])); 270 PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i])); 271 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]); 272 } 273 if (has_const) { 274 PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL)); 275 PetscCall(VecSet(jac->hmnull_constant, 1)); 276 PetscCall(VecNormalize(jac->hmnull_constant, NULL)); 277 PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec])); 278 PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec])); 279 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]); 280 nvec++; 281 } 282 PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull); 283 jac->n_hmnull = nvec; 284 } 285 } 286 287 /* special case for AMS */ 288 if (jac->setup == HYPRE_AMSSetup) { 289 Mat_HYPRE *hm; 290 HYPRE_ParCSRMatrix parcsr; 291 if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 292 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()"); 293 } 294 if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim); 295 if (jac->constants[0]) { 296 HYPRE_ParVector ozz, zoz, zzo = NULL; 297 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz)); 298 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz)); 299 if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo)); 300 PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo); 301 } 302 if (jac->coords[0]) { 303 HYPRE_ParVector coords[3]; 304 coords[0] = NULL; 305 coords[1] = NULL; 306 coords[2] = NULL; 307 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 308 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 309 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 310 PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 311 } 312 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 313 hm = (Mat_HYPRE *)(jac->G->data); 314 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 315 PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr); 316 if (jac->alpha_Poisson) { 317 hm = (Mat_HYPRE *)(jac->alpha_Poisson->data); 318 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 319 PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr); 320 } 321 if (jac->ams_beta_is_zero) { 322 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL); 323 } else if (jac->beta_Poisson) { 324 hm = (Mat_HYPRE *)(jac->beta_Poisson->data); 325 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 326 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr); 327 } else if (jac->ams_beta_is_zero_part) { 328 if (jac->interior) { 329 HYPRE_ParVector interior = NULL; 330 PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior)); 331 PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior); 332 } else { 333 jac->ams_beta_is_zero_part = PETSC_FALSE; 334 } 335 } 336 if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) { 337 PetscInt i; 338 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 339 if (jac->ND_PiFull) { 340 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 341 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 342 } else { 343 nd_parcsrfull = NULL; 344 } 345 for (i = 0; i < 3; ++i) { 346 if (jac->ND_Pi[i]) { 347 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 348 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 349 } else { 350 nd_parcsr[i] = NULL; 351 } 352 } 353 PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 354 } 355 } 356 /* special case for ADS */ 357 if (jac->setup == HYPRE_ADSSetup) { 358 Mat_HYPRE *hm; 359 HYPRE_ParCSRMatrix parcsr; 360 if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) { 361 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations"); 362 } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead"); 363 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 364 PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient"); 365 if (jac->coords[0]) { 366 HYPRE_ParVector coords[3]; 367 coords[0] = NULL; 368 coords[1] = NULL; 369 coords[2] = NULL; 370 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 371 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 372 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 373 PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 374 } 375 hm = (Mat_HYPRE *)(jac->G->data); 376 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 377 PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr); 378 hm = (Mat_HYPRE *)(jac->C->data); 379 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 380 PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr); 381 if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 382 PetscInt i; 383 HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3]; 384 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 385 if (jac->RT_PiFull) { 386 hm = (Mat_HYPRE *)(jac->RT_PiFull->data); 387 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull)); 388 } else { 389 rt_parcsrfull = NULL; 390 } 391 for (i = 0; i < 3; ++i) { 392 if (jac->RT_Pi[i]) { 393 hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data); 394 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i])); 395 } else { 396 rt_parcsr[i] = NULL; 397 } 398 } 399 if (jac->ND_PiFull) { 400 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 401 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 402 } else { 403 nd_parcsrfull = NULL; 404 } 405 for (i = 0; i < 3; ++i) { 406 if (jac->ND_Pi[i]) { 407 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 408 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 409 } else { 410 nd_parcsr[i] = NULL; 411 } 412 } 413 PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 414 } 415 } 416 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 417 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv); 418 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv); 419 PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); 420 PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv); 421 PetscCall(PetscFPTrapPop()); 422 PetscFunctionReturn(PETSC_SUCCESS); 423 } 424 425 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x) 426 { 427 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 428 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 429 HYPRE_ParCSRMatrix hmat; 430 HYPRE_ParVector jbv, jxv; 431 432 PetscFunctionBegin; 433 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 434 if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0)); 435 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b)); 436 if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x)); 437 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x)); 438 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 439 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 440 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 441 PetscStackCallExternalVoid( 442 "Hypre solve", do { 443 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv); 444 if (hierr) { 445 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 446 HYPRE_ClearAllErrors(); 447 } 448 } while (0)); 449 450 if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv); 451 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 452 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 453 PetscFunctionReturn(PETSC_SUCCESS); 454 } 455 456 static PetscErrorCode PCMatApply_HYPRE_BoomerAMG(PC pc, Mat B, Mat X) 457 { 458 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 459 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 460 hypre_ParCSRMatrix *par_matrix; 461 HYPRE_ParVector hb, hx; 462 const PetscScalar *b; 463 PetscScalar *x; 464 PetscInt m, N, lda; 465 hypre_Vector *x_local; 466 PetscMemType type; 467 468 PetscFunctionBegin; 469 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 470 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&par_matrix); 471 PetscCall(MatGetLocalSize(B, &m, NULL)); 472 PetscCall(MatGetSize(B, NULL, &N)); 473 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hb); 474 PetscCallExternal(HYPRE_ParMultiVectorCreate, hypre_ParCSRMatrixComm(par_matrix), hypre_ParCSRMatrixGlobalNumRows(par_matrix), hypre_ParCSRMatrixRowStarts(par_matrix), N, &hx); 475 PetscCall(MatZeroEntries(X)); 476 PetscCall(MatDenseGetArrayReadAndMemType(B, &b, &type)); 477 PetscCall(MatDenseGetLDA(B, &lda)); 478 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 479 PetscCall(MatDenseGetLDA(X, &lda)); 480 PetscCheck(lda == m, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot use a LDA different than the number of local rows: % " PetscInt_FMT " != % " PetscInt_FMT, lda, m); 481 x_local = hypre_ParVectorLocalVector(hb); 482 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 483 hypre_VectorData(x_local) = (HYPRE_Complex *)b; 484 PetscCall(MatDenseGetArrayWriteAndMemType(X, &x, NULL)); 485 x_local = hypre_ParVectorLocalVector(hx); 486 PetscCallExternal(hypre_SeqVectorSetDataOwner, x_local, 0); 487 hypre_VectorData(x_local) = (HYPRE_Complex *)x; 488 PetscCallExternal(hypre_ParVectorInitialize_v2, hb, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 489 PetscCallExternal(hypre_ParVectorInitialize_v2, hx, type == PETSC_MEMTYPE_HOST ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE); 490 PetscStackCallExternalVoid( 491 "Hypre solve", do { 492 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, par_matrix, hb, hx); 493 if (hierr) { 494 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 495 HYPRE_ClearAllErrors(); 496 } 497 } while (0)); 498 PetscCallExternal(HYPRE_ParVectorDestroy, hb); 499 PetscCallExternal(HYPRE_ParVectorDestroy, hx); 500 PetscCall(MatDenseRestoreArrayReadAndMemType(B, &b)); 501 PetscCall(MatDenseRestoreArrayWriteAndMemType(X, &x)); 502 PetscFunctionReturn(PETSC_SUCCESS); 503 } 504 505 static PetscErrorCode PCReset_HYPRE(PC pc) 506 { 507 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 508 509 PetscFunctionBegin; 510 PetscCall(MatDestroy(&jac->hpmat)); 511 PetscCall(MatDestroy(&jac->G)); 512 PetscCall(MatDestroy(&jac->C)); 513 PetscCall(MatDestroy(&jac->alpha_Poisson)); 514 PetscCall(MatDestroy(&jac->beta_Poisson)); 515 PetscCall(MatDestroy(&jac->RT_PiFull)); 516 PetscCall(MatDestroy(&jac->RT_Pi[0])); 517 PetscCall(MatDestroy(&jac->RT_Pi[1])); 518 PetscCall(MatDestroy(&jac->RT_Pi[2])); 519 PetscCall(MatDestroy(&jac->ND_PiFull)); 520 PetscCall(MatDestroy(&jac->ND_Pi[0])); 521 PetscCall(MatDestroy(&jac->ND_Pi[1])); 522 PetscCall(MatDestroy(&jac->ND_Pi[2])); 523 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 524 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 525 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 526 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 527 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 528 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 529 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 530 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 531 jac->ams_beta_is_zero = PETSC_FALSE; 532 jac->ams_beta_is_zero_part = PETSC_FALSE; 533 jac->dim = 0; 534 PetscFunctionReturn(PETSC_SUCCESS); 535 } 536 537 static PetscErrorCode PCDestroy_HYPRE(PC pc) 538 { 539 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 540 541 PetscFunctionBegin; 542 PetscCall(PCReset_HYPRE(pc)); 543 if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver); 544 PetscCall(PetscFree(jac->hypre_type)); 545 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 546 PetscCall(PetscFree(jac->spgemm_type)); 547 #endif 548 if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 549 PetscCall(PetscFree(pc->data)); 550 551 PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0)); 552 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL)); 553 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL)); 554 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL)); 555 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL)); 556 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL)); 557 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL)); 558 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL)); 559 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL)); 560 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL)); 561 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 562 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 563 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL)); 564 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL)); 565 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 566 PetscFunctionReturn(PETSC_SUCCESS); 567 } 568 569 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject) 570 { 571 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 572 PetscBool flag; 573 574 PetscFunctionBegin; 575 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options"); 576 PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag)); 577 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter); 578 PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag)); 579 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol); 580 PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag)); 581 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize); 582 PetscOptionsHeadEnd(); 583 PetscFunctionReturn(PETSC_SUCCESS); 584 } 585 586 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer) 587 { 588 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 589 PetscBool iascii; 590 591 PetscFunctionBegin; 592 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 593 if (iascii) { 594 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Pilut preconditioning\n")); 595 if (jac->maxiter != PETSC_DEFAULT) { 596 PetscCall(PetscViewerASCIIPrintf(viewer, " maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter)); 597 } else { 598 PetscCall(PetscViewerASCIIPrintf(viewer, " default maximum number of iterations \n")); 599 } 600 if (jac->tol != PETSC_DEFAULT) { 601 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->tol)); 602 } else { 603 PetscCall(PetscViewerASCIIPrintf(viewer, " default drop tolerance \n")); 604 } 605 if (jac->factorrowsize != PETSC_DEFAULT) { 606 PetscCall(PetscViewerASCIIPrintf(viewer, " factor row size %" PetscInt_FMT "\n", jac->factorrowsize)); 607 } else { 608 PetscCall(PetscViewerASCIIPrintf(viewer, " default factor row size \n")); 609 } 610 } 611 PetscFunctionReturn(PETSC_SUCCESS); 612 } 613 614 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject) 615 { 616 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 617 PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE; 618 619 PetscFunctionBegin; 620 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options"); 621 PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag)); 622 if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level); 623 624 PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag)); 625 if (flag) { 626 PetscMPIInt size; 627 628 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 629 PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance"); 630 PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance); 631 } 632 633 PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag)); 634 if (flag) { 635 jac->eu_bj = eu_bj ? 1 : 0; 636 PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj); 637 } 638 PetscOptionsHeadEnd(); 639 PetscFunctionReturn(PETSC_SUCCESS); 640 } 641 642 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer) 643 { 644 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 645 PetscBool iascii; 646 647 PetscFunctionBegin; 648 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 649 if (iascii) { 650 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Euclid preconditioning\n")); 651 if (jac->eu_level != PETSC_DEFAULT) { 652 PetscCall(PetscViewerASCIIPrintf(viewer, " factorization levels %" PetscInt_FMT "\n", jac->eu_level)); 653 } else { 654 PetscCall(PetscViewerASCIIPrintf(viewer, " default factorization levels \n")); 655 } 656 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->eu_droptolerance)); 657 PetscCall(PetscViewerASCIIPrintf(viewer, " use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 658 } 659 PetscFunctionReturn(PETSC_SUCCESS); 660 } 661 662 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x) 663 { 664 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 665 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 666 HYPRE_ParCSRMatrix hmat; 667 HYPRE_ParVector jbv, jxv; 668 669 PetscFunctionBegin; 670 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 671 PetscCall(VecSet(x, 0.0)); 672 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b)); 673 PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x)); 674 675 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 676 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 677 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 678 679 PetscStackCallExternalVoid( 680 "Hypre Transpose solve", do { 681 HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv); 682 if (hierr) { 683 /* error code of 1 in BoomerAMG merely means convergence not achieved */ 684 PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 685 HYPRE_ClearAllErrors(); 686 } 687 } while (0)); 688 689 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 690 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 691 PetscFunctionReturn(PETSC_SUCCESS); 692 } 693 694 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[]) 695 { 696 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 697 PetscBool flag; 698 699 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 700 PetscFunctionBegin; 701 if (jac->spgemm_type) { 702 PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag)); 703 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)"); 704 PetscFunctionReturn(PETSC_SUCCESS); 705 } else { 706 PetscCall(PetscStrallocpy(name, &jac->spgemm_type)); 707 } 708 PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag)); 709 if (flag) { 710 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1); 711 PetscFunctionReturn(PETSC_SUCCESS); 712 } 713 PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag)); 714 if (flag) { 715 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0); 716 PetscFunctionReturn(PETSC_SUCCESS); 717 } 718 jac->spgemm_type = NULL; 719 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEMM type %s; Choices are cusparse, hypre", name); 720 #endif 721 } 722 723 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[]) 724 { 725 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 726 727 PetscFunctionBegin; 728 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 729 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 730 *spgemm = jac->spgemm_type; 731 #endif 732 PetscFunctionReturn(PETSC_SUCCESS); 733 } 734 735 static const char *HYPREBoomerAMGCycleType[] = {"", "V", "W"}; 736 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"}; 737 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"}; 738 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */ 739 static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"}; 740 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"}; 741 static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"}; 742 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject) 743 { 744 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 745 PetscInt bs, n, indx, level; 746 PetscBool flg, tmp_truth; 747 double tmpdbl, twodbl[2]; 748 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 749 const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"}; 750 751 PetscFunctionBegin; 752 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options"); 753 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg)); 754 if (flg) { 755 jac->cycletype = indx + 1; 756 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 757 } 758 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg)); 759 if (flg) { 760 PetscCheck(jac->maxlevels >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of levels %" PetscInt_FMT " must be at least two", jac->maxlevels); 761 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 762 } 763 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg)); 764 if (flg) { 765 PetscCheck(jac->maxiter >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of iterations %" PetscInt_FMT " must be at least one", jac->maxiter); 766 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 767 } 768 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg)); 769 if (flg) { 770 PetscCheck(jac->tol >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance %g must be greater than or equal to zero", (double)jac->tol); 771 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 772 } 773 bs = 1; 774 if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs)); 775 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg)); 776 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 777 778 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg)); 779 if (flg) { 780 PetscCheck(jac->truncfactor >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Truncation factor %g must be great than or equal zero", (double)jac->truncfactor); 781 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 782 } 783 784 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg)); 785 if (flg) { 786 PetscCheck(jac->pmax >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "P_max %" PetscInt_FMT " must be greater than or equal to zero", jac->pmax); 787 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 788 } 789 790 PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels)); 791 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 792 793 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg)); 794 if (flg) { 795 PetscCheck(jac->agg_num_paths >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of paths %" PetscInt_FMT " must be greater than or equal to 1", jac->agg_num_paths); 796 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 797 } 798 799 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg)); 800 if (flg) { 801 PetscCheck(jac->strongthreshold >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Strong threshold %g must be great than or equal zero", (double)jac->strongthreshold); 802 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 803 } 804 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg)); 805 if (flg) { 806 PetscCheck(jac->maxrowsum >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be greater than zero", (double)jac->maxrowsum); 807 PetscCheck(jac->maxrowsum <= 1.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be less than or equal one", (double)jac->maxrowsum); 808 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 809 } 810 811 /* Grid sweeps */ 812 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg)); 813 if (flg) { 814 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx); 815 /* modify the jac structure so we can view the updated options with PC_View */ 816 jac->gridsweeps[0] = indx; 817 jac->gridsweeps[1] = indx; 818 /*defaults coarse to 1 */ 819 jac->gridsweeps[2] = 1; 820 } 821 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg)); 822 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening); 823 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg)); 824 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag); 825 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg)); 826 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant); 827 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg)); 828 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax); 829 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg)); 830 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth); 831 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg)); 832 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine); 833 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg)); 834 if (flg) { 835 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1); 836 jac->gridsweeps[0] = indx; 837 } 838 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg)); 839 if (flg) { 840 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2); 841 jac->gridsweeps[1] = indx; 842 } 843 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg)); 844 if (flg) { 845 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3); 846 jac->gridsweeps[2] = indx; 847 } 848 849 /* Smooth type */ 850 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg)); 851 if (flg) { 852 jac->smoothtype = indx; 853 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6); 854 jac->smoothnumlevels = 25; 855 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25); 856 } 857 858 /* Number of smoothing levels */ 859 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg)); 860 if (flg && (jac->smoothtype != -1)) { 861 jac->smoothnumlevels = indx; 862 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx); 863 } 864 865 /* Number of levels for ILU(k) for Euclid */ 866 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg)); 867 if (flg && (jac->smoothtype == 3)) { 868 jac->eu_level = indx; 869 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx); 870 } 871 872 /* Filter for ILU(k) for Euclid */ 873 double droptolerance; 874 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg)); 875 if (flg && (jac->smoothtype == 3)) { 876 jac->eu_droptolerance = droptolerance; 877 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance); 878 } 879 880 /* Use Block Jacobi ILUT for Euclid */ 881 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg)); 882 if (flg && (jac->smoothtype == 3)) { 883 jac->eu_bj = tmp_truth; 884 PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj); 885 } 886 887 /* Relax type */ 888 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 889 if (flg) { 890 jac->relaxtype[0] = jac->relaxtype[1] = indx; 891 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx); 892 /* by default, coarse type set to 9 */ 893 jac->relaxtype[2] = 9; 894 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3); 895 } 896 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 897 if (flg) { 898 jac->relaxtype[0] = indx; 899 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1); 900 } 901 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 902 if (flg) { 903 jac->relaxtype[1] = indx; 904 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2); 905 } 906 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg)); 907 if (flg) { 908 jac->relaxtype[2] = indx; 909 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3); 910 } 911 912 /* Relaxation Weight */ 913 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg)); 914 if (flg) { 915 PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl); 916 jac->relaxweight = tmpdbl; 917 } 918 919 n = 2; 920 twodbl[0] = twodbl[1] = 1.0; 921 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 922 if (flg) { 923 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 924 indx = (int)PetscAbsReal(twodbl[1]); 925 PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx); 926 } 927 928 /* Outer relaxation Weight */ 929 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg)); 930 if (flg) { 931 PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl); 932 jac->outerrelaxweight = tmpdbl; 933 } 934 935 n = 2; 936 twodbl[0] = twodbl[1] = 1.0; 937 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 938 if (flg) { 939 PetscCheck(n == 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 940 indx = (int)PetscAbsReal(twodbl[1]); 941 PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx); 942 } 943 944 /* the Relax Order */ 945 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg)); 946 947 if (flg && tmp_truth) { 948 jac->relaxorder = 0; 949 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 950 } 951 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg)); 952 if (flg) { 953 jac->measuretype = indx; 954 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 955 } 956 /* update list length 3/07 */ 957 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg)); 958 if (flg) { 959 jac->coarsentype = indx; 960 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 961 } 962 963 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg)); 964 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 965 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg)); 966 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 967 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 968 // global parameter but is closely associated with BoomerAMG 969 PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg)); 970 #if defined(PETSC_HAVE_HYPRE_DEVICE) 971 if (!flg) indx = 0; 972 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx])); 973 #else 974 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, "hypre")); 975 #endif 976 #endif 977 /* AIR */ 978 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 979 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL)); 980 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 981 if (jac->Rtype) { 982 jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */ 983 984 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL)); 985 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 986 987 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL)); 988 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 989 990 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL)); 991 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 992 993 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL)); 994 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 995 } 996 #endif 997 998 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9) 999 PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl); 1000 #endif 1001 1002 /* new 3/07 */ 1003 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg)); 1004 if (flg || jac->Rtype) { 1005 if (flg) jac->interptype = indx; 1006 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 1007 } 1008 1009 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg)); 1010 if (flg) { 1011 level = 3; 1012 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL)); 1013 1014 jac->printstatistics = PETSC_TRUE; 1015 PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level); 1016 } 1017 1018 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg)); 1019 if (flg) { 1020 level = 3; 1021 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL)); 1022 1023 jac->printstatistics = PETSC_TRUE; 1024 PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level); 1025 } 1026 1027 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg)); 1028 if (flg && tmp_truth) { 1029 PetscInt tmp_int; 1030 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg)); 1031 if (flg) jac->nodal_relax_levels = tmp_int; 1032 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6); 1033 PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1); 1034 PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0); 1035 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels); 1036 } 1037 1038 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL)); 1039 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 1040 1041 /* options for ParaSails solvers */ 1042 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg)); 1043 if (flg) { 1044 jac->symt = indx; 1045 PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt); 1046 } 1047 1048 PetscOptionsHeadEnd(); 1049 PetscFunctionReturn(PETSC_SUCCESS); 1050 } 1051 1052 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 1053 { 1054 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1055 HYPRE_Int oits; 1056 1057 PetscFunctionBegin; 1058 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 1059 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter); 1060 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol); 1061 jac->applyrichardson = PETSC_TRUE; 1062 PetscCall(PCApply_HYPRE(pc, b, y)); 1063 jac->applyrichardson = PETSC_FALSE; 1064 PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits); 1065 *outits = oits; 1066 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 1067 else *reason = PCRICHARDSON_CONVERGED_RTOL; 1068 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1069 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1070 PetscFunctionReturn(PETSC_SUCCESS); 1071 } 1072 1073 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer) 1074 { 1075 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1076 PetscBool iascii; 1077 1078 PetscFunctionBegin; 1079 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1080 if (iascii) { 1081 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE BoomerAMG preconditioning\n")); 1082 PetscCall(PetscViewerASCIIPrintf(viewer, " Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype])); 1083 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels)); 1084 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter)); 1085 PetscCall(PetscViewerASCIIPrintf(viewer, " Convergence tolerance PER hypre call %g\n", (double)jac->tol)); 1086 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for strong coupling %g\n", (double)jac->strongthreshold)); 1087 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation truncation factor %g\n", (double)jac->truncfactor)); 1088 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax)); 1089 if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine)); 1090 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl)); 1091 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths)); 1092 1093 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum row sums %g\n", (double)jac->maxrowsum)); 1094 1095 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps down %" PetscInt_FMT "\n", jac->gridsweeps[0])); 1096 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps up %" PetscInt_FMT "\n", jac->gridsweeps[1])); 1097 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps on coarse %" PetscInt_FMT "\n", jac->gridsweeps[2])); 1098 1099 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax down %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]])); 1100 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax up %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]])); 1101 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax on coarse %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]])); 1102 1103 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax weight (all) %g\n", (double)jac->relaxweight)); 1104 PetscCall(PetscViewerASCIIPrintf(viewer, " Outer relax weight (all) %g\n", (double)jac->outerrelaxweight)); 1105 1106 if (jac->relaxorder) { 1107 PetscCall(PetscViewerASCIIPrintf(viewer, " Using CF-relaxation\n")); 1108 } else { 1109 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using CF-relaxation\n")); 1110 } 1111 if (jac->smoothtype != -1) { 1112 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth type %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype])); 1113 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num levels %" PetscInt_FMT "\n", jac->smoothnumlevels)); 1114 } else { 1115 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using more complex smoothers.\n")); 1116 } 1117 if (jac->smoothtype == 3) { 1118 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level)); 1119 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance)); 1120 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 1121 } 1122 PetscCall(PetscViewerASCIIPrintf(viewer, " Measure type %s\n", HYPREBoomerAMGMeasureType[jac->measuretype])); 1123 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsen type %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype])); 1124 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation type %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt")); 1125 if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening)); 1126 if (jac->vec_interp_variant) { 1127 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant)); 1128 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax)); 1129 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth)); 1130 } 1131 if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels)); 1132 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 1133 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", jac->spgemm_type)); 1134 #else 1135 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", "hypre")); 1136 #endif 1137 /* AIR */ 1138 if (jac->Rtype) { 1139 PetscCall(PetscViewerASCIIPrintf(viewer, " Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype)); 1140 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for R %g\n", (double)jac->Rstrongthreshold)); 1141 PetscCall(PetscViewerASCIIPrintf(viewer, " Filter for R %g\n", (double)jac->Rfilterthreshold)); 1142 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop tolerance %g\n", (double)jac->Adroptol)); 1143 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop type %" PetscInt_FMT "\n", jac->Adroptype)); 1144 } 1145 } 1146 PetscFunctionReturn(PETSC_SUCCESS); 1147 } 1148 1149 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject) 1150 { 1151 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1152 PetscInt indx; 1153 PetscBool flag; 1154 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 1155 1156 PetscFunctionBegin; 1157 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options"); 1158 PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0)); 1159 PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag)); 1160 if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1161 1162 PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag)); 1163 if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1164 1165 PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag)); 1166 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1167 1168 PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag)); 1169 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1170 1171 PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag)); 1172 if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1173 1174 PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag)); 1175 if (flag) { 1176 jac->symt = indx; 1177 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1178 } 1179 1180 PetscOptionsHeadEnd(); 1181 PetscFunctionReturn(PETSC_SUCCESS); 1182 } 1183 1184 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer) 1185 { 1186 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1187 PetscBool iascii; 1188 const char *symt = 0; 1189 1190 PetscFunctionBegin; 1191 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1192 if (iascii) { 1193 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ParaSails preconditioning\n")); 1194 PetscCall(PetscViewerASCIIPrintf(viewer, " nlevels %" PetscInt_FMT "\n", jac->nlevels)); 1195 PetscCall(PetscViewerASCIIPrintf(viewer, " threshold %g\n", (double)jac->threshold)); 1196 PetscCall(PetscViewerASCIIPrintf(viewer, " filter %g\n", (double)jac->filter)); 1197 PetscCall(PetscViewerASCIIPrintf(viewer, " load balance %g\n", (double)jac->loadbal)); 1198 PetscCall(PetscViewerASCIIPrintf(viewer, " reuse nonzero structure %s\n", PetscBools[jac->ruse])); 1199 PetscCall(PetscViewerASCIIPrintf(viewer, " print info to screen %s\n", PetscBools[jac->logging])); 1200 if (!jac->symt) symt = "nonsymmetric matrix and preconditioner"; 1201 else if (jac->symt == 1) symt = "SPD matrix and preconditioner"; 1202 else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner"; 1203 else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt); 1204 PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", symt)); 1205 } 1206 PetscFunctionReturn(PETSC_SUCCESS); 1207 } 1208 1209 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject) 1210 { 1211 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1212 PetscInt n; 1213 PetscBool flag, flag2, flag3, flag4; 1214 1215 PetscFunctionBegin; 1216 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options"); 1217 PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag)); 1218 if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 1219 PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1220 if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 1221 PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag)); 1222 if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 1223 PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1224 if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 1225 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1226 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1227 PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1228 PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1229 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1230 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1231 n = 5; 1232 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1233 if (flag || flag2) { 1234 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1235 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1236 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1237 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1238 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1239 } 1240 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1241 n = 5; 1242 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2)); 1243 if (flag || flag2) { 1244 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1245 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1246 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1247 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1248 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1249 } 1250 PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag)); 1251 if (flag) { /* override HYPRE's default only if the options is used */ 1252 PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq); 1253 } 1254 PetscOptionsHeadEnd(); 1255 PetscFunctionReturn(PETSC_SUCCESS); 1256 } 1257 1258 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer) 1259 { 1260 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1261 PetscBool iascii; 1262 1263 PetscFunctionBegin; 1264 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1265 if (iascii) { 1266 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE AMS preconditioning\n")); 1267 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1268 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1269 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1270 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1271 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1272 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1273 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1274 if (jac->alpha_Poisson) { 1275 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (passed in by user)\n")); 1276 } else { 1277 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (computed) \n")); 1278 } 1279 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1280 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1281 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1282 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1283 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1284 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1285 if (!jac->ams_beta_is_zero) { 1286 if (jac->beta_Poisson) { 1287 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (passed in by user)\n")); 1288 } else { 1289 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (computed) \n")); 1290 } 1291 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1292 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1293 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1294 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1295 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1296 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1297 if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, " compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq)); 1298 } else { 1299 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver not used (zero-conductivity everywhere) \n")); 1300 } 1301 } 1302 PetscFunctionReturn(PETSC_SUCCESS); 1303 } 1304 1305 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject) 1306 { 1307 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1308 PetscInt n; 1309 PetscBool flag, flag2, flag3, flag4; 1310 1311 PetscFunctionBegin; 1312 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options"); 1313 PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag)); 1314 if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 1315 PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1316 if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 1317 PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag)); 1318 if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type); 1319 PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1320 if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 1321 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1322 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1323 PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1324 PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1325 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1326 PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1327 n = 5; 1328 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1329 PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3)); 1330 if (flag || flag2 || flag3) { 1331 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */ 1332 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1333 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1334 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1335 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1336 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1337 } 1338 PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1339 n = 5; 1340 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2)); 1341 if (flag || flag2) { 1342 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1343 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1344 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1345 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1346 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1347 } 1348 PetscOptionsHeadEnd(); 1349 PetscFunctionReturn(PETSC_SUCCESS); 1350 } 1351 1352 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer) 1353 { 1354 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1355 PetscBool iascii; 1356 1357 PetscFunctionBegin; 1358 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1359 if (iascii) { 1360 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ADS preconditioning\n")); 1361 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1362 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type)); 1363 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1364 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1365 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1366 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1367 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1368 PetscCall(PetscViewerASCIIPrintf(viewer, " AMS solver using boomerAMG\n")); 1369 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1370 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1371 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1372 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1373 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1374 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1375 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1376 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver using boomerAMG\n")); 1377 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1378 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1379 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1380 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1381 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1382 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1383 } 1384 PetscFunctionReturn(PETSC_SUCCESS); 1385 } 1386 1387 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G) 1388 { 1389 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1390 PetscBool ishypre; 1391 1392 PetscFunctionBegin; 1393 PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre)); 1394 if (ishypre) { 1395 PetscCall(PetscObjectReference((PetscObject)G)); 1396 PetscCall(MatDestroy(&jac->G)); 1397 jac->G = G; 1398 } else { 1399 PetscCall(MatDestroy(&jac->G)); 1400 PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G)); 1401 } 1402 PetscFunctionReturn(PETSC_SUCCESS); 1403 } 1404 1405 /*@ 1406 PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads 1407 1408 Collective 1409 1410 Input Parameters: 1411 + pc - the preconditioning context 1412 - G - the discrete gradient 1413 1414 Level: intermediate 1415 1416 Notes: 1417 G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh 1418 1419 Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation 1420 1421 Developer Note: 1422 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1423 1424 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()` 1425 @*/ 1426 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G) 1427 { 1428 PetscFunctionBegin; 1429 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1430 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 1431 PetscCheckSameComm(pc, 1, G, 2); 1432 PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G)); 1433 PetscFunctionReturn(PETSC_SUCCESS); 1434 } 1435 1436 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C) 1437 { 1438 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1439 PetscBool ishypre; 1440 1441 PetscFunctionBegin; 1442 PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre)); 1443 if (ishypre) { 1444 PetscCall(PetscObjectReference((PetscObject)C)); 1445 PetscCall(MatDestroy(&jac->C)); 1446 jac->C = C; 1447 } else { 1448 PetscCall(MatDestroy(&jac->C)); 1449 PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C)); 1450 } 1451 PetscFunctionReturn(PETSC_SUCCESS); 1452 } 1453 1454 /*@ 1455 PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads 1456 1457 Collective 1458 1459 Input Parameters: 1460 + pc - the preconditioning context 1461 - C - the discrete curl 1462 1463 Level: intermediate 1464 1465 Notes: 1466 C should have as many rows as the number of faces and as many columns as the number of edges in the mesh 1467 1468 Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation 1469 1470 Developer Note: 1471 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1472 1473 If this is only for `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()` 1474 1475 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()` 1476 @*/ 1477 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C) 1478 { 1479 PetscFunctionBegin; 1480 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1481 PetscValidHeaderSpecific(C, MAT_CLASSID, 2); 1482 PetscCheckSameComm(pc, 1, C, 2); 1483 PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C)); 1484 PetscFunctionReturn(PETSC_SUCCESS); 1485 } 1486 1487 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1488 { 1489 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1490 PetscBool ishypre; 1491 PetscInt i; 1492 PetscFunctionBegin; 1493 1494 PetscCall(MatDestroy(&jac->RT_PiFull)); 1495 PetscCall(MatDestroy(&jac->ND_PiFull)); 1496 for (i = 0; i < 3; ++i) { 1497 PetscCall(MatDestroy(&jac->RT_Pi[i])); 1498 PetscCall(MatDestroy(&jac->ND_Pi[i])); 1499 } 1500 1501 jac->dim = dim; 1502 if (RT_PiFull) { 1503 PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre)); 1504 if (ishypre) { 1505 PetscCall(PetscObjectReference((PetscObject)RT_PiFull)); 1506 jac->RT_PiFull = RT_PiFull; 1507 } else { 1508 PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull)); 1509 } 1510 } 1511 if (RT_Pi) { 1512 for (i = 0; i < dim; ++i) { 1513 if (RT_Pi[i]) { 1514 PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre)); 1515 if (ishypre) { 1516 PetscCall(PetscObjectReference((PetscObject)RT_Pi[i])); 1517 jac->RT_Pi[i] = RT_Pi[i]; 1518 } else { 1519 PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i])); 1520 } 1521 } 1522 } 1523 } 1524 if (ND_PiFull) { 1525 PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre)); 1526 if (ishypre) { 1527 PetscCall(PetscObjectReference((PetscObject)ND_PiFull)); 1528 jac->ND_PiFull = ND_PiFull; 1529 } else { 1530 PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull)); 1531 } 1532 } 1533 if (ND_Pi) { 1534 for (i = 0; i < dim; ++i) { 1535 if (ND_Pi[i]) { 1536 PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre)); 1537 if (ishypre) { 1538 PetscCall(PetscObjectReference((PetscObject)ND_Pi[i])); 1539 jac->ND_Pi[i] = ND_Pi[i]; 1540 } else { 1541 PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i])); 1542 } 1543 } 1544 } 1545 } 1546 1547 PetscFunctionReturn(PETSC_SUCCESS); 1548 } 1549 1550 /*@ 1551 PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads 1552 1553 Collective 1554 1555 Input Parameters: 1556 + pc - the preconditioning context 1557 . dim - the dimension of the problem, only used in AMS 1558 . RT_PiFull - Raviart-Thomas interpolation matrix 1559 . RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix 1560 . ND_PiFull - Nedelec interpolation matrix 1561 - ND_Pi - x/y/z component of Nedelec interpolation matrix 1562 1563 Level: intermediate 1564 1565 Notes: 1566 For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL. 1567 1568 For ADS, both type of interpolation matrices are needed. 1569 1570 Developer Note: 1571 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1572 1573 .seealso: `PCHYPRE` 1574 @*/ 1575 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) 1576 { 1577 PetscInt i; 1578 1579 PetscFunctionBegin; 1580 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1581 if (RT_PiFull) { 1582 PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3); 1583 PetscCheckSameComm(pc, 1, RT_PiFull, 3); 1584 } 1585 if (RT_Pi) { 1586 PetscValidPointer(RT_Pi, 4); 1587 for (i = 0; i < dim; ++i) { 1588 if (RT_Pi[i]) { 1589 PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4); 1590 PetscCheckSameComm(pc, 1, RT_Pi[i], 4); 1591 } 1592 } 1593 } 1594 if (ND_PiFull) { 1595 PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5); 1596 PetscCheckSameComm(pc, 1, ND_PiFull, 5); 1597 } 1598 if (ND_Pi) { 1599 PetscValidPointer(ND_Pi, 6); 1600 for (i = 0; i < dim; ++i) { 1601 if (ND_Pi[i]) { 1602 PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6); 1603 PetscCheckSameComm(pc, 1, ND_Pi[i], 6); 1604 } 1605 } 1606 } 1607 PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi)); 1608 PetscFunctionReturn(PETSC_SUCCESS); 1609 } 1610 1611 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha) 1612 { 1613 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1614 PetscBool ishypre; 1615 1616 PetscFunctionBegin; 1617 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1618 if (ishypre) { 1619 if (isalpha) { 1620 PetscCall(PetscObjectReference((PetscObject)A)); 1621 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1622 jac->alpha_Poisson = A; 1623 } else { 1624 if (A) { 1625 PetscCall(PetscObjectReference((PetscObject)A)); 1626 } else { 1627 jac->ams_beta_is_zero = PETSC_TRUE; 1628 } 1629 PetscCall(MatDestroy(&jac->beta_Poisson)); 1630 jac->beta_Poisson = A; 1631 } 1632 } else { 1633 if (isalpha) { 1634 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1635 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson)); 1636 } else { 1637 if (A) { 1638 PetscCall(MatDestroy(&jac->beta_Poisson)); 1639 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson)); 1640 } else { 1641 PetscCall(MatDestroy(&jac->beta_Poisson)); 1642 jac->ams_beta_is_zero = PETSC_TRUE; 1643 } 1644 } 1645 } 1646 PetscFunctionReturn(PETSC_SUCCESS); 1647 } 1648 1649 /*@ 1650 PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams 1651 1652 Collective 1653 1654 Input Parameters: 1655 + pc - the preconditioning context 1656 - A - the matrix 1657 1658 Level: intermediate 1659 1660 Note: 1661 A should be obtained by discretizing the vector valued Poisson problem with linear finite elements 1662 1663 Developer Note: 1664 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1665 1666 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()` 1667 1668 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()` 1669 @*/ 1670 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A) 1671 { 1672 PetscFunctionBegin; 1673 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1674 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1675 PetscCheckSameComm(pc, 1, A, 2); 1676 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE)); 1677 PetscFunctionReturn(PETSC_SUCCESS); 1678 } 1679 1680 /*@ 1681 PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams 1682 1683 Collective 1684 1685 Input Parameters: 1686 + pc - the preconditioning context 1687 - A - the matrix, or NULL to turn it off 1688 1689 Level: intermediate 1690 1691 Note: 1692 A should be obtained by discretizing the Poisson problem with linear finite elements. 1693 1694 Developer Note: 1695 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1696 1697 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()` 1698 1699 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1700 @*/ 1701 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A) 1702 { 1703 PetscFunctionBegin; 1704 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1705 if (A) { 1706 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1707 PetscCheckSameComm(pc, 1, A, 2); 1708 } 1709 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE)); 1710 PetscFunctionReturn(PETSC_SUCCESS); 1711 } 1712 1713 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo) 1714 { 1715 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1716 1717 PetscFunctionBegin; 1718 /* throw away any vector if already set */ 1719 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 1720 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 1721 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 1722 PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0])); 1723 PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0])); 1724 PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1])); 1725 PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1])); 1726 jac->dim = 2; 1727 if (zzo) { 1728 PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2])); 1729 PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2])); 1730 jac->dim++; 1731 } 1732 PetscFunctionReturn(PETSC_SUCCESS); 1733 } 1734 1735 /*@ 1736 PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams 1737 1738 Collective 1739 1740 Input Parameters: 1741 + pc - the preconditioning context 1742 . ozz - vector representing (1,0,0) (or (1,0) in 2D) 1743 . zoz - vector representing (0,1,0) (or (0,1) in 2D) 1744 - zzo - vector representing (0,0,1) (use NULL in 2D) 1745 1746 Level: intermediate 1747 1748 Developer Note: 1749 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()` 1750 1751 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1752 @*/ 1753 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo) 1754 { 1755 PetscFunctionBegin; 1756 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1757 PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2); 1758 PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3); 1759 if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4); 1760 PetscCheckSameComm(pc, 1, ozz, 2); 1761 PetscCheckSameComm(pc, 1, zoz, 3); 1762 if (zzo) PetscCheckSameComm(pc, 1, zzo, 4); 1763 PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo)); 1764 PetscFunctionReturn(PETSC_SUCCESS); 1765 } 1766 1767 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior) 1768 { 1769 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1770 1771 PetscFunctionBegin; 1772 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 1773 PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior)); 1774 PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior)); 1775 jac->ams_beta_is_zero_part = PETSC_TRUE; 1776 PetscFunctionReturn(PETSC_SUCCESS); 1777 } 1778 1779 /*@ 1780 PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams 1781 1782 Collective 1783 1784 Input Parameters: 1785 + pc - the preconditioning context 1786 - interior - vector. node is interior if its entry in the array is 1.0. 1787 1788 Level: intermediate 1789 1790 Note: 1791 This calls `HYPRE_AMSSetInteriorNodes()` 1792 1793 Developer Note: 1794 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()` 1795 1796 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1797 @*/ 1798 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior) 1799 { 1800 PetscFunctionBegin; 1801 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1802 PetscValidHeaderSpecific(interior, VEC_CLASSID, 2); 1803 PetscCheckSameComm(pc, 1, interior, 2); 1804 PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior)); 1805 PetscFunctionReturn(PETSC_SUCCESS); 1806 } 1807 1808 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 1809 { 1810 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1811 Vec tv; 1812 PetscInt i; 1813 1814 PetscFunctionBegin; 1815 /* throw away any coordinate vector if already set */ 1816 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 1817 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 1818 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 1819 jac->dim = dim; 1820 1821 /* compute IJ vector for coordinates */ 1822 PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv)); 1823 PetscCall(VecSetType(tv, VECSTANDARD)); 1824 PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE)); 1825 for (i = 0; i < dim; i++) { 1826 PetscScalar *array; 1827 PetscInt j; 1828 1829 PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i])); 1830 PetscCall(VecGetArrayWrite(tv, &array)); 1831 for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i]; 1832 PetscCall(VecRestoreArrayWrite(tv, &array)); 1833 PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i])); 1834 } 1835 PetscCall(VecDestroy(&tv)); 1836 PetscFunctionReturn(PETSC_SUCCESS); 1837 } 1838 1839 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[]) 1840 { 1841 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1842 1843 PetscFunctionBegin; 1844 *name = jac->hypre_type; 1845 PetscFunctionReturn(PETSC_SUCCESS); 1846 } 1847 1848 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[]) 1849 { 1850 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1851 PetscBool flag; 1852 1853 PetscFunctionBegin; 1854 if (jac->hypre_type) { 1855 PetscCall(PetscStrcmp(jac->hypre_type, name, &flag)); 1856 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set"); 1857 PetscFunctionReturn(PETSC_SUCCESS); 1858 } else { 1859 PetscCall(PetscStrallocpy(name, &jac->hypre_type)); 1860 } 1861 1862 jac->maxiter = PETSC_DEFAULT; 1863 jac->tol = PETSC_DEFAULT; 1864 jac->printstatistics = PetscLogPrintInfo; 1865 1866 PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag)); 1867 if (flag) { 1868 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1869 PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver); 1870 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut; 1871 pc->ops->view = PCView_HYPRE_Pilut; 1872 jac->destroy = HYPRE_ParCSRPilutDestroy; 1873 jac->setup = HYPRE_ParCSRPilutSetup; 1874 jac->solve = HYPRE_ParCSRPilutSolve; 1875 jac->factorrowsize = PETSC_DEFAULT; 1876 PetscFunctionReturn(PETSC_SUCCESS); 1877 } 1878 PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag)); 1879 if (flag) { 1880 #if defined(PETSC_USE_64BIT_INDICES) 1881 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64-bit indices"); 1882 #endif 1883 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1884 PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver); 1885 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid; 1886 pc->ops->view = PCView_HYPRE_Euclid; 1887 jac->destroy = HYPRE_EuclidDestroy; 1888 jac->setup = HYPRE_EuclidSetup; 1889 jac->solve = HYPRE_EuclidSolve; 1890 jac->factorrowsize = PETSC_DEFAULT; 1891 jac->eu_level = PETSC_DEFAULT; /* default */ 1892 PetscFunctionReturn(PETSC_SUCCESS); 1893 } 1894 PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag)); 1895 if (flag) { 1896 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1897 PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver); 1898 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails; 1899 pc->ops->view = PCView_HYPRE_ParaSails; 1900 jac->destroy = HYPRE_ParaSailsDestroy; 1901 jac->setup = HYPRE_ParaSailsSetup; 1902 jac->solve = HYPRE_ParaSailsSolve; 1903 /* initialize */ 1904 jac->nlevels = 1; 1905 jac->threshold = .1; 1906 jac->filter = .1; 1907 jac->loadbal = 0; 1908 if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE; 1909 else jac->logging = (int)PETSC_FALSE; 1910 1911 jac->ruse = (int)PETSC_FALSE; 1912 jac->symt = 0; 1913 PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1914 PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1915 PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1916 PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1917 PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1918 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1919 PetscFunctionReturn(PETSC_SUCCESS); 1920 } 1921 PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag)); 1922 if (flag) { 1923 PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver); 1924 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_BoomerAMG; 1925 pc->ops->view = PCView_HYPRE_BoomerAMG; 1926 pc->ops->applytranspose = PCApplyTranspose_HYPRE_BoomerAMG; 1927 pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG; 1928 pc->ops->matapply = PCMatApply_HYPRE_BoomerAMG; 1929 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG)); 1930 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG)); 1931 jac->destroy = HYPRE_BoomerAMGDestroy; 1932 jac->setup = HYPRE_BoomerAMGSetup; 1933 jac->solve = HYPRE_BoomerAMGSolve; 1934 jac->applyrichardson = PETSC_FALSE; 1935 /* these defaults match the hypre defaults */ 1936 jac->cycletype = 1; 1937 jac->maxlevels = 25; 1938 jac->maxiter = 1; 1939 jac->tol = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */ 1940 jac->truncfactor = 0.0; 1941 jac->strongthreshold = .25; 1942 jac->maxrowsum = .9; 1943 jac->coarsentype = 6; 1944 jac->measuretype = 0; 1945 jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1; 1946 jac->smoothtype = -1; /* Not set by default */ 1947 jac->smoothnumlevels = 25; 1948 jac->eu_level = 0; 1949 jac->eu_droptolerance = 0; 1950 jac->eu_bj = 0; 1951 jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */ 1952 jac->relaxtype[2] = 9; /*G.E. */ 1953 jac->relaxweight = 1.0; 1954 jac->outerrelaxweight = 1.0; 1955 jac->relaxorder = 1; 1956 jac->interptype = 0; 1957 jac->Rtype = 0; 1958 jac->Rstrongthreshold = 0.25; 1959 jac->Rfilterthreshold = 0.0; 1960 jac->Adroptype = -1; 1961 jac->Adroptol = 0.0; 1962 jac->agg_nl = 0; 1963 jac->agg_interptype = 4; 1964 jac->pmax = 0; 1965 jac->truncfactor = 0.0; 1966 jac->agg_num_paths = 1; 1967 jac->maxc = 9; 1968 jac->minc = 1; 1969 jac->nodal_coarsening = 0; 1970 jac->nodal_coarsening_diag = 0; 1971 jac->vec_interp_variant = 0; 1972 jac->vec_interp_qmax = 0; 1973 jac->vec_interp_smooth = PETSC_FALSE; 1974 jac->interp_refine = 0; 1975 jac->nodal_relax = PETSC_FALSE; 1976 jac->nodal_relax_levels = 1; 1977 jac->rap2 = 0; 1978 1979 /* GPU defaults 1980 from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options 1981 and /src/parcsr_ls/par_amg.c */ 1982 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1983 jac->keeptranspose = PETSC_TRUE; 1984 jac->mod_rap2 = 1; 1985 jac->coarsentype = 8; 1986 jac->relaxorder = 0; 1987 jac->interptype = 6; 1988 jac->relaxtype[0] = 18; 1989 jac->relaxtype[1] = 18; 1990 jac->agg_interptype = 7; 1991 #else 1992 jac->keeptranspose = PETSC_FALSE; 1993 jac->mod_rap2 = 0; 1994 #endif 1995 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 1996 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 1997 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1998 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1999 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 2000 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 2001 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 2002 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 2003 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 2004 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 2005 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 2006 PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 2007 PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype); 2008 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 2009 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 2010 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]); /* defaults coarse to 9 */ 2011 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */ 2012 PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 2013 PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 2014 /* GPU */ 2015 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2016 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 2017 PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2); 2018 PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2); 2019 #endif 2020 2021 /* AIR */ 2022 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 2023 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 2024 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 2025 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 2026 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 2027 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 2028 #endif 2029 PetscFunctionReturn(PETSC_SUCCESS); 2030 } 2031 PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag)); 2032 if (flag) { 2033 PetscCallExternal(HYPRE_AMSCreate, &jac->hsolver); 2034 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS; 2035 pc->ops->view = PCView_HYPRE_AMS; 2036 jac->destroy = HYPRE_AMSDestroy; 2037 jac->setup = HYPRE_AMSSetup; 2038 jac->solve = HYPRE_AMSSolve; 2039 jac->coords[0] = NULL; 2040 jac->coords[1] = NULL; 2041 jac->coords[2] = NULL; 2042 jac->interior = NULL; 2043 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */ 2044 jac->as_print = 0; 2045 jac->as_max_iter = 1; /* used as a preconditioner */ 2046 jac->as_tol = 0.; /* used as a preconditioner */ 2047 jac->ams_cycle_type = 13; 2048 /* Smoothing options */ 2049 jac->as_relax_type = 2; 2050 jac->as_relax_times = 1; 2051 jac->as_relax_weight = 1.0; 2052 jac->as_omega = 1.0; 2053 /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2054 jac->as_amg_alpha_opts[0] = 10; 2055 jac->as_amg_alpha_opts[1] = 1; 2056 jac->as_amg_alpha_opts[2] = 6; 2057 jac->as_amg_alpha_opts[3] = 6; 2058 jac->as_amg_alpha_opts[4] = 4; 2059 jac->as_amg_alpha_theta = 0.25; 2060 /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2061 jac->as_amg_beta_opts[0] = 10; 2062 jac->as_amg_beta_opts[1] = 1; 2063 jac->as_amg_beta_opts[2] = 6; 2064 jac->as_amg_beta_opts[3] = 6; 2065 jac->as_amg_beta_opts[4] = 4; 2066 jac->as_amg_beta_theta = 0.25; 2067 PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 2068 PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 2069 PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2070 PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 2071 PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2072 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2073 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2074 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2075 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2076 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2077 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2078 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2079 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2080 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2081 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2082 /* Zero conductivity */ 2083 jac->ams_beta_is_zero = PETSC_FALSE; 2084 jac->ams_beta_is_zero_part = PETSC_FALSE; 2085 PetscFunctionReturn(PETSC_SUCCESS); 2086 } 2087 PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag)); 2088 if (flag) { 2089 PetscCallExternal(HYPRE_ADSCreate, &jac->hsolver); 2090 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS; 2091 pc->ops->view = PCView_HYPRE_ADS; 2092 jac->destroy = HYPRE_ADSDestroy; 2093 jac->setup = HYPRE_ADSSetup; 2094 jac->solve = HYPRE_ADSSolve; 2095 jac->coords[0] = NULL; 2096 jac->coords[1] = NULL; 2097 jac->coords[2] = NULL; 2098 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */ 2099 jac->as_print = 0; 2100 jac->as_max_iter = 1; /* used as a preconditioner */ 2101 jac->as_tol = 0.; /* used as a preconditioner */ 2102 jac->ads_cycle_type = 13; 2103 /* Smoothing options */ 2104 jac->as_relax_type = 2; 2105 jac->as_relax_times = 1; 2106 jac->as_relax_weight = 1.0; 2107 jac->as_omega = 1.0; 2108 /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2109 jac->ams_cycle_type = 14; 2110 jac->as_amg_alpha_opts[0] = 10; 2111 jac->as_amg_alpha_opts[1] = 1; 2112 jac->as_amg_alpha_opts[2] = 6; 2113 jac->as_amg_alpha_opts[3] = 6; 2114 jac->as_amg_alpha_opts[4] = 4; 2115 jac->as_amg_alpha_theta = 0.25; 2116 /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2117 jac->as_amg_beta_opts[0] = 10; 2118 jac->as_amg_beta_opts[1] = 1; 2119 jac->as_amg_beta_opts[2] = 6; 2120 jac->as_amg_beta_opts[3] = 6; 2121 jac->as_amg_beta_opts[4] = 4; 2122 jac->as_amg_beta_theta = 0.25; 2123 PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 2124 PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 2125 PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2126 PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 2127 PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2128 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMG coarsen type */ 2129 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2130 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2131 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2132 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2133 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2134 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2135 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2136 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2137 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2138 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2139 PetscFunctionReturn(PETSC_SUCCESS); 2140 } 2141 PetscCall(PetscFree(jac->hypre_type)); 2142 2143 jac->hypre_type = NULL; 2144 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name); 2145 } 2146 2147 /* 2148 It only gets here if the HYPRE type has not been set before the call to 2149 ...SetFromOptions() which actually is most of the time 2150 */ 2151 PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject) 2152 { 2153 PetscInt indx; 2154 const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"}; 2155 PetscBool flg; 2156 2157 PetscFunctionBegin; 2158 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options"); 2159 PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg)); 2160 if (flg) { 2161 PetscCall(PCHYPRESetType_HYPRE(pc, type[indx])); 2162 } else { 2163 PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg")); 2164 } 2165 PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject); 2166 PetscOptionsHeadEnd(); 2167 PetscFunctionReturn(PETSC_SUCCESS); 2168 } 2169 2170 /*@C 2171 PCHYPRESetType - Sets which hypre preconditioner you wish to use 2172 2173 Input Parameters: 2174 + pc - the preconditioner context 2175 - name - either euclid, pilut, parasails, boomeramg, ams, ads 2176 2177 Options Database Key: 2178 -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads 2179 2180 Level: intermediate 2181 2182 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE` 2183 @*/ 2184 PetscErrorCode PCHYPRESetType(PC pc, const char name[]) 2185 { 2186 PetscFunctionBegin; 2187 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2188 PetscValidCharPointer(name, 2); 2189 PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name)); 2190 PetscFunctionReturn(PETSC_SUCCESS); 2191 } 2192 2193 /*@C 2194 PCHYPREGetType - Gets which hypre preconditioner you are using 2195 2196 Input Parameter: 2197 . pc - the preconditioner context 2198 2199 Output Parameter: 2200 . name - either euclid, pilut, parasails, boomeramg, ams, ads 2201 2202 Level: intermediate 2203 2204 .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE` 2205 @*/ 2206 PetscErrorCode PCHYPREGetType(PC pc, const char *name[]) 2207 { 2208 PetscFunctionBegin; 2209 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2210 PetscValidPointer(name, 2); 2211 PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name)); 2212 PetscFunctionReturn(PETSC_SUCCESS); 2213 } 2214 2215 /*@C 2216 PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs 2217 2218 Logically Collective 2219 2220 Input Parameters: 2221 + pc - the hypre context 2222 - type - one of 'cusparse', 'hypre' 2223 2224 Options Database Key: 2225 . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre 2226 2227 Level: intermediate 2228 2229 Developer Note: 2230 How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`? 2231 2232 .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()` 2233 @*/ 2234 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) 2235 { 2236 PetscFunctionBegin; 2237 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2238 PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name)); 2239 PetscFunctionReturn(PETSC_SUCCESS); 2240 } 2241 2242 /*@C 2243 PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs 2244 2245 Not Collective 2246 2247 Input Parameter: 2248 . pc - the multigrid context 2249 2250 Output Parameter: 2251 . name - one of 'cusparse', 'hypre' 2252 2253 Level: intermediate 2254 2255 .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()` 2256 @*/ 2257 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[]) 2258 { 2259 PetscFunctionBegin; 2260 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2261 PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name)); 2262 PetscFunctionReturn(PETSC_SUCCESS); 2263 } 2264 2265 /*MC 2266 PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC` 2267 2268 Options Database Keys: 2269 + -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads 2270 . -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`) 2271 . -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`) 2272 - Many others, run with -pc_type hypre -pc_hypre_type XXX -help to see options for the XXX preconditioner 2273 2274 Level: intermediate 2275 2276 Notes: 2277 Apart from pc_hypre_type (for which there is `PCHYPRESetType()`), 2278 the many hypre options can ONLY be set via the options database (e.g. the command line 2279 or with `PetscOptionsSetValue()`, there are no functions to set them) 2280 2281 The options -pc_hypre_boomeramg_max_iter and -pc_hypre_boomeramg_tol refer to the number of iterations 2282 (V-cycles) and tolerance that boomeramg does EACH time it is called. So for example, if 2283 -pc_hypre_boomeramg_max_iter is set to 2 then 2-V-cycles are being used to define the preconditioner 2284 (-pc_hypre_boomeramg_tol should be set to 0.0 - the default - to strictly use a fixed number of 2285 iterations per hypre call). -ksp_max_it and -ksp_rtol STILL determine the total number of iterations 2286 and tolerance for the Krylov solver. For example, if -pc_hypre_boomeramg_max_iter is 2 and -ksp_max_it is 10 2287 then AT MOST twenty V-cycles of boomeramg will be called. 2288 2289 Note that the option -pc_hypre_boomeramg_relax_type_all defaults to symmetric relaxation 2290 (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry. 2291 Otherwise, you may want to use -pc_hypre_boomeramg_relax_type_all SOR/Jacobi. 2292 If you wish to use BoomerAMG WITHOUT a Krylov method use -ksp_type richardson NOT -ksp_type preonly 2293 and use -ksp_max_it to control the number of V-cycles. 2294 (see the PETSc FAQ.html at the PETSc website under the Documentation tab). 2295 2296 `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use 2297 the following two options: ``-pc_hypre_boomeramg_nodal_coarsen <n> -pc_hypre_boomeramg_vec_interp_variant <v>`` 2298 2299 See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers 2300 2301 For `PCHYPRE` type of ams or ads auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`, 2302 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2303 `PCHYPREAMSSetInteriorNodes()` 2304 2305 PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems 2306 2307 GPU Notes: 2308 To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda 2309 Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2310 2311 To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip 2312 Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2313 2314 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`, 2315 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2316 PCHYPREAMSSetInteriorNodes() 2317 M*/ 2318 2319 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc) 2320 { 2321 PC_HYPRE *jac; 2322 2323 PetscFunctionBegin; 2324 PetscCall(PetscNew(&jac)); 2325 2326 pc->data = jac; 2327 pc->ops->reset = PCReset_HYPRE; 2328 pc->ops->destroy = PCDestroy_HYPRE; 2329 pc->ops->setfromoptions = PCSetFromOptions_HYPRE; 2330 pc->ops->setup = PCSetUp_HYPRE; 2331 pc->ops->apply = PCApply_HYPRE; 2332 jac->comm_hypre = MPI_COMM_NULL; 2333 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE)); 2334 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE)); 2335 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE)); 2336 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE)); 2337 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE)); 2338 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE)); 2339 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE)); 2340 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE)); 2341 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE)); 2342 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG)); 2343 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG)); 2344 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2345 #if defined(HYPRE_USING_HIP) 2346 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); 2347 #endif 2348 #if defined(HYPRE_USING_CUDA) 2349 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); 2350 #endif 2351 #endif 2352 PetscFunctionReturn(PETSC_SUCCESS); 2353 } 2354 2355 typedef struct { 2356 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2357 HYPRE_StructSolver hsolver; 2358 2359 /* keep copy of PFMG options used so may view them */ 2360 PetscInt its; 2361 double tol; 2362 PetscInt relax_type; 2363 PetscInt rap_type; 2364 PetscInt num_pre_relax, num_post_relax; 2365 PetscInt max_levels; 2366 PetscInt skip_relax; 2367 PetscBool print_statistics; 2368 } PC_PFMG; 2369 2370 PetscErrorCode PCDestroy_PFMG(PC pc) 2371 { 2372 PC_PFMG *ex = (PC_PFMG *)pc->data; 2373 2374 PetscFunctionBegin; 2375 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2376 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2377 PetscCall(PetscFree(pc->data)); 2378 PetscFunctionReturn(PETSC_SUCCESS); 2379 } 2380 2381 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"}; 2382 static const char *PFMGRAPType[] = {"Galerkin", "non-Galerkin"}; 2383 2384 PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer) 2385 { 2386 PetscBool iascii; 2387 PC_PFMG *ex = (PC_PFMG *)pc->data; 2388 2389 PetscFunctionBegin; 2390 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2391 if (iascii) { 2392 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE PFMG preconditioning\n")); 2393 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2394 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2395 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2396 PetscCall(PetscViewerASCIIPrintf(viewer, " RAP type %s\n", PFMGRAPType[ex->rap_type])); 2397 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2398 PetscCall(PetscViewerASCIIPrintf(viewer, " max levels %" PetscInt_FMT "\n", ex->max_levels)); 2399 PetscCall(PetscViewerASCIIPrintf(viewer, " skip relax %" PetscInt_FMT "\n", ex->skip_relax)); 2400 } 2401 PetscFunctionReturn(PETSC_SUCCESS); 2402 } 2403 2404 PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject) 2405 { 2406 PC_PFMG *ex = (PC_PFMG *)pc->data; 2407 2408 PetscFunctionBegin; 2409 PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options"); 2410 PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL)); 2411 PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2412 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2413 PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2414 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2415 PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2416 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2417 2418 PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL)); 2419 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2420 2421 PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL)); 2422 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2423 PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2424 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2425 PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL)); 2426 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2427 PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL)); 2428 PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax); 2429 PetscOptionsHeadEnd(); 2430 PetscFunctionReturn(PETSC_SUCCESS); 2431 } 2432 2433 PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y) 2434 { 2435 PC_PFMG *ex = (PC_PFMG *)pc->data; 2436 PetscScalar *yy; 2437 const PetscScalar *xx; 2438 PetscInt ilower[3], iupper[3]; 2439 HYPRE_Int hlower[3], hupper[3]; 2440 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2441 2442 PetscFunctionBegin; 2443 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2444 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2445 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2446 iupper[0] += ilower[0] - 1; 2447 iupper[1] += ilower[1] - 1; 2448 iupper[2] += ilower[2] - 1; 2449 hlower[0] = (HYPRE_Int)ilower[0]; 2450 hlower[1] = (HYPRE_Int)ilower[1]; 2451 hlower[2] = (HYPRE_Int)ilower[2]; 2452 hupper[0] = (HYPRE_Int)iupper[0]; 2453 hupper[1] = (HYPRE_Int)iupper[1]; 2454 hupper[2] = (HYPRE_Int)iupper[2]; 2455 2456 /* copy x values over to hypre */ 2457 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2458 PetscCall(VecGetArrayRead(x, &xx)); 2459 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2460 PetscCall(VecRestoreArrayRead(x, &xx)); 2461 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2462 PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2463 2464 /* copy solution values back to PETSc */ 2465 PetscCall(VecGetArray(y, &yy)); 2466 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2467 PetscCall(VecRestoreArray(y, &yy)); 2468 PetscFunctionReturn(PETSC_SUCCESS); 2469 } 2470 2471 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2472 { 2473 PC_PFMG *jac = (PC_PFMG *)pc->data; 2474 HYPRE_Int oits; 2475 2476 PetscFunctionBegin; 2477 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2478 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its); 2479 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol); 2480 2481 PetscCall(PCApply_PFMG(pc, b, y)); 2482 PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits); 2483 *outits = oits; 2484 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2485 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2486 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol); 2487 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its); 2488 PetscFunctionReturn(PETSC_SUCCESS); 2489 } 2490 2491 PetscErrorCode PCSetUp_PFMG(PC pc) 2492 { 2493 PC_PFMG *ex = (PC_PFMG *)pc->data; 2494 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2495 PetscBool flg; 2496 2497 PetscFunctionBegin; 2498 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2499 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2500 2501 /* create the hypre solver object and set its information */ 2502 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2503 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2504 2505 // Print Hypre statistics about the solve process 2506 if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3); 2507 2508 // The hypre options must be repeated here because the StructPFMG was destroyed and recreated 2509 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2510 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2511 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2512 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2513 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2514 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2515 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2516 2517 PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2518 PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver); 2519 PetscFunctionReturn(PETSC_SUCCESS); 2520 } 2521 2522 /*MC 2523 PCPFMG - the hypre PFMG multigrid solver 2524 2525 Options Database Keys: 2526 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner 2527 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve 2528 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve 2529 . -pc_pfmg_tol <tol> - tolerance of PFMG 2530 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel 2531 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin 2532 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations 2533 when the underlying problem is isotropic, one of 0,1 2534 2535 Level: advanced 2536 2537 Notes: 2538 This is for CELL-centered descretizations 2539 2540 See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG` 2541 2542 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2543 2544 This must be used with the `MATHYPRESTRUCT` matrix type. 2545 2546 This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`. 2547 2548 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG` 2549 M*/ 2550 2551 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc) 2552 { 2553 PC_PFMG *ex; 2554 2555 PetscFunctionBegin; 2556 PetscCall(PetscNew(&ex)); 2557 pc->data = ex; 2558 2559 ex->its = 1; 2560 ex->tol = 1.e-8; 2561 ex->relax_type = 1; 2562 ex->rap_type = 0; 2563 ex->num_pre_relax = 1; 2564 ex->num_post_relax = 1; 2565 ex->max_levels = 0; 2566 ex->skip_relax = 0; 2567 ex->print_statistics = PETSC_FALSE; 2568 2569 pc->ops->setfromoptions = PCSetFromOptions_PFMG; 2570 pc->ops->view = PCView_PFMG; 2571 pc->ops->destroy = PCDestroy_PFMG; 2572 pc->ops->apply = PCApply_PFMG; 2573 pc->ops->applyrichardson = PCApplyRichardson_PFMG; 2574 pc->ops->setup = PCSetUp_PFMG; 2575 2576 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2577 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2578 PetscFunctionReturn(PETSC_SUCCESS); 2579 } 2580 2581 /* we know we are working with a HYPRE_SStructMatrix */ 2582 typedef struct { 2583 MPI_Comm hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */ 2584 HYPRE_SStructSolver ss_solver; 2585 2586 /* keep copy of SYSPFMG options used so may view them */ 2587 PetscInt its; 2588 double tol; 2589 PetscInt relax_type; 2590 PetscInt num_pre_relax, num_post_relax; 2591 } PC_SysPFMG; 2592 2593 PetscErrorCode PCDestroy_SysPFMG(PC pc) 2594 { 2595 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2596 2597 PetscFunctionBegin; 2598 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2599 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2600 PetscCall(PetscFree(pc->data)); 2601 PetscFunctionReturn(PETSC_SUCCESS); 2602 } 2603 2604 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"}; 2605 2606 PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer) 2607 { 2608 PetscBool iascii; 2609 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2610 2611 PetscFunctionBegin; 2612 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2613 if (iascii) { 2614 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SysPFMG preconditioning\n")); 2615 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2616 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2617 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2618 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2619 } 2620 PetscFunctionReturn(PETSC_SUCCESS); 2621 } 2622 2623 PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject) 2624 { 2625 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2626 PetscBool flg = PETSC_FALSE; 2627 2628 PetscFunctionBegin; 2629 PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options"); 2630 PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL)); 2631 if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3); 2632 PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2633 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its); 2634 PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2635 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax); 2636 PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2637 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax); 2638 2639 PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL)); 2640 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol); 2641 PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2642 PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type); 2643 PetscOptionsHeadEnd(); 2644 PetscFunctionReturn(PETSC_SUCCESS); 2645 } 2646 2647 PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y) 2648 { 2649 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2650 PetscScalar *yy; 2651 const PetscScalar *xx; 2652 PetscInt ilower[3], iupper[3]; 2653 HYPRE_Int hlower[3], hupper[3]; 2654 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2655 PetscInt ordering = mx->dofs_order; 2656 PetscInt nvars = mx->nvars; 2657 PetscInt part = 0; 2658 PetscInt size; 2659 PetscInt i; 2660 2661 PetscFunctionBegin; 2662 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2663 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2664 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2665 iupper[0] += ilower[0] - 1; 2666 iupper[1] += ilower[1] - 1; 2667 iupper[2] += ilower[2] - 1; 2668 hlower[0] = (HYPRE_Int)ilower[0]; 2669 hlower[1] = (HYPRE_Int)ilower[1]; 2670 hlower[2] = (HYPRE_Int)ilower[2]; 2671 hupper[0] = (HYPRE_Int)iupper[0]; 2672 hupper[1] = (HYPRE_Int)iupper[1]; 2673 hupper[2] = (HYPRE_Int)iupper[2]; 2674 2675 size = 1; 2676 for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1); 2677 2678 /* copy x values over to hypre for variable ordering */ 2679 if (ordering) { 2680 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2681 PetscCall(VecGetArrayRead(x, &xx)); 2682 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i))); 2683 PetscCall(VecRestoreArrayRead(x, &xx)); 2684 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2685 PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x); 2686 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2687 2688 /* copy solution values back to PETSc */ 2689 PetscCall(VecGetArray(y, &yy)); 2690 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i))); 2691 PetscCall(VecRestoreArray(y, &yy)); 2692 } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */ 2693 PetscScalar *z; 2694 PetscInt j, k; 2695 2696 PetscCall(PetscMalloc1(nvars * size, &z)); 2697 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2698 PetscCall(VecGetArrayRead(x, &xx)); 2699 2700 /* transform nodal to hypre's variable ordering for sys_pfmg */ 2701 for (i = 0; i < size; i++) { 2702 k = i * nvars; 2703 for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j]; 2704 } 2705 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2706 PetscCall(VecRestoreArrayRead(x, &xx)); 2707 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2708 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2709 2710 /* copy solution values back to PETSc */ 2711 PetscCall(VecGetArray(y, &yy)); 2712 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2713 /* transform hypre's variable ordering for sys_pfmg to nodal ordering */ 2714 for (i = 0; i < size; i++) { 2715 k = i * nvars; 2716 for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i]; 2717 } 2718 PetscCall(VecRestoreArray(y, &yy)); 2719 PetscCall(PetscFree(z)); 2720 } 2721 PetscFunctionReturn(PETSC_SUCCESS); 2722 } 2723 2724 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2725 { 2726 PC_SysPFMG *jac = (PC_SysPFMG *)pc->data; 2727 HYPRE_Int oits; 2728 2729 PetscFunctionBegin; 2730 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2731 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its); 2732 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol); 2733 PetscCall(PCApply_SysPFMG(pc, b, y)); 2734 PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits); 2735 *outits = oits; 2736 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2737 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2738 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol); 2739 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its); 2740 PetscFunctionReturn(PETSC_SUCCESS); 2741 } 2742 2743 PetscErrorCode PCSetUp_SysPFMG(PC pc) 2744 { 2745 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2746 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2747 PetscBool flg; 2748 2749 PetscFunctionBegin; 2750 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg)); 2751 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner"); 2752 2753 /* create the hypre sstruct solver object and set its information */ 2754 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2755 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2756 PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver); 2757 PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2758 PetscFunctionReturn(PETSC_SUCCESS); 2759 } 2760 2761 /*MC 2762 PCSYSPFMG - the hypre SysPFMG multigrid solver 2763 2764 Level: advanced 2765 2766 Options Database Keys: 2767 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner 2768 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid 2769 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid 2770 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG 2771 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles 2772 2773 Notes: 2774 See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG` 2775 2776 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2777 2778 This is for CELL-centered descretizations 2779 2780 This must be used with the `MATHYPRESSTRUCT` matrix type. 2781 2782 This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`. 2783 2784 .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG` 2785 M*/ 2786 2787 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc) 2788 { 2789 PC_SysPFMG *ex; 2790 2791 PetscFunctionBegin; 2792 PetscCall(PetscNew(&ex)); 2793 pc->data = ex; 2794 2795 ex->its = 1; 2796 ex->tol = 1.e-8; 2797 ex->relax_type = 1; 2798 ex->num_pre_relax = 1; 2799 ex->num_post_relax = 1; 2800 2801 pc->ops->setfromoptions = PCSetFromOptions_SysPFMG; 2802 pc->ops->view = PCView_SysPFMG; 2803 pc->ops->destroy = PCDestroy_SysPFMG; 2804 pc->ops->apply = PCApply_SysPFMG; 2805 pc->ops->applyrichardson = PCApplyRichardson_SysPFMG; 2806 pc->ops->setup = PCSetUp_SysPFMG; 2807 2808 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2809 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2810 PetscFunctionReturn(PETSC_SUCCESS); 2811 } 2812 2813 /* PC SMG */ 2814 typedef struct { 2815 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2816 HYPRE_StructSolver hsolver; 2817 PetscInt its; /* keep copy of SMG options used so may view them */ 2818 double tol; 2819 PetscBool print_statistics; 2820 PetscInt num_pre_relax, num_post_relax; 2821 } PC_SMG; 2822 2823 PetscErrorCode PCDestroy_SMG(PC pc) 2824 { 2825 PC_SMG *ex = (PC_SMG *)pc->data; 2826 2827 PetscFunctionBegin; 2828 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver); 2829 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2830 PetscCall(PetscFree(pc->data)); 2831 PetscFunctionReturn(PETSC_SUCCESS); 2832 } 2833 2834 PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer) 2835 { 2836 PetscBool iascii; 2837 PC_SMG *ex = (PC_SMG *)pc->data; 2838 2839 PetscFunctionBegin; 2840 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2841 if (iascii) { 2842 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SMG preconditioning\n")); 2843 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2844 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2845 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2846 } 2847 PetscFunctionReturn(PETSC_SUCCESS); 2848 } 2849 2850 PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject) 2851 { 2852 PC_SMG *ex = (PC_SMG *)pc->data; 2853 2854 PetscFunctionBegin; 2855 PetscOptionsHeadBegin(PetscOptionsObject, "SMG options"); 2856 2857 PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL)); 2858 PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2859 PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2860 PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL)); 2861 2862 PetscOptionsHeadEnd(); 2863 PetscFunctionReturn(PETSC_SUCCESS); 2864 } 2865 2866 PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y) 2867 { 2868 PC_SMG *ex = (PC_SMG *)pc->data; 2869 PetscScalar *yy; 2870 const PetscScalar *xx; 2871 PetscInt ilower[3], iupper[3]; 2872 HYPRE_Int hlower[3], hupper[3]; 2873 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2874 2875 PetscFunctionBegin; 2876 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2877 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2878 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2879 iupper[0] += ilower[0] - 1; 2880 iupper[1] += ilower[1] - 1; 2881 iupper[2] += ilower[2] - 1; 2882 hlower[0] = (HYPRE_Int)ilower[0]; 2883 hlower[1] = (HYPRE_Int)ilower[1]; 2884 hlower[2] = (HYPRE_Int)ilower[2]; 2885 hupper[0] = (HYPRE_Int)iupper[0]; 2886 hupper[1] = (HYPRE_Int)iupper[1]; 2887 hupper[2] = (HYPRE_Int)iupper[2]; 2888 2889 /* copy x values over to hypre */ 2890 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2891 PetscCall(VecGetArrayRead(x, &xx)); 2892 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2893 PetscCall(VecRestoreArrayRead(x, &xx)); 2894 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2895 PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2896 2897 /* copy solution values back to PETSc */ 2898 PetscCall(VecGetArray(y, &yy)); 2899 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2900 PetscCall(VecRestoreArray(y, &yy)); 2901 PetscFunctionReturn(PETSC_SUCCESS); 2902 } 2903 2904 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 2905 { 2906 PC_SMG *jac = (PC_SMG *)pc->data; 2907 HYPRE_Int oits; 2908 2909 PetscFunctionBegin; 2910 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2911 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its); 2912 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol); 2913 2914 PetscCall(PCApply_SMG(pc, b, y)); 2915 PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits); 2916 *outits = oits; 2917 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2918 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2919 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol); 2920 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its); 2921 PetscFunctionReturn(PETSC_SUCCESS); 2922 } 2923 2924 PetscErrorCode PCSetUp_SMG(PC pc) 2925 { 2926 PetscInt i, dim; 2927 PC_SMG *ex = (PC_SMG *)pc->data; 2928 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2929 PetscBool flg; 2930 DMBoundaryType p[3]; 2931 PetscInt M[3]; 2932 2933 PetscFunctionBegin; 2934 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2935 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2936 2937 PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0)); 2938 // Check if power of 2 in periodic directions 2939 for (i = 0; i < dim; i++) { 2940 if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) { 2941 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]); 2942 } 2943 } 2944 2945 /* create the hypre solver object and set its information */ 2946 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver)); 2947 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 2948 // The hypre options must be set here and not in SetFromOptions because it is created here! 2949 PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its); 2950 PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2951 PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2952 PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol); 2953 2954 PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2955 PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver); 2956 PetscFunctionReturn(PETSC_SUCCESS); 2957 } 2958 2959 /*MC 2960 PCSMG - the hypre (structured grid) SMG multigrid solver 2961 2962 Level: advanced 2963 2964 Options Database Keys: 2965 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner 2966 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid 2967 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid 2968 - -pc_smg_tol <tol> - tolerance of SMG 2969 2970 Notes: 2971 This is for CELL-centered descretizations 2972 2973 This must be used with the `MATHYPRESTRUCT` `MatType`. 2974 2975 This does not provide all the functionality of hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`. 2976 2977 See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners 2978 2979 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG` 2980 M*/ 2981 2982 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc) 2983 { 2984 PC_SMG *ex; 2985 2986 PetscFunctionBegin; 2987 PetscCall(PetscNew(&ex)); 2988 pc->data = ex; 2989 2990 ex->its = 1; 2991 ex->tol = 1.e-8; 2992 ex->num_pre_relax = 1; 2993 ex->num_post_relax = 1; 2994 2995 pc->ops->setfromoptions = PCSetFromOptions_SMG; 2996 pc->ops->view = PCView_SMG; 2997 pc->ops->destroy = PCDestroy_SMG; 2998 pc->ops->apply = PCApply_SMG; 2999 pc->ops->applyrichardson = PCApplyRichardson_SMG; 3000 pc->ops->setup = PCSetUp_SMG; 3001 3002 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 3003 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 3004 PetscFunctionReturn(PETSC_SUCCESS); 3005 } 3006