1 /* 2 Provides an interface to the LLNL package hypre 3 */ 4 5 #include <petscpkg_version.h> 6 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 7 /* this include is needed ONLY to allow access to the private data inside the Mat object specific to hypre */ 8 #include <petsc/private/matimpl.h> 9 #include <petsc/private/vecimpl.h> 10 #include <../src/vec/vec/impls/hypre/vhyp.h> 11 #include <../src/mat/impls/hypre/mhypre.h> 12 #include <../src/dm/impls/da/hypre/mhyp.h> 13 #include <_hypre_parcsr_ls.h> 14 #include <petscmathypre.h> 15 16 #if defined(PETSC_HAVE_HYPRE_DEVICE) 17 #include <petsc/private/deviceimpl.h> 18 #endif 19 20 static PetscBool cite = PETSC_FALSE; 21 static const char hypreCitation[] = "@manual{hypre-web-page,\n title = {{\\sl hypre}: High Performance Preconditioners},\n organization = {Lawrence Livermore National Laboratory},\n note = " 22 "{\\url{https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods}}\n}\n"; 23 24 /* 25 Private context (data structure) for the preconditioner. 26 */ 27 typedef struct { 28 HYPRE_Solver hsolver; 29 Mat hpmat; /* MatHYPRE */ 30 31 HYPRE_Int (*destroy)(HYPRE_Solver); 32 HYPRE_Int (*solve)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 33 HYPRE_Int (*setup)(HYPRE_Solver, HYPRE_ParCSRMatrix, HYPRE_ParVector, HYPRE_ParVector); 34 35 MPI_Comm comm_hypre; 36 char *hypre_type; 37 38 /* options for Pilut and BoomerAMG*/ 39 PetscInt maxiter; 40 PetscReal tol; 41 42 /* options for Pilut */ 43 PetscInt factorrowsize; 44 45 /* options for ParaSails */ 46 PetscInt nlevels; 47 PetscReal threshold; 48 PetscReal filter; 49 PetscReal loadbal; 50 PetscInt logging; 51 PetscInt ruse; 52 PetscInt symt; 53 54 /* options for BoomerAMG */ 55 PetscBool printstatistics; 56 57 /* options for BoomerAMG */ 58 PetscInt cycletype; 59 PetscInt maxlevels; 60 PetscReal strongthreshold; 61 PetscReal maxrowsum; 62 PetscInt gridsweeps[3]; 63 PetscInt coarsentype; 64 PetscInt measuretype; 65 PetscInt smoothtype; 66 PetscInt smoothnumlevels; 67 PetscInt eu_level; /* Number of levels for ILU(k) in Euclid */ 68 PetscReal eu_droptolerance; /* Drop tolerance for ILU(k) in Euclid */ 69 PetscInt eu_bj; /* Defines use of Block Jacobi ILU in Euclid */ 70 PetscInt relaxtype[3]; 71 PetscReal relaxweight; 72 PetscReal outerrelaxweight; 73 PetscInt relaxorder; 74 PetscReal truncfactor; 75 PetscBool applyrichardson; 76 PetscInt pmax; 77 PetscInt interptype; 78 PetscInt maxc; 79 PetscInt minc; 80 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 81 char *spgemm_type; // this is a global hypre parameter but is closely associated with BoomerAMG 82 #endif 83 /* GPU */ 84 PetscBool keeptranspose; 85 PetscInt rap2; 86 PetscInt mod_rap2; 87 88 /* AIR */ 89 PetscInt Rtype; 90 PetscReal Rstrongthreshold; 91 PetscReal Rfilterthreshold; 92 PetscInt Adroptype; 93 PetscReal Adroptol; 94 95 PetscInt agg_nl; 96 PetscInt agg_interptype; 97 PetscInt agg_num_paths; 98 PetscBool nodal_relax; 99 PetscInt nodal_relax_levels; 100 101 PetscInt nodal_coarsening; 102 PetscInt nodal_coarsening_diag; 103 PetscInt vec_interp_variant; 104 PetscInt vec_interp_qmax; 105 PetscBool vec_interp_smooth; 106 PetscInt interp_refine; 107 108 /* NearNullSpace support */ 109 VecHYPRE_IJVector *hmnull; 110 HYPRE_ParVector *phmnull; 111 PetscInt n_hmnull; 112 Vec hmnull_constant; 113 114 /* options for AS (Auxiliary Space preconditioners) */ 115 PetscInt as_print; 116 PetscInt as_max_iter; 117 PetscReal as_tol; 118 PetscInt as_relax_type; 119 PetscInt as_relax_times; 120 PetscReal as_relax_weight; 121 PetscReal as_omega; 122 PetscInt as_amg_alpha_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for vector Poisson (AMS) or Curl problem (ADS) */ 123 PetscReal as_amg_alpha_theta; /* AMG strength for vector Poisson (AMS) or Curl problem (ADS) */ 124 PetscInt as_amg_beta_opts[5]; /* AMG coarsen type, agg_levels, relax_type, interp_type, Pmax for scalar Poisson (AMS) or vector Poisson (ADS) */ 125 PetscReal as_amg_beta_theta; /* AMG strength for scalar Poisson (AMS) or vector Poisson (ADS) */ 126 PetscInt ams_cycle_type; 127 PetscInt ads_cycle_type; 128 129 /* additional data */ 130 Mat G; /* MatHYPRE */ 131 Mat C; /* MatHYPRE */ 132 Mat alpha_Poisson; /* MatHYPRE */ 133 Mat beta_Poisson; /* MatHYPRE */ 134 135 /* extra information for AMS */ 136 PetscInt dim; /* geometrical dimension */ 137 VecHYPRE_IJVector coords[3]; 138 VecHYPRE_IJVector constants[3]; 139 VecHYPRE_IJVector interior; 140 Mat RT_PiFull, RT_Pi[3]; 141 Mat ND_PiFull, ND_Pi[3]; 142 PetscBool ams_beta_is_zero; 143 PetscBool ams_beta_is_zero_part; 144 PetscInt ams_proj_freq; 145 } PC_HYPRE; 146 147 PetscErrorCode PCHYPREGetSolver(PC pc, HYPRE_Solver *hsolver) { 148 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 149 150 PetscFunctionBegin; 151 *hsolver = jac->hsolver; 152 PetscFunctionReturn(0); 153 } 154 155 /* 156 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 157 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 158 It is used in PCHMG. Other users should avoid using this function. 159 */ 160 static PetscErrorCode PCGetCoarseOperators_BoomerAMG(PC pc, PetscInt *nlevels, Mat *operators[]) { 161 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 162 PetscBool same = PETSC_FALSE; 163 PetscInt num_levels, l; 164 Mat *mattmp; 165 hypre_ParCSRMatrix **A_array; 166 167 PetscFunctionBegin; 168 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 169 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 170 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 171 PetscCall(PetscMalloc1(num_levels, &mattmp)); 172 A_array = hypre_ParAMGDataAArray((hypre_ParAMGData *)(jac->hsolver)); 173 for (l = 1; l < num_levels; l++) { 174 PetscCall(MatCreateFromParCSR(A_array[l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[num_levels - 1 - l]))); 175 /* We want to own the data, and HYPRE can not touch this matrix any more */ 176 A_array[l] = NULL; 177 } 178 *nlevels = num_levels; 179 *operators = mattmp; 180 PetscFunctionReturn(0); 181 } 182 183 /* 184 Matrices with AIJ format are created IN PLACE with using (I,J,data) from BoomerAMG. Since the data format in hypre_ParCSRMatrix 185 is different from that used in PETSc, the original hypre_ParCSRMatrix can not be used any more after call this routine. 186 It is used in PCHMG. Other users should avoid using this function. 187 */ 188 static PetscErrorCode PCGetInterpolations_BoomerAMG(PC pc, PetscInt *nlevels, Mat *interpolations[]) { 189 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 190 PetscBool same = PETSC_FALSE; 191 PetscInt num_levels, l; 192 Mat *mattmp; 193 hypre_ParCSRMatrix **P_array; 194 195 PetscFunctionBegin; 196 PetscCall(PetscStrcmp(jac->hypre_type, "boomeramg", &same)); 197 PetscCheck(same, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_NOTSAMETYPE, "Hypre type is not BoomerAMG "); 198 num_levels = hypre_ParAMGDataNumLevels((hypre_ParAMGData *)(jac->hsolver)); 199 PetscCall(PetscMalloc1(num_levels, &mattmp)); 200 P_array = hypre_ParAMGDataPArray((hypre_ParAMGData *)(jac->hsolver)); 201 for (l = 1; l < num_levels; l++) { 202 PetscCall(MatCreateFromParCSR(P_array[num_levels - 1 - l], MATAIJ, PETSC_OWN_POINTER, &(mattmp[l - 1]))); 203 /* We want to own the data, and HYPRE can not touch this matrix any more */ 204 P_array[num_levels - 1 - l] = NULL; 205 } 206 *nlevels = num_levels; 207 *interpolations = mattmp; 208 PetscFunctionReturn(0); 209 } 210 211 /* Resets (frees) Hypre's representation of the near null space */ 212 static PetscErrorCode PCHYPREResetNearNullSpace_Private(PC pc) { 213 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 214 PetscInt i; 215 216 PetscFunctionBegin; 217 for (i = 0; i < jac->n_hmnull; i++) PetscCall(VecHYPRE_IJVectorDestroy(&jac->hmnull[i])); 218 PetscCall(PetscFree(jac->hmnull)); 219 PetscCall(PetscFree(jac->phmnull)); 220 PetscCall(VecDestroy(&jac->hmnull_constant)); 221 jac->n_hmnull = 0; 222 PetscFunctionReturn(0); 223 } 224 225 static PetscErrorCode PCSetUp_HYPRE(PC pc) { 226 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 227 Mat_HYPRE *hjac; 228 HYPRE_ParCSRMatrix hmat; 229 HYPRE_ParVector bv, xv; 230 PetscBool ishypre; 231 232 PetscFunctionBegin; 233 if (!jac->hypre_type) PetscCall(PCHYPRESetType(pc, "boomeramg")); 234 235 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRE, &ishypre)); 236 if (!ishypre) { 237 PetscCall(MatDestroy(&jac->hpmat)); 238 PetscCall(MatConvert(pc->pmat, MATHYPRE, MAT_INITIAL_MATRIX, &jac->hpmat)); 239 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)jac->hpmat)); 240 } else { 241 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 242 PetscCall(MatDestroy(&jac->hpmat)); 243 jac->hpmat = pc->pmat; 244 } 245 /* allow debug */ 246 PetscCall(MatViewFromOptions(jac->hpmat, NULL, "-pc_hypre_mat_view")); 247 hjac = (Mat_HYPRE *)(jac->hpmat->data); 248 249 /* special case for BoomerAMG */ 250 if (jac->setup == HYPRE_BoomerAMGSetup) { 251 MatNullSpace mnull; 252 PetscBool has_const; 253 PetscInt bs, nvec, i; 254 const Vec *vecs; 255 256 PetscCall(MatGetBlockSize(pc->pmat, &bs)); 257 if (bs > 1) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 258 PetscCall(MatGetNearNullSpace(pc->mat, &mnull)); 259 if (mnull) { 260 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 261 PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs)); 262 PetscCall(PetscMalloc1(nvec + 1, &jac->hmnull)); 263 PetscCall(PetscMalloc1(nvec + 1, &jac->phmnull)); 264 for (i = 0; i < nvec; i++) { 265 PetscCall(VecHYPRE_IJVectorCreate(vecs[i]->map, &jac->hmnull[i])); 266 PetscCall(VecHYPRE_IJVectorCopy(vecs[i], jac->hmnull[i])); 267 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[i]->ij, (void **)&jac->phmnull[i]); 268 } 269 if (has_const) { 270 PetscCall(MatCreateVecs(pc->pmat, &jac->hmnull_constant, NULL)); 271 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)jac->hmnull_constant)); 272 PetscCall(VecSet(jac->hmnull_constant, 1)); 273 PetscCall(VecNormalize(jac->hmnull_constant, NULL)); 274 PetscCall(VecHYPRE_IJVectorCreate(jac->hmnull_constant->map, &jac->hmnull[nvec])); 275 PetscCall(VecHYPRE_IJVectorCopy(jac->hmnull_constant, jac->hmnull[nvec])); 276 PetscCallExternal(HYPRE_IJVectorGetObject, jac->hmnull[nvec]->ij, (void **)&jac->phmnull[nvec]); 277 nvec++; 278 } 279 PetscCallExternal(HYPRE_BoomerAMGSetInterpVectors, jac->hsolver, nvec, jac->phmnull); 280 jac->n_hmnull = nvec; 281 } 282 } 283 284 /* special case for AMS */ 285 if (jac->setup == HYPRE_AMSSetup) { 286 Mat_HYPRE *hm; 287 HYPRE_ParCSRMatrix parcsr; 288 if (!jac->coords[0] && !jac->constants[0] && !(jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 289 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the edge constant vectors via PCHYPRESetEdgeConstantVectors() or the interpolation matrix via PCHYPRESetInterpolations()"); 290 } 291 if (jac->dim) PetscCallExternal(HYPRE_AMSSetDimension, jac->hsolver, jac->dim); 292 if (jac->constants[0]) { 293 HYPRE_ParVector ozz, zoz, zzo = NULL; 294 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[0]->ij, (void **)(&ozz)); 295 PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[1]->ij, (void **)(&zoz)); 296 if (jac->constants[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->constants[2]->ij, (void **)(&zzo)); 297 PetscCallExternal(HYPRE_AMSSetEdgeConstantVectors, jac->hsolver, ozz, zoz, zzo); 298 } 299 if (jac->coords[0]) { 300 HYPRE_ParVector coords[3]; 301 coords[0] = NULL; 302 coords[1] = NULL; 303 coords[2] = NULL; 304 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 305 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 306 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 307 PetscCallExternal(HYPRE_AMSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 308 } 309 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE AMS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 310 hm = (Mat_HYPRE *)(jac->G->data); 311 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 312 PetscCallExternal(HYPRE_AMSSetDiscreteGradient, jac->hsolver, parcsr); 313 if (jac->alpha_Poisson) { 314 hm = (Mat_HYPRE *)(jac->alpha_Poisson->data); 315 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 316 PetscCallExternal(HYPRE_AMSSetAlphaPoissonMatrix, jac->hsolver, parcsr); 317 } 318 if (jac->ams_beta_is_zero) { 319 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, NULL); 320 } else if (jac->beta_Poisson) { 321 hm = (Mat_HYPRE *)(jac->beta_Poisson->data); 322 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 323 PetscCallExternal(HYPRE_AMSSetBetaPoissonMatrix, jac->hsolver, parcsr); 324 } else if (jac->ams_beta_is_zero_part) { 325 if (jac->interior) { 326 HYPRE_ParVector interior = NULL; 327 PetscCallExternal(HYPRE_IJVectorGetObject, jac->interior->ij, (void **)(&interior)); 328 PetscCallExternal(HYPRE_AMSSetInteriorNodes, jac->hsolver, interior); 329 } else { 330 jac->ams_beta_is_zero_part = PETSC_FALSE; 331 } 332 } 333 if (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])) { 334 PetscInt i; 335 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 336 if (jac->ND_PiFull) { 337 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 338 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 339 } else { 340 nd_parcsrfull = NULL; 341 } 342 for (i = 0; i < 3; ++i) { 343 if (jac->ND_Pi[i]) { 344 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 345 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 346 } else { 347 nd_parcsr[i] = NULL; 348 } 349 } 350 PetscCallExternal(HYPRE_AMSSetInterpolations, jac->hsolver, nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 351 } 352 } 353 /* special case for ADS */ 354 if (jac->setup == HYPRE_ADSSetup) { 355 Mat_HYPRE *hm; 356 HYPRE_ParCSRMatrix parcsr; 357 if (!jac->coords[0] && !((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1])))) { 358 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs either the coordinate vectors via PCSetCoordinates() or the interpolation matrices via PCHYPRESetInterpolations"); 359 } else PetscCheck(jac->coords[1] && jac->coords[2], PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner has been designed for three dimensional problems! For two dimensional problems, use HYPRE AMS instead"); 360 PetscCheck(jac->G, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete gradient operator via PCHYPRESetDiscreteGradient"); 361 PetscCheck(jac->C, PetscObjectComm((PetscObject)pc), PETSC_ERR_USER, "HYPRE ADS preconditioner needs the discrete curl operator via PCHYPRESetDiscreteGradient"); 362 if (jac->coords[0]) { 363 HYPRE_ParVector coords[3]; 364 coords[0] = NULL; 365 coords[1] = NULL; 366 coords[2] = NULL; 367 if (jac->coords[0]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[0]->ij, (void **)(&coords[0])); 368 if (jac->coords[1]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[1]->ij, (void **)(&coords[1])); 369 if (jac->coords[2]) PetscCallExternal(HYPRE_IJVectorGetObject, jac->coords[2]->ij, (void **)(&coords[2])); 370 PetscCallExternal(HYPRE_ADSSetCoordinateVectors, jac->hsolver, coords[0], coords[1], coords[2]); 371 } 372 hm = (Mat_HYPRE *)(jac->G->data); 373 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 374 PetscCallExternal(HYPRE_ADSSetDiscreteGradient, jac->hsolver, parcsr); 375 hm = (Mat_HYPRE *)(jac->C->data); 376 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&parcsr)); 377 PetscCallExternal(HYPRE_ADSSetDiscreteCurl, jac->hsolver, parcsr); 378 if ((jac->RT_PiFull || (jac->RT_Pi[0] && jac->RT_Pi[1])) && (jac->ND_PiFull || (jac->ND_Pi[0] && jac->ND_Pi[1]))) { 379 PetscInt i; 380 HYPRE_ParCSRMatrix rt_parcsrfull, rt_parcsr[3]; 381 HYPRE_ParCSRMatrix nd_parcsrfull, nd_parcsr[3]; 382 if (jac->RT_PiFull) { 383 hm = (Mat_HYPRE *)(jac->RT_PiFull->data); 384 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsrfull)); 385 } else { 386 rt_parcsrfull = NULL; 387 } 388 for (i = 0; i < 3; ++i) { 389 if (jac->RT_Pi[i]) { 390 hm = (Mat_HYPRE *)(jac->RT_Pi[i]->data); 391 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&rt_parcsr[i])); 392 } else { 393 rt_parcsr[i] = NULL; 394 } 395 } 396 if (jac->ND_PiFull) { 397 hm = (Mat_HYPRE *)(jac->ND_PiFull->data); 398 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsrfull)); 399 } else { 400 nd_parcsrfull = NULL; 401 } 402 for (i = 0; i < 3; ++i) { 403 if (jac->ND_Pi[i]) { 404 hm = (Mat_HYPRE *)(jac->ND_Pi[i]->data); 405 PetscCallExternal(HYPRE_IJMatrixGetObject, hm->ij, (void **)(&nd_parcsr[i])); 406 } else { 407 nd_parcsr[i] = NULL; 408 } 409 } 410 PetscCallExternal(HYPRE_ADSSetInterpolations, jac->hsolver, rt_parcsrfull, rt_parcsr[0], rt_parcsr[1], rt_parcsr[2], nd_parcsrfull, nd_parcsr[0], nd_parcsr[1], nd_parcsr[2]); 411 } 412 } 413 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 414 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&bv); 415 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&xv); 416 PetscCallExternal(jac->setup, jac->hsolver, hmat, bv, xv); 417 PetscFunctionReturn(0); 418 } 419 420 static PetscErrorCode PCApply_HYPRE(PC pc, Vec b, Vec x) { 421 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 422 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 423 HYPRE_ParCSRMatrix hmat; 424 HYPRE_ParVector jbv, jxv; 425 426 PetscFunctionBegin; 427 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 428 if (!jac->applyrichardson) PetscCall(VecSet(x, 0.0)); 429 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->b, b)); 430 if (jac->applyrichardson) PetscCall(VecHYPRE_IJVectorPushVec(hjac->x, x)); 431 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->x, x)); 432 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 433 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 434 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 435 PetscStackCallExternalVoid( 436 "Hypre solve", do { 437 HYPRE_Int hierr = (*jac->solve)(jac->hsolver, hmat, jbv, jxv); 438 if (hierr) { 439 PetscCheck(hierr == HYPRE_ERROR_CONV, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 440 hypre__global_error = 0; 441 } 442 } while (0)); 443 444 if (jac->setup == HYPRE_AMSSetup && jac->ams_beta_is_zero_part) PetscCallExternal(HYPRE_AMSProjectOutGradients, jac->hsolver, jxv); 445 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 446 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 447 PetscFunctionReturn(0); 448 } 449 450 static PetscErrorCode PCReset_HYPRE(PC pc) { 451 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 452 453 PetscFunctionBegin; 454 PetscCall(MatDestroy(&jac->hpmat)); 455 PetscCall(MatDestroy(&jac->G)); 456 PetscCall(MatDestroy(&jac->C)); 457 PetscCall(MatDestroy(&jac->alpha_Poisson)); 458 PetscCall(MatDestroy(&jac->beta_Poisson)); 459 PetscCall(MatDestroy(&jac->RT_PiFull)); 460 PetscCall(MatDestroy(&jac->RT_Pi[0])); 461 PetscCall(MatDestroy(&jac->RT_Pi[1])); 462 PetscCall(MatDestroy(&jac->RT_Pi[2])); 463 PetscCall(MatDestroy(&jac->ND_PiFull)); 464 PetscCall(MatDestroy(&jac->ND_Pi[0])); 465 PetscCall(MatDestroy(&jac->ND_Pi[1])); 466 PetscCall(MatDestroy(&jac->ND_Pi[2])); 467 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 468 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 469 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 470 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 471 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 472 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 473 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 474 PetscCall(PCHYPREResetNearNullSpace_Private(pc)); 475 jac->ams_beta_is_zero = PETSC_FALSE; 476 jac->ams_beta_is_zero_part = PETSC_FALSE; 477 jac->dim = 0; 478 PetscFunctionReturn(0); 479 } 480 481 static PetscErrorCode PCDestroy_HYPRE(PC pc) { 482 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 483 484 PetscFunctionBegin; 485 PetscCall(PCReset_HYPRE(pc)); 486 if (jac->destroy) PetscCallExternal(jac->destroy, jac->hsolver); 487 PetscCall(PetscFree(jac->hypre_type)); 488 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 489 PetscCall(PetscFree(jac->spgemm_type)); 490 #endif 491 if (jac->comm_hypre != MPI_COMM_NULL) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 492 PetscCall(PetscFree(pc->data)); 493 494 PetscCall(PetscObjectChangeTypeName((PetscObject)pc, 0)); 495 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", NULL)); 496 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", NULL)); 497 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", NULL)); 498 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", NULL)); 499 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", NULL)); 500 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetConstantEdgeVectors_C", NULL)); 501 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", NULL)); 502 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", NULL)); 503 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", NULL)); 504 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 505 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 506 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", NULL)); 507 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", NULL)); 508 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 509 PetscFunctionReturn(0); 510 } 511 512 static PetscErrorCode PCSetFromOptions_HYPRE_Pilut(PC pc, PetscOptionItems *PetscOptionsObject) { 513 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 514 PetscBool flag; 515 516 PetscFunctionBegin; 517 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Pilut Options"); 518 PetscCall(PetscOptionsInt("-pc_hypre_pilut_maxiter", "Number of iterations", "None", jac->maxiter, &jac->maxiter, &flag)); 519 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetMaxIter, jac->hsolver, jac->maxiter); 520 PetscCall(PetscOptionsReal("-pc_hypre_pilut_tol", "Drop tolerance", "None", jac->tol, &jac->tol, &flag)); 521 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetDropTolerance, jac->hsolver, jac->tol); 522 PetscCall(PetscOptionsInt("-pc_hypre_pilut_factorrowsize", "FactorRowSize", "None", jac->factorrowsize, &jac->factorrowsize, &flag)); 523 if (flag) PetscCallExternal(HYPRE_ParCSRPilutSetFactorRowSize, jac->hsolver, jac->factorrowsize); 524 PetscOptionsHeadEnd(); 525 PetscFunctionReturn(0); 526 } 527 528 static PetscErrorCode PCView_HYPRE_Pilut(PC pc, PetscViewer viewer) { 529 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 530 PetscBool iascii; 531 532 PetscFunctionBegin; 533 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 534 if (iascii) { 535 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Pilut preconditioning\n")); 536 if (jac->maxiter != PETSC_DEFAULT) { 537 PetscCall(PetscViewerASCIIPrintf(viewer, " maximum number of iterations %" PetscInt_FMT "\n", jac->maxiter)); 538 } else { 539 PetscCall(PetscViewerASCIIPrintf(viewer, " default maximum number of iterations \n")); 540 } 541 if (jac->tol != PETSC_DEFAULT) { 542 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->tol)); 543 } else { 544 PetscCall(PetscViewerASCIIPrintf(viewer, " default drop tolerance \n")); 545 } 546 if (jac->factorrowsize != PETSC_DEFAULT) { 547 PetscCall(PetscViewerASCIIPrintf(viewer, " factor row size %" PetscInt_FMT "\n", jac->factorrowsize)); 548 } else { 549 PetscCall(PetscViewerASCIIPrintf(viewer, " default factor row size \n")); 550 } 551 } 552 PetscFunctionReturn(0); 553 } 554 555 static PetscErrorCode PCSetFromOptions_HYPRE_Euclid(PC pc, PetscOptionItems *PetscOptionsObject) { 556 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 557 PetscBool flag, eu_bj = jac->eu_bj ? PETSC_TRUE : PETSC_FALSE; 558 559 PetscFunctionBegin; 560 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE Euclid Options"); 561 PetscCall(PetscOptionsInt("-pc_hypre_euclid_level", "Factorization levels", "None", jac->eu_level, &jac->eu_level, &flag)); 562 if (flag) PetscCallExternal(HYPRE_EuclidSetLevel, jac->hsolver, jac->eu_level); 563 564 PetscCall(PetscOptionsReal("-pc_hypre_euclid_droptolerance", "Drop tolerance for ILU(k) in Euclid", "None", jac->eu_droptolerance, &jac->eu_droptolerance, &flag)); 565 if (flag) { 566 PetscMPIInt size; 567 568 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 569 PetscCheck(size == 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "hypre's Euclid does not support a parallel drop tolerance"); 570 PetscCallExternal(HYPRE_EuclidSetILUT, jac->hsolver, jac->eu_droptolerance); 571 } 572 573 PetscCall(PetscOptionsBool("-pc_hypre_euclid_bj", "Use Block Jacobi for ILU in Euclid", "None", eu_bj, &eu_bj, &flag)); 574 if (flag) { 575 jac->eu_bj = eu_bj ? 1 : 0; 576 PetscCallExternal(HYPRE_EuclidSetBJ, jac->hsolver, jac->eu_bj); 577 } 578 PetscOptionsHeadEnd(); 579 PetscFunctionReturn(0); 580 } 581 582 static PetscErrorCode PCView_HYPRE_Euclid(PC pc, PetscViewer viewer) { 583 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 584 PetscBool iascii; 585 586 PetscFunctionBegin; 587 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 588 if (iascii) { 589 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE Euclid preconditioning\n")); 590 if (jac->eu_level != PETSC_DEFAULT) { 591 PetscCall(PetscViewerASCIIPrintf(viewer, " factorization levels %" PetscInt_FMT "\n", jac->eu_level)); 592 } else { 593 PetscCall(PetscViewerASCIIPrintf(viewer, " default factorization levels \n")); 594 } 595 PetscCall(PetscViewerASCIIPrintf(viewer, " drop tolerance %g\n", (double)jac->eu_droptolerance)); 596 PetscCall(PetscViewerASCIIPrintf(viewer, " use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 597 } 598 PetscFunctionReturn(0); 599 } 600 601 static PetscErrorCode PCApplyTranspose_HYPRE_BoomerAMG(PC pc, Vec b, Vec x) { 602 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 603 Mat_HYPRE *hjac = (Mat_HYPRE *)(jac->hpmat->data); 604 HYPRE_ParCSRMatrix hmat; 605 HYPRE_ParVector jbv, jxv; 606 607 PetscFunctionBegin; 608 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 609 PetscCall(VecSet(x, 0.0)); 610 PetscCall(VecHYPRE_IJVectorPushVecRead(hjac->x, b)); 611 PetscCall(VecHYPRE_IJVectorPushVecWrite(hjac->b, x)); 612 613 PetscCallExternal(HYPRE_IJMatrixGetObject, hjac->ij, (void **)&hmat); 614 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->b->ij, (void **)&jbv); 615 PetscCallExternal(HYPRE_IJVectorGetObject, hjac->x->ij, (void **)&jxv); 616 617 PetscStackCallExternalVoid( 618 "Hypre Transpose solve", do { 619 HYPRE_Int hierr = HYPRE_BoomerAMGSolveT(jac->hsolver, hmat, jbv, jxv); 620 if (hierr) { 621 /* error code of 1 in BoomerAMG merely means convergence not achieved */ 622 PetscCheck(hierr == 1, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error in HYPRE solver, error code %d", (int)hierr); 623 hypre__global_error = 0; 624 } 625 } while (0)); 626 627 PetscCall(VecHYPRE_IJVectorPopVec(hjac->x)); 628 PetscCall(VecHYPRE_IJVectorPopVec(hjac->b)); 629 PetscFunctionReturn(0); 630 } 631 632 static PetscErrorCode PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char name[]) { 633 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 634 PetscBool flag; 635 636 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 637 PetscFunctionBegin; 638 if (jac->spgemm_type) { 639 PetscCall(PetscStrcmp(jac->spgemm_type, name, &flag)); 640 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE SpGEMM (really we can)"); 641 PetscFunctionReturn(0); 642 } else { 643 PetscCall(PetscStrallocpy(name, &jac->spgemm_type)); 644 } 645 PetscCall(PetscStrcmp("cusparse", jac->spgemm_type, &flag)); 646 if (flag) { 647 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 1); 648 PetscFunctionReturn(0); 649 } 650 PetscCall(PetscStrcmp("hypre", jac->spgemm_type, &flag)); 651 if (flag) { 652 PetscCallExternal(HYPRE_SetSpGemmUseCusparse, 0); 653 PetscFunctionReturn(0); 654 } 655 jac->spgemm_type = NULL; 656 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE SpGEM type %s; Choices are cusparse, hypre", name); 657 #endif 658 } 659 660 static PetscErrorCode PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG(PC pc, const char *spgemm[]) { 661 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 662 663 PetscFunctionBegin; 664 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 665 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 666 *spgemm = jac->spgemm_type; 667 #endif 668 PetscFunctionReturn(0); 669 } 670 671 static const char *HYPREBoomerAMGCycleType[] = {"", "V", "W"}; 672 static const char *HYPREBoomerAMGCoarsenType[] = {"CLJP", "Ruge-Stueben", "", "modifiedRuge-Stueben", "", "", "Falgout", "", "PMIS", "", "HMIS"}; 673 static const char *HYPREBoomerAMGMeasureType[] = {"local", "global"}; 674 /* The following corresponds to HYPRE_BoomerAMGSetRelaxType which has many missing numbers in the enum */ 675 static const char *HYPREBoomerAMGSmoothType[] = {"Schwarz-smoothers", "Pilut", "ParaSails", "Euclid"}; 676 static const char *HYPREBoomerAMGRelaxType[] = {"Jacobi", "sequential-Gauss-Seidel", "seqboundary-Gauss-Seidel", "SOR/Jacobi", "backward-SOR/Jacobi", "" /* [5] hybrid chaotic Gauss-Seidel (works only with OpenMP) */, "symmetric-SOR/Jacobi", "" /* 7 */, "l1scaled-SOR/Jacobi", "Gaussian-elimination", "" /* 10 */, "" /* 11 */, "" /* 12 */, "l1-Gauss-Seidel" /* nonsymmetric */, "backward-l1-Gauss-Seidel" /* nonsymmetric */, "CG" /* non-stationary */, "Chebyshev", "FCF-Jacobi", "l1scaled-Jacobi"}; 677 static const char *HYPREBoomerAMGInterpType[] = {"classical", "", "", "direct", "multipass", "multipass-wts", "ext+i", "ext+i-cc", "standard", "standard-wts", "block", "block-wtd", "FF", "FF1", "ext", "ad-wts", "ext-mm", "ext+i-mm", "ext+e-mm"}; 678 static PetscErrorCode PCSetFromOptions_HYPRE_BoomerAMG(PC pc, PetscOptionItems *PetscOptionsObject) { 679 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 680 PetscInt bs, n, indx, level; 681 PetscBool flg, tmp_truth; 682 double tmpdbl, twodbl[2]; 683 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 684 const char *PCHYPRESpgemmTypes[] = {"cusparse", "hypre"}; 685 686 PetscFunctionBegin; 687 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE BoomerAMG Options"); 688 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_cycle_type", "Cycle type", "None", HYPREBoomerAMGCycleType + 1, 2, HYPREBoomerAMGCycleType[jac->cycletype], &indx, &flg)); 689 if (flg) { 690 jac->cycletype = indx + 1; 691 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 692 } 693 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_levels", "Number of levels (of grids) allowed", "None", jac->maxlevels, &jac->maxlevels, &flg)); 694 if (flg) { 695 PetscCheck(jac->maxlevels >= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of levels %" PetscInt_FMT " must be at least two", jac->maxlevels); 696 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 697 } 698 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_iter", "Maximum iterations used PER hypre call", "None", jac->maxiter, &jac->maxiter, &flg)); 699 if (flg) { 700 PetscCheck(jac->maxiter >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of iterations %" PetscInt_FMT " must be at least one", jac->maxiter); 701 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 702 } 703 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_tol", "Convergence tolerance PER hypre call (0.0 = use a fixed number of iterations)", "None", jac->tol, &jac->tol, &flg)); 704 if (flg) { 705 PetscCheck(jac->tol >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Tolerance %g must be greater than or equal to zero", (double)jac->tol); 706 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 707 } 708 bs = 1; 709 if (pc->pmat) PetscCall(MatGetBlockSize(pc->pmat, &bs)); 710 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_numfunctions", "Number of functions", "HYPRE_BoomerAMGSetNumFunctions", bs, &bs, &flg)); 711 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNumFunctions, jac->hsolver, bs); 712 713 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_truncfactor", "Truncation factor for interpolation (0=no truncation)", "None", jac->truncfactor, &jac->truncfactor, &flg)); 714 if (flg) { 715 PetscCheck(jac->truncfactor >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Truncation factor %g must be great than or equal zero", (double)jac->truncfactor); 716 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 717 } 718 719 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_P_max", "Max elements per row for interpolation operator (0=unlimited)", "None", jac->pmax, &jac->pmax, &flg)); 720 if (flg) { 721 PetscCheck(jac->pmax >= 0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "P_max %" PetscInt_FMT " must be greater than or equal to zero", jac->pmax); 722 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 723 } 724 725 PetscCall(PetscOptionsRangeInt("-pc_hypre_boomeramg_agg_nl", "Number of levels of aggressive coarsening", "None", jac->agg_nl, &jac->agg_nl, &flg, 0, jac->maxlevels)); 726 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 727 728 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_agg_num_paths", "Number of paths for aggressive coarsening", "None", jac->agg_num_paths, &jac->agg_num_paths, &flg)); 729 if (flg) { 730 PetscCheck(jac->agg_num_paths >= 1, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Number of paths %" PetscInt_FMT " must be greater than or equal to 1", jac->agg_num_paths); 731 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 732 } 733 734 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strong_threshold", "Threshold for being strongly connected", "None", jac->strongthreshold, &jac->strongthreshold, &flg)); 735 if (flg) { 736 PetscCheck(jac->strongthreshold >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Strong threshold %g must be great than or equal zero", (double)jac->strongthreshold); 737 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 738 } 739 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_max_row_sum", "Maximum row sum", "None", jac->maxrowsum, &jac->maxrowsum, &flg)); 740 if (flg) { 741 PetscCheck(jac->maxrowsum >= 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be greater than zero", (double)jac->maxrowsum); 742 PetscCheck(jac->maxrowsum <= 1.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Maximum row sum %g must be less than or equal one", (double)jac->maxrowsum); 743 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 744 } 745 746 /* Grid sweeps */ 747 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_all", "Number of sweeps for the up and down grid levels", "None", jac->gridsweeps[0], &indx, &flg)); 748 if (flg) { 749 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, indx); 750 /* modify the jac structure so we can view the updated options with PC_View */ 751 jac->gridsweeps[0] = indx; 752 jac->gridsweeps[1] = indx; 753 /*defaults coarse to 1 */ 754 jac->gridsweeps[2] = 1; 755 } 756 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen", "Use a nodal based coarsening 1-6", "HYPRE_BoomerAMGSetNodal", jac->nodal_coarsening, &jac->nodal_coarsening, &flg)); 757 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodal, jac->hsolver, jac->nodal_coarsening); 758 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_coarsen_diag", "Diagonal in strength matrix for nodal based coarsening 0-2", "HYPRE_BoomerAMGSetNodalDiag", jac->nodal_coarsening_diag, &jac->nodal_coarsening_diag, &flg)); 759 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetNodalDiag, jac->hsolver, jac->nodal_coarsening_diag); 760 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_variant", "Variant of algorithm 1-3", "HYPRE_BoomerAMGSetInterpVecVariant", jac->vec_interp_variant, &jac->vec_interp_variant, &flg)); 761 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecVariant, jac->hsolver, jac->vec_interp_variant); 762 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_vec_interp_qmax", "Max elements per row for each Q", "HYPRE_BoomerAMGSetInterpVecQMax", jac->vec_interp_qmax, &jac->vec_interp_qmax, &flg)); 763 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpVecQMax, jac->hsolver, jac->vec_interp_qmax); 764 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_vec_interp_smooth", "Whether to smooth the interpolation vectors", "HYPRE_BoomerAMGSetSmoothInterpVectors", jac->vec_interp_smooth, &jac->vec_interp_smooth, &flg)); 765 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetSmoothInterpVectors, jac->hsolver, jac->vec_interp_smooth); 766 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_interp_refine", "Preprocess the interpolation matrix through iterative weight refinement", "HYPRE_BoomerAMGSetInterpRefine", jac->interp_refine, &jac->interp_refine, &flg)); 767 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetInterpRefine, jac->hsolver, jac->interp_refine); 768 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_down", "Number of sweeps for the down cycles", "None", jac->gridsweeps[0], &indx, &flg)); 769 if (flg) { 770 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 1); 771 jac->gridsweeps[0] = indx; 772 } 773 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_up", "Number of sweeps for the up cycles", "None", jac->gridsweeps[1], &indx, &flg)); 774 if (flg) { 775 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 2); 776 jac->gridsweeps[1] = indx; 777 } 778 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_grid_sweeps_coarse", "Number of sweeps for the coarse level", "None", jac->gridsweeps[2], &indx, &flg)); 779 if (flg) { 780 PetscCallExternal(HYPRE_BoomerAMGSetCycleNumSweeps, jac->hsolver, indx, 3); 781 jac->gridsweeps[2] = indx; 782 } 783 784 /* Smooth type */ 785 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_smooth_type", "Enable more complex smoothers", "None", HYPREBoomerAMGSmoothType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGSmoothType), HYPREBoomerAMGSmoothType[0], &indx, &flg)); 786 if (flg) { 787 jac->smoothtype = indx; 788 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, indx + 6); 789 jac->smoothnumlevels = 25; 790 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, 25); 791 } 792 793 /* Number of smoothing levels */ 794 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_smooth_num_levels", "Number of levels on which more complex smoothers are used", "None", 25, &indx, &flg)); 795 if (flg && (jac->smoothtype != -1)) { 796 jac->smoothnumlevels = indx; 797 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, indx); 798 } 799 800 /* Number of levels for ILU(k) for Euclid */ 801 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_eu_level", "Number of levels for ILU(k) in Euclid smoother", "None", 0, &indx, &flg)); 802 if (flg && (jac->smoothtype == 3)) { 803 jac->eu_level = indx; 804 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, indx); 805 } 806 807 /* Filter for ILU(k) for Euclid */ 808 double droptolerance; 809 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_eu_droptolerance", "Drop tolerance for ILU(k) in Euclid smoother", "None", 0, &droptolerance, &flg)); 810 if (flg && (jac->smoothtype == 3)) { 811 jac->eu_droptolerance = droptolerance; 812 PetscCallExternal(HYPRE_BoomerAMGSetEuLevel, jac->hsolver, droptolerance); 813 } 814 815 /* Use Block Jacobi ILUT for Euclid */ 816 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_eu_bj", "Use Block Jacobi for ILU in Euclid smoother?", "None", PETSC_FALSE, &tmp_truth, &flg)); 817 if (flg && (jac->smoothtype == 3)) { 818 jac->eu_bj = tmp_truth; 819 PetscCallExternal(HYPRE_BoomerAMGSetEuBJ, jac->hsolver, jac->eu_bj); 820 } 821 822 /* Relax type */ 823 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_all", "Relax type for the up and down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 824 if (flg) { 825 jac->relaxtype[0] = jac->relaxtype[1] = indx; 826 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, indx); 827 /* by default, coarse type set to 9 */ 828 jac->relaxtype[2] = 9; 829 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, 9, 3); 830 } 831 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_down", "Relax type for the down cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 832 if (flg) { 833 jac->relaxtype[0] = indx; 834 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 1); 835 } 836 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_up", "Relax type for the up cycles", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[6], &indx, &flg)); 837 if (flg) { 838 jac->relaxtype[1] = indx; 839 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 2); 840 } 841 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_relax_type_coarse", "Relax type on coarse grid", "None", HYPREBoomerAMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGRelaxType), HYPREBoomerAMGRelaxType[9], &indx, &flg)); 842 if (flg) { 843 jac->relaxtype[2] = indx; 844 PetscCallExternal(HYPRE_BoomerAMGSetCycleRelaxType, jac->hsolver, indx, 3); 845 } 846 847 /* Relaxation Weight */ 848 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_relax_weight_all", "Relaxation weight for all levels (0 = hypre estimates, -k = determined with k CG steps)", "None", jac->relaxweight, &tmpdbl, &flg)); 849 if (flg) { 850 PetscCallExternal(HYPRE_BoomerAMGSetRelaxWt, jac->hsolver, tmpdbl); 851 jac->relaxweight = tmpdbl; 852 } 853 854 n = 2; 855 twodbl[0] = twodbl[1] = 1.0; 856 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_relax_weight_level", "Set the relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 857 if (flg) { 858 if (n == 2) { 859 indx = (int)PetscAbsReal(twodbl[1]); 860 PetscCallExternal(HYPRE_BoomerAMGSetLevelRelaxWt, jac->hsolver, twodbl[0], indx); 861 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight level: you must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 862 } 863 864 /* Outer relaxation Weight */ 865 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_outer_relax_weight_all", "Outer relaxation weight for all levels (-k = determined with k CG steps)", "None", jac->outerrelaxweight, &tmpdbl, &flg)); 866 if (flg) { 867 PetscCallExternal(HYPRE_BoomerAMGSetOuterWt, jac->hsolver, tmpdbl); 868 jac->outerrelaxweight = tmpdbl; 869 } 870 871 n = 2; 872 twodbl[0] = twodbl[1] = 1.0; 873 PetscCall(PetscOptionsRealArray("-pc_hypre_boomeramg_outer_relax_weight_level", "Set the outer relaxation weight for a particular level (weight,level)", "None", twodbl, &n, &flg)); 874 if (flg) { 875 if (n == 2) { 876 indx = (int)PetscAbsReal(twodbl[1]); 877 PetscCallExternal(HYPRE_BoomerAMGSetLevelOuterWt, jac->hsolver, twodbl[0], indx); 878 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Relax weight outer level: You must provide 2 values separated by a comma (and no space), you provided %" PetscInt_FMT, n); 879 } 880 881 /* the Relax Order */ 882 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_no_CF", "Do not use CF-relaxation", "None", PETSC_FALSE, &tmp_truth, &flg)); 883 884 if (flg && tmp_truth) { 885 jac->relaxorder = 0; 886 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 887 } 888 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_measure_type", "Measure type", "None", HYPREBoomerAMGMeasureType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGMeasureType), HYPREBoomerAMGMeasureType[0], &indx, &flg)); 889 if (flg) { 890 jac->measuretype = indx; 891 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 892 } 893 /* update list length 3/07 */ 894 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_coarsen_type", "Coarsen type", "None", HYPREBoomerAMGCoarsenType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGCoarsenType), HYPREBoomerAMGCoarsenType[6], &indx, &flg)); 895 if (flg) { 896 jac->coarsentype = indx; 897 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 898 } 899 900 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_max_coarse_size", "Maximum size of coarsest grid", "None", jac->maxc, &jac->maxc, &flg)); 901 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 902 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_min_coarse_size", "Minimum size of coarsest grid", "None", jac->minc, &jac->minc, &flg)); 903 if (flg) PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 904 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 905 // global parameter but is closely associated with BoomerAMG 906 PetscCall(PetscOptionsEList("-pc_mg_galerkin_mat_product_algorithm", "Type of SpGEMM to use in hypre (only for now)", "PCMGGalerkinSetMatProductAlgorithm", PCHYPRESpgemmTypes, PETSC_STATIC_ARRAY_LENGTH(PCHYPRESpgemmTypes), PCHYPRESpgemmTypes[0], &indx, &flg)); 907 if (!flg) indx = 0; 908 PetscCall(PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG(pc, PCHYPRESpgemmTypes[indx])); 909 #endif 910 /* AIR */ 911 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 912 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_restriction_type", "Type of AIR method (distance 1 or 2, 0 means no AIR)", "None", jac->Rtype, &jac->Rtype, NULL)); 913 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 914 if (jac->Rtype) { 915 jac->interptype = 100; /* no way we can pass this with strings... Set it as default as in MFEM, then users can still customize it back to a different one */ 916 917 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_strongthresholdR", "Threshold for R", "None", jac->Rstrongthreshold, &jac->Rstrongthreshold, NULL)); 918 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 919 920 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_filterthresholdR", "Filter threshold for R", "None", jac->Rfilterthreshold, &jac->Rfilterthreshold, NULL)); 921 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 922 923 PetscCall(PetscOptionsReal("-pc_hypre_boomeramg_Adroptol", "Defines the drop tolerance for the A-matrices from the 2nd level of AMG", "None", jac->Adroptol, &jac->Adroptol, NULL)); 924 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 925 926 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_Adroptype", "Drops the entries that are not on the diagonal and smaller than its row norm: type 1: 1-norm, 2: 2-norm, -1: infinity norm", "None", jac->Adroptype, &jac->Adroptype, NULL)); 927 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 928 } 929 #endif 930 931 #if PETSC_PKG_HYPRE_VERSION_LE(9, 9, 9) 932 PetscCheck(!jac->Rtype || !jac->agg_nl, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "-pc_hypre_boomeramg_restriction_type (%" PetscInt_FMT ") and -pc_hypre_boomeramg_agg_nl (%" PetscInt_FMT ")", jac->Rtype, jac->agg_nl); 933 #endif 934 935 /* new 3/07 */ 936 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_interp_type", "Interpolation type", "None", HYPREBoomerAMGInterpType, PETSC_STATIC_ARRAY_LENGTH(HYPREBoomerAMGInterpType), HYPREBoomerAMGInterpType[0], &indx, &flg)); 937 if (flg || jac->Rtype) { 938 if (flg) jac->interptype = indx; 939 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 940 } 941 942 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", &flg)); 943 if (flg) { 944 level = 3; 945 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_statistics", "Print statistics", "None", level, &level, NULL)); 946 947 jac->printstatistics = PETSC_TRUE; 948 PetscCallExternal(HYPRE_BoomerAMGSetPrintLevel, jac->hsolver, level); 949 } 950 951 PetscCall(PetscOptionsName("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", &flg)); 952 if (flg) { 953 level = 3; 954 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_print_debug", "Print debug information", "None", level, &level, NULL)); 955 956 jac->printstatistics = PETSC_TRUE; 957 PetscCallExternal(HYPRE_BoomerAMGSetDebugFlag, jac->hsolver, level); 958 } 959 960 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", PETSC_FALSE, &tmp_truth, &flg)); 961 if (flg && tmp_truth) { 962 PetscInt tmp_int; 963 PetscCall(PetscOptionsInt("-pc_hypre_boomeramg_nodal_relaxation", "Nodal relaxation via Schwarz", "None", jac->nodal_relax_levels, &tmp_int, &flg)); 964 if (flg) jac->nodal_relax_levels = tmp_int; 965 PetscCallExternal(HYPRE_BoomerAMGSetSmoothType, jac->hsolver, 6); 966 PetscCallExternal(HYPRE_BoomerAMGSetDomainType, jac->hsolver, 1); 967 PetscCallExternal(HYPRE_BoomerAMGSetOverlap, jac->hsolver, 0); 968 PetscCallExternal(HYPRE_BoomerAMGSetSmoothNumLevels, jac->hsolver, jac->nodal_relax_levels); 969 } 970 971 PetscCall(PetscOptionsBool("-pc_hypre_boomeramg_keeptranspose", "Avoid transpose matvecs in preconditioner application", "None", jac->keeptranspose, &jac->keeptranspose, NULL)); 972 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 973 974 /* options for ParaSails solvers */ 975 PetscCall(PetscOptionsEList("-pc_hypre_boomeramg_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flg)); 976 if (flg) { 977 jac->symt = indx; 978 PetscCallExternal(HYPRE_BoomerAMGSetSym, jac->hsolver, jac->symt); 979 } 980 981 PetscOptionsHeadEnd(); 982 PetscFunctionReturn(0); 983 } 984 985 static PetscErrorCode PCApplyRichardson_HYPRE_BoomerAMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) { 986 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 987 HYPRE_Int oits; 988 989 PetscFunctionBegin; 990 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 991 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, its * jac->maxiter); 992 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, rtol); 993 jac->applyrichardson = PETSC_TRUE; 994 PetscCall(PCApply_HYPRE(pc, b, y)); 995 jac->applyrichardson = PETSC_FALSE; 996 PetscCallExternal(HYPRE_BoomerAMGGetNumIterations, jac->hsolver, &oits); 997 *outits = oits; 998 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 999 else *reason = PCRICHARDSON_CONVERGED_RTOL; 1000 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1001 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1002 PetscFunctionReturn(0); 1003 } 1004 1005 static PetscErrorCode PCView_HYPRE_BoomerAMG(PC pc, PetscViewer viewer) { 1006 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1007 PetscBool iascii; 1008 1009 PetscFunctionBegin; 1010 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1011 if (iascii) { 1012 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE BoomerAMG preconditioning\n")); 1013 PetscCall(PetscViewerASCIIPrintf(viewer, " Cycle type %s\n", HYPREBoomerAMGCycleType[jac->cycletype])); 1014 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of levels %" PetscInt_FMT "\n", jac->maxlevels)); 1015 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum number of iterations PER hypre call %" PetscInt_FMT "\n", jac->maxiter)); 1016 PetscCall(PetscViewerASCIIPrintf(viewer, " Convergence tolerance PER hypre call %g\n", (double)jac->tol)); 1017 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for strong coupling %g\n", (double)jac->strongthreshold)); 1018 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation truncation factor %g\n", (double)jac->truncfactor)); 1019 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: max elements per row %" PetscInt_FMT "\n", jac->pmax)); 1020 if (jac->interp_refine) PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation: number of steps of weighted refinement %" PetscInt_FMT "\n", jac->interp_refine)); 1021 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels of aggressive coarsening %" PetscInt_FMT "\n", jac->agg_nl)); 1022 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of paths for aggressive coarsening %" PetscInt_FMT "\n", jac->agg_num_paths)); 1023 1024 PetscCall(PetscViewerASCIIPrintf(viewer, " Maximum row sums %g\n", (double)jac->maxrowsum)); 1025 1026 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps down %" PetscInt_FMT "\n", jac->gridsweeps[0])); 1027 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps up %" PetscInt_FMT "\n", jac->gridsweeps[1])); 1028 PetscCall(PetscViewerASCIIPrintf(viewer, " Sweeps on coarse %" PetscInt_FMT "\n", jac->gridsweeps[2])); 1029 1030 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax down %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[0]])); 1031 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax up %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[1]])); 1032 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax on coarse %s\n", HYPREBoomerAMGRelaxType[jac->relaxtype[2]])); 1033 1034 PetscCall(PetscViewerASCIIPrintf(viewer, " Relax weight (all) %g\n", (double)jac->relaxweight)); 1035 PetscCall(PetscViewerASCIIPrintf(viewer, " Outer relax weight (all) %g\n", (double)jac->outerrelaxweight)); 1036 1037 if (jac->relaxorder) { 1038 PetscCall(PetscViewerASCIIPrintf(viewer, " Using CF-relaxation\n")); 1039 } else { 1040 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using CF-relaxation\n")); 1041 } 1042 if (jac->smoothtype != -1) { 1043 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth type %s\n", HYPREBoomerAMGSmoothType[jac->smoothtype])); 1044 PetscCall(PetscViewerASCIIPrintf(viewer, " Smooth num levels %" PetscInt_FMT "\n", jac->smoothnumlevels)); 1045 } else { 1046 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using more complex smoothers.\n")); 1047 } 1048 if (jac->smoothtype == 3) { 1049 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) levels %" PetscInt_FMT "\n", jac->eu_level)); 1050 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU(k) drop tolerance %g\n", (double)jac->eu_droptolerance)); 1051 PetscCall(PetscViewerASCIIPrintf(viewer, " Euclid ILU use Block-Jacobi? %" PetscInt_FMT "\n", jac->eu_bj)); 1052 } 1053 PetscCall(PetscViewerASCIIPrintf(viewer, " Measure type %s\n", HYPREBoomerAMGMeasureType[jac->measuretype])); 1054 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsen type %s\n", HYPREBoomerAMGCoarsenType[jac->coarsentype])); 1055 PetscCall(PetscViewerASCIIPrintf(viewer, " Interpolation type %s\n", jac->interptype != 100 ? HYPREBoomerAMGInterpType[jac->interptype] : "1pt")); 1056 if (jac->nodal_coarsening) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal coarsening with HYPRE_BOOMERAMGSetNodal() %" PetscInt_FMT "\n", jac->nodal_coarsening)); 1057 if (jac->vec_interp_variant) { 1058 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecVariant() %" PetscInt_FMT "\n", jac->vec_interp_variant)); 1059 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetInterpVecQMax() %" PetscInt_FMT "\n", jac->vec_interp_qmax)); 1060 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE_BoomerAMGSetSmoothInterpVectors() %d\n", jac->vec_interp_smooth)); 1061 } 1062 if (jac->nodal_relax) PetscCall(PetscViewerASCIIPrintf(viewer, " Using nodal relaxation via Schwarz smoothing on levels %" PetscInt_FMT "\n", jac->nodal_relax_levels)); 1063 #if PETSC_PKG_HYPRE_VERSION_GE(2, 23, 0) 1064 PetscCall(PetscViewerASCIIPrintf(viewer, " SpGEMM type %s\n", jac->spgemm_type)); 1065 #endif 1066 /* AIR */ 1067 if (jac->Rtype) { 1068 PetscCall(PetscViewerASCIIPrintf(viewer, " Using approximate ideal restriction type %" PetscInt_FMT "\n", jac->Rtype)); 1069 PetscCall(PetscViewerASCIIPrintf(viewer, " Threshold for R %g\n", (double)jac->Rstrongthreshold)); 1070 PetscCall(PetscViewerASCIIPrintf(viewer, " Filter for R %g\n", (double)jac->Rfilterthreshold)); 1071 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop tolerance %g\n", (double)jac->Adroptol)); 1072 PetscCall(PetscViewerASCIIPrintf(viewer, " A drop type %" PetscInt_FMT "\n", jac->Adroptype)); 1073 } 1074 } 1075 PetscFunctionReturn(0); 1076 } 1077 1078 static PetscErrorCode PCSetFromOptions_HYPRE_ParaSails(PC pc, PetscOptionItems *PetscOptionsObject) { 1079 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1080 PetscInt indx; 1081 PetscBool flag; 1082 const char *symtlist[] = {"nonsymmetric", "SPD", "nonsymmetric,SPD"}; 1083 1084 PetscFunctionBegin; 1085 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ParaSails Options"); 1086 PetscCall(PetscOptionsInt("-pc_hypre_parasails_nlevels", "Number of number of levels", "None", jac->nlevels, &jac->nlevels, 0)); 1087 PetscCall(PetscOptionsReal("-pc_hypre_parasails_thresh", "Threshold", "None", jac->threshold, &jac->threshold, &flag)); 1088 if (flag) PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1089 1090 PetscCall(PetscOptionsReal("-pc_hypre_parasails_filter", "filter", "None", jac->filter, &jac->filter, &flag)); 1091 if (flag) PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1092 1093 PetscCall(PetscOptionsReal("-pc_hypre_parasails_loadbal", "Load balance", "None", jac->loadbal, &jac->loadbal, &flag)); 1094 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1095 1096 PetscCall(PetscOptionsBool("-pc_hypre_parasails_logging", "Print info to screen", "None", (PetscBool)jac->logging, (PetscBool *)&jac->logging, &flag)); 1097 if (flag) PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1098 1099 PetscCall(PetscOptionsBool("-pc_hypre_parasails_reuse", "Reuse nonzero pattern in preconditioner", "None", (PetscBool)jac->ruse, (PetscBool *)&jac->ruse, &flag)); 1100 if (flag) PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1101 1102 PetscCall(PetscOptionsEList("-pc_hypre_parasails_sym", "Symmetry of matrix and preconditioner", "None", symtlist, PETSC_STATIC_ARRAY_LENGTH(symtlist), symtlist[0], &indx, &flag)); 1103 if (flag) { 1104 jac->symt = indx; 1105 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1106 } 1107 1108 PetscOptionsHeadEnd(); 1109 PetscFunctionReturn(0); 1110 } 1111 1112 static PetscErrorCode PCView_HYPRE_ParaSails(PC pc, PetscViewer viewer) { 1113 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1114 PetscBool iascii; 1115 const char *symt = 0; 1116 1117 PetscFunctionBegin; 1118 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1119 if (iascii) { 1120 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ParaSails preconditioning\n")); 1121 PetscCall(PetscViewerASCIIPrintf(viewer, " nlevels %" PetscInt_FMT "\n", jac->nlevels)); 1122 PetscCall(PetscViewerASCIIPrintf(viewer, " threshold %g\n", (double)jac->threshold)); 1123 PetscCall(PetscViewerASCIIPrintf(viewer, " filter %g\n", (double)jac->filter)); 1124 PetscCall(PetscViewerASCIIPrintf(viewer, " load balance %g\n", (double)jac->loadbal)); 1125 PetscCall(PetscViewerASCIIPrintf(viewer, " reuse nonzero structure %s\n", PetscBools[jac->ruse])); 1126 PetscCall(PetscViewerASCIIPrintf(viewer, " print info to screen %s\n", PetscBools[jac->logging])); 1127 if (!jac->symt) symt = "nonsymmetric matrix and preconditioner"; 1128 else if (jac->symt == 1) symt = "SPD matrix and preconditioner"; 1129 else if (jac->symt == 2) symt = "nonsymmetric matrix but SPD preconditioner"; 1130 else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Unknown HYPRE ParaSails symmetric option %" PetscInt_FMT, jac->symt); 1131 PetscCall(PetscViewerASCIIPrintf(viewer, " %s\n", symt)); 1132 } 1133 PetscFunctionReturn(0); 1134 } 1135 1136 static PetscErrorCode PCSetFromOptions_HYPRE_AMS(PC pc, PetscOptionItems *PetscOptionsObject) { 1137 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1138 PetscInt n; 1139 PetscBool flag, flag2, flag3, flag4; 1140 1141 PetscFunctionBegin; 1142 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE AMS Options"); 1143 PetscCall(PetscOptionsInt("-pc_hypre_ams_print_level", "Debugging output level for AMS", "None", jac->as_print, &jac->as_print, &flag)); 1144 if (flag) PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 1145 PetscCall(PetscOptionsInt("-pc_hypre_ams_max_iter", "Maximum number of AMS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1146 if (flag) PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 1147 PetscCall(PetscOptionsInt("-pc_hypre_ams_cycle_type", "Cycle type for AMS multigrid", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag)); 1148 if (flag) PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 1149 PetscCall(PetscOptionsReal("-pc_hypre_ams_tol", "Error tolerance for AMS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1150 if (flag) PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 1151 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_type", "Relaxation type for AMS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1152 PetscCall(PetscOptionsInt("-pc_hypre_ams_relax_times", "Number of relaxation steps for AMS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1153 PetscCall(PetscOptionsReal("-pc_hypre_ams_relax_weight", "Relaxation weight for AMS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1154 PetscCall(PetscOptionsReal("-pc_hypre_ams_omega", "SSOR coefficient for AMS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1155 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1156 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_alpha_theta", "Threshold for strong coupling of vector Poisson AMG solver", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1157 n = 5; 1158 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_alpha_options", "AMG options for vector Poisson", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1159 if (flag || flag2) { 1160 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1161 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1162 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1163 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1164 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1165 } 1166 PetscCall(PetscOptionsReal("-pc_hypre_ams_amg_beta_theta", "Threshold for strong coupling of scalar Poisson AMG solver", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1167 n = 5; 1168 PetscCall(PetscOptionsIntArray("-pc_hypre_ams_amg_beta_options", "AMG options for scalar Poisson solver", "None", jac->as_amg_beta_opts, &n, &flag2)); 1169 if (flag || flag2) { 1170 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1171 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1172 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1173 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1174 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1175 } 1176 PetscCall(PetscOptionsInt("-pc_hypre_ams_projection_frequency", "Frequency at which a projection onto the compatible subspace for problems with zero conductivity regions is performed", "None", jac->ams_proj_freq, &jac->ams_proj_freq, &flag)); 1177 if (flag) { /* override HYPRE's default only if the options is used */ 1178 PetscCallExternal(HYPRE_AMSSetProjectionFrequency, jac->hsolver, jac->ams_proj_freq); 1179 } 1180 PetscOptionsHeadEnd(); 1181 PetscFunctionReturn(0); 1182 } 1183 1184 static PetscErrorCode PCView_HYPRE_AMS(PC pc, PetscViewer viewer) { 1185 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1186 PetscBool iascii; 1187 1188 PetscFunctionBegin; 1189 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1190 if (iascii) { 1191 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE AMS preconditioning\n")); 1192 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1193 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1194 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1195 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1196 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1197 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1198 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1199 if (jac->alpha_Poisson) { 1200 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (passed in by user)\n")); 1201 } else { 1202 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver (computed) \n")); 1203 } 1204 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1205 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1206 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1207 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1208 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1209 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1210 if (!jac->ams_beta_is_zero) { 1211 if (jac->beta_Poisson) { 1212 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (passed in by user)\n")); 1213 } else { 1214 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver (computed) \n")); 1215 } 1216 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1217 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1218 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1219 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1220 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1221 PetscCall(PetscViewerASCIIPrintf(viewer, " boomerAMG strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1222 if (jac->ams_beta_is_zero_part) PetscCall(PetscViewerASCIIPrintf(viewer, " compatible subspace projection frequency %" PetscInt_FMT " (-1 HYPRE uses default)\n", jac->ams_proj_freq)); 1223 } else { 1224 PetscCall(PetscViewerASCIIPrintf(viewer, " scalar Poisson solver not used (zero-conductivity everywhere) \n")); 1225 } 1226 } 1227 PetscFunctionReturn(0); 1228 } 1229 1230 static PetscErrorCode PCSetFromOptions_HYPRE_ADS(PC pc, PetscOptionItems *PetscOptionsObject) { 1231 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1232 PetscInt n; 1233 PetscBool flag, flag2, flag3, flag4; 1234 1235 PetscFunctionBegin; 1236 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE ADS Options"); 1237 PetscCall(PetscOptionsInt("-pc_hypre_ads_print_level", "Debugging output level for ADS", "None", jac->as_print, &jac->as_print, &flag)); 1238 if (flag) PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 1239 PetscCall(PetscOptionsInt("-pc_hypre_ads_max_iter", "Maximum number of ADS multigrid iterations within PCApply", "None", jac->as_max_iter, &jac->as_max_iter, &flag)); 1240 if (flag) PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 1241 PetscCall(PetscOptionsInt("-pc_hypre_ads_cycle_type", "Cycle type for ADS multigrid", "None", jac->ads_cycle_type, &jac->ads_cycle_type, &flag)); 1242 if (flag) PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ads_cycle_type); 1243 PetscCall(PetscOptionsReal("-pc_hypre_ads_tol", "Error tolerance for ADS multigrid", "None", jac->as_tol, &jac->as_tol, &flag)); 1244 if (flag) PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 1245 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_type", "Relaxation type for ADS smoother", "None", jac->as_relax_type, &jac->as_relax_type, &flag)); 1246 PetscCall(PetscOptionsInt("-pc_hypre_ads_relax_times", "Number of relaxation steps for ADS smoother", "None", jac->as_relax_times, &jac->as_relax_times, &flag2)); 1247 PetscCall(PetscOptionsReal("-pc_hypre_ads_relax_weight", "Relaxation weight for ADS smoother", "None", jac->as_relax_weight, &jac->as_relax_weight, &flag3)); 1248 PetscCall(PetscOptionsReal("-pc_hypre_ads_omega", "SSOR coefficient for ADS smoother", "None", jac->as_omega, &jac->as_omega, &flag4)); 1249 if (flag || flag2 || flag3 || flag4) PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1250 PetscCall(PetscOptionsReal("-pc_hypre_ads_ams_theta", "Threshold for strong coupling of AMS solver inside ADS", "None", jac->as_amg_alpha_theta, &jac->as_amg_alpha_theta, &flag)); 1251 n = 5; 1252 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_ams_options", "AMG options for AMS solver inside ADS", "None", jac->as_amg_alpha_opts, &n, &flag2)); 1253 PetscCall(PetscOptionsInt("-pc_hypre_ads_ams_cycle_type", "Cycle type for AMS solver inside ADS", "None", jac->ams_cycle_type, &jac->ams_cycle_type, &flag3)); 1254 if (flag || flag2 || flag3) { 1255 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMS cycle type */ 1256 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1257 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1258 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1259 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1260 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1261 } 1262 PetscCall(PetscOptionsReal("-pc_hypre_ads_amg_theta", "Threshold for strong coupling of vector AMG solver inside ADS", "None", jac->as_amg_beta_theta, &jac->as_amg_beta_theta, &flag)); 1263 n = 5; 1264 PetscCall(PetscOptionsIntArray("-pc_hypre_ads_amg_options", "AMG options for vector AMG solver inside ADS", "None", jac->as_amg_beta_opts, &n, &flag2)); 1265 if (flag || flag2) { 1266 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1267 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1268 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1269 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1270 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1271 } 1272 PetscOptionsHeadEnd(); 1273 PetscFunctionReturn(0); 1274 } 1275 1276 static PetscErrorCode PCView_HYPRE_ADS(PC pc, PetscViewer viewer) { 1277 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1278 PetscBool iascii; 1279 1280 PetscFunctionBegin; 1281 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1282 if (iascii) { 1283 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE ADS preconditioning\n")); 1284 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iterations per application %" PetscInt_FMT "\n", jac->as_max_iter)); 1285 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ads_cycle_type)); 1286 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace iteration tolerance %g\n", (double)jac->as_tol)); 1287 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother type %" PetscInt_FMT "\n", jac->as_relax_type)); 1288 PetscCall(PetscViewerASCIIPrintf(viewer, " number of smoothing steps %" PetscInt_FMT "\n", jac->as_relax_times)); 1289 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother weight %g\n", (double)jac->as_relax_weight)); 1290 PetscCall(PetscViewerASCIIPrintf(viewer, " smoother omega %g\n", (double)jac->as_omega)); 1291 PetscCall(PetscViewerASCIIPrintf(viewer, " AMS solver using boomerAMG\n")); 1292 PetscCall(PetscViewerASCIIPrintf(viewer, " subspace cycle type %" PetscInt_FMT "\n", jac->ams_cycle_type)); 1293 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[0])); 1294 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[1])); 1295 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[2])); 1296 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[3])); 1297 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_alpha_opts[4])); 1298 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_alpha_theta)); 1299 PetscCall(PetscViewerASCIIPrintf(viewer, " vector Poisson solver using boomerAMG\n")); 1300 PetscCall(PetscViewerASCIIPrintf(viewer, " coarsening type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[0])); 1301 PetscCall(PetscViewerASCIIPrintf(viewer, " levels of aggressive coarsening %" PetscInt_FMT "\n", jac->as_amg_beta_opts[1])); 1302 PetscCall(PetscViewerASCIIPrintf(viewer, " relaxation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[2])); 1303 PetscCall(PetscViewerASCIIPrintf(viewer, " interpolation type %" PetscInt_FMT "\n", jac->as_amg_beta_opts[3])); 1304 PetscCall(PetscViewerASCIIPrintf(viewer, " max nonzero elements in interpolation rows %" PetscInt_FMT "\n", jac->as_amg_beta_opts[4])); 1305 PetscCall(PetscViewerASCIIPrintf(viewer, " strength threshold %g\n", (double)jac->as_amg_beta_theta)); 1306 } 1307 PetscFunctionReturn(0); 1308 } 1309 1310 static PetscErrorCode PCHYPRESetDiscreteGradient_HYPRE(PC pc, Mat G) { 1311 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1312 PetscBool ishypre; 1313 1314 PetscFunctionBegin; 1315 PetscCall(PetscObjectTypeCompare((PetscObject)G, MATHYPRE, &ishypre)); 1316 if (ishypre) { 1317 PetscCall(PetscObjectReference((PetscObject)G)); 1318 PetscCall(MatDestroy(&jac->G)); 1319 jac->G = G; 1320 } else { 1321 PetscCall(MatDestroy(&jac->G)); 1322 PetscCall(MatConvert(G, MATHYPRE, MAT_INITIAL_MATRIX, &jac->G)); 1323 } 1324 PetscFunctionReturn(0); 1325 } 1326 1327 /*@ 1328 PCHYPRESetDiscreteGradient - Set discrete gradient matrix for `PCHYPRE` type of ams or ads 1329 1330 Collective on pc 1331 1332 Input Parameters: 1333 + pc - the preconditioning context 1334 - G - the discrete gradient 1335 1336 Level: intermediate 1337 1338 Notes: 1339 G should have as many rows as the number of edges and as many columns as the number of vertices in the mesh 1340 1341 Each row of G has 2 nonzeros, with column indexes being the global indexes of edge's endpoints: matrix entries are +1 and -1 depending on edge orientation 1342 1343 Developer Note: 1344 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1345 1346 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteCurl()` 1347 @*/ 1348 PetscErrorCode PCHYPRESetDiscreteGradient(PC pc, Mat G) { 1349 PetscFunctionBegin; 1350 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1351 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 1352 PetscCheckSameComm(pc, 1, G, 2); 1353 PetscTryMethod(pc, "PCHYPRESetDiscreteGradient_C", (PC, Mat), (pc, G)); 1354 PetscFunctionReturn(0); 1355 } 1356 1357 static PetscErrorCode PCHYPRESetDiscreteCurl_HYPRE(PC pc, Mat C) { 1358 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1359 PetscBool ishypre; 1360 1361 PetscFunctionBegin; 1362 PetscCall(PetscObjectTypeCompare((PetscObject)C, MATHYPRE, &ishypre)); 1363 if (ishypre) { 1364 PetscCall(PetscObjectReference((PetscObject)C)); 1365 PetscCall(MatDestroy(&jac->C)); 1366 jac->C = C; 1367 } else { 1368 PetscCall(MatDestroy(&jac->C)); 1369 PetscCall(MatConvert(C, MATHYPRE, MAT_INITIAL_MATRIX, &jac->C)); 1370 } 1371 PetscFunctionReturn(0); 1372 } 1373 1374 /*@ 1375 PCHYPRESetDiscreteCurl - Set discrete curl matrx for `PCHYPRE` type of ads 1376 1377 Collective on pc 1378 1379 Input Parameters: 1380 + pc - the preconditioning context 1381 - C - the discrete curl 1382 1383 Level: intermediate 1384 1385 Notes: 1386 C should have as many rows as the number of faces and as many columns as the number of edges in the mesh 1387 1388 Each row of G has as many nonzeros as the number of edges of a face, with column indexes being the global indexes of the corresponding edge: matrix entries are +1 and -1 depending on edge orientation with respect to the face orientation 1389 1390 Developer Note: 1391 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1392 1393 If this is only for `PCHYPRE` type of ads it should be called `PCHYPREADSSetDiscreteCurl()` 1394 1395 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()` 1396 @*/ 1397 PetscErrorCode PCHYPRESetDiscreteCurl(PC pc, Mat C) { 1398 PetscFunctionBegin; 1399 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1400 PetscValidHeaderSpecific(C, MAT_CLASSID, 2); 1401 PetscCheckSameComm(pc, 1, C, 2); 1402 PetscTryMethod(pc, "PCHYPRESetDiscreteCurl_C", (PC, Mat), (pc, C)); 1403 PetscFunctionReturn(0); 1404 } 1405 1406 static PetscErrorCode PCHYPRESetInterpolations_HYPRE(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) { 1407 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1408 PetscBool ishypre; 1409 PetscInt i; 1410 PetscFunctionBegin; 1411 1412 PetscCall(MatDestroy(&jac->RT_PiFull)); 1413 PetscCall(MatDestroy(&jac->ND_PiFull)); 1414 for (i = 0; i < 3; ++i) { 1415 PetscCall(MatDestroy(&jac->RT_Pi[i])); 1416 PetscCall(MatDestroy(&jac->ND_Pi[i])); 1417 } 1418 1419 jac->dim = dim; 1420 if (RT_PiFull) { 1421 PetscCall(PetscObjectTypeCompare((PetscObject)RT_PiFull, MATHYPRE, &ishypre)); 1422 if (ishypre) { 1423 PetscCall(PetscObjectReference((PetscObject)RT_PiFull)); 1424 jac->RT_PiFull = RT_PiFull; 1425 } else { 1426 PetscCall(MatConvert(RT_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_PiFull)); 1427 } 1428 } 1429 if (RT_Pi) { 1430 for (i = 0; i < dim; ++i) { 1431 if (RT_Pi[i]) { 1432 PetscCall(PetscObjectTypeCompare((PetscObject)RT_Pi[i], MATHYPRE, &ishypre)); 1433 if (ishypre) { 1434 PetscCall(PetscObjectReference((PetscObject)RT_Pi[i])); 1435 jac->RT_Pi[i] = RT_Pi[i]; 1436 } else { 1437 PetscCall(MatConvert(RT_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->RT_Pi[i])); 1438 } 1439 } 1440 } 1441 } 1442 if (ND_PiFull) { 1443 PetscCall(PetscObjectTypeCompare((PetscObject)ND_PiFull, MATHYPRE, &ishypre)); 1444 if (ishypre) { 1445 PetscCall(PetscObjectReference((PetscObject)ND_PiFull)); 1446 jac->ND_PiFull = ND_PiFull; 1447 } else { 1448 PetscCall(MatConvert(ND_PiFull, MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_PiFull)); 1449 } 1450 } 1451 if (ND_Pi) { 1452 for (i = 0; i < dim; ++i) { 1453 if (ND_Pi[i]) { 1454 PetscCall(PetscObjectTypeCompare((PetscObject)ND_Pi[i], MATHYPRE, &ishypre)); 1455 if (ishypre) { 1456 PetscCall(PetscObjectReference((PetscObject)ND_Pi[i])); 1457 jac->ND_Pi[i] = ND_Pi[i]; 1458 } else { 1459 PetscCall(MatConvert(ND_Pi[i], MATHYPRE, MAT_INITIAL_MATRIX, &jac->ND_Pi[i])); 1460 } 1461 } 1462 } 1463 } 1464 1465 PetscFunctionReturn(0); 1466 } 1467 1468 /*@ 1469 PCHYPRESetInterpolations - Set interpolation matrices for `PCHYPRE` type of ams or ads 1470 1471 Collective on pc 1472 1473 Input Parameters: 1474 + pc - the preconditioning context 1475 - dim - the dimension of the problem, only used in AMS 1476 - RT_PiFull - Raviart-Thomas interpolation matrix 1477 - RT_Pi - x/y/z component of Raviart-Thomas interpolation matrix 1478 - ND_PiFull - Nedelec interpolation matrix 1479 - ND_Pi - x/y/z component of Nedelec interpolation matrix 1480 1481 Level: intermediate 1482 1483 Notes: 1484 For AMS, only Nedelec interpolation matrices are needed, the Raviart-Thomas interpolation matrices can be set to NULL. 1485 1486 For ADS, both type of interpolation matrices are needed. 1487 1488 Developer Note: 1489 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1490 1491 .seealso: `PCHYPRE` 1492 @*/ 1493 PetscErrorCode PCHYPRESetInterpolations(PC pc, PetscInt dim, Mat RT_PiFull, Mat RT_Pi[], Mat ND_PiFull, Mat ND_Pi[]) { 1494 PetscInt i; 1495 1496 PetscFunctionBegin; 1497 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1498 if (RT_PiFull) { 1499 PetscValidHeaderSpecific(RT_PiFull, MAT_CLASSID, 3); 1500 PetscCheckSameComm(pc, 1, RT_PiFull, 3); 1501 } 1502 if (RT_Pi) { 1503 PetscValidPointer(RT_Pi, 4); 1504 for (i = 0; i < dim; ++i) { 1505 if (RT_Pi[i]) { 1506 PetscValidHeaderSpecific(RT_Pi[i], MAT_CLASSID, 4); 1507 PetscCheckSameComm(pc, 1, RT_Pi[i], 4); 1508 } 1509 } 1510 } 1511 if (ND_PiFull) { 1512 PetscValidHeaderSpecific(ND_PiFull, MAT_CLASSID, 5); 1513 PetscCheckSameComm(pc, 1, ND_PiFull, 5); 1514 } 1515 if (ND_Pi) { 1516 PetscValidPointer(ND_Pi, 6); 1517 for (i = 0; i < dim; ++i) { 1518 if (ND_Pi[i]) { 1519 PetscValidHeaderSpecific(ND_Pi[i], MAT_CLASSID, 6); 1520 PetscCheckSameComm(pc, 1, ND_Pi[i], 6); 1521 } 1522 } 1523 } 1524 PetscTryMethod(pc, "PCHYPRESetInterpolations_C", (PC, PetscInt, Mat, Mat[], Mat, Mat[]), (pc, dim, RT_PiFull, RT_Pi, ND_PiFull, ND_Pi)); 1525 PetscFunctionReturn(0); 1526 } 1527 1528 static PetscErrorCode PCHYPRESetPoissonMatrix_HYPRE(PC pc, Mat A, PetscBool isalpha) { 1529 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1530 PetscBool ishypre; 1531 1532 PetscFunctionBegin; 1533 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1534 if (ishypre) { 1535 if (isalpha) { 1536 PetscCall(PetscObjectReference((PetscObject)A)); 1537 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1538 jac->alpha_Poisson = A; 1539 } else { 1540 if (A) { 1541 PetscCall(PetscObjectReference((PetscObject)A)); 1542 } else { 1543 jac->ams_beta_is_zero = PETSC_TRUE; 1544 } 1545 PetscCall(MatDestroy(&jac->beta_Poisson)); 1546 jac->beta_Poisson = A; 1547 } 1548 } else { 1549 if (isalpha) { 1550 PetscCall(MatDestroy(&jac->alpha_Poisson)); 1551 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->alpha_Poisson)); 1552 } else { 1553 if (A) { 1554 PetscCall(MatDestroy(&jac->beta_Poisson)); 1555 PetscCall(MatConvert(A, MATHYPRE, MAT_INITIAL_MATRIX, &jac->beta_Poisson)); 1556 } else { 1557 PetscCall(MatDestroy(&jac->beta_Poisson)); 1558 jac->ams_beta_is_zero = PETSC_TRUE; 1559 } 1560 } 1561 } 1562 PetscFunctionReturn(0); 1563 } 1564 1565 /*@ 1566 PCHYPRESetAlphaPoissonMatrix - Set vector Poisson matrix for `PCHYPRE` of type ams 1567 1568 Collective on pc 1569 1570 Input Parameters: 1571 + pc - the preconditioning context 1572 - A - the matrix 1573 1574 Level: intermediate 1575 1576 Note: 1577 A should be obtained by discretizing the vector valued Poisson problem with linear finite elements 1578 1579 Developer Note: 1580 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1581 1582 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetAlphaPoissonMatrix()` 1583 1584 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetBetaPoissonMatrix()` 1585 @*/ 1586 PetscErrorCode PCHYPRESetAlphaPoissonMatrix(PC pc, Mat A) { 1587 PetscFunctionBegin; 1588 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1589 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1590 PetscCheckSameComm(pc, 1, A, 2); 1591 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_TRUE)); 1592 PetscFunctionReturn(0); 1593 } 1594 1595 /*@ 1596 PCHYPRESetBetaPoissonMatrix - Set Poisson matrix for `PCHYPRE` of type ams 1597 1598 Collective on pc 1599 1600 Input Parameters: 1601 + pc - the preconditioning context 1602 - A - the matrix, or NULL to turn it off 1603 1604 Level: intermediate 1605 1606 Note: 1607 A should be obtained by discretizing the Poisson problem with linear finite elements. 1608 1609 Developer Note: 1610 This automatically converts the matrix to `MATHYPRE` if it is not already of that type 1611 1612 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSPCHYPRESetBetaPoissonMatrix()` 1613 1614 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1615 @*/ 1616 PetscErrorCode PCHYPRESetBetaPoissonMatrix(PC pc, Mat A) { 1617 PetscFunctionBegin; 1618 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1619 if (A) { 1620 PetscValidHeaderSpecific(A, MAT_CLASSID, 2); 1621 PetscCheckSameComm(pc, 1, A, 2); 1622 } 1623 PetscTryMethod(pc, "PCHYPRESetPoissonMatrix_C", (PC, Mat, PetscBool), (pc, A, PETSC_FALSE)); 1624 PetscFunctionReturn(0); 1625 } 1626 1627 static PetscErrorCode PCHYPRESetEdgeConstantVectors_HYPRE(PC pc, Vec ozz, Vec zoz, Vec zzo) { 1628 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1629 1630 PetscFunctionBegin; 1631 /* throw away any vector if already set */ 1632 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[0])); 1633 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[1])); 1634 PetscCall(VecHYPRE_IJVectorDestroy(&jac->constants[2])); 1635 PetscCall(VecHYPRE_IJVectorCreate(ozz->map, &jac->constants[0])); 1636 PetscCall(VecHYPRE_IJVectorCopy(ozz, jac->constants[0])); 1637 PetscCall(VecHYPRE_IJVectorCreate(zoz->map, &jac->constants[1])); 1638 PetscCall(VecHYPRE_IJVectorCopy(zoz, jac->constants[1])); 1639 jac->dim = 2; 1640 if (zzo) { 1641 PetscCall(VecHYPRE_IJVectorCreate(zzo->map, &jac->constants[2])); 1642 PetscCall(VecHYPRE_IJVectorCopy(zzo, jac->constants[2])); 1643 jac->dim++; 1644 } 1645 PetscFunctionReturn(0); 1646 } 1647 1648 /*@ 1649 PCHYPRESetEdgeConstantVectors - Set the representation of the constant vector fields in the edge element basis for `PCHYPRE` of type ams 1650 1651 Collective on pc 1652 1653 Input Parameters: 1654 + pc - the preconditioning context 1655 - ozz - vector representing (1,0,0) (or (1,0) in 2D) 1656 - zoz - vector representing (0,1,0) (or (0,1) in 2D) 1657 - zzo - vector representing (0,0,1) (use NULL in 2D) 1658 1659 Level: intermediate 1660 1661 Developer Note: 1662 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetEdgeConstantVectors()` 1663 1664 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1665 @*/ 1666 PetscErrorCode PCHYPRESetEdgeConstantVectors(PC pc, Vec ozz, Vec zoz, Vec zzo) { 1667 PetscFunctionBegin; 1668 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1669 PetscValidHeaderSpecific(ozz, VEC_CLASSID, 2); 1670 PetscValidHeaderSpecific(zoz, VEC_CLASSID, 3); 1671 if (zzo) PetscValidHeaderSpecific(zzo, VEC_CLASSID, 4); 1672 PetscCheckSameComm(pc, 1, ozz, 2); 1673 PetscCheckSameComm(pc, 1, zoz, 3); 1674 if (zzo) PetscCheckSameComm(pc, 1, zzo, 4); 1675 PetscTryMethod(pc, "PCHYPRESetEdgeConstantVectors_C", (PC, Vec, Vec, Vec), (pc, ozz, zoz, zzo)); 1676 PetscFunctionReturn(0); 1677 } 1678 1679 static PetscErrorCode PCHYPREAMSSetInteriorNodes_HYPRE(PC pc, Vec interior) { 1680 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1681 1682 PetscFunctionBegin; 1683 PetscCall(VecHYPRE_IJVectorDestroy(&jac->interior)); 1684 PetscCall(VecHYPRE_IJVectorCreate(interior->map, &jac->interior)); 1685 PetscCall(VecHYPRE_IJVectorCopy(interior, jac->interior)); 1686 jac->ams_beta_is_zero_part = PETSC_TRUE; 1687 PetscFunctionReturn(0); 1688 } 1689 1690 /*@ 1691 PCHYPREAMSSetInteriorNodes - Set the list of interior nodes to a zero-conductivity region for `PCHYPRE` of type ams 1692 1693 Collective on pc 1694 1695 Input Parameters: 1696 + pc - the preconditioning context 1697 - interior - vector. node is interior if its entry in the array is 1.0. 1698 1699 Level: intermediate 1700 1701 Note: 1702 This calls `HYPRE_AMSSetInteriorNodes()` 1703 1704 Developer Note: 1705 If this is only for `PCHYPRE` type of ams it should be called `PCHYPREAMSSetInteriorNodes()` 1706 1707 .seealso: `PCHYPRE`, `PCHYPRESetDiscreteGradient()`, `PCHYPRESetDiscreteCurl()`, `PCHYPRESetAlphaPoissonMatrix()` 1708 @*/ 1709 PetscErrorCode PCHYPREAMSSetInteriorNodes(PC pc, Vec interior) { 1710 PetscFunctionBegin; 1711 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1712 PetscValidHeaderSpecific(interior, VEC_CLASSID, 2); 1713 PetscCheckSameComm(pc, 1, interior, 2); 1714 PetscTryMethod(pc, "PCHYPREAMSSetInteriorNodes_C", (PC, Vec), (pc, interior)); 1715 PetscFunctionReturn(0); 1716 } 1717 1718 static PetscErrorCode PCSetCoordinates_HYPRE(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) { 1719 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1720 Vec tv; 1721 PetscInt i; 1722 1723 PetscFunctionBegin; 1724 /* throw away any coordinate vector if already set */ 1725 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[0])); 1726 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[1])); 1727 PetscCall(VecHYPRE_IJVectorDestroy(&jac->coords[2])); 1728 jac->dim = dim; 1729 1730 /* compute IJ vector for coordinates */ 1731 PetscCall(VecCreate(PetscObjectComm((PetscObject)pc), &tv)); 1732 PetscCall(VecSetType(tv, VECSTANDARD)); 1733 PetscCall(VecSetSizes(tv, nloc, PETSC_DECIDE)); 1734 for (i = 0; i < dim; i++) { 1735 PetscScalar *array; 1736 PetscInt j; 1737 1738 PetscCall(VecHYPRE_IJVectorCreate(tv->map, &jac->coords[i])); 1739 PetscCall(VecGetArrayWrite(tv, &array)); 1740 for (j = 0; j < nloc; j++) array[j] = coords[j * dim + i]; 1741 PetscCall(VecRestoreArrayWrite(tv, &array)); 1742 PetscCall(VecHYPRE_IJVectorCopy(tv, jac->coords[i])); 1743 } 1744 PetscCall(VecDestroy(&tv)); 1745 PetscFunctionReturn(0); 1746 } 1747 1748 static PetscErrorCode PCHYPREGetType_HYPRE(PC pc, const char *name[]) { 1749 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1750 1751 PetscFunctionBegin; 1752 *name = jac->hypre_type; 1753 PetscFunctionReturn(0); 1754 } 1755 1756 static PetscErrorCode PCHYPRESetType_HYPRE(PC pc, const char name[]) { 1757 PC_HYPRE *jac = (PC_HYPRE *)pc->data; 1758 PetscBool flag; 1759 1760 PetscFunctionBegin; 1761 if (jac->hypre_type) { 1762 PetscCall(PetscStrcmp(jac->hypre_type, name, &flag)); 1763 PetscCheck(flag, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Cannot reset the HYPRE preconditioner type once it has been set"); 1764 PetscFunctionReturn(0); 1765 } else { 1766 PetscCall(PetscStrallocpy(name, &jac->hypre_type)); 1767 } 1768 1769 jac->maxiter = PETSC_DEFAULT; 1770 jac->tol = PETSC_DEFAULT; 1771 jac->printstatistics = PetscLogPrintInfo; 1772 1773 PetscCall(PetscStrcmp("pilut", jac->hypre_type, &flag)); 1774 if (flag) { 1775 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1776 PetscCallExternal(HYPRE_ParCSRPilutCreate, jac->comm_hypre, &jac->hsolver); 1777 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Pilut; 1778 pc->ops->view = PCView_HYPRE_Pilut; 1779 jac->destroy = HYPRE_ParCSRPilutDestroy; 1780 jac->setup = HYPRE_ParCSRPilutSetup; 1781 jac->solve = HYPRE_ParCSRPilutSolve; 1782 jac->factorrowsize = PETSC_DEFAULT; 1783 PetscFunctionReturn(0); 1784 } 1785 PetscCall(PetscStrcmp("euclid", jac->hypre_type, &flag)); 1786 if (flag) { 1787 #if defined(PETSC_USE_64BIT_INDICES) 1788 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Hypre Euclid does not support 64 bit indices"); 1789 #endif 1790 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1791 PetscCallExternal(HYPRE_EuclidCreate, jac->comm_hypre, &jac->hsolver); 1792 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_Euclid; 1793 pc->ops->view = PCView_HYPRE_Euclid; 1794 jac->destroy = HYPRE_EuclidDestroy; 1795 jac->setup = HYPRE_EuclidSetup; 1796 jac->solve = HYPRE_EuclidSolve; 1797 jac->factorrowsize = PETSC_DEFAULT; 1798 jac->eu_level = PETSC_DEFAULT; /* default */ 1799 PetscFunctionReturn(0); 1800 } 1801 PetscCall(PetscStrcmp("parasails", jac->hypre_type, &flag)); 1802 if (flag) { 1803 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &jac->comm_hypre)); 1804 PetscCallExternal(HYPRE_ParaSailsCreate, jac->comm_hypre, &jac->hsolver); 1805 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ParaSails; 1806 pc->ops->view = PCView_HYPRE_ParaSails; 1807 jac->destroy = HYPRE_ParaSailsDestroy; 1808 jac->setup = HYPRE_ParaSailsSetup; 1809 jac->solve = HYPRE_ParaSailsSolve; 1810 /* initialize */ 1811 jac->nlevels = 1; 1812 jac->threshold = .1; 1813 jac->filter = .1; 1814 jac->loadbal = 0; 1815 if (PetscLogPrintInfo) jac->logging = (int)PETSC_TRUE; 1816 else jac->logging = (int)PETSC_FALSE; 1817 1818 jac->ruse = (int)PETSC_FALSE; 1819 jac->symt = 0; 1820 PetscCallExternal(HYPRE_ParaSailsSetParams, jac->hsolver, jac->threshold, jac->nlevels); 1821 PetscCallExternal(HYPRE_ParaSailsSetFilter, jac->hsolver, jac->filter); 1822 PetscCallExternal(HYPRE_ParaSailsSetLoadbal, jac->hsolver, jac->loadbal); 1823 PetscCallExternal(HYPRE_ParaSailsSetLogging, jac->hsolver, jac->logging); 1824 PetscCallExternal(HYPRE_ParaSailsSetReuse, jac->hsolver, jac->ruse); 1825 PetscCallExternal(HYPRE_ParaSailsSetSym, jac->hsolver, jac->symt); 1826 PetscFunctionReturn(0); 1827 } 1828 PetscCall(PetscStrcmp("boomeramg", jac->hypre_type, &flag)); 1829 if (flag) { 1830 PetscCallExternal(HYPRE_BoomerAMGCreate, &jac->hsolver); 1831 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_BoomerAMG; 1832 pc->ops->view = PCView_HYPRE_BoomerAMG; 1833 pc->ops->applytranspose = PCApplyTranspose_HYPRE_BoomerAMG; 1834 pc->ops->applyrichardson = PCApplyRichardson_HYPRE_BoomerAMG; 1835 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_BoomerAMG)); 1836 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_BoomerAMG)); 1837 jac->destroy = HYPRE_BoomerAMGDestroy; 1838 jac->setup = HYPRE_BoomerAMGSetup; 1839 jac->solve = HYPRE_BoomerAMGSolve; 1840 jac->applyrichardson = PETSC_FALSE; 1841 /* these defaults match the hypre defaults */ 1842 jac->cycletype = 1; 1843 jac->maxlevels = 25; 1844 jac->maxiter = 1; 1845 jac->tol = 0.0; /* tolerance of zero indicates use as preconditioner (suppresses convergence errors) */ 1846 jac->truncfactor = 0.0; 1847 jac->strongthreshold = .25; 1848 jac->maxrowsum = .9; 1849 jac->coarsentype = 6; 1850 jac->measuretype = 0; 1851 jac->gridsweeps[0] = jac->gridsweeps[1] = jac->gridsweeps[2] = 1; 1852 jac->smoothtype = -1; /* Not set by default */ 1853 jac->smoothnumlevels = 25; 1854 jac->eu_level = 0; 1855 jac->eu_droptolerance = 0; 1856 jac->eu_bj = 0; 1857 jac->relaxtype[0] = jac->relaxtype[1] = 6; /* Defaults to SYMMETRIC since in PETSc we are using a PC - most likely with CG */ 1858 jac->relaxtype[2] = 9; /*G.E. */ 1859 jac->relaxweight = 1.0; 1860 jac->outerrelaxweight = 1.0; 1861 jac->relaxorder = 1; 1862 jac->interptype = 0; 1863 jac->Rtype = 0; 1864 jac->Rstrongthreshold = 0.25; 1865 jac->Rfilterthreshold = 0.0; 1866 jac->Adroptype = -1; 1867 jac->Adroptol = 0.0; 1868 jac->agg_nl = 0; 1869 jac->agg_interptype = 4; 1870 jac->pmax = 0; 1871 jac->truncfactor = 0.0; 1872 jac->agg_num_paths = 1; 1873 jac->maxc = 9; 1874 jac->minc = 1; 1875 jac->nodal_coarsening = 0; 1876 jac->nodal_coarsening_diag = 0; 1877 jac->vec_interp_variant = 0; 1878 jac->vec_interp_qmax = 0; 1879 jac->vec_interp_smooth = PETSC_FALSE; 1880 jac->interp_refine = 0; 1881 jac->nodal_relax = PETSC_FALSE; 1882 jac->nodal_relax_levels = 1; 1883 jac->rap2 = 0; 1884 1885 /* GPU defaults 1886 from https://hypre.readthedocs.io/en/latest/solvers-boomeramg.html#gpu-supported-options 1887 and /src/parcsr_ls/par_amg.c */ 1888 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1889 jac->keeptranspose = PETSC_TRUE; 1890 jac->mod_rap2 = 1; 1891 jac->coarsentype = 8; 1892 jac->relaxorder = 0; 1893 jac->interptype = 6; 1894 jac->relaxtype[0] = 18; 1895 jac->relaxtype[1] = 18; 1896 jac->agg_interptype = 7; 1897 #else 1898 jac->keeptranspose = PETSC_FALSE; 1899 jac->mod_rap2 = 0; 1900 #endif 1901 PetscCallExternal(HYPRE_BoomerAMGSetCycleType, jac->hsolver, jac->cycletype); 1902 PetscCallExternal(HYPRE_BoomerAMGSetMaxLevels, jac->hsolver, jac->maxlevels); 1903 PetscCallExternal(HYPRE_BoomerAMGSetMaxIter, jac->hsolver, jac->maxiter); 1904 PetscCallExternal(HYPRE_BoomerAMGSetTol, jac->hsolver, jac->tol); 1905 PetscCallExternal(HYPRE_BoomerAMGSetTruncFactor, jac->hsolver, jac->truncfactor); 1906 PetscCallExternal(HYPRE_BoomerAMGSetStrongThreshold, jac->hsolver, jac->strongthreshold); 1907 PetscCallExternal(HYPRE_BoomerAMGSetMaxRowSum, jac->hsolver, jac->maxrowsum); 1908 PetscCallExternal(HYPRE_BoomerAMGSetCoarsenType, jac->hsolver, jac->coarsentype); 1909 PetscCallExternal(HYPRE_BoomerAMGSetMeasureType, jac->hsolver, jac->measuretype); 1910 PetscCallExternal(HYPRE_BoomerAMGSetRelaxOrder, jac->hsolver, jac->relaxorder); 1911 PetscCallExternal(HYPRE_BoomerAMGSetInterpType, jac->hsolver, jac->interptype); 1912 PetscCallExternal(HYPRE_BoomerAMGSetAggNumLevels, jac->hsolver, jac->agg_nl); 1913 PetscCallExternal(HYPRE_BoomerAMGSetAggInterpType, jac->hsolver, jac->agg_interptype); 1914 PetscCallExternal(HYPRE_BoomerAMGSetPMaxElmts, jac->hsolver, jac->pmax); 1915 PetscCallExternal(HYPRE_BoomerAMGSetNumPaths, jac->hsolver, jac->agg_num_paths); 1916 PetscCallExternal(HYPRE_BoomerAMGSetRelaxType, jac->hsolver, jac->relaxtype[0]); /* defaults coarse to 9 */ 1917 PetscCallExternal(HYPRE_BoomerAMGSetNumSweeps, jac->hsolver, jac->gridsweeps[0]); /* defaults coarse to 1 */ 1918 PetscCallExternal(HYPRE_BoomerAMGSetMaxCoarseSize, jac->hsolver, jac->maxc); 1919 PetscCallExternal(HYPRE_BoomerAMGSetMinCoarseSize, jac->hsolver, jac->minc); 1920 /* GPU */ 1921 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 1922 PetscCallExternal(HYPRE_BoomerAMGSetKeepTranspose, jac->hsolver, jac->keeptranspose ? 1 : 0); 1923 PetscCallExternal(HYPRE_BoomerAMGSetRAP2, jac->hsolver, jac->rap2); 1924 PetscCallExternal(HYPRE_BoomerAMGSetModuleRAP2, jac->hsolver, jac->mod_rap2); 1925 #endif 1926 1927 /* AIR */ 1928 #if PETSC_PKG_HYPRE_VERSION_GE(2, 18, 0) 1929 PetscCallExternal(HYPRE_BoomerAMGSetRestriction, jac->hsolver, jac->Rtype); 1930 PetscCallExternal(HYPRE_BoomerAMGSetStrongThresholdR, jac->hsolver, jac->Rstrongthreshold); 1931 PetscCallExternal(HYPRE_BoomerAMGSetFilterThresholdR, jac->hsolver, jac->Rfilterthreshold); 1932 PetscCallExternal(HYPRE_BoomerAMGSetADropTol, jac->hsolver, jac->Adroptol); 1933 PetscCallExternal(HYPRE_BoomerAMGSetADropType, jac->hsolver, jac->Adroptype); 1934 #endif 1935 PetscFunctionReturn(0); 1936 } 1937 PetscCall(PetscStrcmp("ams", jac->hypre_type, &flag)); 1938 if (flag) { 1939 PetscCall(HYPRE_AMSCreate(&jac->hsolver)); 1940 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_AMS; 1941 pc->ops->view = PCView_HYPRE_AMS; 1942 jac->destroy = HYPRE_AMSDestroy; 1943 jac->setup = HYPRE_AMSSetup; 1944 jac->solve = HYPRE_AMSSolve; 1945 jac->coords[0] = NULL; 1946 jac->coords[1] = NULL; 1947 jac->coords[2] = NULL; 1948 jac->interior = NULL; 1949 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE AMS */ 1950 jac->as_print = 0; 1951 jac->as_max_iter = 1; /* used as a preconditioner */ 1952 jac->as_tol = 0.; /* used as a preconditioner */ 1953 jac->ams_cycle_type = 13; 1954 /* Smoothing options */ 1955 jac->as_relax_type = 2; 1956 jac->as_relax_times = 1; 1957 jac->as_relax_weight = 1.0; 1958 jac->as_omega = 1.0; 1959 /* Vector valued Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 1960 jac->as_amg_alpha_opts[0] = 10; 1961 jac->as_amg_alpha_opts[1] = 1; 1962 jac->as_amg_alpha_opts[2] = 6; 1963 jac->as_amg_alpha_opts[3] = 6; 1964 jac->as_amg_alpha_opts[4] = 4; 1965 jac->as_amg_alpha_theta = 0.25; 1966 /* Scalar Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 1967 jac->as_amg_beta_opts[0] = 10; 1968 jac->as_amg_beta_opts[1] = 1; 1969 jac->as_amg_beta_opts[2] = 6; 1970 jac->as_amg_beta_opts[3] = 6; 1971 jac->as_amg_beta_opts[4] = 4; 1972 jac->as_amg_beta_theta = 0.25; 1973 PetscCallExternal(HYPRE_AMSSetPrintLevel, jac->hsolver, jac->as_print); 1974 PetscCallExternal(HYPRE_AMSSetMaxIter, jac->hsolver, jac->as_max_iter); 1975 PetscCallExternal(HYPRE_AMSSetCycleType, jac->hsolver, jac->ams_cycle_type); 1976 PetscCallExternal(HYPRE_AMSSetTol, jac->hsolver, jac->as_tol); 1977 PetscCallExternal(HYPRE_AMSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 1978 PetscCallExternal(HYPRE_AMSSetAlphaAMGOptions, jac->hsolver, jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 1979 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 1980 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 1981 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 1982 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 1983 PetscCallExternal(HYPRE_AMSSetBetaAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 1984 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 1985 jac->as_amg_beta_opts[2], /* AMG relax_type */ 1986 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 1987 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 1988 /* Zero conductivity */ 1989 jac->ams_beta_is_zero = PETSC_FALSE; 1990 jac->ams_beta_is_zero_part = PETSC_FALSE; 1991 PetscFunctionReturn(0); 1992 } 1993 PetscCall(PetscStrcmp("ads", jac->hypre_type, &flag)); 1994 if (flag) { 1995 PetscCall(HYPRE_ADSCreate(&jac->hsolver)); 1996 pc->ops->setfromoptions = PCSetFromOptions_HYPRE_ADS; 1997 pc->ops->view = PCView_HYPRE_ADS; 1998 jac->destroy = HYPRE_ADSDestroy; 1999 jac->setup = HYPRE_ADSSetup; 2000 jac->solve = HYPRE_ADSSolve; 2001 jac->coords[0] = NULL; 2002 jac->coords[1] = NULL; 2003 jac->coords[2] = NULL; 2004 /* solver parameters: these are borrowed from mfem package, and they are not the default values from HYPRE ADS */ 2005 jac->as_print = 0; 2006 jac->as_max_iter = 1; /* used as a preconditioner */ 2007 jac->as_tol = 0.; /* used as a preconditioner */ 2008 jac->ads_cycle_type = 13; 2009 /* Smoothing options */ 2010 jac->as_relax_type = 2; 2011 jac->as_relax_times = 1; 2012 jac->as_relax_weight = 1.0; 2013 jac->as_omega = 1.0; 2014 /* AMS solver parameters: cycle_type, coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2015 jac->ams_cycle_type = 14; 2016 jac->as_amg_alpha_opts[0] = 10; 2017 jac->as_amg_alpha_opts[1] = 1; 2018 jac->as_amg_alpha_opts[2] = 6; 2019 jac->as_amg_alpha_opts[3] = 6; 2020 jac->as_amg_alpha_opts[4] = 4; 2021 jac->as_amg_alpha_theta = 0.25; 2022 /* Vector Poisson AMG solver parameters: coarsen type, agg_levels, relax_type, interp_type, Pmax */ 2023 jac->as_amg_beta_opts[0] = 10; 2024 jac->as_amg_beta_opts[1] = 1; 2025 jac->as_amg_beta_opts[2] = 6; 2026 jac->as_amg_beta_opts[3] = 6; 2027 jac->as_amg_beta_opts[4] = 4; 2028 jac->as_amg_beta_theta = 0.25; 2029 PetscCallExternal(HYPRE_ADSSetPrintLevel, jac->hsolver, jac->as_print); 2030 PetscCallExternal(HYPRE_ADSSetMaxIter, jac->hsolver, jac->as_max_iter); 2031 PetscCallExternal(HYPRE_ADSSetCycleType, jac->hsolver, jac->ams_cycle_type); 2032 PetscCallExternal(HYPRE_ADSSetTol, jac->hsolver, jac->as_tol); 2033 PetscCallExternal(HYPRE_ADSSetSmoothingOptions, jac->hsolver, jac->as_relax_type, jac->as_relax_times, jac->as_relax_weight, jac->as_omega); 2034 PetscCallExternal(HYPRE_ADSSetAMSOptions, jac->hsolver, jac->ams_cycle_type, /* AMG coarsen type */ 2035 jac->as_amg_alpha_opts[0], /* AMG coarsen type */ 2036 jac->as_amg_alpha_opts[1], /* AMG agg_levels */ 2037 jac->as_amg_alpha_opts[2], /* AMG relax_type */ 2038 jac->as_amg_alpha_theta, jac->as_amg_alpha_opts[3], /* AMG interp_type */ 2039 jac->as_amg_alpha_opts[4]); /* AMG Pmax */ 2040 PetscCallExternal(HYPRE_ADSSetAMGOptions, jac->hsolver, jac->as_amg_beta_opts[0], /* AMG coarsen type */ 2041 jac->as_amg_beta_opts[1], /* AMG agg_levels */ 2042 jac->as_amg_beta_opts[2], /* AMG relax_type */ 2043 jac->as_amg_beta_theta, jac->as_amg_beta_opts[3], /* AMG interp_type */ 2044 jac->as_amg_beta_opts[4]); /* AMG Pmax */ 2045 PetscFunctionReturn(0); 2046 } 2047 PetscCall(PetscFree(jac->hypre_type)); 2048 2049 jac->hypre_type = NULL; 2050 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown HYPRE preconditioner %s; Choices are euclid, pilut, parasails, boomeramg, ams", name); 2051 } 2052 2053 /* 2054 It only gets here if the HYPRE type has not been set before the call to 2055 ...SetFromOptions() which actually is most of the time 2056 */ 2057 PetscErrorCode PCSetFromOptions_HYPRE(PC pc, PetscOptionItems *PetscOptionsObject) { 2058 PetscInt indx; 2059 const char *type[] = {"euclid", "pilut", "parasails", "boomeramg", "ams", "ads"}; 2060 PetscBool flg; 2061 2062 PetscFunctionBegin; 2063 PetscOptionsHeadBegin(PetscOptionsObject, "HYPRE preconditioner options"); 2064 PetscCall(PetscOptionsEList("-pc_hypre_type", "HYPRE preconditioner type", "PCHYPRESetType", type, PETSC_STATIC_ARRAY_LENGTH(type), "boomeramg", &indx, &flg)); 2065 if (flg) { 2066 PetscCall(PCHYPRESetType_HYPRE(pc, type[indx])); 2067 } else { 2068 PetscCall(PCHYPRESetType_HYPRE(pc, "boomeramg")); 2069 } 2070 PetscTryTypeMethod(pc, setfromoptions, PetscOptionsObject); 2071 PetscOptionsHeadEnd(); 2072 PetscFunctionReturn(0); 2073 } 2074 2075 /*@C 2076 PCHYPRESetType - Sets which hypre preconditioner you wish to use 2077 2078 Input Parameters: 2079 + pc - the preconditioner context 2080 - name - either euclid, pilut, parasails, boomeramg, ams, ads 2081 2082 Options Database Key: 2083 -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads 2084 2085 Level: intermediate 2086 2087 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRE` 2088 @*/ 2089 PetscErrorCode PCHYPRESetType(PC pc, const char name[]) { 2090 PetscFunctionBegin; 2091 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2092 PetscValidCharPointer(name, 2); 2093 PetscTryMethod(pc, "PCHYPRESetType_C", (PC, const char[]), (pc, name)); 2094 PetscFunctionReturn(0); 2095 } 2096 2097 /*@C 2098 PCHYPREGetType - Gets which hypre preconditioner you are using 2099 2100 Input Parameter: 2101 . pc - the preconditioner context 2102 2103 Output Parameter: 2104 . name - either euclid, pilut, parasails, boomeramg, ams, ads 2105 2106 Level: intermediate 2107 2108 .seealso: `PCCreate()`, `PCHYPRESetType()`, `PCType`, `PC`, `PCHYPRE` 2109 @*/ 2110 PetscErrorCode PCHYPREGetType(PC pc, const char *name[]) { 2111 PetscFunctionBegin; 2112 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2113 PetscValidPointer(name, 2); 2114 PetscTryMethod(pc, "PCHYPREGetType_C", (PC, const char *[]), (pc, name)); 2115 PetscFunctionReturn(0); 2116 } 2117 2118 /*@C 2119 PCMGGalerkinSetMatProductAlgorithm - Set type of SpGEMM for hypre to use on GPUs 2120 2121 Logically Collective on pc 2122 2123 Input Parameters: 2124 + pc - the hypre context 2125 - type - one of 'cusparse', 'hypre' 2126 2127 Options Database Key: 2128 . -pc_mg_galerkin_mat_product_algorithm <cusparse,hypre> - Type of SpGEMM to use in hypre 2129 2130 Level: intermediate 2131 2132 Developer Note: 2133 How the name starts with `PCMG`, should it not be `PCHYPREBoomerAMG`? 2134 2135 .seealso: `PCHYPRE`, `PCMGGalerkinGetMatProductAlgorithm()` 2136 @*/ 2137 PetscErrorCode PCMGGalerkinSetMatProductAlgorithm(PC pc, const char name[]) { 2138 PetscFunctionBegin; 2139 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2140 PetscTryMethod(pc, "PCMGGalerkinSetMatProductAlgorithm_C", (PC, const char[]), (pc, name)); 2141 PetscFunctionReturn(0); 2142 } 2143 2144 /*@C 2145 PCMGGalerkinGetMatProductAlgorithm - Get type of SpGEMM for hypre to use on GPUs 2146 2147 Not Collective 2148 2149 Input Parameter: 2150 . pc - the multigrid context 2151 2152 Output Parameter: 2153 . name - one of 'cusparse', 'hypre' 2154 2155 Level: intermediate 2156 2157 .seealso: `PCHYPRE`, ``PCMGGalerkinSetMatProductAlgorithm()` 2158 @*/ 2159 PetscErrorCode PCMGGalerkinGetMatProductAlgorithm(PC pc, const char *name[]) { 2160 PetscFunctionBegin; 2161 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2162 PetscTryMethod(pc, "PCMGGalerkinGetMatProductAlgorithm_C", (PC, const char *[]), (pc, name)); 2163 PetscFunctionReturn(0); 2164 } 2165 2166 /*MC 2167 PCHYPRE - Allows you to use the matrix element based preconditioners in the LLNL package hypre as PETSc `PC` 2168 2169 Options Database Keys: 2170 + -pc_hypre_type - One of euclid, pilut, parasails, boomeramg, ams, ads 2171 . -pc_hypre_boomeramg_nodal_coarsen <n> - where n is from 1 to 6 (see `HYPRE_BOOMERAMGSetNodal()`) 2172 . -pc_hypre_boomeramg_vec_interp_variant <v> - where v is from 1 to 3 (see `HYPRE_BoomerAMGSetInterpVecVariant()`) 2173 - Many others, run with -pc_type hypre -pc_hypre_type XXX -help to see options for the XXX preconditioner 2174 2175 Level: intermediate 2176 2177 Notes: 2178 Apart from pc_hypre_type (for which there is `PCHYPRESetType()`), 2179 the many hypre options can ONLY be set via the options database (e.g. the command line 2180 or with PetscOptionsSetValue(), there are no functions to set them) 2181 2182 The options -pc_hypre_boomeramg_max_iter and -pc_hypre_boomeramg_tol refer to the number of iterations 2183 (V-cycles) and tolerance that boomeramg does EACH time it is called. So for example, if 2184 -pc_hypre_boomeramg_max_iter is set to 2 then 2-V-cycles are being used to define the preconditioner 2185 (-pc_hypre_boomeramg_tol should be set to 0.0 - the default - to strictly use a fixed number of 2186 iterations per hypre call). -ksp_max_it and -ksp_rtol STILL determine the total number of iterations 2187 and tolerance for the Krylov solver. For example, if -pc_hypre_boomeramg_max_iter is 2 and -ksp_max_it is 10 2188 then AT MOST twenty V-cycles of boomeramg will be called. 2189 2190 Note that the option -pc_hypre_boomeramg_relax_type_all defaults to symmetric relaxation 2191 (symmetric-SOR/Jacobi), which is required for Krylov solvers like CG that expect symmetry. 2192 Otherwise, you may want to use -pc_hypre_boomeramg_relax_type_all SOR/Jacobi. 2193 If you wish to use BoomerAMG WITHOUT a Krylov method use -ksp_type richardson NOT -ksp_type preonly 2194 and use -ksp_max_it to control the number of V-cycles. 2195 (see the PETSc FAQ.html at the PETSc website under the Documentation tab). 2196 2197 2007-02-03 Using HYPRE-1.11.1b, the routine HYPRE_BoomerAMGSolveT and the option 2198 -pc_hypre_parasails_reuse were failing with SIGSEGV. Dalcin L. 2199 2200 `MatSetNearNullSpace()` - if you provide a near null space to your matrix it is ignored by hypre UNLESS you also use 2201 the following two options: 2202 2203 See `PCPFMG`, `PCSMG`, and `PCSYSPFMG` for access to hypre's other (nonalgebraic) multigrid solvers 2204 2205 For `PCHYPRE` type of ams or ads auxiliary data must be provided to the preconditioner with `PCHYPRESetDiscreteGradient()`, 2206 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2207 PCHYPREAMSSetInteriorNodes() 2208 2209 PETSc provides its own geometric and algebraic multigrid solvers `PCMG` and `PCGAMG`, also see `PCHMG` which is useful for certain multicomponent problems 2210 2211 GPU Notes: 2212 To configure hypre BoomerAMG so that it can utilize NVIDIA GPUs run ./configure --download-hypre --with-cuda 2213 Then pass `VECCUDA` vectors and `MATAIJCUSPARSE` matrices to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2214 2215 To configure hypre BoomerAMG so that it can utilize AMD GPUs run ./configure --download-hypre --with-hip 2216 Then pass `VECHIP` vectors to the solvers and PETSc will automatically utilize hypre's GPU solvers. 2217 2218 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCHYPRESetType()`, `PCPFMG`, `PCGAMG`, `PCSYSPFMG`, `PCSMG`, `PCHYPRESetDiscreteGradient()`, 2219 `PCHYPRESetDiscreteCurl()`, `PCHYPRESetInterpolations()`, `PCHYPRESetAlphaPoissonMatrix()`, `PCHYPRESetBetaPoissonMatrix()`, `PCHYPRESetEdgeConstantVectors()`, 2220 PCHYPREAMSSetInteriorNodes() 2221 M*/ 2222 2223 PETSC_EXTERN PetscErrorCode PCCreate_HYPRE(PC pc) { 2224 PC_HYPRE *jac; 2225 2226 PetscFunctionBegin; 2227 PetscCall(PetscNewLog(pc, &jac)); 2228 2229 pc->data = jac; 2230 pc->ops->reset = PCReset_HYPRE; 2231 pc->ops->destroy = PCDestroy_HYPRE; 2232 pc->ops->setfromoptions = PCSetFromOptions_HYPRE; 2233 pc->ops->setup = PCSetUp_HYPRE; 2234 pc->ops->apply = PCApply_HYPRE; 2235 jac->comm_hypre = MPI_COMM_NULL; 2236 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetType_C", PCHYPRESetType_HYPRE)); 2237 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREGetType_C", PCHYPREGetType_HYPRE)); 2238 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_HYPRE)); 2239 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteGradient_C", PCHYPRESetDiscreteGradient_HYPRE)); 2240 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetDiscreteCurl_C", PCHYPRESetDiscreteCurl_HYPRE)); 2241 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetInterpolations_C", PCHYPRESetInterpolations_HYPRE)); 2242 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetEdgeConstantVectors_C", PCHYPRESetEdgeConstantVectors_HYPRE)); 2243 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPREAMSSetInteriorNodes_C", PCHYPREAMSSetInteriorNodes_HYPRE)); 2244 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCHYPRESetPoissonMatrix_C", PCHYPRESetPoissonMatrix_HYPRE)); 2245 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinSetMatProductAlgorithm_C", PCMGGalerkinSetMatProductAlgorithm_HYPRE_BoomerAMG)); 2246 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGalerkinGetMatProductAlgorithm_C", PCMGGalerkinGetMatProductAlgorithm_HYPRE_BoomerAMG)); 2247 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2248 #if defined(HYPRE_USING_HIP) 2249 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); 2250 #endif 2251 #if defined(HYPRE_USING_CUDA) 2252 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); 2253 #endif 2254 #endif 2255 PetscFunctionReturn(0); 2256 } 2257 2258 typedef struct { 2259 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2260 HYPRE_StructSolver hsolver; 2261 2262 /* keep copy of PFMG options used so may view them */ 2263 PetscInt its; 2264 double tol; 2265 PetscInt relax_type; 2266 PetscInt rap_type; 2267 PetscInt num_pre_relax, num_post_relax; 2268 PetscInt max_levels; 2269 PetscInt skip_relax; 2270 PetscBool print_statistics; 2271 } PC_PFMG; 2272 2273 PetscErrorCode PCDestroy_PFMG(PC pc) { 2274 PC_PFMG *ex = (PC_PFMG *)pc->data; 2275 2276 PetscFunctionBegin; 2277 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2278 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2279 PetscCall(PetscFree(pc->data)); 2280 PetscFunctionReturn(0); 2281 } 2282 2283 static const char *PFMGRelaxType[] = {"Jacobi", "Weighted-Jacobi", "symmetric-Red/Black-Gauss-Seidel", "Red/Black-Gauss-Seidel"}; 2284 static const char *PFMGRAPType[] = {"Galerkin", "non-Galerkin"}; 2285 2286 PetscErrorCode PCView_PFMG(PC pc, PetscViewer viewer) { 2287 PetscBool iascii; 2288 PC_PFMG *ex = (PC_PFMG *)pc->data; 2289 2290 PetscFunctionBegin; 2291 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2292 if (iascii) { 2293 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE PFMG preconditioning\n")); 2294 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2295 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2296 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2297 PetscCall(PetscViewerASCIIPrintf(viewer, " RAP type %s\n", PFMGRAPType[ex->rap_type])); 2298 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2299 PetscCall(PetscViewerASCIIPrintf(viewer, " max levels %" PetscInt_FMT "\n", ex->max_levels)); 2300 PetscCall(PetscViewerASCIIPrintf(viewer, " skip relax %" PetscInt_FMT "\n", ex->skip_relax)); 2301 } 2302 PetscFunctionReturn(0); 2303 } 2304 2305 PetscErrorCode PCSetFromOptions_PFMG(PC pc, PetscOptionItems *PetscOptionsObject) { 2306 PC_PFMG *ex = (PC_PFMG *)pc->data; 2307 2308 PetscFunctionBegin; 2309 PetscOptionsHeadBegin(PetscOptionsObject, "PFMG options"); 2310 PetscCall(PetscOptionsBool("-pc_pfmg_print_statistics", "Print statistics", "HYPRE_StructPFMGSetPrintLevel", ex->print_statistics, &ex->print_statistics, NULL)); 2311 PetscCall(PetscOptionsInt("-pc_pfmg_its", "Number of iterations of PFMG to use as preconditioner", "HYPRE_StructPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2312 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2313 PetscCall(PetscOptionsInt("-pc_pfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2314 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2315 PetscCall(PetscOptionsInt("-pc_pfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2316 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2317 2318 PetscCall(PetscOptionsInt("-pc_pfmg_max_levels", "Max Levels for MG hierarchy", "HYPRE_StructPFMGSetMaxLevels", ex->max_levels, &ex->max_levels, NULL)); 2319 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2320 2321 PetscCall(PetscOptionsReal("-pc_pfmg_tol", "Tolerance of PFMG", "HYPRE_StructPFMGSetTol", ex->tol, &ex->tol, NULL)); 2322 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2323 PetscCall(PetscOptionsEList("-pc_pfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_StructPFMGSetRelaxType", PFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(PFMGRelaxType), PFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2324 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2325 PetscCall(PetscOptionsEList("-pc_pfmg_rap_type", "RAP type", "HYPRE_StructPFMGSetRAPType", PFMGRAPType, PETSC_STATIC_ARRAY_LENGTH(PFMGRAPType), PFMGRAPType[ex->rap_type], &ex->rap_type, NULL)); 2326 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2327 PetscCall(PetscOptionsInt("-pc_pfmg_skip_relax", "Skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations when the underlying problem is isotropic", "HYPRE_StructPFMGSetSkipRelax", ex->skip_relax, &ex->skip_relax, NULL)); 2328 PetscCallExternal(HYPRE_StructPFMGSetSkipRelax, ex->hsolver, ex->skip_relax); 2329 PetscOptionsHeadEnd(); 2330 PetscFunctionReturn(0); 2331 } 2332 2333 PetscErrorCode PCApply_PFMG(PC pc, Vec x, Vec y) { 2334 PC_PFMG *ex = (PC_PFMG *)pc->data; 2335 PetscScalar *yy; 2336 const PetscScalar *xx; 2337 PetscInt ilower[3], iupper[3]; 2338 HYPRE_Int hlower[3], hupper[3]; 2339 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2340 2341 PetscFunctionBegin; 2342 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2343 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2344 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2345 iupper[0] += ilower[0] - 1; 2346 iupper[1] += ilower[1] - 1; 2347 iupper[2] += ilower[2] - 1; 2348 hlower[0] = (HYPRE_Int)ilower[0]; 2349 hlower[1] = (HYPRE_Int)ilower[1]; 2350 hlower[2] = (HYPRE_Int)ilower[2]; 2351 hupper[0] = (HYPRE_Int)iupper[0]; 2352 hupper[1] = (HYPRE_Int)iupper[1]; 2353 hupper[2] = (HYPRE_Int)iupper[2]; 2354 2355 /* copy x values over to hypre */ 2356 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2357 PetscCall(VecGetArrayRead(x, &xx)); 2358 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2359 PetscCall(VecRestoreArrayRead(x, &xx)); 2360 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2361 PetscCallExternal(HYPRE_StructPFMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2362 2363 /* copy solution values back to PETSc */ 2364 PetscCall(VecGetArray(y, &yy)); 2365 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2366 PetscCall(VecRestoreArray(y, &yy)); 2367 PetscFunctionReturn(0); 2368 } 2369 2370 static PetscErrorCode PCApplyRichardson_PFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) { 2371 PC_PFMG *jac = (PC_PFMG *)pc->data; 2372 HYPRE_Int oits; 2373 2374 PetscFunctionBegin; 2375 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2376 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, its * jac->its); 2377 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, rtol); 2378 2379 PetscCall(PCApply_PFMG(pc, b, y)); 2380 PetscCallExternal(HYPRE_StructPFMGGetNumIterations, jac->hsolver, &oits); 2381 *outits = oits; 2382 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2383 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2384 PetscCallExternal(HYPRE_StructPFMGSetTol, jac->hsolver, jac->tol); 2385 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, jac->hsolver, jac->its); 2386 PetscFunctionReturn(0); 2387 } 2388 2389 PetscErrorCode PCSetUp_PFMG(PC pc) { 2390 PC_PFMG *ex = (PC_PFMG *)pc->data; 2391 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2392 PetscBool flg; 2393 2394 PetscFunctionBegin; 2395 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2396 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2397 2398 /* create the hypre solver object and set its information */ 2399 if (ex->hsolver) PetscCallExternal(HYPRE_StructPFMGDestroy, ex->hsolver); 2400 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2401 2402 // Print Hypre statistics about the solve process 2403 if (ex->print_statistics) PetscCallExternal(HYPRE_StructPFMGSetPrintLevel, ex->hsolver, 3); 2404 2405 // The hypre options must be repeated here because the StructPFMG was destroyed and recreated 2406 PetscCallExternal(HYPRE_StructPFMGSetMaxIter, ex->hsolver, ex->its); 2407 PetscCallExternal(HYPRE_StructPFMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2408 PetscCallExternal(HYPRE_StructPFMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2409 PetscCallExternal(HYPRE_StructPFMGSetMaxLevels, ex->hsolver, ex->max_levels); 2410 PetscCallExternal(HYPRE_StructPFMGSetTol, ex->hsolver, ex->tol); 2411 PetscCallExternal(HYPRE_StructPFMGSetRelaxType, ex->hsolver, ex->relax_type); 2412 PetscCallExternal(HYPRE_StructPFMGSetRAPType, ex->hsolver, ex->rap_type); 2413 2414 PetscCallExternal(HYPRE_StructPFMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2415 PetscCallExternal(HYPRE_StructPFMGSetZeroGuess, ex->hsolver); 2416 PetscFunctionReturn(0); 2417 } 2418 2419 /*MC 2420 PCPFMG - the hypre PFMG multigrid solver 2421 2422 Options Database Keys: 2423 + -pc_pfmg_its <its> - number of iterations of PFMG to use as preconditioner 2424 . -pc_pfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid solve 2425 . -pc_pfmg_num_post_relax <steps> - number of smoothing steps after coarse grid solve 2426 . -pc_pfmg_tol <tol> - tolerance of PFMG 2427 . -pc_pfmg_relax_type - relaxation type for the up and down cycles, one of Jacobi,Weighted-Jacobi,symmetric-Red/Black-Gauss-Seidel,Red/Black-Gauss-Seidel 2428 . -pc_pfmg_rap_type - type of coarse matrix generation, one of Galerkin,non-Galerkin 2429 - -pc_pfmg_skip_relax - skip relaxation on certain grids for isotropic problems. This can greatly improve efficiency by eliminating unnecessary relaxations 2430 when the underlying problem is isotropic, one of 0,1 2431 2432 Level: advanced 2433 2434 Notes: 2435 This is for CELL-centered descretizations 2436 2437 See `PCSYSPFMG` for a version suitable for systems of PDEs, and `PCSMG` 2438 2439 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2440 2441 This must be used with the `MATHYPRESTRUCT` matrix type. 2442 2443 This provides only some of the functionality of PFMG, it supports only one block per process defined by a PETSc `DMDA`. 2444 2445 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCHYPRE`, `PCGAMG`, `PCSYSPFMG`, `PCSMG` 2446 M*/ 2447 2448 PETSC_EXTERN PetscErrorCode PCCreate_PFMG(PC pc) { 2449 PC_PFMG *ex; 2450 2451 PetscFunctionBegin; 2452 PetscCall(PetscNew(&ex)); 2453 pc->data = ex; 2454 2455 ex->its = 1; 2456 ex->tol = 1.e-8; 2457 ex->relax_type = 1; 2458 ex->rap_type = 0; 2459 ex->num_pre_relax = 1; 2460 ex->num_post_relax = 1; 2461 ex->max_levels = 0; 2462 ex->skip_relax = 0; 2463 ex->print_statistics = PETSC_FALSE; 2464 2465 pc->ops->setfromoptions = PCSetFromOptions_PFMG; 2466 pc->ops->view = PCView_PFMG; 2467 pc->ops->destroy = PCDestroy_PFMG; 2468 pc->ops->apply = PCApply_PFMG; 2469 pc->ops->applyrichardson = PCApplyRichardson_PFMG; 2470 pc->ops->setup = PCSetUp_PFMG; 2471 2472 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2473 PetscCallExternal(HYPRE_StructPFMGCreate, ex->hcomm, &ex->hsolver); 2474 PetscFunctionReturn(0); 2475 } 2476 2477 /* we know we are working with a HYPRE_SStructMatrix */ 2478 typedef struct { 2479 MPI_Comm hcomm; /* does not share comm with HYPRE_SStructMatrix because need to create solver before getting matrix */ 2480 HYPRE_SStructSolver ss_solver; 2481 2482 /* keep copy of SYSPFMG options used so may view them */ 2483 PetscInt its; 2484 double tol; 2485 PetscInt relax_type; 2486 PetscInt num_pre_relax, num_post_relax; 2487 } PC_SysPFMG; 2488 2489 PetscErrorCode PCDestroy_SysPFMG(PC pc) { 2490 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2491 2492 PetscFunctionBegin; 2493 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2494 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2495 PetscCall(PetscFree(pc->data)); 2496 PetscFunctionReturn(0); 2497 } 2498 2499 static const char *SysPFMGRelaxType[] = {"Weighted-Jacobi", "Red/Black-Gauss-Seidel"}; 2500 2501 PetscErrorCode PCView_SysPFMG(PC pc, PetscViewer viewer) { 2502 PetscBool iascii; 2503 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2504 2505 PetscFunctionBegin; 2506 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2507 if (iascii) { 2508 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SysPFMG preconditioning\n")); 2509 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2510 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2511 PetscCall(PetscViewerASCIIPrintf(viewer, " relax type %s\n", PFMGRelaxType[ex->relax_type])); 2512 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2513 } 2514 PetscFunctionReturn(0); 2515 } 2516 2517 PetscErrorCode PCSetFromOptions_SysPFMG(PC pc, PetscOptionItems *PetscOptionsObject) { 2518 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2519 PetscBool flg = PETSC_FALSE; 2520 2521 PetscFunctionBegin; 2522 PetscOptionsHeadBegin(PetscOptionsObject, "SysPFMG options"); 2523 PetscCall(PetscOptionsBool("-pc_syspfmg_print_statistics", "Print statistics", "HYPRE_SStructSysPFMGSetPrintLevel", flg, &flg, NULL)); 2524 if (flg) PetscCallExternal(HYPRE_SStructSysPFMGSetPrintLevel, ex->ss_solver, 3); 2525 PetscCall(PetscOptionsInt("-pc_syspfmg_its", "Number of iterations of SysPFMG to use as preconditioner", "HYPRE_SStructSysPFMGSetMaxIter", ex->its, &ex->its, NULL)); 2526 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, ex->ss_solver, ex->its); 2527 PetscCall(PetscOptionsInt("-pc_syspfmg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_SStructSysPFMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2528 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPreRelax, ex->ss_solver, ex->num_pre_relax); 2529 PetscCall(PetscOptionsInt("-pc_syspfmg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_SStructSysPFMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2530 PetscCallExternal(HYPRE_SStructSysPFMGSetNumPostRelax, ex->ss_solver, ex->num_post_relax); 2531 2532 PetscCall(PetscOptionsReal("-pc_syspfmg_tol", "Tolerance of SysPFMG", "HYPRE_SStructSysPFMGSetTol", ex->tol, &ex->tol, NULL)); 2533 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, ex->ss_solver, ex->tol); 2534 PetscCall(PetscOptionsEList("-pc_syspfmg_relax_type", "Relax type for the up and down cycles", "HYPRE_SStructSysPFMGSetRelaxType", SysPFMGRelaxType, PETSC_STATIC_ARRAY_LENGTH(SysPFMGRelaxType), SysPFMGRelaxType[ex->relax_type], &ex->relax_type, NULL)); 2535 PetscCallExternal(HYPRE_SStructSysPFMGSetRelaxType, ex->ss_solver, ex->relax_type); 2536 PetscOptionsHeadEnd(); 2537 PetscFunctionReturn(0); 2538 } 2539 2540 PetscErrorCode PCApply_SysPFMG(PC pc, Vec x, Vec y) { 2541 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2542 PetscScalar *yy; 2543 const PetscScalar *xx; 2544 PetscInt ilower[3], iupper[3]; 2545 HYPRE_Int hlower[3], hupper[3]; 2546 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2547 PetscInt ordering = mx->dofs_order; 2548 PetscInt nvars = mx->nvars; 2549 PetscInt part = 0; 2550 PetscInt size; 2551 PetscInt i; 2552 2553 PetscFunctionBegin; 2554 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2555 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2556 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2557 iupper[0] += ilower[0] - 1; 2558 iupper[1] += ilower[1] - 1; 2559 iupper[2] += ilower[2] - 1; 2560 hlower[0] = (HYPRE_Int)ilower[0]; 2561 hlower[1] = (HYPRE_Int)ilower[1]; 2562 hlower[2] = (HYPRE_Int)ilower[2]; 2563 hupper[0] = (HYPRE_Int)iupper[0]; 2564 hupper[1] = (HYPRE_Int)iupper[1]; 2565 hupper[2] = (HYPRE_Int)iupper[2]; 2566 2567 size = 1; 2568 for (i = 0; i < 3; i++) size *= (iupper[i] - ilower[i] + 1); 2569 2570 /* copy x values over to hypre for variable ordering */ 2571 if (ordering) { 2572 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2573 PetscCall(VecGetArrayRead(x, &xx)); 2574 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(xx + (size * i))); 2575 PetscCall(VecRestoreArrayRead(x, &xx)); 2576 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2577 PetscCallExternal(HYPRE_SStructMatrixMatvec, 1.0, mx->ss_mat, mx->ss_b, 0.0, mx->ss_x); 2578 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2579 2580 /* copy solution values back to PETSc */ 2581 PetscCall(VecGetArray(y, &yy)); 2582 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(yy + (size * i))); 2583 PetscCall(VecRestoreArray(y, &yy)); 2584 } else { /* nodal ordering must be mapped to variable ordering for sys_pfmg */ 2585 PetscScalar *z; 2586 PetscInt j, k; 2587 2588 PetscCall(PetscMalloc1(nvars * size, &z)); 2589 PetscCallExternal(HYPRE_SStructVectorSetConstantValues, mx->ss_b, 0.0); 2590 PetscCall(VecGetArrayRead(x, &xx)); 2591 2592 /* transform nodal to hypre's variable ordering for sys_pfmg */ 2593 for (i = 0; i < size; i++) { 2594 k = i * nvars; 2595 for (j = 0; j < nvars; j++) z[j * size + i] = xx[k + j]; 2596 } 2597 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorSetBoxValues, mx->ss_b, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2598 PetscCall(VecRestoreArrayRead(x, &xx)); 2599 PetscCallExternal(HYPRE_SStructVectorAssemble, mx->ss_b); 2600 PetscCallExternal(HYPRE_SStructSysPFMGSolve, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2601 2602 /* copy solution values back to PETSc */ 2603 PetscCall(VecGetArray(y, &yy)); 2604 for (i = 0; i < nvars; i++) PetscCallExternal(HYPRE_SStructVectorGetBoxValues, mx->ss_x, part, hlower, hupper, i, (HYPRE_Complex *)(z + (size * i))); 2605 /* transform hypre's variable ordering for sys_pfmg to nodal ordering */ 2606 for (i = 0; i < size; i++) { 2607 k = i * nvars; 2608 for (j = 0; j < nvars; j++) yy[k + j] = z[j * size + i]; 2609 } 2610 PetscCall(VecRestoreArray(y, &yy)); 2611 PetscCall(PetscFree(z)); 2612 } 2613 PetscFunctionReturn(0); 2614 } 2615 2616 static PetscErrorCode PCApplyRichardson_SysPFMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) { 2617 PC_SysPFMG *jac = (PC_SysPFMG *)pc->data; 2618 HYPRE_Int oits; 2619 2620 PetscFunctionBegin; 2621 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2622 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, its * jac->its); 2623 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, rtol); 2624 PetscCall(PCApply_SysPFMG(pc, b, y)); 2625 PetscCallExternal(HYPRE_SStructSysPFMGGetNumIterations, jac->ss_solver, &oits); 2626 *outits = oits; 2627 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2628 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2629 PetscCallExternal(HYPRE_SStructSysPFMGSetTol, jac->ss_solver, jac->tol); 2630 PetscCallExternal(HYPRE_SStructSysPFMGSetMaxIter, jac->ss_solver, jac->its); 2631 PetscFunctionReturn(0); 2632 } 2633 2634 PetscErrorCode PCSetUp_SysPFMG(PC pc) { 2635 PC_SysPFMG *ex = (PC_SysPFMG *)pc->data; 2636 Mat_HYPRESStruct *mx = (Mat_HYPRESStruct *)(pc->pmat->data); 2637 PetscBool flg; 2638 2639 PetscFunctionBegin; 2640 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESSTRUCT, &flg)); 2641 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESSTRUCT with this preconditioner"); 2642 2643 /* create the hypre sstruct solver object and set its information */ 2644 if (ex->ss_solver) PetscCallExternal(HYPRE_SStructSysPFMGDestroy, ex->ss_solver); 2645 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2646 PetscCallExternal(HYPRE_SStructSysPFMGSetZeroGuess, ex->ss_solver); 2647 PetscCallExternal(HYPRE_SStructSysPFMGSetup, ex->ss_solver, mx->ss_mat, mx->ss_b, mx->ss_x); 2648 PetscFunctionReturn(0); 2649 } 2650 2651 /*MC 2652 PCSYSPFMG - the hypre SysPFMG multigrid solver 2653 2654 Level: advanced 2655 2656 Options Database Keys: 2657 + -pc_syspfmg_its <its> - number of iterations of SysPFMG to use as preconditioner 2658 . -pc_syspfmg_num_pre_relax <steps> - number of smoothing steps before coarse grid 2659 . -pc_syspfmg_num_post_relax <steps> - number of smoothing steps after coarse grid 2660 . -pc_syspfmg_tol <tol> - tolerance of SysPFMG 2661 - -pc_syspfmg_relax_type <Weighted-Jacobi,Red/Black-Gauss-Seidel> - relaxation type for the up and down cycles 2662 2663 Notes: 2664 See `PCPFMG` for hypre's PFMG that works for a scalar PDE and `PCSMG` 2665 2666 See `PCHYPRE` for hypre's BoomerAMG algebraic multigrid solver 2667 2668 This is for CELL-centered descretizations 2669 2670 This must be used with the `MATHYPRESSTRUCT` matrix type. 2671 2672 This does not give access to all the functionality of hypres SysPFMG, it supports only one part, and one block per process defined by a PETSc `DMDA`. 2673 2674 .seealso: `PCMG`, `MATHYPRESSTRUCT`, `PCPFMG`, `PCHYPRE`, `PCGAMG`, `PCSMG` 2675 M*/ 2676 2677 PETSC_EXTERN PetscErrorCode PCCreate_SysPFMG(PC pc) { 2678 PC_SysPFMG *ex; 2679 2680 PetscFunctionBegin; 2681 PetscCall(PetscNew(&ex)); 2682 pc->data = ex; 2683 2684 ex->its = 1; 2685 ex->tol = 1.e-8; 2686 ex->relax_type = 1; 2687 ex->num_pre_relax = 1; 2688 ex->num_post_relax = 1; 2689 2690 pc->ops->setfromoptions = PCSetFromOptions_SysPFMG; 2691 pc->ops->view = PCView_SysPFMG; 2692 pc->ops->destroy = PCDestroy_SysPFMG; 2693 pc->ops->apply = PCApply_SysPFMG; 2694 pc->ops->applyrichardson = PCApplyRichardson_SysPFMG; 2695 pc->ops->setup = PCSetUp_SysPFMG; 2696 2697 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2698 PetscCallExternal(HYPRE_SStructSysPFMGCreate, ex->hcomm, &ex->ss_solver); 2699 PetscFunctionReturn(0); 2700 } 2701 2702 /* PC SMG */ 2703 typedef struct { 2704 MPI_Comm hcomm; /* does not share comm with HYPRE_StructMatrix because need to create solver before getting matrix */ 2705 HYPRE_StructSolver hsolver; 2706 PetscInt its; /* keep copy of SMG options used so may view them */ 2707 double tol; 2708 PetscBool print_statistics; 2709 PetscInt num_pre_relax, num_post_relax; 2710 } PC_SMG; 2711 2712 PetscErrorCode PCDestroy_SMG(PC pc) { 2713 PC_SMG *ex = (PC_SMG *)pc->data; 2714 2715 PetscFunctionBegin; 2716 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, ex->hsolver); 2717 PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2718 PetscCall(PetscFree(pc->data)); 2719 PetscFunctionReturn(0); 2720 } 2721 2722 PetscErrorCode PCView_SMG(PC pc, PetscViewer viewer) { 2723 PetscBool iascii; 2724 PC_SMG *ex = (PC_SMG *)pc->data; 2725 2726 PetscFunctionBegin; 2727 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 2728 if (iascii) { 2729 PetscCall(PetscViewerASCIIPrintf(viewer, " HYPRE SMG preconditioning\n")); 2730 PetscCall(PetscViewerASCIIPrintf(viewer, " max iterations %" PetscInt_FMT "\n", ex->its)); 2731 PetscCall(PetscViewerASCIIPrintf(viewer, " tolerance %g\n", ex->tol)); 2732 PetscCall(PetscViewerASCIIPrintf(viewer, " number pre-relax %" PetscInt_FMT " post-relax %" PetscInt_FMT "\n", ex->num_pre_relax, ex->num_post_relax)); 2733 } 2734 PetscFunctionReturn(0); 2735 } 2736 2737 PetscErrorCode PCSetFromOptions_SMG(PC pc, PetscOptionItems *PetscOptionsObject) { 2738 PC_SMG *ex = (PC_SMG *)pc->data; 2739 2740 PetscFunctionBegin; 2741 PetscOptionsHeadBegin(PetscOptionsObject, "SMG options"); 2742 2743 PetscCall(PetscOptionsInt("-pc_smg_its", "Number of iterations of SMG to use as preconditioner", "HYPRE_StructSMGSetMaxIter", ex->its, &ex->its, NULL)); 2744 PetscCall(PetscOptionsInt("-pc_smg_num_pre_relax", "Number of smoothing steps before coarse grid", "HYPRE_StructSMGSetNumPreRelax", ex->num_pre_relax, &ex->num_pre_relax, NULL)); 2745 PetscCall(PetscOptionsInt("-pc_smg_num_post_relax", "Number of smoothing steps after coarse grid", "HYPRE_StructSMGSetNumPostRelax", ex->num_post_relax, &ex->num_post_relax, NULL)); 2746 PetscCall(PetscOptionsReal("-pc_smg_tol", "Tolerance of SMG", "HYPRE_StructSMGSetTol", ex->tol, &ex->tol, NULL)); 2747 2748 PetscOptionsHeadEnd(); 2749 PetscFunctionReturn(0); 2750 } 2751 2752 PetscErrorCode PCApply_SMG(PC pc, Vec x, Vec y) { 2753 PC_SMG *ex = (PC_SMG *)pc->data; 2754 PetscScalar *yy; 2755 const PetscScalar *xx; 2756 PetscInt ilower[3], iupper[3]; 2757 HYPRE_Int hlower[3], hupper[3]; 2758 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2759 2760 PetscFunctionBegin; 2761 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2762 PetscCall(DMDAGetCorners(mx->da, &ilower[0], &ilower[1], &ilower[2], &iupper[0], &iupper[1], &iupper[2])); 2763 /* when HYPRE_MIXEDINT is defined, sizeof(HYPRE_Int) == 32 */ 2764 iupper[0] += ilower[0] - 1; 2765 iupper[1] += ilower[1] - 1; 2766 iupper[2] += ilower[2] - 1; 2767 hlower[0] = (HYPRE_Int)ilower[0]; 2768 hlower[1] = (HYPRE_Int)ilower[1]; 2769 hlower[2] = (HYPRE_Int)ilower[2]; 2770 hupper[0] = (HYPRE_Int)iupper[0]; 2771 hupper[1] = (HYPRE_Int)iupper[1]; 2772 hupper[2] = (HYPRE_Int)iupper[2]; 2773 2774 /* copy x values over to hypre */ 2775 PetscCallExternal(HYPRE_StructVectorSetConstantValues, mx->hb, 0.0); 2776 PetscCall(VecGetArrayRead(x, &xx)); 2777 PetscCallExternal(HYPRE_StructVectorSetBoxValues, mx->hb, hlower, hupper, (HYPRE_Complex *)xx); 2778 PetscCall(VecRestoreArrayRead(x, &xx)); 2779 PetscCallExternal(HYPRE_StructVectorAssemble, mx->hb); 2780 PetscCallExternal(HYPRE_StructSMGSolve, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2781 2782 /* copy solution values back to PETSc */ 2783 PetscCall(VecGetArray(y, &yy)); 2784 PetscCallExternal(HYPRE_StructVectorGetBoxValues, mx->hx, hlower, hupper, (HYPRE_Complex *)yy); 2785 PetscCall(VecRestoreArray(y, &yy)); 2786 PetscFunctionReturn(0); 2787 } 2788 2789 static PetscErrorCode PCApplyRichardson_SMG(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) { 2790 PC_SMG *jac = (PC_SMG *)pc->data; 2791 HYPRE_Int oits; 2792 2793 PetscFunctionBegin; 2794 PetscCall(PetscCitationsRegister(hypreCitation, &cite)); 2795 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, its * jac->its); 2796 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, rtol); 2797 2798 PetscCall(PCApply_SMG(pc, b, y)); 2799 PetscCallExternal(HYPRE_StructSMGGetNumIterations, jac->hsolver, &oits); 2800 *outits = oits; 2801 if (oits == its) *reason = PCRICHARDSON_CONVERGED_ITS; 2802 else *reason = PCRICHARDSON_CONVERGED_RTOL; 2803 PetscCallExternal(HYPRE_StructSMGSetTol, jac->hsolver, jac->tol); 2804 PetscCallExternal(HYPRE_StructSMGSetMaxIter, jac->hsolver, jac->its); 2805 PetscFunctionReturn(0); 2806 } 2807 2808 PetscErrorCode PCSetUp_SMG(PC pc) { 2809 PetscInt i, dim; 2810 PC_SMG *ex = (PC_SMG *)pc->data; 2811 Mat_HYPREStruct *mx = (Mat_HYPREStruct *)(pc->pmat->data); 2812 PetscBool flg; 2813 DMBoundaryType p[3]; 2814 PetscInt M[3]; 2815 2816 PetscFunctionBegin; 2817 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATHYPRESTRUCT, &flg)); 2818 PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use MATHYPRESTRUCT with this preconditioner"); 2819 2820 PetscCall(DMDAGetInfo(mx->da, &dim, &M[0], &M[1], &M[2], 0, 0, 0, 0, 0, &p[0], &p[1], &p[2], 0)); 2821 // Check if power of 2 in periodic directions 2822 for (i = 0; i < dim; i++) { 2823 if (((M[i] & (M[i] - 1)) != 0) && (p[i] == DM_BOUNDARY_PERIODIC)) { 2824 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "With SMG, the number of points in a periodic direction must be a power of 2, but is here %" PetscInt_FMT ".", M[i]); 2825 } 2826 } 2827 2828 /* create the hypre solver object and set its information */ 2829 if (ex->hsolver) PetscCallExternal(HYPRE_StructSMGDestroy, (ex->hsolver)); 2830 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 2831 // The hypre options must be set here and not in SetFromOptions because it is created here! 2832 PetscCallExternal(HYPRE_StructSMGSetMaxIter, ex->hsolver, ex->its); 2833 PetscCallExternal(HYPRE_StructSMGSetNumPreRelax, ex->hsolver, ex->num_pre_relax); 2834 PetscCallExternal(HYPRE_StructSMGSetNumPostRelax, ex->hsolver, ex->num_post_relax); 2835 PetscCallExternal(HYPRE_StructSMGSetTol, ex->hsolver, ex->tol); 2836 2837 PetscCallExternal(HYPRE_StructSMGSetup, ex->hsolver, mx->hmat, mx->hb, mx->hx); 2838 PetscCallExternal(HYPRE_StructSMGSetZeroGuess, ex->hsolver); 2839 PetscFunctionReturn(0); 2840 } 2841 2842 /*MC 2843 PCSMG - the hypre (structured grid) SMG multigrid solver 2844 2845 Level: advanced 2846 2847 Options Database Keys: 2848 + -pc_smg_its <its> - number of iterations of SMG to use as preconditioner 2849 . -pc_smg_num_pre_relax <steps> - number of smoothing steps before coarse grid 2850 . -pc_smg_num_post_relax <steps> - number of smoothing steps after coarse grid 2851 - -pc_smg_tol <tol> - tolerance of SMG 2852 2853 Notes: 2854 This is for CELL-centered descretizations 2855 2856 This must be used with the `MATHYPRESTRUCT` `MatType`. 2857 2858 This does not provide all the functionality of hypre's SMG solver, it supports only one block per process defined by a PETSc `DMDA`. 2859 2860 See `PCSYSPFMG`, `PCSMG`, `PCPFMG`, and `PCHYPRE` for access to hypre's other preconditioners 2861 2862 .seealso: `PCMG`, `MATHYPRESTRUCT`, `PCPFMG`, `PCSYSPFMG`, `PCHYPRE`, `PCGAMG` 2863 M*/ 2864 2865 PETSC_EXTERN PetscErrorCode PCCreate_SMG(PC pc) { 2866 PC_SMG *ex; 2867 2868 PetscFunctionBegin; 2869 PetscCall(PetscNew(&ex)); 2870 pc->data = ex; 2871 2872 ex->its = 1; 2873 ex->tol = 1.e-8; 2874 ex->num_pre_relax = 1; 2875 ex->num_post_relax = 1; 2876 2877 pc->ops->setfromoptions = PCSetFromOptions_SMG; 2878 pc->ops->view = PCView_SMG; 2879 pc->ops->destroy = PCDestroy_SMG; 2880 pc->ops->apply = PCApply_SMG; 2881 pc->ops->applyrichardson = PCApplyRichardson_SMG; 2882 pc->ops->setup = PCSetUp_SMG; 2883 2884 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)pc), &ex->hcomm)); 2885 PetscCallExternal(HYPRE_StructSMGCreate, ex->hcomm, &ex->hsolver); 2886 PetscFunctionReturn(0); 2887 } 2888