1 2 /* 3 Defines the multigrid preconditioner interface. 4 */ 5 #include <petsc/private/pcmgimpl.h> /*I "petscksp.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <petscdm.h> 8 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC, PetscBool *); 9 10 /* 11 Contains the list of registered coarse space construction routines 12 */ 13 PetscFunctionList PCMGCoarseList = NULL; 14 15 PetscErrorCode PCMGMCycle_Private(PC pc, PC_MG_Levels **mglevelsin, PetscBool transpose, PetscBool matapp, PCRichardsonConvergedReason *reason) { 16 PC_MG *mg = (PC_MG *)pc->data; 17 PC_MG_Levels *mgc, *mglevels = *mglevelsin; 18 PetscInt cycles = (mglevels->level == 1) ? 1 : (PetscInt)mglevels->cycles; 19 20 PetscFunctionBegin; 21 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 22 if (!transpose) { 23 if (matapp) { 24 PetscCall(KSPMatSolve(mglevels->smoothd, mglevels->B, mglevels->X)); /* pre-smooth */ 25 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, NULL)); 26 } else { 27 PetscCall(KSPSolve(mglevels->smoothd, mglevels->b, mglevels->x)); /* pre-smooth */ 28 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 29 } 30 } else { 31 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 32 PetscCall(KSPSolveTranspose(mglevels->smoothu, mglevels->b, mglevels->x)); /* transpose of post-smooth */ 33 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 34 } 35 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 36 if (mglevels->level) { /* not the coarsest grid */ 37 if (mglevels->eventresidual) PetscCall(PetscLogEventBegin(mglevels->eventresidual, 0, 0, 0, 0)); 38 if (matapp && !mglevels->R) PetscCall(MatDuplicate(mglevels->B, MAT_DO_NOT_COPY_VALUES, &mglevels->R)); 39 if (!transpose) { 40 if (matapp) PetscCall((*mglevels->matresidual)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 41 else PetscCall((*mglevels->residual)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 42 } else { 43 if (matapp) PetscCall((*mglevels->matresidualtranspose)(mglevels->A, mglevels->B, mglevels->X, mglevels->R)); 44 else PetscCall((*mglevels->residualtranspose)(mglevels->A, mglevels->b, mglevels->x, mglevels->r)); 45 } 46 if (mglevels->eventresidual) PetscCall(PetscLogEventEnd(mglevels->eventresidual, 0, 0, 0, 0)); 47 48 /* if on finest level and have convergence criteria set */ 49 if (mglevels->level == mglevels->levels - 1 && mg->ttol && reason) { 50 PetscReal rnorm; 51 PetscCall(VecNorm(mglevels->r, NORM_2, &rnorm)); 52 if (rnorm <= mg->ttol) { 53 if (rnorm < mg->abstol) { 54 *reason = PCRICHARDSON_CONVERGED_ATOL; 55 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than absolute tolerance %g\n", (double)rnorm, (double)mg->abstol)); 56 } else { 57 *reason = PCRICHARDSON_CONVERGED_RTOL; 58 PetscCall(PetscInfo(pc, "Linear solver has converged. Residual norm %g is less than relative tolerance times initial residual norm %g\n", (double)rnorm, (double)mg->ttol)); 59 } 60 PetscFunctionReturn(0); 61 } 62 } 63 64 mgc = *(mglevelsin - 1); 65 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 66 if (!transpose) { 67 if (matapp) PetscCall(MatMatRestrict(mglevels->restrct, mglevels->R, &mgc->B)); 68 else PetscCall(MatRestrict(mglevels->restrct, mglevels->r, mgc->b)); 69 } else { 70 if (matapp) PetscCall(MatMatRestrict(mglevels->interpolate, mglevels->R, &mgc->B)); 71 else PetscCall(MatRestrict(mglevels->interpolate, mglevels->r, mgc->b)); 72 } 73 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 74 if (matapp) { 75 if (!mgc->X) { 76 PetscCall(MatDuplicate(mgc->B, MAT_DO_NOT_COPY_VALUES, &mgc->X)); 77 } else { 78 PetscCall(MatZeroEntries(mgc->X)); 79 } 80 } else { 81 PetscCall(VecZeroEntries(mgc->x)); 82 } 83 while (cycles--) PetscCall(PCMGMCycle_Private(pc, mglevelsin - 1, transpose, matapp, reason)); 84 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventBegin(mglevels->eventinterprestrict, 0, 0, 0, 0)); 85 if (!transpose) { 86 if (matapp) PetscCall(MatMatInterpolateAdd(mglevels->interpolate, mgc->X, mglevels->X, &mglevels->X)); 87 else PetscCall(MatInterpolateAdd(mglevels->interpolate, mgc->x, mglevels->x, mglevels->x)); 88 } else { 89 PetscCall(MatInterpolateAdd(mglevels->restrct, mgc->x, mglevels->x, mglevels->x)); 90 } 91 if (mglevels->eventinterprestrict) PetscCall(PetscLogEventEnd(mglevels->eventinterprestrict, 0, 0, 0, 0)); 92 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventBegin(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 93 if (!transpose) { 94 if (matapp) { 95 PetscCall(KSPMatSolve(mglevels->smoothu, mglevels->B, mglevels->X)); /* post smooth */ 96 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, NULL)); 97 } else { 98 PetscCall(KSPSolve(mglevels->smoothu, mglevels->b, mglevels->x)); /* post smooth */ 99 PetscCall(KSPCheckSolve(mglevels->smoothu, pc, mglevels->x)); 100 } 101 } else { 102 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 103 PetscCall(KSPSolveTranspose(mglevels->smoothd, mglevels->b, mglevels->x)); /* post smooth */ 104 PetscCall(KSPCheckSolve(mglevels->smoothd, pc, mglevels->x)); 105 } 106 if (mglevels->cr) { 107 PetscCheck(!matapp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Not supported"); 108 /* TODO Turn on copy and turn off noisy if we have an exact solution 109 PetscCall(VecCopy(mglevels->x, mglevels->crx)); 110 PetscCall(VecCopy(mglevels->b, mglevels->crb)); */ 111 PetscCall(KSPSetNoisy_Private(mglevels->crx)); 112 PetscCall(KSPSolve(mglevels->cr, mglevels->crb, mglevels->crx)); /* compatible relaxation */ 113 PetscCall(KSPCheckSolve(mglevels->cr, pc, mglevels->crx)); 114 } 115 if (mglevels->eventsmoothsolve) PetscCall(PetscLogEventEnd(mglevels->eventsmoothsolve, 0, 0, 0, 0)); 116 } 117 PetscFunctionReturn(0); 118 } 119 120 static PetscErrorCode PCApplyRichardson_MG(PC pc, Vec b, Vec x, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool zeroguess, PetscInt *outits, PCRichardsonConvergedReason *reason) { 121 PC_MG *mg = (PC_MG *)pc->data; 122 PC_MG_Levels **mglevels = mg->levels; 123 PC tpc; 124 PetscBool changeu, changed; 125 PetscInt levels = mglevels[0]->levels, i; 126 127 PetscFunctionBegin; 128 /* When the DM is supplying the matrix then it will not exist until here */ 129 for (i = 0; i < levels; i++) { 130 if (!mglevels[i]->A) { 131 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 132 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 133 } 134 } 135 136 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 137 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 138 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 139 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 140 if (!changed && !changeu) { 141 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 142 mglevels[levels - 1]->b = b; 143 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 144 if (!mglevels[levels - 1]->b) { 145 Vec *vec; 146 147 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 148 mglevels[levels - 1]->b = *vec; 149 PetscCall(PetscFree(vec)); 150 } 151 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 152 } 153 mglevels[levels - 1]->x = x; 154 155 mg->rtol = rtol; 156 mg->abstol = abstol; 157 mg->dtol = dtol; 158 if (rtol) { 159 /* compute initial residual norm for relative convergence test */ 160 PetscReal rnorm; 161 if (zeroguess) { 162 PetscCall(VecNorm(b, NORM_2, &rnorm)); 163 } else { 164 PetscCall((*mglevels[levels - 1]->residual)(mglevels[levels - 1]->A, b, x, w)); 165 PetscCall(VecNorm(w, NORM_2, &rnorm)); 166 } 167 mg->ttol = PetscMax(rtol * rnorm, abstol); 168 } else if (abstol) mg->ttol = abstol; 169 else mg->ttol = 0.0; 170 171 /* since smoother is applied to full system, not just residual we need to make sure that smoothers don't 172 stop prematurely due to small residual */ 173 for (i = 1; i < levels; i++) { 174 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 175 if (mglevels[i]->smoothu != mglevels[i]->smoothd) { 176 /* For Richardson the initial guess is nonzero since it is solving in each cycle the original system not just applying as a preconditioner */ 177 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 178 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, 0, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT)); 179 } 180 } 181 182 *reason = (PCRichardsonConvergedReason)0; 183 for (i = 0; i < its; i++) { 184 PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, PETSC_FALSE, PETSC_FALSE, reason)); 185 if (*reason) break; 186 } 187 if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS; 188 *outits = i; 189 if (!changed && !changeu) mglevels[levels - 1]->b = NULL; 190 PetscFunctionReturn(0); 191 } 192 193 PetscErrorCode PCReset_MG(PC pc) { 194 PC_MG *mg = (PC_MG *)pc->data; 195 PC_MG_Levels **mglevels = mg->levels; 196 PetscInt i, n; 197 198 PetscFunctionBegin; 199 if (mglevels) { 200 n = mglevels[0]->levels; 201 for (i = 0; i < n - 1; i++) { 202 PetscCall(VecDestroy(&mglevels[i + 1]->r)); 203 PetscCall(VecDestroy(&mglevels[i]->b)); 204 PetscCall(VecDestroy(&mglevels[i]->x)); 205 PetscCall(MatDestroy(&mglevels[i + 1]->R)); 206 PetscCall(MatDestroy(&mglevels[i]->B)); 207 PetscCall(MatDestroy(&mglevels[i]->X)); 208 PetscCall(VecDestroy(&mglevels[i]->crx)); 209 PetscCall(VecDestroy(&mglevels[i]->crb)); 210 PetscCall(MatDestroy(&mglevels[i + 1]->restrct)); 211 PetscCall(MatDestroy(&mglevels[i + 1]->interpolate)); 212 PetscCall(MatDestroy(&mglevels[i + 1]->inject)); 213 PetscCall(VecDestroy(&mglevels[i + 1]->rscale)); 214 } 215 PetscCall(VecDestroy(&mglevels[n - 1]->crx)); 216 PetscCall(VecDestroy(&mglevels[n - 1]->crb)); 217 /* this is not null only if the smoother on the finest level 218 changes the rhs during PreSolve */ 219 PetscCall(VecDestroy(&mglevels[n - 1]->b)); 220 PetscCall(MatDestroy(&mglevels[n - 1]->B)); 221 222 for (i = 0; i < n; i++) { 223 PetscCall(MatDestroy(&mglevels[i]->coarseSpace)); 224 PetscCall(MatDestroy(&mglevels[i]->A)); 225 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPReset(mglevels[i]->smoothd)); 226 PetscCall(KSPReset(mglevels[i]->smoothu)); 227 if (mglevels[i]->cr) PetscCall(KSPReset(mglevels[i]->cr)); 228 } 229 mg->Nc = 0; 230 } 231 PetscFunctionReturn(0); 232 } 233 234 /* Implementing CR 235 236 We only want to make corrections that ``do not change'' the coarse solution. What we mean by not changing is that if I prolong my coarse solution to the fine grid and then inject that fine solution back to the coarse grid, I get the same answer. Injection is what Brannick calls R. We want the complementary projector to Inj, which we will call S, after Brannick, so that Inj S = 0. Now the orthogonal projector onto the range of Inj^T is 237 238 Inj^T (Inj Inj^T)^{-1} Inj 239 240 and if Inj is a VecScatter, as it is now in PETSc, we have 241 242 Inj^T Inj 243 244 and 245 246 S = I - Inj^T Inj 247 248 since 249 250 Inj S = Inj - (Inj Inj^T) Inj = 0. 251 252 Brannick suggests 253 254 A \to S^T A S \qquad\mathrm{and}\qquad M \to S^T M S 255 256 but I do not think his :math:`S^T S = I` is correct. Our S is an orthogonal projector, so :math:`S^T S = S^2 = S`. We will use 257 258 M^{-1} A \to S M^{-1} A S 259 260 In fact, since it is somewhat hard in PETSc to do the symmetric application, we will just apply S on the left. 261 262 Check: || Inj P - I ||_F < tol 263 Check: In general, Inj Inj^T = I 264 */ 265 266 typedef struct { 267 PC mg; /* The PCMG object */ 268 PetscInt l; /* The multigrid level for this solver */ 269 Mat Inj; /* The injection matrix */ 270 Mat S; /* I - Inj^T Inj */ 271 } CRContext; 272 273 static PetscErrorCode CRSetup_Private(PC pc) { 274 CRContext *ctx; 275 Mat It; 276 277 PetscFunctionBeginUser; 278 PetscCall(PCShellGetContext(pc, &ctx)); 279 PetscCall(PCMGGetInjection(ctx->mg, ctx->l, &It)); 280 PetscCheck(It, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "CR requires that injection be defined for this PCMG"); 281 PetscCall(MatCreateTranspose(It, &ctx->Inj)); 282 PetscCall(MatCreateNormal(ctx->Inj, &ctx->S)); 283 PetscCall(MatScale(ctx->S, -1.0)); 284 PetscCall(MatShift(ctx->S, 1.0)); 285 PetscFunctionReturn(0); 286 } 287 288 static PetscErrorCode CRApply_Private(PC pc, Vec x, Vec y) { 289 CRContext *ctx; 290 291 PetscFunctionBeginUser; 292 PetscCall(PCShellGetContext(pc, &ctx)); 293 PetscCall(MatMult(ctx->S, x, y)); 294 PetscFunctionReturn(0); 295 } 296 297 static PetscErrorCode CRDestroy_Private(PC pc) { 298 CRContext *ctx; 299 300 PetscFunctionBeginUser; 301 PetscCall(PCShellGetContext(pc, &ctx)); 302 PetscCall(MatDestroy(&ctx->Inj)); 303 PetscCall(MatDestroy(&ctx->S)); 304 PetscCall(PetscFree(ctx)); 305 PetscCall(PCShellSetContext(pc, NULL)); 306 PetscFunctionReturn(0); 307 } 308 309 static PetscErrorCode CreateCR_Private(PC pc, PetscInt l, PC *cr) { 310 CRContext *ctx; 311 312 PetscFunctionBeginUser; 313 PetscCall(PCCreate(PetscObjectComm((PetscObject)pc), cr)); 314 PetscCall(PetscObjectSetName((PetscObject)*cr, "S (complementary projector to injection)")); 315 PetscCall(PetscCalloc1(1, &ctx)); 316 ctx->mg = pc; 317 ctx->l = l; 318 PetscCall(PCSetType(*cr, PCSHELL)); 319 PetscCall(PCShellSetContext(*cr, ctx)); 320 PetscCall(PCShellSetApply(*cr, CRApply_Private)); 321 PetscCall(PCShellSetSetUp(*cr, CRSetup_Private)); 322 PetscCall(PCShellSetDestroy(*cr, CRDestroy_Private)); 323 PetscFunctionReturn(0); 324 } 325 326 PetscErrorCode PCMGSetLevels_MG(PC pc, PetscInt levels, MPI_Comm *comms) { 327 PC_MG *mg = (PC_MG *)pc->data; 328 MPI_Comm comm; 329 PC_MG_Levels **mglevels = mg->levels; 330 PCMGType mgtype = mg->am; 331 PetscInt mgctype = (PetscInt)PC_MG_CYCLE_V; 332 PetscInt i; 333 PetscMPIInt size; 334 const char *prefix; 335 PC ipc; 336 PetscInt n; 337 338 PetscFunctionBegin; 339 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 340 PetscValidLogicalCollectiveInt(pc, levels, 2); 341 if (mg->nlevels == levels) PetscFunctionReturn(0); 342 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 343 if (mglevels) { 344 mgctype = mglevels[0]->cycles; 345 /* changing the number of levels so free up the previous stuff */ 346 PetscCall(PCReset_MG(pc)); 347 n = mglevels[0]->levels; 348 for (i = 0; i < n; i++) { 349 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 350 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 351 PetscCall(KSPDestroy(&mglevels[i]->cr)); 352 PetscCall(PetscFree(mglevels[i])); 353 } 354 PetscCall(PetscFree(mg->levels)); 355 } 356 357 mg->nlevels = levels; 358 359 PetscCall(PetscMalloc1(levels, &mglevels)); 360 PetscCall(PetscLogObjectMemory((PetscObject)pc, levels * (sizeof(PC_MG *)))); 361 362 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 363 364 mg->stageApply = 0; 365 for (i = 0; i < levels; i++) { 366 PetscCall(PetscNewLog(pc, &mglevels[i])); 367 368 mglevels[i]->level = i; 369 mglevels[i]->levels = levels; 370 mglevels[i]->cycles = mgctype; 371 mg->default_smoothu = 2; 372 mg->default_smoothd = 2; 373 mglevels[i]->eventsmoothsetup = 0; 374 mglevels[i]->eventsmoothsolve = 0; 375 mglevels[i]->eventresidual = 0; 376 mglevels[i]->eventinterprestrict = 0; 377 378 if (comms) comm = comms[i]; 379 if (comm != MPI_COMM_NULL) { 380 PetscCall(KSPCreate(comm, &mglevels[i]->smoothd)); 381 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->smoothd, pc->erroriffailure)); 382 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->smoothd, (PetscObject)pc, levels - i)); 383 PetscCall(KSPSetOptionsPrefix(mglevels[i]->smoothd, prefix)); 384 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->smoothd, PetscMGLevelId, mglevels[i]->level)); 385 if (i || levels == 1) { 386 char tprefix[128]; 387 388 PetscCall(KSPSetType(mglevels[i]->smoothd, KSPCHEBYSHEV)); 389 PetscCall(KSPSetConvergenceTest(mglevels[i]->smoothd, KSPConvergedSkip, NULL, NULL)); 390 PetscCall(KSPSetNormType(mglevels[i]->smoothd, KSP_NORM_NONE)); 391 PetscCall(KSPGetPC(mglevels[i]->smoothd, &ipc)); 392 PetscCall(PCSetType(ipc, PCSOR)); 393 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 394 395 PetscCall(PetscSNPrintf(tprefix, 128, "mg_levels_%d_", (int)i)); 396 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->smoothd, tprefix)); 397 } else { 398 PetscCall(KSPAppendOptionsPrefix(mglevels[0]->smoothd, "mg_coarse_")); 399 400 /* coarse solve is (redundant) LU by default; set shifttype NONZERO to avoid annoying zero-pivot in LU preconditioner */ 401 PetscCall(KSPSetType(mglevels[0]->smoothd, KSPPREONLY)); 402 PetscCall(KSPGetPC(mglevels[0]->smoothd, &ipc)); 403 PetscCallMPI(MPI_Comm_size(comm, &size)); 404 if (size > 1) { 405 PetscCall(PCSetType(ipc, PCREDUNDANT)); 406 } else { 407 PetscCall(PCSetType(ipc, PCLU)); 408 } 409 PetscCall(PCFactorSetShiftType(ipc, MAT_SHIFT_INBLOCKS)); 410 } 411 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)mglevels[i]->smoothd)); 412 } 413 mglevels[i]->smoothu = mglevels[i]->smoothd; 414 mg->rtol = 0.0; 415 mg->abstol = 0.0; 416 mg->dtol = 0.0; 417 mg->ttol = 0.0; 418 mg->cyclesperpcapply = 1; 419 } 420 mg->levels = mglevels; 421 PetscCall(PCMGSetType(pc, mgtype)); 422 PetscFunctionReturn(0); 423 } 424 425 /*@C 426 PCMGSetLevels - Sets the number of levels to use with `PCMG`. 427 Must be called before any other `PCMG` routine. 428 429 Logically Collective on pc 430 431 Input Parameters: 432 + pc - the preconditioner context 433 . levels - the number of levels 434 - comms - optional communicators for each level; this is to allow solving the coarser problems 435 on smaller sets of processes. For processes that are not included in the computation 436 you must pass `MPI_COMM_NULL`. Use comms = NULL to specify that all processes 437 should participate in each level of problem. 438 439 Level: intermediate 440 441 Notes: 442 If the number of levels is one then the multigrid uses the -mg_levels prefix 443 for setting the level options rather than the -mg_coarse prefix. 444 445 You can free the information in comms after this routine is called. 446 447 The array of MPI communicators must contain `MPI_COMM_NULL` for those ranks that at each level 448 are not participating in the coarser solve. For example, with 2 levels and 1 and 2 ranks on 449 the two levels, rank 0 in the original communicator will pass in an array of 2 communicators 450 of size 2 and 1, while rank 1 in the original communicator will pass in array of 2 communicators 451 the first of size 2 and the second of value `MPI_COMM_NULL` since the rank 1 does not participate 452 in the coarse grid solve. 453 454 Since each coarser level may have a new `MPI_Comm` with fewer ranks than the previous, one 455 must take special care in providing the restriction and interpolation operation. We recommend 456 providing these as two step operations; first perform a standard restriction or interpolation on 457 the full number of ranks for that level and then use an MPI call to copy the resulting vector 458 array entries (after calls to VecGetArray()) to the smaller or larger number of ranks, note in both 459 cases the MPI calls must be made on the larger of the two communicators. Traditional MPI send and 460 recieves or MPI_AlltoAllv() could be used to do the reshuffling of the vector entries. 461 462 Fortran Note: 463 Use comms = `PETSC_NULL_MPI_COMM` as the equivalent of NULL in the C interface. Note `PETSC_NULL_MPI_COMM` 464 is not `MPI_COMM_NULL`. It is more like `PETSC_NULL_INTEGER`, `PETSC_NULL_REAL` etc. 465 466 .seealso: `PCMGSetType()`, `PCMGGetLevels()` 467 @*/ 468 PetscErrorCode PCMGSetLevels(PC pc, PetscInt levels, MPI_Comm *comms) { 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 471 if (comms) PetscValidPointer(comms, 3); 472 PetscTryMethod(pc, "PCMGSetLevels_C", (PC, PetscInt, MPI_Comm *), (pc, levels, comms)); 473 PetscFunctionReturn(0); 474 } 475 476 PetscErrorCode PCDestroy_MG(PC pc) { 477 PC_MG *mg = (PC_MG *)pc->data; 478 PC_MG_Levels **mglevels = mg->levels; 479 PetscInt i, n; 480 481 PetscFunctionBegin; 482 PetscCall(PCReset_MG(pc)); 483 if (mglevels) { 484 n = mglevels[0]->levels; 485 for (i = 0; i < n; i++) { 486 if (mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPDestroy(&mglevels[i]->smoothd)); 487 PetscCall(KSPDestroy(&mglevels[i]->smoothu)); 488 PetscCall(KSPDestroy(&mglevels[i]->cr)); 489 PetscCall(PetscFree(mglevels[i])); 490 } 491 PetscCall(PetscFree(mg->levels)); 492 } 493 PetscCall(PetscFree(pc->data)); 494 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 495 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 496 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", NULL)); 497 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", NULL)); 498 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", NULL)); 499 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", NULL)); 500 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", NULL)); 501 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", NULL)); 502 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", NULL)); 503 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", NULL)); 504 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", NULL)); 505 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", NULL)); 506 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", NULL)); 507 PetscFunctionReturn(0); 508 } 509 510 /* 511 PCApply_MG - Runs either an additive, multiplicative, Kaskadic 512 or full cycle of multigrid. 513 514 Note: 515 A simple wrapper which calls PCMGMCycle(),PCMGACycle(), or PCMGFCycle(). 516 */ 517 static PetscErrorCode PCApply_MG_Internal(PC pc, Vec b, Vec x, Mat B, Mat X, PetscBool transpose) { 518 PC_MG *mg = (PC_MG *)pc->data; 519 PC_MG_Levels **mglevels = mg->levels; 520 PC tpc; 521 PetscInt levels = mglevels[0]->levels, i; 522 PetscBool changeu, changed, matapp; 523 524 PetscFunctionBegin; 525 matapp = (PetscBool)(B && X); 526 if (mg->stageApply) PetscCall(PetscLogStagePush(mg->stageApply)); 527 /* When the DM is supplying the matrix then it will not exist until here */ 528 for (i = 0; i < levels; i++) { 529 if (!mglevels[i]->A) { 530 PetscCall(KSPGetOperators(mglevels[i]->smoothu, &mglevels[i]->A, NULL)); 531 PetscCall(PetscObjectReference((PetscObject)mglevels[i]->A)); 532 } 533 } 534 535 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothd, &tpc)); 536 PetscCall(PCPreSolveChangeRHS(tpc, &changed)); 537 PetscCall(KSPGetPC(mglevels[levels - 1]->smoothu, &tpc)); 538 PetscCall(PCPreSolveChangeRHS(tpc, &changeu)); 539 if (!changeu && !changed) { 540 if (matapp) { 541 PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 542 mglevels[levels - 1]->B = B; 543 } else { 544 PetscCall(VecDestroy(&mglevels[levels - 1]->b)); 545 mglevels[levels - 1]->b = b; 546 } 547 } else { /* if the smoother changes the rhs during PreSolve, we cannot use the input vector */ 548 if (matapp) { 549 if (mglevels[levels - 1]->B) { 550 PetscInt N1, N2; 551 PetscBool flg; 552 553 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &N1)); 554 PetscCall(MatGetSize(B, NULL, &N2)); 555 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 1]->B, ((PetscObject)B)->type_name, &flg)); 556 if (N1 != N2 || !flg) PetscCall(MatDestroy(&mglevels[levels - 1]->B)); 557 } 558 if (!mglevels[levels - 1]->B) { 559 PetscCall(MatDuplicate(B, MAT_COPY_VALUES, &mglevels[levels - 1]->B)); 560 } else { 561 PetscCall(MatCopy(B, mglevels[levels - 1]->B, SAME_NONZERO_PATTERN)); 562 } 563 } else { 564 if (!mglevels[levels - 1]->b) { 565 Vec *vec; 566 567 PetscCall(KSPCreateVecs(mglevels[levels - 1]->smoothd, 1, &vec, 0, NULL)); 568 mglevels[levels - 1]->b = *vec; 569 PetscCall(PetscFree(vec)); 570 } 571 PetscCall(VecCopy(b, mglevels[levels - 1]->b)); 572 } 573 } 574 if (matapp) { 575 mglevels[levels - 1]->X = X; 576 } else { 577 mglevels[levels - 1]->x = x; 578 } 579 580 /* If coarser Xs are present, it means we have already block applied the PC at least once 581 Reset operators if sizes/type do no match */ 582 if (matapp && levels > 1 && mglevels[levels - 2]->X) { 583 PetscInt Xc, Bc; 584 PetscBool flg; 585 586 PetscCall(MatGetSize(mglevels[levels - 2]->X, NULL, &Xc)); 587 PetscCall(MatGetSize(mglevels[levels - 1]->B, NULL, &Bc)); 588 PetscCall(PetscObjectTypeCompare((PetscObject)mglevels[levels - 2]->X, ((PetscObject)mglevels[levels - 1]->X)->type_name, &flg)); 589 if (Xc != Bc || !flg) { 590 PetscCall(MatDestroy(&mglevels[levels - 1]->R)); 591 for (i = 0; i < levels - 1; i++) { 592 PetscCall(MatDestroy(&mglevels[i]->R)); 593 PetscCall(MatDestroy(&mglevels[i]->B)); 594 PetscCall(MatDestroy(&mglevels[i]->X)); 595 } 596 } 597 } 598 599 if (mg->am == PC_MG_MULTIPLICATIVE) { 600 if (matapp) PetscCall(MatZeroEntries(X)); 601 else PetscCall(VecZeroEntries(x)); 602 for (i = 0; i < mg->cyclesperpcapply; i++) PetscCall(PCMGMCycle_Private(pc, mglevels + levels - 1, transpose, matapp, NULL)); 603 } else if (mg->am == PC_MG_ADDITIVE) { 604 PetscCall(PCMGACycle_Private(pc, mglevels, transpose, matapp)); 605 } else if (mg->am == PC_MG_KASKADE) { 606 PetscCall(PCMGKCycle_Private(pc, mglevels, transpose, matapp)); 607 } else { 608 PetscCall(PCMGFCycle_Private(pc, mglevels, transpose, matapp)); 609 } 610 if (mg->stageApply) PetscCall(PetscLogStagePop()); 611 if (!changeu && !changed) { 612 if (matapp) { 613 mglevels[levels - 1]->B = NULL; 614 } else { 615 mglevels[levels - 1]->b = NULL; 616 } 617 } 618 PetscFunctionReturn(0); 619 } 620 621 static PetscErrorCode PCApply_MG(PC pc, Vec b, Vec x) { 622 PetscFunctionBegin; 623 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_FALSE)); 624 PetscFunctionReturn(0); 625 } 626 627 static PetscErrorCode PCApplyTranspose_MG(PC pc, Vec b, Vec x) { 628 PetscFunctionBegin; 629 PetscCall(PCApply_MG_Internal(pc, b, x, NULL, NULL, PETSC_TRUE)); 630 PetscFunctionReturn(0); 631 } 632 633 static PetscErrorCode PCMatApply_MG(PC pc, Mat b, Mat x) { 634 PetscFunctionBegin; 635 PetscCall(PCApply_MG_Internal(pc, NULL, NULL, b, x, PETSC_FALSE)); 636 PetscFunctionReturn(0); 637 } 638 639 PetscErrorCode PCSetFromOptions_MG(PC pc, PetscOptionItems *PetscOptionsObject) { 640 PetscInt levels, cycles; 641 PetscBool flg, flg2; 642 PC_MG *mg = (PC_MG *)pc->data; 643 PC_MG_Levels **mglevels; 644 PCMGType mgtype; 645 PCMGCycleType mgctype; 646 PCMGGalerkinType gtype; 647 PCMGCoarseSpaceType coarseSpaceType; 648 649 PetscFunctionBegin; 650 levels = PetscMax(mg->nlevels, 1); 651 PetscOptionsHeadBegin(PetscOptionsObject, "Multigrid options"); 652 PetscCall(PetscOptionsInt("-pc_mg_levels", "Number of Levels", "PCMGSetLevels", levels, &levels, &flg)); 653 if (!flg && !mg->levels && pc->dm) { 654 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 655 levels++; 656 mg->usedmfornumberoflevels = PETSC_TRUE; 657 } 658 PetscCall(PCMGSetLevels(pc, levels, NULL)); 659 mglevels = mg->levels; 660 661 mgctype = (PCMGCycleType)mglevels[0]->cycles; 662 PetscCall(PetscOptionsEnum("-pc_mg_cycle_type", "V cycle or for W-cycle", "PCMGSetCycleType", PCMGCycleTypes, (PetscEnum)mgctype, (PetscEnum *)&mgctype, &flg)); 663 if (flg) PetscCall(PCMGSetCycleType(pc, mgctype)); 664 gtype = mg->galerkin; 665 PetscCall(PetscOptionsEnum("-pc_mg_galerkin", "Use Galerkin process to compute coarser operators", "PCMGSetGalerkin", PCMGGalerkinTypes, (PetscEnum)gtype, (PetscEnum *)>ype, &flg)); 666 if (flg) PetscCall(PCMGSetGalerkin(pc, gtype)); 667 coarseSpaceType = mg->coarseSpaceType; 668 PetscCall(PetscOptionsEnum("-pc_mg_adapt_interp_coarse_space", "Type of adaptive coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw", "PCMGSetAdaptCoarseSpaceType", PCMGCoarseSpaceTypes, (PetscEnum)coarseSpaceType, (PetscEnum *)&coarseSpaceType, &flg)); 669 if (flg) PetscCall(PCMGSetAdaptCoarseSpaceType(pc, coarseSpaceType)); 670 PetscCall(PetscOptionsInt("-pc_mg_adapt_interp_n", "Size of the coarse space for adaptive interpolation", "PCMGSetCoarseSpace", mg->Nc, &mg->Nc, &flg)); 671 PetscCall(PetscOptionsBool("-pc_mg_mesp_monitor", "Monitor the multilevel eigensolver", "PCMGSetAdaptInterpolation", PETSC_FALSE, &mg->mespMonitor, &flg)); 672 flg2 = PETSC_FALSE; 673 PetscCall(PetscOptionsBool("-pc_mg_adapt_cr", "Monitor coarse space quality using Compatible Relaxation (CR)", "PCMGSetAdaptCR", PETSC_FALSE, &flg2, &flg)); 674 if (flg) PetscCall(PCMGSetAdaptCR(pc, flg2)); 675 flg = PETSC_FALSE; 676 PetscCall(PetscOptionsBool("-pc_mg_distinct_smoothup", "Create separate smoothup KSP and append the prefix _up", "PCMGSetDistinctSmoothUp", PETSC_FALSE, &flg, NULL)); 677 if (flg) PetscCall(PCMGSetDistinctSmoothUp(pc)); 678 mgtype = mg->am; 679 PetscCall(PetscOptionsEnum("-pc_mg_type", "Multigrid type", "PCMGSetType", PCMGTypes, (PetscEnum)mgtype, (PetscEnum *)&mgtype, &flg)); 680 if (flg) PetscCall(PCMGSetType(pc, mgtype)); 681 if (mg->am == PC_MG_MULTIPLICATIVE) { 682 PetscCall(PetscOptionsInt("-pc_mg_multiplicative_cycles", "Number of cycles for each preconditioner step", "PCMGMultiplicativeSetCycles", mg->cyclesperpcapply, &cycles, &flg)); 683 if (flg) PetscCall(PCMGMultiplicativeSetCycles(pc, cycles)); 684 } 685 flg = PETSC_FALSE; 686 PetscCall(PetscOptionsBool("-pc_mg_log", "Log times for each multigrid level", "None", flg, &flg, NULL)); 687 if (flg) { 688 PetscInt i; 689 char eventname[128]; 690 691 levels = mglevels[0]->levels; 692 for (i = 0; i < levels; i++) { 693 sprintf(eventname, "MGSetup Level %d", (int)i); 694 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsetup)); 695 sprintf(eventname, "MGSmooth Level %d", (int)i); 696 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventsmoothsolve)); 697 if (i) { 698 sprintf(eventname, "MGResid Level %d", (int)i); 699 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventresidual)); 700 sprintf(eventname, "MGInterp Level %d", (int)i); 701 PetscCall(PetscLogEventRegister(eventname, ((PetscObject)pc)->classid, &mglevels[i]->eventinterprestrict)); 702 } 703 } 704 705 #if defined(PETSC_USE_LOG) 706 { 707 const char *sname = "MG Apply"; 708 PetscStageLog stageLog; 709 PetscInt st; 710 711 PetscCall(PetscLogGetStageLog(&stageLog)); 712 for (st = 0; st < stageLog->numStages; ++st) { 713 PetscBool same; 714 715 PetscCall(PetscStrcmp(stageLog->stageInfo[st].name, sname, &same)); 716 if (same) mg->stageApply = st; 717 } 718 if (!mg->stageApply) PetscCall(PetscLogStageRegister(sname, &mg->stageApply)); 719 } 720 #endif 721 } 722 PetscOptionsHeadEnd(); 723 /* Check option consistency */ 724 PetscCall(PCMGGetGalerkin(pc, >ype)); 725 PetscCall(PCMGGetAdaptInterpolation(pc, &flg)); 726 PetscCheck(!flg || !(gtype >= PC_MG_GALERKIN_NONE), PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_INCOMP, "Must use Galerkin coarse operators when adapting the interpolator"); 727 PetscFunctionReturn(0); 728 } 729 730 const char *const PCMGTypes[] = {"MULTIPLICATIVE", "ADDITIVE", "FULL", "KASKADE", "PCMGType", "PC_MG", NULL}; 731 const char *const PCMGCycleTypes[] = {"invalid", "v", "w", "PCMGCycleType", "PC_MG_CYCLE", NULL}; 732 const char *const PCMGGalerkinTypes[] = {"both", "pmat", "mat", "none", "external", "PCMGGalerkinType", "PC_MG_GALERKIN", NULL}; 733 const char *const PCMGCoarseSpaceTypes[] = {"none", "polynomial", "harmonic", "eigenvector", "generalized_eigenvector", "gdsw", "PCMGCoarseSpaceType", "PCMG_ADAPT_NONE", NULL}; 734 735 #include <petscdraw.h> 736 PetscErrorCode PCView_MG(PC pc, PetscViewer viewer) { 737 PC_MG *mg = (PC_MG *)pc->data; 738 PC_MG_Levels **mglevels = mg->levels; 739 PetscInt levels = mglevels ? mglevels[0]->levels : 0, i; 740 PetscBool iascii, isbinary, isdraw; 741 742 PetscFunctionBegin; 743 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 744 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 745 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 746 if (iascii) { 747 const char *cyclename = levels ? (mglevels[0]->cycles == PC_MG_CYCLE_V ? "v" : "w") : "unknown"; 748 PetscCall(PetscViewerASCIIPrintf(viewer, " type is %s, levels=%" PetscInt_FMT " cycles=%s\n", PCMGTypes[mg->am], levels, cyclename)); 749 if (mg->am == PC_MG_MULTIPLICATIVE) PetscCall(PetscViewerASCIIPrintf(viewer, " Cycles per PCApply=%" PetscInt_FMT "\n", mg->cyclesperpcapply)); 750 if (mg->galerkin == PC_MG_GALERKIN_BOTH) { 751 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices\n")); 752 } else if (mg->galerkin == PC_MG_GALERKIN_PMAT) { 753 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for pmat\n")); 754 } else if (mg->galerkin == PC_MG_GALERKIN_MAT) { 755 PetscCall(PetscViewerASCIIPrintf(viewer, " Using Galerkin computed coarse grid matrices for mat\n")); 756 } else if (mg->galerkin == PC_MG_GALERKIN_EXTERNAL) { 757 PetscCall(PetscViewerASCIIPrintf(viewer, " Using externally compute Galerkin coarse grid matrices\n")); 758 } else { 759 PetscCall(PetscViewerASCIIPrintf(viewer, " Not using Galerkin computed coarse grid matrices\n")); 760 } 761 if (mg->view) PetscCall((*mg->view)(pc, viewer)); 762 for (i = 0; i < levels; i++) { 763 if (i) { 764 PetscCall(PetscViewerASCIIPrintf(viewer, "Down solver (pre-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 765 } else { 766 PetscCall(PetscViewerASCIIPrintf(viewer, "Coarse grid solver -- level %" PetscInt_FMT " -------------------------------\n", i)); 767 } 768 PetscCall(PetscViewerASCIIPushTab(viewer)); 769 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 770 PetscCall(PetscViewerASCIIPopTab(viewer)); 771 if (i && mglevels[i]->smoothd == mglevels[i]->smoothu) { 772 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) same as down solver (pre-smoother)\n")); 773 } else if (i) { 774 PetscCall(PetscViewerASCIIPrintf(viewer, "Up solver (post-smoother) on level %" PetscInt_FMT " -------------------------------\n", i)); 775 PetscCall(PetscViewerASCIIPushTab(viewer)); 776 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 777 PetscCall(PetscViewerASCIIPopTab(viewer)); 778 } 779 if (i && mglevels[i]->cr) { 780 PetscCall(PetscViewerASCIIPrintf(viewer, "CR solver on level %" PetscInt_FMT " -------------------------------\n", i)); 781 PetscCall(PetscViewerASCIIPushTab(viewer)); 782 PetscCall(KSPView(mglevels[i]->cr, viewer)); 783 PetscCall(PetscViewerASCIIPopTab(viewer)); 784 } 785 } 786 } else if (isbinary) { 787 for (i = levels - 1; i >= 0; i--) { 788 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 789 if (i && mglevels[i]->smoothd != mglevels[i]->smoothu) PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 790 } 791 } else if (isdraw) { 792 PetscDraw draw; 793 PetscReal x, w, y, bottom, th; 794 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 795 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 796 PetscCall(PetscDrawStringGetSize(draw, NULL, &th)); 797 bottom = y - th; 798 for (i = levels - 1; i >= 0; i--) { 799 if (!mglevels[i]->smoothu || (mglevels[i]->smoothu == mglevels[i]->smoothd)) { 800 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 801 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 802 PetscCall(PetscDrawPopCurrentPoint(draw)); 803 } else { 804 w = 0.5 * PetscMin(1.0 - x, x); 805 PetscCall(PetscDrawPushCurrentPoint(draw, x + w, bottom)); 806 PetscCall(KSPView(mglevels[i]->smoothd, viewer)); 807 PetscCall(PetscDrawPopCurrentPoint(draw)); 808 PetscCall(PetscDrawPushCurrentPoint(draw, x - w, bottom)); 809 PetscCall(KSPView(mglevels[i]->smoothu, viewer)); 810 PetscCall(PetscDrawPopCurrentPoint(draw)); 811 } 812 PetscCall(PetscDrawGetBoundingBox(draw, NULL, &bottom, NULL, NULL)); 813 bottom -= th; 814 } 815 } 816 PetscFunctionReturn(0); 817 } 818 819 #include <petsc/private/kspimpl.h> 820 821 /* 822 Calls setup for the KSP on each level 823 */ 824 PetscErrorCode PCSetUp_MG(PC pc) { 825 PC_MG *mg = (PC_MG *)pc->data; 826 PC_MG_Levels **mglevels = mg->levels; 827 PetscInt i, n; 828 PC cpc; 829 PetscBool dump = PETSC_FALSE, opsset, use_amat, missinginterpolate = PETSC_FALSE; 830 Mat dA, dB; 831 Vec tvec; 832 DM *dms; 833 PetscViewer viewer = NULL; 834 PetscBool dAeqdB = PETSC_FALSE, needRestricts = PETSC_FALSE, doCR = PETSC_FALSE; 835 PetscBool adaptInterpolation = mg->adaptInterpolation; 836 837 PetscFunctionBegin; 838 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels with PCMGSetLevels() before setting up"); 839 n = mglevels[0]->levels; 840 /* FIX: Move this to PCSetFromOptions_MG? */ 841 if (mg->usedmfornumberoflevels) { 842 PetscInt levels; 843 PetscCall(DMGetRefineLevel(pc->dm, &levels)); 844 levels++; 845 if (levels > n) { /* the problem is now being solved on a finer grid */ 846 PetscCall(PCMGSetLevels(pc, levels, NULL)); 847 n = levels; 848 PetscCall(PCSetFromOptions(pc)); /* it is bad to call this here, but otherwise will never be called for the new hierarchy */ 849 mglevels = mg->levels; 850 } 851 } 852 PetscCall(KSPGetPC(mglevels[0]->smoothd, &cpc)); 853 854 /* If user did not provide fine grid operators OR operator was not updated since last global KSPSetOperators() */ 855 /* so use those from global PC */ 856 /* Is this what we always want? What if user wants to keep old one? */ 857 PetscCall(KSPGetOperatorsSet(mglevels[n - 1]->smoothd, NULL, &opsset)); 858 if (opsset) { 859 Mat mmat; 860 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, NULL, &mmat)); 861 if (mmat == pc->pmat) opsset = PETSC_FALSE; 862 } 863 864 /* Create CR solvers */ 865 PetscCall(PCMGGetAdaptCR(pc, &doCR)); 866 if (doCR) { 867 const char *prefix; 868 869 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 870 for (i = 1; i < n; ++i) { 871 PC ipc, cr; 872 char crprefix[128]; 873 874 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &mglevels[i]->cr)); 875 PetscCall(KSPSetErrorIfNotConverged(mglevels[i]->cr, PETSC_FALSE)); 876 PetscCall(PetscObjectIncrementTabLevel((PetscObject)mglevels[i]->cr, (PetscObject)pc, n - i)); 877 PetscCall(KSPSetOptionsPrefix(mglevels[i]->cr, prefix)); 878 PetscCall(PetscObjectComposedDataSetInt((PetscObject)mglevels[i]->cr, PetscMGLevelId, mglevels[i]->level)); 879 PetscCall(KSPSetType(mglevels[i]->cr, KSPCHEBYSHEV)); 880 PetscCall(KSPSetConvergenceTest(mglevels[i]->cr, KSPConvergedSkip, NULL, NULL)); 881 PetscCall(KSPSetNormType(mglevels[i]->cr, KSP_NORM_PRECONDITIONED)); 882 PetscCall(KSPGetPC(mglevels[i]->cr, &ipc)); 883 884 PetscCall(PCSetType(ipc, PCCOMPOSITE)); 885 PetscCall(PCCompositeSetType(ipc, PC_COMPOSITE_MULTIPLICATIVE)); 886 PetscCall(PCCompositeAddPCType(ipc, PCSOR)); 887 PetscCall(CreateCR_Private(pc, i, &cr)); 888 PetscCall(PCCompositeAddPC(ipc, cr)); 889 PetscCall(PCDestroy(&cr)); 890 891 PetscCall(KSPSetTolerances(mglevels[i]->cr, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, mg->default_smoothd)); 892 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 893 PetscCall(PetscSNPrintf(crprefix, 128, "mg_levels_%d_cr_", (int)i)); 894 PetscCall(KSPAppendOptionsPrefix(mglevels[i]->cr, crprefix)); 895 PetscCall(PetscLogObjectParent((PetscObject)pc, (PetscObject)mglevels[i]->cr)); 896 } 897 } 898 899 if (!opsset) { 900 PetscCall(PCGetUseAmat(pc, &use_amat)); 901 if (use_amat) { 902 PetscCall(PetscInfo(pc, "Using outer operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 903 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->mat, pc->pmat)); 904 } else { 905 PetscCall(PetscInfo(pc, "Using matrix (pmat) operators to define finest grid operator \n because PCMGGetSmoother(pc,nlevels-1,&ksp);KSPSetOperators(ksp,...); was not called.\n")); 906 PetscCall(KSPSetOperators(mglevels[n - 1]->smoothd, pc->pmat, pc->pmat)); 907 } 908 } 909 910 for (i = n - 1; i > 0; i--) { 911 if (!(mglevels[i]->interpolate || mglevels[i]->restrct)) { 912 missinginterpolate = PETSC_TRUE; 913 break; 914 } 915 } 916 917 PetscCall(KSPGetOperators(mglevels[n - 1]->smoothd, &dA, &dB)); 918 if (dA == dB) dAeqdB = PETSC_TRUE; 919 if (mg->galerkin == PC_MG_GALERKIN_NONE || ((mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_MAT) && !dAeqdB)) { 920 needRestricts = PETSC_TRUE; /* user must compute either mat, pmat, or both so must restrict x to coarser levels */ 921 } 922 923 if (pc->dm && !pc->setupcalled) { 924 /* finest smoother also gets DM but it is not active, independent of whether galerkin==PC_MG_GALERKIN_EXTERNAL */ 925 PetscCall(KSPSetDM(mglevels[n - 1]->smoothd, pc->dm)); 926 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothd, PETSC_FALSE)); 927 if (mglevels[n - 1]->smoothd != mglevels[n - 1]->smoothu) { 928 PetscCall(KSPSetDM(mglevels[n - 1]->smoothu, pc->dm)); 929 PetscCall(KSPSetDMActive(mglevels[n - 1]->smoothu, PETSC_FALSE)); 930 } 931 if (mglevels[n - 1]->cr) { 932 PetscCall(KSPSetDM(mglevels[n - 1]->cr, pc->dm)); 933 PetscCall(KSPSetDMActive(mglevels[n - 1]->cr, PETSC_FALSE)); 934 } 935 } 936 937 /* 938 Skipping if user has provided all interpolation/restriction needed (since DM might not be able to produce them (when coming from SNES/TS) 939 Skipping for externally managed hierarchy (such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? 940 */ 941 if (missinginterpolate && mg->galerkin != PC_MG_GALERKIN_EXTERNAL && !pc->setupcalled) { 942 /* first see if we can compute a coarse space */ 943 if (mg->coarseSpaceType == PCMG_ADAPT_GDSW) { 944 for (i = n - 2; i > -1; i--) { 945 if (!mglevels[i + 1]->restrct && !mglevels[i + 1]->interpolate) { 946 PetscCall(PCMGComputeCoarseSpace_Internal(pc, i + 1, mg->coarseSpaceType, mg->Nc, NULL, &mglevels[i + 1]->coarseSpace)); 947 PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->coarseSpace)); 948 } 949 } 950 } else { /* construct the interpolation from the DMs */ 951 Mat p; 952 Vec rscale; 953 PetscCall(PetscMalloc1(n, &dms)); 954 dms[n - 1] = pc->dm; 955 /* Separately create them so we do not get DMKSP interference between levels */ 956 for (i = n - 2; i > -1; i--) PetscCall(DMCoarsen(dms[i + 1], MPI_COMM_NULL, &dms[i])); 957 for (i = n - 2; i > -1; i--) { 958 DMKSP kdm; 959 PetscBool dmhasrestrict, dmhasinject; 960 PetscCall(KSPSetDM(mglevels[i]->smoothd, dms[i])); 961 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothd, PETSC_FALSE)); 962 if (mglevels[i]->smoothd != mglevels[i]->smoothu) { 963 PetscCall(KSPSetDM(mglevels[i]->smoothu, dms[i])); 964 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->smoothu, PETSC_FALSE)); 965 } 966 if (mglevels[i]->cr) { 967 PetscCall(KSPSetDM(mglevels[i]->cr, dms[i])); 968 if (!needRestricts) PetscCall(KSPSetDMActive(mglevels[i]->cr, PETSC_FALSE)); 969 } 970 PetscCall(DMGetDMKSPWrite(dms[i], &kdm)); 971 /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take 972 * a bitwise OR of computing the matrix, RHS, and initial iterate. */ 973 kdm->ops->computerhs = NULL; 974 kdm->rhsctx = NULL; 975 if (!mglevels[i + 1]->interpolate) { 976 PetscCall(DMCreateInterpolation(dms[i], dms[i + 1], &p, &rscale)); 977 PetscCall(PCMGSetInterpolation(pc, i + 1, p)); 978 if (rscale) PetscCall(PCMGSetRScale(pc, i + 1, rscale)); 979 PetscCall(VecDestroy(&rscale)); 980 PetscCall(MatDestroy(&p)); 981 } 982 PetscCall(DMHasCreateRestriction(dms[i], &dmhasrestrict)); 983 if (dmhasrestrict && !mglevels[i + 1]->restrct) { 984 PetscCall(DMCreateRestriction(dms[i], dms[i + 1], &p)); 985 PetscCall(PCMGSetRestriction(pc, i + 1, p)); 986 PetscCall(MatDestroy(&p)); 987 } 988 PetscCall(DMHasCreateInjection(dms[i], &dmhasinject)); 989 if (dmhasinject && !mglevels[i + 1]->inject) { 990 PetscCall(DMCreateInjection(dms[i], dms[i + 1], &p)); 991 PetscCall(PCMGSetInjection(pc, i + 1, p)); 992 PetscCall(MatDestroy(&p)); 993 } 994 } 995 996 for (i = n - 2; i > -1; i--) PetscCall(DMDestroy(&dms[i])); 997 PetscCall(PetscFree(dms)); 998 } 999 } 1000 1001 if (mg->galerkin < PC_MG_GALERKIN_NONE) { 1002 Mat A, B; 1003 PetscBool doA = PETSC_FALSE, doB = PETSC_FALSE; 1004 MatReuse reuse = MAT_INITIAL_MATRIX; 1005 1006 if (mg->galerkin == PC_MG_GALERKIN_PMAT || mg->galerkin == PC_MG_GALERKIN_BOTH) doB = PETSC_TRUE; 1007 if (mg->galerkin == PC_MG_GALERKIN_MAT || (mg->galerkin == PC_MG_GALERKIN_BOTH && dA != dB)) doA = PETSC_TRUE; 1008 if (pc->setupcalled) reuse = MAT_REUSE_MATRIX; 1009 for (i = n - 2; i > -1; i--) { 1010 PetscCheck(mglevels[i + 1]->restrct || mglevels[i + 1]->interpolate, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must provide interpolation or restriction for each MG level except level 0"); 1011 if (!mglevels[i + 1]->interpolate) PetscCall(PCMGSetInterpolation(pc, i + 1, mglevels[i + 1]->restrct)); 1012 if (!mglevels[i + 1]->restrct) PetscCall(PCMGSetRestriction(pc, i + 1, mglevels[i + 1]->interpolate)); 1013 if (reuse == MAT_REUSE_MATRIX) PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, &B)); 1014 if (doA) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dA, mglevels[i + 1]->interpolate, reuse, 1.0, &A)); 1015 if (doB) PetscCall(MatGalerkin(mglevels[i + 1]->restrct, dB, mglevels[i + 1]->interpolate, reuse, 1.0, &B)); 1016 /* the management of the PetscObjectReference() and PetscObjecDereference() below is rather delicate */ 1017 if (!doA && dAeqdB) { 1018 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)B)); 1019 A = B; 1020 } else if (!doA && reuse == MAT_INITIAL_MATRIX) { 1021 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &A, NULL)); 1022 PetscCall(PetscObjectReference((PetscObject)A)); 1023 } 1024 if (!doB && dAeqdB) { 1025 if (reuse == MAT_INITIAL_MATRIX) PetscCall(PetscObjectReference((PetscObject)A)); 1026 B = A; 1027 } else if (!doB && reuse == MAT_INITIAL_MATRIX) { 1028 PetscCall(KSPGetOperators(mglevels[i]->smoothd, NULL, &B)); 1029 PetscCall(PetscObjectReference((PetscObject)B)); 1030 } 1031 if (reuse == MAT_INITIAL_MATRIX) { 1032 PetscCall(KSPSetOperators(mglevels[i]->smoothd, A, B)); 1033 PetscCall(PetscObjectDereference((PetscObject)A)); 1034 PetscCall(PetscObjectDereference((PetscObject)B)); 1035 } 1036 dA = A; 1037 dB = B; 1038 } 1039 } 1040 1041 /* Adapt interpolation matrices */ 1042 if (adaptInterpolation) { 1043 for (i = 0; i < n; ++i) { 1044 if (!mglevels[i]->coarseSpace) PetscCall(PCMGComputeCoarseSpace_Internal(pc, i, mg->coarseSpaceType, mg->Nc, !i ? NULL : mglevels[i - 1]->coarseSpace, &mglevels[i]->coarseSpace)); 1045 if (i) PetscCall(PCMGAdaptInterpolator_Internal(pc, i, mglevels[i - 1]->smoothu, mglevels[i]->smoothu, mglevels[i - 1]->coarseSpace, mglevels[i]->coarseSpace)); 1046 } 1047 for (i = n - 2; i > -1; --i) PetscCall(PCMGRecomputeLevelOperators_Internal(pc, i)); 1048 } 1049 1050 if (needRestricts && pc->dm) { 1051 for (i = n - 2; i >= 0; i--) { 1052 DM dmfine, dmcoarse; 1053 Mat Restrict, Inject; 1054 Vec rscale; 1055 PetscCall(KSPGetDM(mglevels[i + 1]->smoothd, &dmfine)); 1056 PetscCall(KSPGetDM(mglevels[i]->smoothd, &dmcoarse)); 1057 PetscCall(PCMGGetRestriction(pc, i + 1, &Restrict)); 1058 PetscCall(PCMGGetRScale(pc, i + 1, &rscale)); 1059 PetscCall(PCMGGetInjection(pc, i + 1, &Inject)); 1060 PetscCall(DMRestrict(dmfine, Restrict, rscale, Inject, dmcoarse)); 1061 } 1062 } 1063 1064 if (!pc->setupcalled) { 1065 for (i = 0; i < n; i++) PetscCall(KSPSetFromOptions(mglevels[i]->smoothd)); 1066 for (i = 1; i < n; i++) { 1067 if (mglevels[i]->smoothu && (mglevels[i]->smoothu != mglevels[i]->smoothd)) PetscCall(KSPSetFromOptions(mglevels[i]->smoothu)); 1068 if (mglevels[i]->cr) PetscCall(KSPSetFromOptions(mglevels[i]->cr)); 1069 } 1070 /* insure that if either interpolation or restriction is set the other other one is set */ 1071 for (i = 1; i < n; i++) { 1072 PetscCall(PCMGGetInterpolation(pc, i, NULL)); 1073 PetscCall(PCMGGetRestriction(pc, i, NULL)); 1074 } 1075 for (i = 0; i < n - 1; i++) { 1076 if (!mglevels[i]->b) { 1077 Vec *vec; 1078 PetscCall(KSPCreateVecs(mglevels[i]->smoothd, 1, &vec, 0, NULL)); 1079 PetscCall(PCMGSetRhs(pc, i, *vec)); 1080 PetscCall(VecDestroy(vec)); 1081 PetscCall(PetscFree(vec)); 1082 } 1083 if (!mglevels[i]->r && i) { 1084 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1085 PetscCall(PCMGSetR(pc, i, tvec)); 1086 PetscCall(VecDestroy(&tvec)); 1087 } 1088 if (!mglevels[i]->x) { 1089 PetscCall(VecDuplicate(mglevels[i]->b, &tvec)); 1090 PetscCall(PCMGSetX(pc, i, tvec)); 1091 PetscCall(VecDestroy(&tvec)); 1092 } 1093 if (doCR) { 1094 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crx)); 1095 PetscCall(VecDuplicate(mglevels[i]->b, &mglevels[i]->crb)); 1096 PetscCall(VecZeroEntries(mglevels[i]->crb)); 1097 } 1098 } 1099 if (n != 1 && !mglevels[n - 1]->r) { 1100 /* PCMGSetR() on the finest level if user did not supply it */ 1101 Vec *vec; 1102 PetscCall(KSPCreateVecs(mglevels[n - 1]->smoothd, 1, &vec, 0, NULL)); 1103 PetscCall(PCMGSetR(pc, n - 1, *vec)); 1104 PetscCall(VecDestroy(vec)); 1105 PetscCall(PetscFree(vec)); 1106 } 1107 if (doCR) { 1108 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crx)); 1109 PetscCall(VecDuplicate(mglevels[n - 1]->r, &mglevels[n - 1]->crb)); 1110 PetscCall(VecZeroEntries(mglevels[n - 1]->crb)); 1111 } 1112 } 1113 1114 if (pc->dm) { 1115 /* need to tell all the coarser levels to rebuild the matrix using the DM for that level */ 1116 for (i = 0; i < n - 1; i++) { 1117 if (mglevels[i]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[i]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1118 } 1119 } 1120 // We got here (PCSetUp_MG) because the matrix has changed, which means the smoother needs to be set up again (e.g., 1121 // new diagonal for Jacobi). Setting it here allows it to be logged under PCSetUp rather than deep inside a PCApply. 1122 if (mglevels[n - 1]->smoothd->setupstage != KSP_SETUP_NEW) mglevels[n - 1]->smoothd->setupstage = KSP_SETUP_NEWMATRIX; 1123 1124 for (i = 1; i < n; i++) { 1125 if (mglevels[i]->smoothu == mglevels[i]->smoothd || mg->am == PC_MG_FULL || mg->am == PC_MG_KASKADE || mg->cyclesperpcapply > 1) { 1126 /* if doing only down then initial guess is zero */ 1127 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothd, PETSC_TRUE)); 1128 } 1129 if (mglevels[i]->cr) PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1130 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1131 PetscCall(KSPSetUp(mglevels[i]->smoothd)); 1132 if (mglevels[i]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1133 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1134 if (!mglevels[i]->residual) { 1135 Mat mat; 1136 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1137 PetscCall(PCMGSetResidual(pc, i, PCMGResidualDefault, mat)); 1138 } 1139 if (!mglevels[i]->residualtranspose) { 1140 Mat mat; 1141 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &mat, NULL)); 1142 PetscCall(PCMGSetResidualTranspose(pc, i, PCMGResidualTransposeDefault, mat)); 1143 } 1144 } 1145 for (i = 1; i < n; i++) { 1146 if (mglevels[i]->smoothu && mglevels[i]->smoothu != mglevels[i]->smoothd) { 1147 Mat downmat, downpmat; 1148 1149 /* check if operators have been set for up, if not use down operators to set them */ 1150 PetscCall(KSPGetOperatorsSet(mglevels[i]->smoothu, &opsset, NULL)); 1151 if (!opsset) { 1152 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1153 PetscCall(KSPSetOperators(mglevels[i]->smoothu, downmat, downpmat)); 1154 } 1155 1156 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->smoothu, PETSC_TRUE)); 1157 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1158 PetscCall(KSPSetUp(mglevels[i]->smoothu)); 1159 if (mglevels[i]->smoothu->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1160 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1161 } 1162 if (mglevels[i]->cr) { 1163 Mat downmat, downpmat; 1164 1165 /* check if operators have been set for up, if not use down operators to set them */ 1166 PetscCall(KSPGetOperatorsSet(mglevels[i]->cr, &opsset, NULL)); 1167 if (!opsset) { 1168 PetscCall(KSPGetOperators(mglevels[i]->smoothd, &downmat, &downpmat)); 1169 PetscCall(KSPSetOperators(mglevels[i]->cr, downmat, downpmat)); 1170 } 1171 1172 PetscCall(KSPSetInitialGuessNonzero(mglevels[i]->cr, PETSC_TRUE)); 1173 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1174 PetscCall(KSPSetUp(mglevels[i]->cr)); 1175 if (mglevels[i]->cr->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1176 if (mglevels[i]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[i]->eventsmoothsetup, 0, 0, 0, 0)); 1177 } 1178 } 1179 1180 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventBegin(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1181 PetscCall(KSPSetUp(mglevels[0]->smoothd)); 1182 if (mglevels[0]->smoothd->reason == KSP_DIVERGED_PC_FAILED) pc->failedreason = PC_SUBPC_ERROR; 1183 if (mglevels[0]->eventsmoothsetup) PetscCall(PetscLogEventEnd(mglevels[0]->eventsmoothsetup, 0, 0, 0, 0)); 1184 1185 /* 1186 Dump the interpolation/restriction matrices plus the 1187 Jacobian/stiffness on each level. This allows MATLAB users to 1188 easily check if the Galerkin condition A_c = R A_f R^T is satisfied. 1189 1190 Only support one or the other at the same time. 1191 */ 1192 #if defined(PETSC_USE_SOCKET_VIEWER) 1193 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_matlab", &dump, NULL)); 1194 if (dump) viewer = PETSC_VIEWER_SOCKET_(PetscObjectComm((PetscObject)pc)); 1195 dump = PETSC_FALSE; 1196 #endif 1197 PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_mg_dump_binary", &dump, NULL)); 1198 if (dump) viewer = PETSC_VIEWER_BINARY_(PetscObjectComm((PetscObject)pc)); 1199 1200 if (viewer) { 1201 for (i = 1; i < n; i++) PetscCall(MatView(mglevels[i]->restrct, viewer)); 1202 for (i = 0; i < n; i++) { 1203 PetscCall(KSPGetPC(mglevels[i]->smoothd, &pc)); 1204 PetscCall(MatView(pc->mat, viewer)); 1205 } 1206 } 1207 PetscFunctionReturn(0); 1208 } 1209 1210 /* -------------------------------------------------------------------------------------*/ 1211 1212 PetscErrorCode PCMGGetLevels_MG(PC pc, PetscInt *levels) { 1213 PC_MG *mg = (PC_MG *)pc->data; 1214 1215 PetscFunctionBegin; 1216 *levels = mg->nlevels; 1217 PetscFunctionReturn(0); 1218 } 1219 1220 /*@ 1221 PCMGGetLevels - Gets the number of levels to use with `PCMG`. 1222 1223 Not Collective 1224 1225 Input Parameter: 1226 . pc - the preconditioner context 1227 1228 Output parameter: 1229 . levels - the number of levels 1230 1231 Level: advanced 1232 1233 .seealso: `PCMG`, `PCMGSetLevels()` 1234 @*/ 1235 PetscErrorCode PCMGGetLevels(PC pc, PetscInt *levels) { 1236 PetscFunctionBegin; 1237 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1238 PetscValidIntPointer(levels, 2); 1239 *levels = 0; 1240 PetscTryMethod(pc, "PCMGGetLevels_C", (PC, PetscInt *), (pc, levels)); 1241 PetscFunctionReturn(0); 1242 } 1243 1244 /*@ 1245 PCMGGetGridComplexity - compute operator and grid complexity of the `PCMG` hierarchy 1246 1247 Input Parameter: 1248 . pc - the preconditioner context 1249 1250 Output Parameters: 1251 + gc - grid complexity = sum_i(n_i) / n_0 1252 - oc - operator complexity = sum_i(nnz_i) / nnz_0 1253 1254 Level: advanced 1255 1256 Note: 1257 This is often call the operator complexity in multigrid literature 1258 1259 .seealso: `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()` 1260 @*/ 1261 PetscErrorCode PCMGGetGridComplexity(PC pc, PetscReal *gc, PetscReal *oc) { 1262 PC_MG *mg = (PC_MG *)pc->data; 1263 PC_MG_Levels **mglevels = mg->levels; 1264 PetscInt lev, N; 1265 PetscLogDouble nnz0 = 0, sgc = 0, soc = 0, n0 = 0; 1266 MatInfo info; 1267 1268 PetscFunctionBegin; 1269 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1270 if (gc) PetscValidRealPointer(gc, 2); 1271 if (oc) PetscValidRealPointer(oc, 3); 1272 if (!pc->setupcalled) { 1273 if (gc) *gc = 0; 1274 if (oc) *oc = 0; 1275 PetscFunctionReturn(0); 1276 } 1277 PetscCheck(mg->nlevels > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MG has no levels"); 1278 for (lev = 0; lev < mg->nlevels; lev++) { 1279 Mat dB; 1280 PetscCall(KSPGetOperators(mglevels[lev]->smoothd, NULL, &dB)); 1281 PetscCall(MatGetInfo(dB, MAT_GLOBAL_SUM, &info)); /* global reduction */ 1282 PetscCall(MatGetSize(dB, &N, NULL)); 1283 sgc += N; 1284 soc += info.nz_used; 1285 if (lev == mg->nlevels - 1) { 1286 nnz0 = info.nz_used; 1287 n0 = N; 1288 } 1289 } 1290 if (n0 > 0 && gc) *gc = (PetscReal)(sgc / n0); 1291 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number for grid points on finest level is not available"); 1292 if (nnz0 > 0 && oc) *oc = (PetscReal)(soc / nnz0); 1293 PetscFunctionReturn(0); 1294 } 1295 1296 /*@ 1297 PCMGSetType - Determines the form of multigrid to use: 1298 multiplicative, additive, full, or the Kaskade algorithm. 1299 1300 Logically Collective on pc 1301 1302 Input Parameters: 1303 + pc - the preconditioner context 1304 - form - multigrid form, one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE` 1305 1306 Options Database Key: 1307 . -pc_mg_type <form> - Sets <form>, one of multiplicative, 1308 additive, full, kaskade 1309 1310 Level: advanced 1311 1312 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGGetType()`, `PCMGCycleType` 1313 @*/ 1314 PetscErrorCode PCMGSetType(PC pc, PCMGType form) { 1315 PC_MG *mg = (PC_MG *)pc->data; 1316 1317 PetscFunctionBegin; 1318 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1319 PetscValidLogicalCollectiveEnum(pc, form, 2); 1320 mg->am = form; 1321 if (form == PC_MG_MULTIPLICATIVE) pc->ops->applyrichardson = PCApplyRichardson_MG; 1322 else pc->ops->applyrichardson = NULL; 1323 PetscFunctionReturn(0); 1324 } 1325 1326 /*@ 1327 PCMGGetType - Finds the form of multigrid the `PCMG` is using multiplicative, additive, full, or the Kaskade algorithm. 1328 1329 Logically Collective on pc 1330 1331 Input Parameter: 1332 . pc - the preconditioner context 1333 1334 Output Parameter: 1335 . type - one of `PC_MG_MULTIPLICATIVE`, `PC_MG_ADDITIVE`, `PC_MG_FULL`, `PC_MG_KASKADE`, `PCMGCycleType` 1336 1337 Level: advanced 1338 1339 .seealso: `PCMGType`, `PCMG`, `PCMGGetLevels()`, `PCMGSetLevels()`, `PCMGSetType()` 1340 @*/ 1341 PetscErrorCode PCMGGetType(PC pc, PCMGType *type) { 1342 PC_MG *mg = (PC_MG *)pc->data; 1343 1344 PetscFunctionBegin; 1345 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1346 *type = mg->am; 1347 PetscFunctionReturn(0); 1348 } 1349 1350 /*@ 1351 PCMGSetCycleType - Sets the type cycles to use. Use `PCMGSetCycleTypeOnLevel()` for more 1352 complicated cycling. 1353 1354 Logically Collective on pc 1355 1356 Input Parameters: 1357 + pc - the multigrid context 1358 - n - either `PC_MG_CYCLE_V` or `PC_MG_CYCLE_W` 1359 1360 Options Database Key: 1361 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1362 1363 Level: advanced 1364 1365 .seealso: `PCMG`, `PCMGSetCycleTypeOnLevel()`, `PCMGType`, `PCMGCycleType` 1366 @*/ 1367 PetscErrorCode PCMGSetCycleType(PC pc, PCMGCycleType n) { 1368 PC_MG *mg = (PC_MG *)pc->data; 1369 PC_MG_Levels **mglevels = mg->levels; 1370 PetscInt i, levels; 1371 1372 PetscFunctionBegin; 1373 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1374 PetscValidLogicalCollectiveEnum(pc, n, 2); 1375 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1376 levels = mglevels[0]->levels; 1377 for (i = 0; i < levels; i++) mglevels[i]->cycles = n; 1378 PetscFunctionReturn(0); 1379 } 1380 1381 /*@ 1382 PCMGMultiplicativeSetCycles - Sets the number of cycles to use for each preconditioner step 1383 of multigrid when `PCMGType` is `PC_MG_MULTIPLICATIVE` 1384 1385 Logically Collective on pc 1386 1387 Input Parameters: 1388 + pc - the multigrid context 1389 - n - number of cycles (default is 1) 1390 1391 Options Database Key: 1392 . -pc_mg_multiplicative_cycles n - set the number of cycles 1393 1394 Level: advanced 1395 1396 Note: 1397 This is not associated with setting a v or w cycle, that is set with `PCMGSetCycleType()` 1398 1399 .seealso: `PCMGSetCycleTypeOnLevel()`, `PCMGSetCycleType()`, `PCMGCycleType`, `PCMGType` 1400 @*/ 1401 PetscErrorCode PCMGMultiplicativeSetCycles(PC pc, PetscInt n) { 1402 PC_MG *mg = (PC_MG *)pc->data; 1403 1404 PetscFunctionBegin; 1405 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1406 PetscValidLogicalCollectiveInt(pc, n, 2); 1407 mg->cyclesperpcapply = n; 1408 PetscFunctionReturn(0); 1409 } 1410 1411 PetscErrorCode PCMGSetGalerkin_MG(PC pc, PCMGGalerkinType use) { 1412 PC_MG *mg = (PC_MG *)pc->data; 1413 1414 PetscFunctionBegin; 1415 mg->galerkin = use; 1416 PetscFunctionReturn(0); 1417 } 1418 1419 /*@ 1420 PCMGSetGalerkin - Causes the coarser grid matrices to be computed from the 1421 finest grid via the Galerkin process: A_i-1 = r_i * A_i * p_i 1422 1423 Logically Collective on pc 1424 1425 Input Parameters: 1426 + pc - the multigrid context 1427 - use - one of `PC_MG_GALERKIN_BOTH`, `PC_MG_GALERKIN_PMAT`, `PC_MG_GALERKIN_MAT`, or `PC_MG_GALERKIN_NONE` 1428 1429 Options Database Key: 1430 . -pc_mg_galerkin <both,pmat,mat,none> - set the matrices to form via the Galerkin process 1431 1432 Level: intermediate 1433 1434 Note: 1435 Some codes that use `PCMG` such as `PCGAMG` use Galerkin internally while constructing the hierarchy and thus do not 1436 use the `PCMG` construction of the coarser grids. 1437 1438 .seealso: `PCMG`, `PCMGGetGalerkin()`, `PCMGGalerkinType` 1439 @*/ 1440 PetscErrorCode PCMGSetGalerkin(PC pc, PCMGGalerkinType use) { 1441 PetscFunctionBegin; 1442 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1443 PetscTryMethod(pc, "PCMGSetGalerkin_C", (PC, PCMGGalerkinType), (pc, use)); 1444 PetscFunctionReturn(0); 1445 } 1446 1447 /*@ 1448 PCMGGetGalerkin - Checks if Galerkin multigrid is being used, i.e. A_i-1 = r_i * A_i * p_i 1449 1450 Not Collective 1451 1452 Input Parameter: 1453 . pc - the multigrid context 1454 1455 Output Parameter: 1456 . galerkin - one of `PC_MG_GALERKIN_BOTH`,`PC_MG_GALERKIN_PMAT`,`PC_MG_GALERKIN_MAT`, `PC_MG_GALERKIN_NONE`, or `PC_MG_GALERKIN_EXTERNAL` 1457 1458 Level: intermediate 1459 1460 .seealso: `PCMG`, `PCMGSetGalerkin()`, `PCMGGalerkinType` 1461 @*/ 1462 PetscErrorCode PCMGGetGalerkin(PC pc, PCMGGalerkinType *galerkin) { 1463 PC_MG *mg = (PC_MG *)pc->data; 1464 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1467 *galerkin = mg->galerkin; 1468 PetscFunctionReturn(0); 1469 } 1470 1471 PetscErrorCode PCMGSetAdaptInterpolation_MG(PC pc, PetscBool adapt) { 1472 PC_MG *mg = (PC_MG *)pc->data; 1473 1474 PetscFunctionBegin; 1475 mg->adaptInterpolation = adapt; 1476 PetscFunctionReturn(0); 1477 } 1478 1479 PetscErrorCode PCMGGetAdaptInterpolation_MG(PC pc, PetscBool *adapt) { 1480 PC_MG *mg = (PC_MG *)pc->data; 1481 1482 PetscFunctionBegin; 1483 *adapt = mg->adaptInterpolation; 1484 PetscFunctionReturn(0); 1485 } 1486 1487 PetscErrorCode PCMGSetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType ctype) { 1488 PC_MG *mg = (PC_MG *)pc->data; 1489 1490 PetscFunctionBegin; 1491 mg->adaptInterpolation = ctype != PCMG_ADAPT_NONE ? PETSC_TRUE : PETSC_FALSE; 1492 mg->coarseSpaceType = ctype; 1493 PetscFunctionReturn(0); 1494 } 1495 1496 PetscErrorCode PCMGGetAdaptCoarseSpaceType_MG(PC pc, PCMGCoarseSpaceType *ctype) { 1497 PC_MG *mg = (PC_MG *)pc->data; 1498 1499 PetscFunctionBegin; 1500 *ctype = mg->coarseSpaceType; 1501 PetscFunctionReturn(0); 1502 } 1503 1504 PetscErrorCode PCMGSetAdaptCR_MG(PC pc, PetscBool cr) { 1505 PC_MG *mg = (PC_MG *)pc->data; 1506 1507 PetscFunctionBegin; 1508 mg->compatibleRelaxation = cr; 1509 PetscFunctionReturn(0); 1510 } 1511 1512 PetscErrorCode PCMGGetAdaptCR_MG(PC pc, PetscBool *cr) { 1513 PC_MG *mg = (PC_MG *)pc->data; 1514 1515 PetscFunctionBegin; 1516 *cr = mg->compatibleRelaxation; 1517 PetscFunctionReturn(0); 1518 } 1519 1520 /*@C 1521 PCMGSetAdaptCoarseSpaceType - Set the type of adaptive coarse space. 1522 1523 Adapts or creates the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1524 1525 Logically Collective on pc 1526 1527 Input Parameters: 1528 + pc - the multigrid context 1529 - ctype - the type of coarse space 1530 1531 Options Database Keys: 1532 + -pc_mg_adapt_interp_n <int> - The number of modes to use 1533 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: none, polynomial, harmonic, eigenvector, generalized_eigenvector, gdsw 1534 1535 Level: intermediate 1536 1537 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1538 @*/ 1539 PetscErrorCode PCMGSetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType ctype) { 1540 PetscFunctionBegin; 1541 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1542 PetscValidLogicalCollectiveEnum(pc, ctype, 2); 1543 PetscTryMethod(pc, "PCMGSetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType), (pc, ctype)); 1544 PetscFunctionReturn(0); 1545 } 1546 1547 /*@C 1548 PCMGGetAdaptCoarseSpaceType - Get the type of adaptive coarse space. 1549 1550 Not Collective 1551 1552 Input Parameter: 1553 . pc - the multigrid context 1554 1555 Output Parameter: 1556 . ctype - the type of coarse space 1557 1558 Level: intermediate 1559 1560 .seealso: `PCMG`, `PCMGCoarseSpaceType`, `PCMGSetAdaptCoarseSpaceType()`, `PCMGSetGalerkin()`, `PCMGSetAdaptInterpolation()` 1561 @*/ 1562 PetscErrorCode PCMGGetAdaptCoarseSpaceType(PC pc, PCMGCoarseSpaceType *ctype) { 1563 PetscFunctionBegin; 1564 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1565 PetscValidPointer(ctype, 2); 1566 PetscUseMethod(pc, "PCMGGetAdaptCoarseSpaceType_C", (PC, PCMGCoarseSpaceType *), (pc, ctype)); 1567 PetscFunctionReturn(0); 1568 } 1569 1570 /* MATT: REMOVE? */ 1571 /*@ 1572 PCMGSetAdaptInterpolation - Adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, and thus accurately interpolated. 1573 1574 Logically Collective on pc 1575 1576 Input Parameters: 1577 + pc - the multigrid context 1578 - adapt - flag for adaptation of the interpolator 1579 1580 Options Database Keys: 1581 + -pc_mg_adapt_interp - Turn on adaptation 1582 . -pc_mg_adapt_interp_n <int> - The number of modes to use, should be divisible by dimension 1583 - -pc_mg_adapt_interp_coarse_space <type> - The type of coarse space: polynomial, harmonic, eigenvector, generalized_eigenvector 1584 1585 Level: intermediate 1586 1587 .seealso: `PCMG`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1588 @*/ 1589 PetscErrorCode PCMGSetAdaptInterpolation(PC pc, PetscBool adapt) { 1590 PetscFunctionBegin; 1591 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1592 PetscTryMethod(pc, "PCMGSetAdaptInterpolation_C", (PC, PetscBool), (pc, adapt)); 1593 PetscFunctionReturn(0); 1594 } 1595 1596 /*@ 1597 PCMGGetAdaptInterpolation - Get the flag to adapt the interpolator based upon a vector space which should be accurately captured by the next coarser mesh, 1598 and thus accurately interpolated. 1599 1600 Not collective 1601 1602 Input Parameter: 1603 . pc - the multigrid context 1604 1605 Output Parameter: 1606 . adapt - flag for adaptation of the interpolator 1607 1608 Level: intermediate 1609 1610 .seealso: `PCMG`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1611 @*/ 1612 PetscErrorCode PCMGGetAdaptInterpolation(PC pc, PetscBool *adapt) { 1613 PetscFunctionBegin; 1614 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1615 PetscValidBoolPointer(adapt, 2); 1616 PetscUseMethod(pc, "PCMGGetAdaptInterpolation_C", (PC, PetscBool *), (pc, adapt)); 1617 PetscFunctionReturn(0); 1618 } 1619 1620 /*@ 1621 PCMGSetAdaptCR - Monitor the coarse space quality using an auxiliary solve with compatible relaxation. 1622 1623 Logically Collective on pc 1624 1625 Input Parameters: 1626 + pc - the multigrid context 1627 - cr - flag for compatible relaxation 1628 1629 Options Database Key: 1630 . -pc_mg_adapt_cr - Turn on compatible relaxation 1631 1632 Level: intermediate 1633 1634 .seealso: `PCMG`, `PCMGGetAdaptCR()`, `PCMGSetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1635 @*/ 1636 PetscErrorCode PCMGSetAdaptCR(PC pc, PetscBool cr) { 1637 PetscFunctionBegin; 1638 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1639 PetscTryMethod(pc, "PCMGSetAdaptCR_C", (PC, PetscBool), (pc, cr)); 1640 PetscFunctionReturn(0); 1641 } 1642 1643 /*@ 1644 PCMGGetAdaptCR - Get the flag to monitor coarse space quality using an auxiliary solve with compatible relaxation. 1645 1646 Not collective 1647 1648 Input Parameter: 1649 . pc - the multigrid context 1650 1651 Output Parameter: 1652 . cr - flag for compatible relaxaion 1653 1654 Level: intermediate 1655 1656 .seealso: `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1657 @*/ 1658 PetscErrorCode PCMGGetAdaptCR(PC pc, PetscBool *cr) { 1659 PetscFunctionBegin; 1660 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1661 PetscValidBoolPointer(cr, 2); 1662 PetscUseMethod(pc, "PCMGGetAdaptCR_C", (PC, PetscBool *), (pc, cr)); 1663 PetscFunctionReturn(0); 1664 } 1665 1666 /*@ 1667 PCMGSetNumberSmooth - Sets the number of pre and post-smoothing steps to use 1668 on all levels. Use `PCMGDistinctSmoothUp()` to create separate up and down smoothers if you want different numbers of 1669 pre- and post-smoothing steps. 1670 1671 Logically Collective on pc 1672 1673 Input Parameters: 1674 + mg - the multigrid context 1675 - n - the number of smoothing steps 1676 1677 Options Database Key: 1678 . -mg_levels_ksp_max_it <n> - Sets number of pre and post-smoothing steps 1679 1680 Level: advanced 1681 1682 Note: 1683 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1684 1685 .seealso: `PCMG`, `PCMGSetDistinctSmoothUp()` 1686 @*/ 1687 PetscErrorCode PCMGSetNumberSmooth(PC pc, PetscInt n) { 1688 PC_MG *mg = (PC_MG *)pc->data; 1689 PC_MG_Levels **mglevels = mg->levels; 1690 PetscInt i, levels; 1691 1692 PetscFunctionBegin; 1693 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1694 PetscValidLogicalCollectiveInt(pc, n, 2); 1695 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1696 levels = mglevels[0]->levels; 1697 1698 for (i = 1; i < levels; i++) { 1699 PetscCall(KSPSetTolerances(mglevels[i]->smoothu, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1700 PetscCall(KSPSetTolerances(mglevels[i]->smoothd, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, n)); 1701 mg->default_smoothu = n; 1702 mg->default_smoothd = n; 1703 } 1704 PetscFunctionReturn(0); 1705 } 1706 1707 /*@ 1708 PCMGSetDistinctSmoothUp - sets the up (post) smoother to be a separate `KSP` from the down (pre) smoother on all levels 1709 and adds the suffix _up to the options name 1710 1711 Logically Collective on pc 1712 1713 Input Parameter: 1714 . pc - the preconditioner context 1715 1716 Options Database Key: 1717 . -pc_mg_distinct_smoothup <bool> - use distinct smoothing objects 1718 1719 Level: advanced 1720 1721 Note: 1722 This does not set a value on the coarsest grid, since we assume that there is no separate smooth up on the coarsest grid. 1723 1724 .seealso: `PCMG`, `PCMGSetNumberSmooth()` 1725 @*/ 1726 PetscErrorCode PCMGSetDistinctSmoothUp(PC pc) { 1727 PC_MG *mg = (PC_MG *)pc->data; 1728 PC_MG_Levels **mglevels = mg->levels; 1729 PetscInt i, levels; 1730 KSP subksp; 1731 1732 PetscFunctionBegin; 1733 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1734 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ORDER, "Must set MG levels with PCMGSetLevels() before calling"); 1735 levels = mglevels[0]->levels; 1736 1737 for (i = 1; i < levels; i++) { 1738 const char *prefix = NULL; 1739 /* make sure smoother up and down are different */ 1740 PetscCall(PCMGGetSmootherUp(pc, i, &subksp)); 1741 PetscCall(KSPGetOptionsPrefix(mglevels[i]->smoothd, &prefix)); 1742 PetscCall(KSPSetOptionsPrefix(subksp, prefix)); 1743 PetscCall(KSPAppendOptionsPrefix(subksp, "up_")); 1744 } 1745 PetscFunctionReturn(0); 1746 } 1747 1748 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1749 PetscErrorCode PCGetInterpolations_MG(PC pc, PetscInt *num_levels, Mat *interpolations[]) { 1750 PC_MG *mg = (PC_MG *)pc->data; 1751 PC_MG_Levels **mglevels = mg->levels; 1752 Mat *mat; 1753 PetscInt l; 1754 1755 PetscFunctionBegin; 1756 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1757 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1758 for (l = 1; l < mg->nlevels; l++) { 1759 mat[l - 1] = mglevels[l]->interpolate; 1760 PetscCall(PetscObjectReference((PetscObject)mat[l - 1])); 1761 } 1762 *num_levels = mg->nlevels; 1763 *interpolations = mat; 1764 PetscFunctionReturn(0); 1765 } 1766 1767 /* No new matrices are created, and the coarse operator matrices are the references to the original ones */ 1768 PetscErrorCode PCGetCoarseOperators_MG(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) { 1769 PC_MG *mg = (PC_MG *)pc->data; 1770 PC_MG_Levels **mglevels = mg->levels; 1771 PetscInt l; 1772 Mat *mat; 1773 1774 PetscFunctionBegin; 1775 PetscCheck(mglevels, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Must set MG levels before calling"); 1776 PetscCall(PetscMalloc1(mg->nlevels, &mat)); 1777 for (l = 0; l < mg->nlevels - 1; l++) { 1778 PetscCall(KSPGetOperators(mglevels[l]->smoothd, NULL, &(mat[l]))); 1779 PetscCall(PetscObjectReference((PetscObject)mat[l])); 1780 } 1781 *num_levels = mg->nlevels; 1782 *coarseOperators = mat; 1783 PetscFunctionReturn(0); 1784 } 1785 1786 /*@C 1787 PCMGRegisterCoarseSpaceConstructor - Adds a method to the `PCMG` package for coarse space construction. 1788 1789 Not collective 1790 1791 Input Parameters: 1792 + name - name of the constructor 1793 - function - constructor routine 1794 1795 Calling sequence for the routine: 1796 $ my_csp(PC pc, PetscInt l, DM dm, KSP smooth, PetscInt Nc, Mat initGuess, Mat *coarseSp) 1797 + pc - The PC object 1798 . l - The multigrid level, 0 is the coarse level 1799 . dm - The DM for this level 1800 . smooth - The level smoother 1801 . Nc - The size of the coarse space 1802 . initGuess - Basis for an initial guess for the space 1803 - coarseSp - A basis for the computed coarse space 1804 1805 Level: advanced 1806 1807 Developer Note: 1808 How come this is not used by `PCGAMG`? 1809 1810 .seealso: `PCMG`, `PCMGGetCoarseSpaceConstructor()`, `PCRegister()` 1811 @*/ 1812 PetscErrorCode PCMGRegisterCoarseSpaceConstructor(const char name[], PetscErrorCode (*function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1813 PetscFunctionBegin; 1814 PetscCall(PCInitializePackage()); 1815 PetscCall(PetscFunctionListAdd(&PCMGCoarseList, name, function)); 1816 PetscFunctionReturn(0); 1817 } 1818 1819 /*@C 1820 PCMGGetCoarseSpaceConstructor - Returns the given coarse space construction method. 1821 1822 Not collective 1823 1824 Input Parameter: 1825 . name - name of the constructor 1826 1827 Output Parameter: 1828 . function - constructor routine 1829 1830 Level: advanced 1831 1832 .seealso: `PCMG`, `PCMGRegisterCoarseSpaceConstructor()`, `PCRegister()` 1833 @*/ 1834 PetscErrorCode PCMGGetCoarseSpaceConstructor(const char name[], PetscErrorCode (**function)(PC, PetscInt, DM, KSP, PetscInt, Mat, Mat *)) { 1835 PetscFunctionBegin; 1836 PetscCall(PetscFunctionListFind(PCMGCoarseList, name, function)); 1837 PetscFunctionReturn(0); 1838 } 1839 1840 /*MC 1841 PCMG - Use multigrid preconditioning. This preconditioner requires you provide additional 1842 information about the coarser grid matrices and restriction/interpolation operators. 1843 1844 Options Database Keys: 1845 + -pc_mg_levels <nlevels> - number of levels including finest 1846 . -pc_mg_cycle_type <v,w> - provide the cycle desired 1847 . -pc_mg_type <additive,multiplicative,full,kaskade> - multiplicative is the default 1848 . -pc_mg_log - log information about time spent on each level of the solver 1849 . -pc_mg_distinct_smoothup - configure up (after interpolation) and down (before restriction) smoothers separately (with different options prefixes) 1850 . -pc_mg_galerkin <both,pmat,mat,none> - use Galerkin process to compute coarser operators, i.e. Acoarse = R A R' 1851 . -pc_mg_multiplicative_cycles - number of cycles to use as the preconditioner (defaults to 1) 1852 . -pc_mg_dump_matlab - dumps the matrices for each level and the restriction/interpolation matrices 1853 to the Socket viewer for reading from MATLAB. 1854 - -pc_mg_dump_binary - dumps the matrices for each level and the restriction/interpolation matrices 1855 to the binary output file called binaryoutput 1856 1857 Notes: 1858 If one uses a Krylov method such `KSPGMRES` or `KSPCG` as the smoother then one must use `KSPFGMRES`, `KSPGCR`, or `KSPRICHARDSON` as the outer Krylov method 1859 1860 When run with a single level the smoother options are used on that level NOT the coarse grid solver options 1861 1862 When run with `KSPRICHARDSON` the convergence test changes slightly if monitor is turned on. The iteration count may change slightly. This 1863 is because without monitoring the residual norm is computed WITHIN each multigrid cycle on the finest level after the pre-smoothing 1864 (because the residual has just been computed for the multigrid algorithm and is hence available for free) while with monitoring the 1865 residual is computed at the end of each cycle. 1866 1867 Level: intermediate 1868 1869 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `PCMGType`, `PCEXOTIC`, `PCGAMG`, `PCML`, `PCHYPRE` 1870 `PCMGSetLevels()`, `PCMGGetLevels()`, `PCMGSetType()`, `PCMGSetCycleType()`, 1871 `PCMGSetDistinctSmoothUp()`, `PCMGGetCoarseSolve()`, `PCMGSetResidual()`, `PCMGSetInterpolation()`, 1872 `PCMGSetRestriction()`, `PCMGGetSmoother()`, `PCMGGetSmootherUp()`, `PCMGGetSmootherDown()`, 1873 `PCMGSetCycleTypeOnLevel()`, `PCMGSetRhs()`, `PCMGSetX()`, `PCMGSetR()`, 1874 `PCMGSetAdaptCR()`, `PCMGGetAdaptInterpolation()`, `PCMGSetGalerkin()`, `PCMGGetAdaptCoarseSpaceType()`, `PCMGSetAdaptCoarseSpaceType()` 1875 M*/ 1876 1877 PETSC_EXTERN PetscErrorCode PCCreate_MG(PC pc) { 1878 PC_MG *mg; 1879 1880 PetscFunctionBegin; 1881 PetscCall(PetscNewLog(pc, &mg)); 1882 pc->data = mg; 1883 mg->nlevels = -1; 1884 mg->am = PC_MG_MULTIPLICATIVE; 1885 mg->galerkin = PC_MG_GALERKIN_NONE; 1886 mg->adaptInterpolation = PETSC_FALSE; 1887 mg->Nc = -1; 1888 mg->eigenvalue = -1; 1889 1890 pc->useAmat = PETSC_TRUE; 1891 1892 pc->ops->apply = PCApply_MG; 1893 pc->ops->applytranspose = PCApplyTranspose_MG; 1894 pc->ops->matapply = PCMatApply_MG; 1895 pc->ops->setup = PCSetUp_MG; 1896 pc->ops->reset = PCReset_MG; 1897 pc->ops->destroy = PCDestroy_MG; 1898 pc->ops->setfromoptions = PCSetFromOptions_MG; 1899 pc->ops->view = PCView_MG; 1900 1901 PetscCall(PetscObjectComposedDataRegister(&mg->eigenvalue)); 1902 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetGalerkin_C", PCMGSetGalerkin_MG)); 1903 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetLevels_C", PCMGGetLevels_MG)); 1904 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetLevels_C", PCMGSetLevels_MG)); 1905 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetInterpolations_C", PCGetInterpolations_MG)); 1906 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGetCoarseOperators_C", PCGetCoarseOperators_MG)); 1907 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptInterpolation_C", PCMGSetAdaptInterpolation_MG)); 1908 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptInterpolation_C", PCMGGetAdaptInterpolation_MG)); 1909 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCR_C", PCMGSetAdaptCR_MG)); 1910 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCR_C", PCMGGetAdaptCR_MG)); 1911 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGSetAdaptCoarseSpaceType_C", PCMGSetAdaptCoarseSpaceType_MG)); 1912 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCMGGetAdaptCoarseSpaceType_C", PCMGGetAdaptCoarseSpaceType_MG)); 1913 PetscFunctionReturn(0); 1914 } 1915