1 /* 2 The PC (preconditioner) interface routines, callable by users. 3 */ 4 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 5 #include <petscdm.h> 6 7 /* Logging support */ 8 PetscClassId PC_CLASSID; 9 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplySymmetricLeft; 10 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 11 PetscInt PetscMGLevelId; 12 PetscLogStage PCMPIStage; 13 14 PETSC_INTERN PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) 15 { 16 PetscMPIInt size; 17 PetscBool hasopblock, hasopsolve, flg1, flg2, set, flg3, isnormal; 18 19 PetscFunctionBegin; 20 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 21 if (pc->pmat) { 22 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasopblock)); 23 PetscCall(MatHasOperation(pc->pmat, MATOP_SOLVE, &hasopsolve)); 24 if (size == 1) { 25 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 26 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 27 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 28 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (isnormal) { 34 *type = PCNONE; 35 } else if (hasopblock) { /* likely is a parallel matrix run on one processor */ 36 *type = PCBJACOBI; 37 } else if (hasopsolve) { 38 *type = PCMAT; 39 } else { 40 *type = PCNONE; 41 } 42 } else { 43 if (hasopblock) { 44 *type = PCBJACOBI; 45 } else if (hasopsolve) { 46 *type = PCMAT; 47 } else { 48 *type = PCNONE; 49 } 50 } 51 } else *type = NULL; 52 PetscFunctionReturn(PETSC_SUCCESS); 53 } 54 55 /* do not log solves, setup and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 56 PETSC_EXTERN PetscLogEvent KSP_Solve, KSP_SetUp; 57 58 static PetscErrorCode PCLogEventsDeactivatePush(void) 59 { 60 PetscFunctionBegin; 61 PetscCall(KSPInitializePackage()); 62 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 63 PetscCall(PetscLogEventDeactivatePush(KSP_SetUp)); 64 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 65 PetscCall(PetscLogEventDeactivatePush(PC_SetUp)); 66 PetscCall(PetscLogEventDeactivatePush(PC_SetUpOnBlocks)); 67 PetscFunctionReturn(PETSC_SUCCESS); 68 } 69 70 static PetscErrorCode PCLogEventsDeactivatePop(void) 71 { 72 PetscFunctionBegin; 73 PetscCall(KSPInitializePackage()); 74 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 75 PetscCall(PetscLogEventDeactivatePop(KSP_SetUp)); 76 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 77 PetscCall(PetscLogEventDeactivatePop(PC_SetUp)); 78 PetscCall(PetscLogEventDeactivatePop(PC_SetUpOnBlocks)); 79 PetscFunctionReturn(PETSC_SUCCESS); 80 } 81 82 /*@ 83 PCReset - Resets a `PC` context to the state it was in before `PCSetUp()` was called, and removes any allocated `Vec` and `Mat` from its data structure 84 85 Collective 86 87 Input Parameter: 88 . pc - the `PC` preconditioner context 89 90 Level: developer 91 92 Notes: 93 Any options set, including those set with `KSPSetFromOptions()` remain. 94 95 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in `pc` 96 97 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 98 @*/ 99 PetscErrorCode PCReset(PC pc) 100 { 101 PetscFunctionBegin; 102 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 103 PetscTryTypeMethod(pc, reset); 104 PetscCall(VecDestroy(&pc->diagonalscaleright)); 105 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 106 PetscCall(MatDestroy(&pc->pmat)); 107 PetscCall(MatDestroy(&pc->mat)); 108 109 pc->setupcalled = 0; 110 PetscFunctionReturn(PETSC_SUCCESS); 111 } 112 113 /*@ 114 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 115 116 Collective 117 118 Input Parameter: 119 . pc - the `PC` preconditioner context 120 121 Level: developer 122 123 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()` 124 @*/ 125 PetscErrorCode PCDestroy(PC *pc) 126 { 127 PetscFunctionBegin; 128 if (!*pc) PetscFunctionReturn(PETSC_SUCCESS); 129 PetscValidHeaderSpecific(*pc, PC_CLASSID, 1); 130 if (--((PetscObject)*pc)->refct > 0) { 131 *pc = NULL; 132 PetscFunctionReturn(PETSC_SUCCESS); 133 } 134 135 PetscCall(PCReset(*pc)); 136 137 /* if memory was published with SAWs then destroy it */ 138 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 139 PetscTryTypeMethod(*pc, destroy); 140 PetscCall(DMDestroy(&(*pc)->dm)); 141 PetscCall(PetscHeaderDestroy(pc)); 142 PetscFunctionReturn(PETSC_SUCCESS); 143 } 144 145 /*@ 146 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 147 scaling as needed by certain time-stepping codes. 148 149 Logically Collective 150 151 Input Parameter: 152 . pc - the `PC` preconditioner context 153 154 Output Parameter: 155 . flag - `PETSC_TRUE` if it applies the scaling 156 157 Level: developer 158 159 Note: 160 If this returns `PETSC_TRUE` then the system solved via the Krylov method is, for left and right preconditioning, 161 162 $$ 163 \begin{align*} 164 D M A D^{-1} y = D M b \\ 165 D A M D^{-1} z = D b. 166 \end{align*} 167 $$ 168 169 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 170 @*/ 171 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) 172 { 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 175 PetscAssertPointer(flag, 2); 176 *flag = pc->diagonalscale; 177 PetscFunctionReturn(PETSC_SUCCESS); 178 } 179 180 /*@ 181 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 182 scaling as needed by certain time-stepping codes. 183 184 Logically Collective 185 186 Input Parameters: 187 + pc - the `PC` preconditioner context 188 - s - scaling vector 189 190 Level: intermediate 191 192 Notes: 193 The system solved via the Krylov method is, for left and right preconditioning, 194 $$ 195 \begin{align*} 196 D M A D^{-1} y = D M b \\ 197 D A M D^{-1} z = D b. 198 \end{align*} 199 $$ 200 201 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 202 203 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 204 @*/ 205 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) 206 { 207 PetscFunctionBegin; 208 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 209 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 210 pc->diagonalscale = PETSC_TRUE; 211 212 PetscCall(PetscObjectReference((PetscObject)s)); 213 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 214 215 pc->diagonalscaleleft = s; 216 217 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 218 PetscCall(VecCopy(s, pc->diagonalscaleright)); 219 PetscCall(VecReciprocal(pc->diagonalscaleright)); 220 PetscFunctionReturn(PETSC_SUCCESS); 221 } 222 223 /*@ 224 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 225 226 Logically Collective 227 228 Input Parameters: 229 + pc - the `PC` preconditioner context 230 . in - input vector 231 - out - scaled vector (maybe the same as in) 232 233 Level: intermediate 234 235 Notes: 236 The system solved via the Krylov method is, for left and right preconditioning, 237 238 $$ 239 \begin{align*} 240 D M A D^{-1} y = D M b \\ 241 D A M D^{-1} z = D b. 242 \end{align*} 243 $$ 244 245 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 246 247 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 248 249 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCSetDiagonalScale()`, `PCDiagonalScaleRight()`, `MatDiagonalScale()` 250 @*/ 251 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) 252 { 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 255 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 256 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 257 if (pc->diagonalscale) { 258 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 259 } else if (in != out) { 260 PetscCall(VecCopy(in, out)); 261 } 262 PetscFunctionReturn(PETSC_SUCCESS); 263 } 264 265 /*@ 266 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 267 268 Logically Collective 269 270 Input Parameters: 271 + pc - the `PC` preconditioner context 272 . in - input vector 273 - out - scaled vector (maybe the same as in) 274 275 Level: intermediate 276 277 Notes: 278 The system solved via the Krylov method is, for left and right preconditioning, 279 280 $$ 281 \begin{align*} 282 D M A D^{-1} y = D M b \\ 283 D A M D^{-1} z = D b. 284 \end{align*} 285 $$ 286 287 `PCDiagonalScaleLeft()` scales a vector by $D$. `PCDiagonalScaleRight()` scales a vector by $D^{-1}$. 288 289 If diagonal scaling is turned off and `in` is not `out` then `in` is copied to `out` 290 291 .seealso: [](ch_ksp), `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCSetDiagonalScale()`, `MatDiagonalScale()` 292 @*/ 293 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) 294 { 295 PetscFunctionBegin; 296 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 297 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 298 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 299 if (pc->diagonalscale) { 300 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 301 } else if (in != out) { 302 PetscCall(VecCopy(in, out)); 303 } 304 PetscFunctionReturn(PETSC_SUCCESS); 305 } 306 307 /*@ 308 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 309 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 310 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 311 312 Logically Collective 313 314 Input Parameters: 315 + pc - the `PC` preconditioner context 316 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 317 318 Options Database Key: 319 . -pc_use_amat <true,false> - use the amat argument to `KSPSetOperators()` or `PCSetOperators()` to apply the operator 320 321 Level: intermediate 322 323 Note: 324 For the common case in which the linear system matrix and the matrix used to construct the 325 preconditioner are identical, this routine has no affect. 326 327 .seealso: [](ch_ksp), `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE`, 328 `KSPSetOperators()`, `PCSetOperators()` 329 @*/ 330 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) 331 { 332 PetscFunctionBegin; 333 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 334 pc->useAmat = flg; 335 PetscFunctionReturn(PETSC_SUCCESS); 336 } 337 338 /*@ 339 PCSetErrorIfFailure - Causes `PC` to generate an error if a floating point exception, for example a zero pivot, is detected. 340 341 Logically Collective 342 343 Input Parameters: 344 + pc - iterative context obtained from `PCCreate()` 345 - flg - `PETSC_TRUE` indicates you want the error generated 346 347 Level: advanced 348 349 Notes: 350 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 351 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 352 detected. 353 354 This is propagated into `KSP`s used by this `PC`, which then propagate it into `PC`s used by those `KSP`s 355 356 .seealso: [](ch_ksp), `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 357 @*/ 358 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) 359 { 360 PetscFunctionBegin; 361 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 362 PetscValidLogicalCollectiveBool(pc, flg, 2); 363 pc->erroriffailure = flg; 364 PetscFunctionReturn(PETSC_SUCCESS); 365 } 366 367 /*@ 368 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 369 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 370 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 371 372 Logically Collective 373 374 Input Parameter: 375 . pc - the `PC` preconditioner context 376 377 Output Parameter: 378 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 379 380 Level: intermediate 381 382 Note: 383 For the common case in which the linear system matrix and the matrix used to construct the 384 preconditioner are identical, this routine is does nothing. 385 386 .seealso: [](ch_ksp), `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PCMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 387 @*/ 388 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) 389 { 390 PetscFunctionBegin; 391 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 392 *flg = pc->useAmat; 393 PetscFunctionReturn(PETSC_SUCCESS); 394 } 395 396 /*@ 397 PCSetKSPNestLevel - sets the amount of nesting the `KSP` that contains this `PC` has 398 399 Collective 400 401 Input Parameters: 402 + pc - the `PC` 403 - level - the nest level 404 405 Level: developer 406 407 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPGetNestLevel()`, `PCGetKSPNestLevel()`, `KSPSetNestLevel()` 408 @*/ 409 PetscErrorCode PCSetKSPNestLevel(PC pc, PetscInt level) 410 { 411 PetscFunctionBegin; 412 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 413 PetscValidLogicalCollectiveInt(pc, level, 2); 414 pc->kspnestlevel = level; 415 PetscFunctionReturn(PETSC_SUCCESS); 416 } 417 418 /*@ 419 PCGetKSPNestLevel - gets the amount of nesting the `KSP` that contains this `PC` has 420 421 Not Collective 422 423 Input Parameter: 424 . pc - the `PC` 425 426 Output Parameter: 427 . level - the nest level 428 429 Level: developer 430 431 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPSetNestLevel()`, `PCSetKSPNestLevel()`, `KSPGetNestLevel()` 432 @*/ 433 PetscErrorCode PCGetKSPNestLevel(PC pc, PetscInt *level) 434 { 435 PetscFunctionBegin; 436 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 437 PetscAssertPointer(level, 2); 438 *level = pc->kspnestlevel; 439 PetscFunctionReturn(PETSC_SUCCESS); 440 } 441 442 /*@ 443 PCCreate - Creates a preconditioner context, `PC` 444 445 Collective 446 447 Input Parameter: 448 . comm - MPI communicator 449 450 Output Parameter: 451 . newpc - location to put the `PC` preconditioner context 452 453 Level: developer 454 455 Notes: 456 This is rarely called directly by users since `KSP` manages the `PC` objects it uses. Use `KSPGetPC()` to access the `PC` used by a `KSP`. 457 458 Use `PCSetType()` or `PCSetFromOptions()` with the option `-pc_type pctype` to set the `PCType` for this `PC` 459 460 The default preconditioner type `PCType` for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 461 in parallel. For dense matrices it is always `PCNONE`. 462 463 .seealso: [](ch_ksp), `PC`, `PCType`, `PCSetType`, `PCSetUp()`, `PCApply()`, `PCDestroy()`, `KSP`, `KSPGetPC()` 464 @*/ 465 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) 466 { 467 PC pc; 468 469 PetscFunctionBegin; 470 PetscAssertPointer(newpc, 2); 471 PetscCall(PCInitializePackage()); 472 473 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 474 pc->mat = NULL; 475 pc->pmat = NULL; 476 pc->setupcalled = 0; 477 pc->setfromoptionscalled = 0; 478 pc->data = NULL; 479 pc->diagonalscale = PETSC_FALSE; 480 pc->diagonalscaleleft = NULL; 481 pc->diagonalscaleright = NULL; 482 483 pc->modifysubmatrices = NULL; 484 pc->modifysubmatricesP = NULL; 485 486 *newpc = pc; 487 PetscFunctionReturn(PETSC_SUCCESS); 488 } 489 490 /*@ 491 PCApply - Applies the preconditioner to a vector. 492 493 Collective 494 495 Input Parameters: 496 + pc - the `PC` preconditioner context 497 - x - input vector 498 499 Output Parameter: 500 . y - output vector 501 502 Level: developer 503 504 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 505 @*/ 506 PetscErrorCode PCApply(PC pc, Vec x, Vec y) 507 { 508 PetscInt m, n, mv, nv; 509 510 PetscFunctionBegin; 511 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 512 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 513 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 514 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 515 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 516 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 517 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 518 PetscCall(VecGetLocalSize(x, &mv)); 519 PetscCall(VecGetLocalSize(y, &nv)); 520 /* check pmat * y = x is feasible */ 521 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 522 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 523 PetscCall(VecSetErrorIfLocked(y, 3)); 524 525 PetscCall(PCSetUp(pc)); 526 PetscCall(VecLockReadPush(x)); 527 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 528 PetscUseTypeMethod(pc, apply, x, y); 529 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 530 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 531 PetscCall(VecLockReadPop(x)); 532 PetscFunctionReturn(PETSC_SUCCESS); 533 } 534 535 static PetscErrorCode PCMatApplyTranspose_Private(PC pc, Mat X, Mat Y, PetscBool transpose) 536 { 537 Mat A; 538 Vec cy, cx; 539 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 540 PetscBool match; 541 542 PetscFunctionBegin; 543 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 544 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 545 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 546 PetscCheckSameComm(pc, 1, X, 2); 547 PetscCheckSameComm(pc, 1, Y, 3); 548 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 549 PetscCall(PCGetOperators(pc, NULL, &A)); 550 PetscCall(MatGetLocalSize(A, &m3, &n3)); 551 PetscCall(MatGetLocalSize(X, &m2, &n2)); 552 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 553 PetscCall(MatGetSize(A, &M3, &N3)); 554 PetscCall(MatGetSize(X, &M2, &N2)); 555 PetscCall(MatGetSize(Y, &M1, &N1)); 556 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 557 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 558 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 559 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 560 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 561 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 562 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 563 PetscCall(PCSetUp(pc)); 564 if (!transpose && pc->ops->matapply) { 565 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 566 PetscUseTypeMethod(pc, matapply, X, Y); 567 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 568 } else if (transpose && pc->ops->matapplytranspose) { 569 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 570 PetscUseTypeMethod(pc, matapplytranspose, X, Y); 571 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 572 } else { 573 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 574 for (n1 = 0; n1 < N1; ++n1) { 575 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 576 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 577 if (!transpose) PetscCall(PCApply(pc, cx, cy)); 578 else PetscCall(PCApplyTranspose(pc, cx, cy)); 579 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 580 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 581 } 582 } 583 PetscFunctionReturn(PETSC_SUCCESS); 584 } 585 586 /*@ 587 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, `Y` and `X` must be different matrices. 588 589 Collective 590 591 Input Parameters: 592 + pc - the `PC` preconditioner context 593 - X - block of input vectors 594 595 Output Parameter: 596 . Y - block of output vectors 597 598 Level: developer 599 600 .seealso: [](ch_ksp), `PC`, `PCApply()`, `KSPMatSolve()` 601 @*/ 602 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) 603 { 604 PetscFunctionBegin; 605 PetscCall(PCMatApplyTranspose_Private(pc, X, Y, PETSC_FALSE)); 606 PetscFunctionReturn(PETSC_SUCCESS); 607 } 608 609 /*@ 610 PCMatApplyTranspose - Applies the transpose of preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApplyTranspose()`, `Y` and `X` must be different matrices. 611 612 Collective 613 614 Input Parameters: 615 + pc - the `PC` preconditioner context 616 - X - block of input vectors 617 618 Output Parameter: 619 . Y - block of output vectors 620 621 Level: developer 622 623 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()`, `KSPMatSolveTranspose()` 624 @*/ 625 PetscErrorCode PCMatApplyTranspose(PC pc, Mat X, Mat Y) 626 { 627 PetscFunctionBegin; 628 PetscCall(PCMatApplyTranspose_Private(pc, X, Y, PETSC_TRUE)); 629 PetscFunctionReturn(PETSC_SUCCESS); 630 } 631 632 /*@ 633 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 634 635 Collective 636 637 Input Parameters: 638 + pc - the `PC` preconditioner context 639 - x - input vector 640 641 Output Parameter: 642 . y - output vector 643 644 Level: developer 645 646 Note: 647 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 648 649 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricRight()` 650 @*/ 651 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) 652 { 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 655 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 656 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 657 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 658 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 659 PetscCall(PCSetUp(pc)); 660 PetscCall(VecLockReadPush(x)); 661 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 662 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 663 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 664 PetscCall(VecLockReadPop(x)); 665 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 666 PetscFunctionReturn(PETSC_SUCCESS); 667 } 668 669 /*@ 670 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 671 672 Collective 673 674 Input Parameters: 675 + pc - the `PC` preconditioner context 676 - x - input vector 677 678 Output Parameter: 679 . y - output vector 680 681 Level: developer 682 683 Note: 684 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 685 686 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplySymmetricLeft()` 687 @*/ 688 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) 689 { 690 PetscFunctionBegin; 691 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 692 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 693 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 694 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 695 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 696 PetscCall(PCSetUp(pc)); 697 PetscCall(VecLockReadPush(x)); 698 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 699 PetscUseTypeMethod(pc, applysymmetricright, x, y); 700 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 701 PetscCall(VecLockReadPop(x)); 702 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 703 PetscFunctionReturn(PETSC_SUCCESS); 704 } 705 706 /*@ 707 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 708 709 Collective 710 711 Input Parameters: 712 + pc - the `PC` preconditioner context 713 - x - input vector 714 715 Output Parameter: 716 . y - output vector 717 718 Level: developer 719 720 Note: 721 For complex numbers this applies the non-Hermitian transpose. 722 723 Developer Note: 724 We need to implement a `PCApplyHermitianTranspose()` 725 726 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 727 @*/ 728 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) 729 { 730 PetscFunctionBegin; 731 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 732 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 733 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 734 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 735 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 736 PetscCall(PCSetUp(pc)); 737 PetscCall(VecLockReadPush(x)); 738 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 739 PetscUseTypeMethod(pc, applytranspose, x, y); 740 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 741 PetscCall(VecLockReadPop(x)); 742 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 743 PetscFunctionReturn(PETSC_SUCCESS); 744 } 745 746 /*@ 747 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 748 749 Collective 750 751 Input Parameter: 752 . pc - the `PC` preconditioner context 753 754 Output Parameter: 755 . flg - `PETSC_TRUE` if a transpose operation is defined 756 757 Level: developer 758 759 .seealso: [](ch_ksp), `PC`, `PCApplyTranspose()` 760 @*/ 761 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) 762 { 763 PetscFunctionBegin; 764 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 765 PetscAssertPointer(flg, 2); 766 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 767 else *flg = PETSC_FALSE; 768 PetscFunctionReturn(PETSC_SUCCESS); 769 } 770 771 /*@ 772 PCApplyBAorAB - Applies the preconditioner and operator to a vector. $y = B*A*x $ or $ y = A*B*x$. 773 774 Collective 775 776 Input Parameters: 777 + pc - the `PC` preconditioner context 778 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 779 . x - input vector 780 - work - work vector 781 782 Output Parameter: 783 . y - output vector 784 785 Level: developer 786 787 Note: 788 If the `PC` has had `PCSetDiagonalScale()` set then $ D M A D^{-1} $ for left preconditioning or $ D A M D^{-1} $ is actually applied. 789 The specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 790 791 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 792 @*/ 793 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) 794 { 795 PetscFunctionBegin; 796 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 797 PetscValidLogicalCollectiveEnum(pc, side, 2); 798 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 799 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 800 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 801 PetscCheckSameComm(pc, 1, x, 3); 802 PetscCheckSameComm(pc, 1, y, 4); 803 PetscCheckSameComm(pc, 1, work, 5); 804 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 805 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 806 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 807 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 808 809 PetscCall(PCSetUp(pc)); 810 if (pc->diagonalscale) { 811 if (pc->ops->applyBA) { 812 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 813 PetscCall(VecDuplicate(x, &work2)); 814 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 815 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 816 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 817 PetscCall(VecDestroy(&work2)); 818 } else if (side == PC_RIGHT) { 819 PetscCall(PCDiagonalScaleRight(pc, x, y)); 820 PetscCall(PCApply(pc, y, work)); 821 PetscCall(MatMult(pc->mat, work, y)); 822 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 823 } else if (side == PC_LEFT) { 824 PetscCall(PCDiagonalScaleRight(pc, x, y)); 825 PetscCall(MatMult(pc->mat, y, work)); 826 PetscCall(PCApply(pc, work, y)); 827 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 828 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 829 } else { 830 if (pc->ops->applyBA) { 831 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 832 } else if (side == PC_RIGHT) { 833 PetscCall(PCApply(pc, x, work)); 834 PetscCall(MatMult(pc->mat, work, y)); 835 } else if (side == PC_LEFT) { 836 PetscCall(MatMult(pc->mat, x, work)); 837 PetscCall(PCApply(pc, work, y)); 838 } else if (side == PC_SYMMETRIC) { 839 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 840 PetscCall(PCApplySymmetricRight(pc, x, work)); 841 PetscCall(MatMult(pc->mat, work, y)); 842 PetscCall(VecCopy(y, work)); 843 PetscCall(PCApplySymmetricLeft(pc, work, y)); 844 } 845 } 846 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 847 PetscFunctionReturn(PETSC_SUCCESS); 848 } 849 850 /*@ 851 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 852 and operator to a vector. That is, applies $B^T * A^T$ with left preconditioning, 853 NOT $(B*A)^T = A^T*B^T$. 854 855 Collective 856 857 Input Parameters: 858 + pc - the `PC` preconditioner context 859 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 860 . x - input vector 861 - work - work vector 862 863 Output Parameter: 864 . y - output vector 865 866 Level: developer 867 868 Note: 869 This routine is used internally so that the same Krylov code can be used to solve $A x = b$ and $A^T x = b$, with a preconditioner 870 defined by $B^T$. This is why this has the funny form that it computes $B^T * A^T$ 871 872 .seealso: [](ch_ksp), `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 873 @*/ 874 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) 875 { 876 PetscFunctionBegin; 877 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 878 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 879 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 880 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 881 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 882 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 883 if (pc->ops->applyBAtranspose) { 884 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 885 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 886 PetscFunctionReturn(PETSC_SUCCESS); 887 } 888 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 889 890 PetscCall(PCSetUp(pc)); 891 if (side == PC_RIGHT) { 892 PetscCall(PCApplyTranspose(pc, x, work)); 893 PetscCall(MatMultTranspose(pc->mat, work, y)); 894 } else if (side == PC_LEFT) { 895 PetscCall(MatMultTranspose(pc->mat, x, work)); 896 PetscCall(PCApplyTranspose(pc, work, y)); 897 } 898 /* add support for PC_SYMMETRIC */ 899 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 900 PetscFunctionReturn(PETSC_SUCCESS); 901 } 902 903 /*@ 904 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 905 built-in fast application of Richardson's method. 906 907 Not Collective 908 909 Input Parameter: 910 . pc - the preconditioner 911 912 Output Parameter: 913 . exists - `PETSC_TRUE` or `PETSC_FALSE` 914 915 Level: developer 916 917 .seealso: [](ch_ksp), `PC`, `KSPRICHARDSON`, `PCApplyRichardson()` 918 @*/ 919 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) 920 { 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 923 PetscAssertPointer(exists, 2); 924 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 925 else *exists = PETSC_FALSE; 926 PetscFunctionReturn(PETSC_SUCCESS); 927 } 928 929 /*@ 930 PCApplyRichardson - Applies several steps of Richardson iteration with 931 the particular preconditioner. This routine is usually used by the 932 Krylov solvers and not the application code directly. 933 934 Collective 935 936 Input Parameters: 937 + pc - the `PC` preconditioner context 938 . b - the right-hand side 939 . w - one work vector 940 . rtol - relative decrease in residual norm convergence criteria 941 . abstol - absolute residual norm convergence criteria 942 . dtol - divergence residual norm increase criteria 943 . its - the number of iterations to apply. 944 - guesszero - if the input x contains nonzero initial guess 945 946 Output Parameters: 947 + outits - number of iterations actually used (for SOR this always equals its) 948 . reason - the reason the apply terminated 949 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 950 951 Level: developer 952 953 Notes: 954 Most preconditioners do not support this function. Use the command 955 `PCApplyRichardsonExists()` to determine if one does. 956 957 Except for the `PCMG` this routine ignores the convergence tolerances 958 and always runs for the number of iterations 959 960 .seealso: [](ch_ksp), `PC`, `PCApplyRichardsonExists()` 961 @*/ 962 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 963 { 964 PetscFunctionBegin; 965 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 966 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 967 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 968 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 969 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 970 PetscCall(PCSetUp(pc)); 971 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 972 PetscFunctionReturn(PETSC_SUCCESS); 973 } 974 975 /*@ 976 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 977 978 Logically Collective 979 980 Input Parameters: 981 + pc - the `PC` preconditioner context 982 - reason - the reason it failed 983 984 Level: advanced 985 986 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 987 @*/ 988 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) 989 { 990 PetscFunctionBegin; 991 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 992 pc->failedreason = reason; 993 PetscFunctionReturn(PETSC_SUCCESS); 994 } 995 996 /*@ 997 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 998 999 Not Collective 1000 1001 Input Parameter: 1002 . pc - the `PC` preconditioner context 1003 1004 Output Parameter: 1005 . reason - the reason it failed 1006 1007 Level: advanced 1008 1009 Note: 1010 After a call to `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or a call to `PCReduceFailedReason()` 1011 this is the maximum reason over all MPI processes in the `PC` communicator and hence logically collective. 1012 Otherwise it returns the local value. 1013 1014 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCSetFailedReason()`, `PCFailedReason` 1015 @*/ 1016 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) 1017 { 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1020 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 1021 else *reason = pc->failedreason; 1022 PetscFunctionReturn(PETSC_SUCCESS); 1023 } 1024 1025 /*@ 1026 PCReduceFailedReason - Reduce the failed reason among the MPI processes that share the `PC` 1027 1028 Collective 1029 1030 Input Parameter: 1031 . pc - the `PC` preconditioner context 1032 1033 Level: advanced 1034 1035 Note: 1036 Different MPI processes may have different reasons or no reason, see `PCGetFailedReason()`. This routine 1037 makes them have a common value (failure if any MPI process had a failure). 1038 1039 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`, `PCFailedReason` 1040 @*/ 1041 PetscErrorCode PCReduceFailedReason(PC pc) 1042 { 1043 PetscInt buf; 1044 1045 PetscFunctionBegin; 1046 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1047 buf = (PetscInt)pc->failedreason; 1048 PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, &buf, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc))); 1049 pc->failedreason = (PCFailedReason)buf; 1050 PetscFunctionReturn(PETSC_SUCCESS); 1051 } 1052 1053 /* 1054 a setupcall of 0 indicates never setup, 1055 1 indicates has been previously setup 1056 -1 indicates a PCSetUp() was attempted and failed 1057 */ 1058 /*@ 1059 PCSetUp - Prepares for the use of a preconditioner. Performs all the one-time operations needed before the preconditioner 1060 can be used with `PCApply()` 1061 1062 Collective 1063 1064 Input Parameter: 1065 . pc - the `PC` preconditioner context 1066 1067 Level: developer 1068 1069 Notes: 1070 For example, for `PCLU` this will compute the factorization. 1071 1072 This is called automatically by `KSPSetUp()` or `PCApply()` so rarely needs to be called directly. 1073 1074 For nested preconditioners, such as `PCFIELDSPLIT` or `PCBJACOBI` this may not finish the construction of the preconditioner 1075 on the inner levels, the routine `PCSetUpOnBlocks()` may compute more of the preconditioner in those situations. 1076 1077 .seealso: [](ch_ksp), `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `KSPSetUp()`, `PCSetUpOnBlocks()` 1078 @*/ 1079 PetscErrorCode PCSetUp(PC pc) 1080 { 1081 const char *def; 1082 PetscObjectState matstate, matnonzerostate; 1083 1084 PetscFunctionBegin; 1085 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1086 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 1087 1088 if (pc->setupcalled && pc->reusepreconditioner) { 1089 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 1090 PetscFunctionReturn(PETSC_SUCCESS); 1091 } 1092 1093 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 1094 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 1095 if (!pc->setupcalled) { 1096 //PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 1097 pc->flag = DIFFERENT_NONZERO_PATTERN; 1098 } else if (matstate == pc->matstate) PetscFunctionReturn(PETSC_SUCCESS); 1099 else { 1100 if (matnonzerostate != pc->matnonzerostate) { 1101 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 1102 pc->flag = DIFFERENT_NONZERO_PATTERN; 1103 } else { 1104 //PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 1105 pc->flag = SAME_NONZERO_PATTERN; 1106 } 1107 } 1108 pc->matstate = matstate; 1109 pc->matnonzerostate = matnonzerostate; 1110 1111 if (!((PetscObject)pc)->type_name) { 1112 PetscCall(PCGetDefaultType_Private(pc, &def)); 1113 PetscCall(PCSetType(pc, def)); 1114 } 1115 1116 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1117 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 1118 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 1119 if (pc->ops->setup) { 1120 PetscCall(PCLogEventsDeactivatePush()); 1121 PetscUseTypeMethod(pc, setup); 1122 PetscCall(PCLogEventsDeactivatePop()); 1123 } 1124 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 1125 if (pc->postsetup) PetscCall((*pc->postsetup)(pc)); 1126 if (!pc->setupcalled) pc->setupcalled = 1; 1127 PetscFunctionReturn(PETSC_SUCCESS); 1128 } 1129 1130 /*@ 1131 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1132 the block Jacobi, overlapping Schwarz, and fieldsplit methods. 1133 1134 Collective 1135 1136 Input Parameter: 1137 . pc - the `PC` preconditioner context 1138 1139 Level: developer 1140 1141 Notes: 1142 For nested preconditioners such as `PCBJACOBI`, `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 1143 called on the outer `PC`, this routine ensures it is called. 1144 1145 It calls `PCSetUp()` if not yet called. 1146 1147 .seealso: [](ch_ksp), `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1148 @*/ 1149 PetscErrorCode PCSetUpOnBlocks(PC pc) 1150 { 1151 PetscFunctionBegin; 1152 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1153 if (!pc->setupcalled) PetscCall(PCSetUp(pc)); /* "if" to prevent -info extra prints */ 1154 if (!pc->ops->setuponblocks) PetscFunctionReturn(PETSC_SUCCESS); 1155 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1156 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1157 PetscCall(PCLogEventsDeactivatePush()); 1158 PetscUseTypeMethod(pc, setuponblocks); 1159 PetscCall(PCLogEventsDeactivatePop()); 1160 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1161 PetscFunctionReturn(PETSC_SUCCESS); 1162 } 1163 1164 /*@C 1165 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1166 submatrices that arise within certain subdomain-based preconditioners such as `PCASM` 1167 1168 Logically Collective 1169 1170 Input Parameters: 1171 + pc - the `PC` preconditioner context 1172 . func - routine for modifying the submatrices 1173 - ctx - optional user-defined context (may be `NULL`) 1174 1175 Calling sequence of `func`: 1176 + pc - the `PC` preconditioner context 1177 . nsub - number of index sets 1178 . row - an array of index sets that contain the global row numbers 1179 that comprise each local submatrix 1180 . col - an array of index sets that contain the global column numbers 1181 that comprise each local submatrix 1182 . submat - array of local submatrices 1183 - ctx - optional user-defined context for private data for the 1184 user-defined func routine (may be `NULL`) 1185 1186 Level: advanced 1187 1188 Notes: 1189 The basic submatrices are extracted from the matrix used to construct the preconditioner as 1190 usual; the user can then alter these (for example, to set different boundary 1191 conditions for each submatrix) before they are used for the local solves. 1192 1193 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1194 `KSPSolve()`. 1195 1196 A routine set by `PCSetModifySubMatrices()` is currently called within 1197 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1198 preconditioners. All other preconditioners ignore this routine. 1199 1200 .seealso: [](ch_ksp), `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1201 @*/ 1202 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx), void *ctx) 1203 { 1204 PetscFunctionBegin; 1205 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1206 pc->modifysubmatrices = func; 1207 pc->modifysubmatricesP = ctx; 1208 PetscFunctionReturn(PETSC_SUCCESS); 1209 } 1210 1211 /*@C 1212 PCModifySubMatrices - Calls an optional user-defined routine within 1213 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1214 1215 Collective 1216 1217 Input Parameters: 1218 + pc - the `PC` preconditioner context 1219 . nsub - the number of local submatrices 1220 . row - an array of index sets that contain the global row numbers 1221 that comprise each local submatrix 1222 . col - an array of index sets that contain the global column numbers 1223 that comprise each local submatrix 1224 . submat - array of local submatrices 1225 - ctx - optional user-defined context for private data for the 1226 user-defined routine (may be `NULL`) 1227 1228 Output Parameter: 1229 . submat - array of local submatrices (the entries of which may 1230 have been modified) 1231 1232 Level: developer 1233 1234 Note: 1235 The user should NOT generally call this routine, as it will 1236 automatically be called within certain preconditioners. 1237 1238 .seealso: [](ch_ksp), `PC`, `PCSetModifySubMatrices()` 1239 @*/ 1240 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) 1241 { 1242 PetscFunctionBegin; 1243 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1244 if (!pc->modifysubmatrices) PetscFunctionReturn(PETSC_SUCCESS); 1245 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1246 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1247 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1248 PetscFunctionReturn(PETSC_SUCCESS); 1249 } 1250 1251 /*@ 1252 PCSetOperators - Sets the matrix associated with the linear system and 1253 a (possibly) different one associated with the preconditioner. 1254 1255 Logically Collective 1256 1257 Input Parameters: 1258 + pc - the `PC` preconditioner context 1259 . Amat - the matrix that defines the linear system 1260 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1261 1262 Level: intermediate 1263 1264 Notes: 1265 Passing a `NULL` for `Amat` or `Pmat` removes the matrix that is currently used. 1266 1267 If you wish to replace either `Amat` or `Pmat` but leave the other one untouched then 1268 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1269 on it and then pass it back in in your call to `KSPSetOperators()`. 1270 1271 More Notes about Repeated Solution of Linear Systems: 1272 PETSc does NOT reset the matrix entries of either `Amat` or `Pmat` 1273 to zero after a linear solve; the user is completely responsible for 1274 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1275 zero all elements of a matrix. 1276 1277 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()` 1278 @*/ 1279 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) 1280 { 1281 PetscInt m1, n1, m2, n2; 1282 1283 PetscFunctionBegin; 1284 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1285 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1286 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1287 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1288 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1289 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1290 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1291 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1292 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1293 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1294 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1295 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1296 } 1297 1298 if (Pmat != pc->pmat) { 1299 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1300 pc->matnonzerostate = -1; 1301 pc->matstate = -1; 1302 } 1303 1304 /* reference first in case the matrices are the same */ 1305 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1306 PetscCall(MatDestroy(&pc->mat)); 1307 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1308 PetscCall(MatDestroy(&pc->pmat)); 1309 pc->mat = Amat; 1310 pc->pmat = Pmat; 1311 PetscFunctionReturn(PETSC_SUCCESS); 1312 } 1313 1314 /*@ 1315 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner `PC` has changed. 1316 1317 Logically Collective 1318 1319 Input Parameters: 1320 + pc - the `PC` preconditioner context 1321 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1322 1323 Level: intermediate 1324 1325 Note: 1326 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1327 prevents this. 1328 1329 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1330 @*/ 1331 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) 1332 { 1333 PetscFunctionBegin; 1334 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1335 PetscValidLogicalCollectiveBool(pc, flag, 2); 1336 pc->reusepreconditioner = flag; 1337 PetscFunctionReturn(PETSC_SUCCESS); 1338 } 1339 1340 /*@ 1341 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1342 1343 Not Collective 1344 1345 Input Parameter: 1346 . pc - the `PC` preconditioner context 1347 1348 Output Parameter: 1349 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1350 1351 Level: intermediate 1352 1353 .seealso: [](ch_ksp), `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1354 @*/ 1355 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) 1356 { 1357 PetscFunctionBegin; 1358 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1359 PetscAssertPointer(flag, 2); 1360 *flag = pc->reusepreconditioner; 1361 PetscFunctionReturn(PETSC_SUCCESS); 1362 } 1363 1364 /*@ 1365 PCGetOperators - Gets the matrix associated with the linear system and 1366 possibly a different one which is used to construct the preconditioner. 1367 1368 Not Collective, though parallel `Mat`s are returned if `pc` is parallel 1369 1370 Input Parameter: 1371 . pc - the `PC` preconditioner context 1372 1373 Output Parameters: 1374 + Amat - the matrix defining the linear system 1375 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1376 1377 Level: intermediate 1378 1379 Note: 1380 Does not increase the reference count of the matrices, so you should not destroy them 1381 1382 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1383 are created in `PC` and returned to the user. In this case, if both operators 1384 mat and pmat are requested, two DIFFERENT operators will be returned. If 1385 only one is requested both operators in the PC will be the same (i.e. as 1386 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1387 The user must set the sizes of the returned matrices and their type etc just 1388 as if the user created them with `MatCreate()`. For example, 1389 1390 .vb 1391 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1392 set size, type, etc of Amat 1393 1394 MatCreate(comm,&mat); 1395 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1396 PetscObjectDereference((PetscObject)mat); 1397 set size, type, etc of Amat 1398 .ve 1399 1400 and 1401 1402 .vb 1403 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1404 set size, type, etc of Amat and Pmat 1405 1406 MatCreate(comm,&Amat); 1407 MatCreate(comm,&Pmat); 1408 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1409 PetscObjectDereference((PetscObject)Amat); 1410 PetscObjectDereference((PetscObject)Pmat); 1411 set size, type, etc of Amat and Pmat 1412 .ve 1413 1414 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1415 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their lifespans can be completely 1416 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1417 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1418 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1419 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1420 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1421 it can be created for you? 1422 1423 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1424 @*/ 1425 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) 1426 { 1427 PetscFunctionBegin; 1428 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1429 if (Amat) { 1430 if (!pc->mat) { 1431 if (pc->pmat && !Pmat) { /* Pmat has been set, but user did not request it, so use for Amat */ 1432 pc->mat = pc->pmat; 1433 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1434 } else { /* both Amat and Pmat are empty */ 1435 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1436 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1437 pc->pmat = pc->mat; 1438 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1439 } 1440 } 1441 } 1442 *Amat = pc->mat; 1443 } 1444 if (Pmat) { 1445 if (!pc->pmat) { 1446 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1447 pc->pmat = pc->mat; 1448 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1449 } else { 1450 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1451 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1452 pc->mat = pc->pmat; 1453 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1454 } 1455 } 1456 } 1457 *Pmat = pc->pmat; 1458 } 1459 PetscFunctionReturn(PETSC_SUCCESS); 1460 } 1461 1462 /*@ 1463 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1464 possibly a different one associated with the preconditioner have been set in the `PC`. 1465 1466 Not Collective, though the results on all processes should be the same 1467 1468 Input Parameter: 1469 . pc - the `PC` preconditioner context 1470 1471 Output Parameters: 1472 + mat - the matrix associated with the linear system was set 1473 - pmat - matrix associated with the preconditioner was set, usually the same 1474 1475 Level: intermediate 1476 1477 .seealso: [](ch_ksp), `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1478 @*/ 1479 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) 1480 { 1481 PetscFunctionBegin; 1482 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1483 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1484 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1485 PetscFunctionReturn(PETSC_SUCCESS); 1486 } 1487 1488 /*@ 1489 PCFactorGetMatrix - Gets the factored matrix from the 1490 preconditioner context. This routine is valid only for the `PCLU`, 1491 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1492 1493 Not Collective though `mat` is parallel if `pc` is parallel 1494 1495 Input Parameter: 1496 . pc - the `PC` preconditioner context 1497 1498 Output Parameters: 1499 . mat - the factored matrix 1500 1501 Level: advanced 1502 1503 Note: 1504 Does not increase the reference count for `mat` so DO NOT destroy it 1505 1506 .seealso: [](ch_ksp), `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1507 @*/ 1508 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) 1509 { 1510 PetscFunctionBegin; 1511 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1512 PetscAssertPointer(mat, 2); 1513 PetscCall(PCFactorSetUpMatSolverType(pc)); 1514 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1515 PetscFunctionReturn(PETSC_SUCCESS); 1516 } 1517 1518 /*@ 1519 PCSetOptionsPrefix - Sets the prefix used for searching for all 1520 `PC` options in the database. 1521 1522 Logically Collective 1523 1524 Input Parameters: 1525 + pc - the `PC` preconditioner context 1526 - prefix - the prefix string to prepend to all `PC` option requests 1527 1528 Note: 1529 A hyphen (-) must NOT be given at the beginning of the prefix name. 1530 The first character of all runtime options is AUTOMATICALLY the 1531 hyphen. 1532 1533 Level: advanced 1534 1535 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1536 @*/ 1537 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) 1538 { 1539 PetscFunctionBegin; 1540 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1541 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1542 PetscFunctionReturn(PETSC_SUCCESS); 1543 } 1544 1545 /*@ 1546 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1547 `PC` options in the database. 1548 1549 Logically Collective 1550 1551 Input Parameters: 1552 + pc - the `PC` preconditioner context 1553 - prefix - the prefix string to prepend to all `PC` option requests 1554 1555 Note: 1556 A hyphen (-) must NOT be given at the beginning of the prefix name. 1557 The first character of all runtime options is AUTOMATICALLY the 1558 hyphen. 1559 1560 Level: advanced 1561 1562 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1563 @*/ 1564 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) 1565 { 1566 PetscFunctionBegin; 1567 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1568 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1569 PetscFunctionReturn(PETSC_SUCCESS); 1570 } 1571 1572 /*@ 1573 PCGetOptionsPrefix - Gets the prefix used for searching for all 1574 PC options in the database. 1575 1576 Not Collective 1577 1578 Input Parameter: 1579 . pc - the `PC` preconditioner context 1580 1581 Output Parameter: 1582 . prefix - pointer to the prefix string used, is returned 1583 1584 Level: advanced 1585 1586 .seealso: [](ch_ksp), `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1587 @*/ 1588 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) 1589 { 1590 PetscFunctionBegin; 1591 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1592 PetscAssertPointer(prefix, 2); 1593 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1594 PetscFunctionReturn(PETSC_SUCCESS); 1595 } 1596 1597 /* 1598 Indicates the right-hand side will be changed by KSPSolve(), this occurs for a few 1599 preconditioners including BDDC and Eisentat that transform the equations before applying 1600 the Krylov methods 1601 */ 1602 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) 1603 { 1604 PetscFunctionBegin; 1605 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1606 PetscAssertPointer(change, 2); 1607 *change = PETSC_FALSE; 1608 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1609 PetscFunctionReturn(PETSC_SUCCESS); 1610 } 1611 1612 /*@ 1613 PCPreSolve - Optional pre-solve phase, intended for any preconditioner-specific actions that must be performed before 1614 the iterative solve itself. Used in conjunction with `PCPostSolve()` 1615 1616 Collective 1617 1618 Input Parameters: 1619 + pc - the `PC` preconditioner context 1620 - ksp - the Krylov subspace context 1621 1622 Level: developer 1623 1624 Notes: 1625 `KSPSolve()` calls this directly, so is rarely called by the user. 1626 1627 Certain preconditioners, such as the `PCType` of `PCEISENSTAT`, change the formulation of the linear system to be solved iteratively. 1628 This function performs that transformation. `PCPostSolve()` then transforms the system back to its original form after the solve. 1629 `PCPostSolve()` also transforms the resulting solution of the transformed system to the solution of the original problem. 1630 1631 `KSPSetPostSolve()` provides an alternative way to provide such transformations. 1632 1633 .seealso: [](ch_ksp), `PC`, `PCPostSolve()`, `KSP`, `PCSetPostSetUp()`, `KSPSetPreSolve()`, `KSPSetPostSolve()` 1634 @*/ 1635 PetscErrorCode PCPreSolve(PC pc, KSP ksp) 1636 { 1637 Vec x, rhs; 1638 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1641 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1642 pc->presolvedone++; 1643 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1644 PetscCall(KSPGetSolution(ksp, &x)); 1645 PetscCall(KSPGetRhs(ksp, &rhs)); 1646 PetscTryTypeMethod(pc, presolve, ksp, rhs, x); 1647 PetscFunctionReturn(PETSC_SUCCESS); 1648 } 1649 1650 /*@C 1651 PCSetPostSetUp - Sets function called at the end of `PCSetUp()` to adjust the computed preconditioner 1652 1653 Logically Collective 1654 1655 Input Parameters: 1656 + pc - the preconditioner object 1657 - postsetup - the function to call after `PCSetUp()` 1658 1659 Calling sequence of `postsetup`: 1660 . pc - the `PC` context 1661 1662 Level: developer 1663 1664 .seealso: [](ch_ksp), `PC`, `PCSetUp()` 1665 @*/ 1666 PetscErrorCode PCSetPostSetUp(PC pc, PetscErrorCode (*postsetup)(PC pc)) 1667 { 1668 PetscFunctionBegin; 1669 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1670 pc->postsetup = postsetup; 1671 PetscFunctionReturn(PETSC_SUCCESS); 1672 } 1673 1674 /*@ 1675 PCPostSolve - Optional post-solve phase, intended for any 1676 preconditioner-specific actions that must be performed after 1677 the iterative solve itself. 1678 1679 Collective 1680 1681 Input Parameters: 1682 + pc - the `PC` preconditioner context 1683 - ksp - the `KSP` Krylov subspace context 1684 1685 Example Usage: 1686 .vb 1687 PCPreSolve(pc,ksp); 1688 KSPSolve(ksp,b,x); 1689 PCPostSolve(pc,ksp); 1690 .ve 1691 1692 Level: developer 1693 1694 Note: 1695 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1696 1697 .seealso: [](ch_ksp), `PC`, `PCSetPreSolve()`, `KSPSetPostSolve()`, `KSPSetPreSolve()`, `PCPreSolve()`, `KSPSolve()` 1698 @*/ 1699 PetscErrorCode PCPostSolve(PC pc, KSP ksp) 1700 { 1701 Vec x, rhs; 1702 1703 PetscFunctionBegin; 1704 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1705 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1706 pc->presolvedone--; 1707 PetscCall(KSPGetSolution(ksp, &x)); 1708 PetscCall(KSPGetRhs(ksp, &rhs)); 1709 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1710 PetscFunctionReturn(PETSC_SUCCESS); 1711 } 1712 1713 /*@ 1714 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1715 1716 Collective 1717 1718 Input Parameters: 1719 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1720 some related function before a call to `PCLoad()`. 1721 - viewer - binary file viewer `PETSCVIEWERBINARY`, obtained from `PetscViewerBinaryOpen()` 1722 1723 Level: intermediate 1724 1725 Note: 1726 The type is determined by the data in the file, any `PCType` set into the `PC` before this call is ignored. 1727 1728 .seealso: [](ch_ksp), `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()`, `PETSCVIEWERBINARY` 1729 @*/ 1730 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1731 { 1732 PetscBool isbinary; 1733 PetscInt classid; 1734 char type[256]; 1735 1736 PetscFunctionBegin; 1737 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1738 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1739 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1740 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1741 1742 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1743 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1744 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1745 PetscCall(PCSetType(newdm, type)); 1746 PetscTryTypeMethod(newdm, load, viewer); 1747 PetscFunctionReturn(PETSC_SUCCESS); 1748 } 1749 1750 #include <petscdraw.h> 1751 #if defined(PETSC_HAVE_SAWS) 1752 #include <petscviewersaws.h> 1753 #endif 1754 1755 /*@ 1756 PCViewFromOptions - View (print or provide information about) the `PC`, based on options in the options database 1757 1758 Collective 1759 1760 Input Parameters: 1761 + A - the `PC` context 1762 . obj - Optional object that provides the options prefix 1763 - name - command line option name 1764 1765 Level: developer 1766 1767 .seealso: [](ch_ksp), `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1768 @*/ 1769 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) 1770 { 1771 PetscFunctionBegin; 1772 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1773 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1774 PetscFunctionReturn(PETSC_SUCCESS); 1775 } 1776 1777 /*@ 1778 PCView - Prints information about the `PC` 1779 1780 Collective 1781 1782 Input Parameters: 1783 + pc - the `PC` preconditioner context 1784 - viewer - optional `PetscViewer` visualization context 1785 1786 Level: intermediate 1787 1788 Notes: 1789 The available visualization contexts include 1790 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1791 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1792 output where only the first processor opens 1793 the file. All other processors send their 1794 data to the first processor to print. 1795 1796 The user can open an alternative visualization contexts with 1797 `PetscViewerASCIIOpen()` (output to a specified file). 1798 1799 .seealso: [](ch_ksp), `PC`, `PetscViewer`, `PetscViewerType`, `KSPView()`, `PetscViewerASCIIOpen()` 1800 @*/ 1801 PetscErrorCode PCView(PC pc, PetscViewer viewer) 1802 { 1803 PCType cstr; 1804 PetscViewerFormat format; 1805 PetscBool iascii, isstring, isbinary, isdraw, pop = PETSC_FALSE; 1806 #if defined(PETSC_HAVE_SAWS) 1807 PetscBool issaws; 1808 #endif 1809 1810 PetscFunctionBegin; 1811 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1812 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1813 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1814 PetscCheckSameComm(pc, 1, viewer, 2); 1815 1816 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1817 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1818 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1819 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1820 #if defined(PETSC_HAVE_SAWS) 1821 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1822 #endif 1823 1824 if (iascii) { 1825 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1826 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1827 PetscCall(PetscViewerASCIIPushTab(viewer)); 1828 PetscTryTypeMethod(pc, view, viewer); 1829 PetscCall(PetscViewerASCIIPopTab(viewer)); 1830 if (pc->mat) { 1831 PetscCall(PetscViewerGetFormat(viewer, &format)); 1832 if (format != PETSC_VIEWER_ASCII_INFO_DETAIL) { 1833 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1834 pop = PETSC_TRUE; 1835 } 1836 if (pc->pmat == pc->mat) { 1837 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1838 PetscCall(PetscViewerASCIIPushTab(viewer)); 1839 PetscCall(MatView(pc->mat, viewer)); 1840 PetscCall(PetscViewerASCIIPopTab(viewer)); 1841 } else { 1842 if (pc->pmat) { 1843 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1844 } else { 1845 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1846 } 1847 PetscCall(PetscViewerASCIIPushTab(viewer)); 1848 PetscCall(MatView(pc->mat, viewer)); 1849 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1850 PetscCall(PetscViewerASCIIPopTab(viewer)); 1851 } 1852 if (pop) PetscCall(PetscViewerPopFormat(viewer)); 1853 } 1854 } else if (isstring) { 1855 PetscCall(PCGetType(pc, &cstr)); 1856 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1857 PetscTryTypeMethod(pc, view, viewer); 1858 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1859 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1860 } else if (isbinary) { 1861 PetscInt classid = PC_FILE_CLASSID; 1862 MPI_Comm comm; 1863 PetscMPIInt rank; 1864 char type[256]; 1865 1866 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1867 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1868 if (rank == 0) { 1869 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1870 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1871 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1872 } 1873 PetscTryTypeMethod(pc, view, viewer); 1874 } else if (isdraw) { 1875 PetscDraw draw; 1876 char str[25]; 1877 PetscReal x, y, bottom, h; 1878 PetscInt n; 1879 1880 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1881 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1882 if (pc->mat) { 1883 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1884 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1885 } else { 1886 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1887 } 1888 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1889 bottom = y - h; 1890 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1891 PetscTryTypeMethod(pc, view, viewer); 1892 PetscCall(PetscDrawPopCurrentPoint(draw)); 1893 #if defined(PETSC_HAVE_SAWS) 1894 } else if (issaws) { 1895 PetscMPIInt rank; 1896 1897 PetscCall(PetscObjectName((PetscObject)pc)); 1898 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1899 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1900 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1901 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1902 #endif 1903 } 1904 PetscFunctionReturn(PETSC_SUCCESS); 1905 } 1906 1907 /*@C 1908 PCRegister - Adds a method (`PCType`) to the PETSc preconditioner package. 1909 1910 Not collective. No Fortran Support 1911 1912 Input Parameters: 1913 + sname - name of a new user-defined solver 1914 - function - routine to create the method context which will be stored in a `PC` when `PCSetType()` is called 1915 1916 Example Usage: 1917 .vb 1918 PCRegister("my_solver", MySolverCreate); 1919 .ve 1920 1921 Then, your solver can be chosen with the procedural interface via 1922 .vb 1923 PCSetType(pc, "my_solver") 1924 .ve 1925 or at runtime via the option 1926 .vb 1927 -pc_type my_solver 1928 .ve 1929 1930 Level: advanced 1931 1932 Note: 1933 A simpler alternative to using `PCRegister()` for an application specific preconditioner is to use a `PC` of `PCType` `PCSHELL` and 1934 provide your customizations with `PCShellSetContext()` and `PCShellSetApply()` 1935 1936 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1937 1938 .seealso: [](ch_ksp), `PC`, `PCType`, `PCRegisterAll()`, `PCSetType()`, `PCShellSetContext()`, `PCShellSetApply()`, `PCSHELL` 1939 @*/ 1940 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) 1941 { 1942 PetscFunctionBegin; 1943 PetscCall(PCInitializePackage()); 1944 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1945 PetscFunctionReturn(PETSC_SUCCESS); 1946 } 1947 1948 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) 1949 { 1950 PC pc; 1951 1952 PetscFunctionBegin; 1953 PetscCall(MatShellGetContext(A, &pc)); 1954 PetscCall(PCApply(pc, X, Y)); 1955 PetscFunctionReturn(PETSC_SUCCESS); 1956 } 1957 1958 /*@ 1959 PCComputeOperator - Computes the explicit preconditioned operator as a matrix `Mat`. 1960 1961 Collective 1962 1963 Input Parameters: 1964 + pc - the `PC` preconditioner object 1965 - mattype - the `MatType` to be used for the operator 1966 1967 Output Parameter: 1968 . mat - the explicit preconditioned operator 1969 1970 Level: advanced 1971 1972 Note: 1973 This computation is done by applying the operators to columns of the identity matrix. 1974 This routine is costly in general, and is recommended for use only with relatively small systems. 1975 Currently, this routine uses a dense matrix format when `mattype` == `NULL` 1976 1977 Developer Note: 1978 This should be called `PCCreateExplicitOperator()` 1979 1980 .seealso: [](ch_ksp), `PC`, `KSPComputeOperator()`, `MatType` 1981 @*/ 1982 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) 1983 { 1984 PetscInt N, M, m, n; 1985 Mat A, Apc; 1986 1987 PetscFunctionBegin; 1988 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1989 PetscAssertPointer(mat, 3); 1990 PetscCall(PCGetOperators(pc, &A, NULL)); 1991 PetscCall(MatGetLocalSize(A, &m, &n)); 1992 PetscCall(MatGetSize(A, &M, &N)); 1993 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1994 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1995 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1996 PetscCall(MatDestroy(&Apc)); 1997 PetscFunctionReturn(PETSC_SUCCESS); 1998 } 1999 2000 /*@ 2001 PCSetCoordinates - sets the coordinates of all the nodes (degrees of freedom in the vector) on the local process 2002 2003 Collective 2004 2005 Input Parameters: 2006 + pc - the `PC` preconditioner context 2007 . dim - the dimension of the coordinates 1, 2, or 3 2008 . nloc - the blocked size of the coordinates array 2009 - coords - the coordinates array 2010 2011 Level: intermediate 2012 2013 Notes: 2014 `coords` is an array of the dim coordinates for the nodes on 2015 the local processor, of size `dim`*`nloc`. 2016 If there are 108 equations (dofs) on a processor 2017 for a 3d displacement finite element discretization of elasticity (so 2018 that there are nloc = 36 = 108/3 nodes) then the array must have 108 2019 double precision values (ie, 3 * 36). These x y z coordinates 2020 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 2021 ... , N-1.z ]. 2022 2023 The information provided here can be used by some preconditioners, such as `PCGAMG`, to produce a better preconditioner. 2024 See also `MatSetNearNullSpace()`. 2025 2026 .seealso: [](ch_ksp), `PC`, `MatSetNearNullSpace()` 2027 @*/ 2028 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 2029 { 2030 PetscFunctionBegin; 2031 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2032 PetscValidLogicalCollectiveInt(pc, dim, 2); 2033 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal[]), (pc, dim, nloc, coords)); 2034 PetscFunctionReturn(PETSC_SUCCESS); 2035 } 2036 2037 /*@ 2038 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 2039 2040 Logically Collective 2041 2042 Input Parameter: 2043 . pc - the precondition context 2044 2045 Output Parameters: 2046 + num_levels - the number of levels 2047 - interpolations - the interpolation matrices (size of `num_levels`-1) 2048 2049 Level: advanced 2050 2051 Developer Note: 2052 Why is this here instead of in `PCMG` etc? 2053 2054 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 2055 @*/ 2056 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) 2057 { 2058 PetscFunctionBegin; 2059 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2060 PetscAssertPointer(num_levels, 2); 2061 PetscAssertPointer(interpolations, 3); 2062 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 2063 PetscFunctionReturn(PETSC_SUCCESS); 2064 } 2065 2066 /*@ 2067 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2068 2069 Logically Collective 2070 2071 Input Parameter: 2072 . pc - the precondition context 2073 2074 Output Parameters: 2075 + num_levels - the number of levels 2076 - coarseOperators - the coarse operator matrices (size of `num_levels`-1) 2077 2078 Level: advanced 2079 2080 Developer Note: 2081 Why is this here instead of in `PCMG` etc? 2082 2083 .seealso: [](ch_ksp), `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2084 @*/ 2085 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 2086 { 2087 PetscFunctionBegin; 2088 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2089 PetscAssertPointer(num_levels, 2); 2090 PetscAssertPointer(coarseOperators, 3); 2091 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 2092 PetscFunctionReturn(PETSC_SUCCESS); 2093 } 2094