1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 PetscLogStage PCMPIStage; 14 15 PetscErrorCode PCGetDefaultType_Private(PC pc, const char *type[]) 16 { 17 PetscMPIInt size; 18 PetscBool hasop, flg1, flg2, set, flg3, isnormal; 19 20 PetscFunctionBegin; 21 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 22 if (pc->pmat) { 23 PetscCall(MatHasOperation(pc->pmat, MATOP_GET_DIAGONAL_BLOCK, &hasop)); 24 if (size == 1) { 25 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ICC, &flg1)); 26 PetscCall(MatGetFactorAvailable(pc->pmat, "petsc", MAT_FACTOR_ILU, &flg2)); 27 PetscCall(MatIsSymmetricKnown(pc->pmat, &set, &flg3)); 28 PetscCall(PetscObjectTypeCompareAny((PetscObject)pc->pmat, &isnormal, MATNORMAL, MATNORMALHERMITIAN, NULL)); 29 if (flg1 && (!flg2 || (set && flg3))) { 30 *type = PCICC; 31 } else if (flg2) { 32 *type = PCILU; 33 } else if (isnormal) { 34 *type = PCNONE; 35 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 36 *type = PCBJACOBI; 37 } else { 38 *type = PCNONE; 39 } 40 } else { 41 if (hasop) { 42 *type = PCBJACOBI; 43 } else { 44 *type = PCNONE; 45 } 46 } 47 } else { 48 if (size == 1) { 49 *type = PCILU; 50 } else { 51 *type = PCBJACOBI; 52 } 53 } 54 PetscFunctionReturn(PETSC_SUCCESS); 55 } 56 57 /*@ 58 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated `Vec`s and `Mat`s 59 60 Collective 61 62 Input Parameter: 63 . pc - the preconditioner context 64 65 Level: developer 66 67 Note: 68 This allows a `PC` to be reused for a different sized linear system but using the same options that have been previously set in the PC 69 70 .seealso: `PC`, `PCCreate()`, `PCSetUp()` 71 @*/ 72 PetscErrorCode PCReset(PC pc) 73 { 74 PetscFunctionBegin; 75 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 76 PetscTryTypeMethod(pc, reset); 77 PetscCall(VecDestroy(&pc->diagonalscaleright)); 78 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 79 PetscCall(MatDestroy(&pc->pmat)); 80 PetscCall(MatDestroy(&pc->mat)); 81 82 pc->setupcalled = 0; 83 PetscFunctionReturn(PETSC_SUCCESS); 84 } 85 86 /*@C 87 PCDestroy - Destroys `PC` context that was created with `PCCreate()`. 88 89 Collective 90 91 Input Parameter: 92 . pc - the preconditioner context 93 94 Level: developer 95 96 .seealso: `PC`, `PCCreate()`, `PCSetUp()` 97 @*/ 98 PetscErrorCode PCDestroy(PC *pc) 99 { 100 PetscFunctionBegin; 101 if (!*pc) PetscFunctionReturn(PETSC_SUCCESS); 102 PetscValidHeaderSpecific((*pc), PC_CLASSID, 1); 103 if (--((PetscObject)(*pc))->refct > 0) { 104 *pc = NULL; 105 PetscFunctionReturn(PETSC_SUCCESS); 106 } 107 108 PetscCall(PCReset(*pc)); 109 110 /* if memory was published with SAWs then destroy it */ 111 PetscCall(PetscObjectSAWsViewOff((PetscObject)*pc)); 112 PetscTryTypeMethod((*pc), destroy); 113 PetscCall(DMDestroy(&(*pc)->dm)); 114 PetscCall(PetscHeaderDestroy(pc)); 115 PetscFunctionReturn(PETSC_SUCCESS); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - `PETSC_TRUE` if it applies the scaling 129 130 Level: developer 131 132 Note: 133 If this returns `PETSC_TRUE` then the system solved via the Krylov method is 134 .vb 135 D M A D^{-1} y = D M b for left preconditioning or 136 D A M D^{-1} z = D b for right preconditioning 137 .ve 138 139 .seealso: `PC`, `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCSetDiagonalScale()` 140 @*/ 141 PetscErrorCode PCGetDiagonalScale(PC pc, PetscBool *flag) 142 { 143 PetscFunctionBegin; 144 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 145 PetscAssertPointer(flag, 2); 146 *flag = pc->diagonalscale; 147 PetscFunctionReturn(PETSC_SUCCESS); 148 } 149 150 /*@ 151 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 152 scaling as needed by certain time-stepping codes. 153 154 Logically Collective 155 156 Input Parameters: 157 + pc - the preconditioner context 158 - s - scaling vector 159 160 Level: intermediate 161 162 Notes: 163 The system solved via the Krylov method is 164 .vb 165 D M A D^{-1} y = D M b for left preconditioning or 166 D A M D^{-1} z = D b for right preconditioning 167 .ve 168 169 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 170 171 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleRight()`, `PCGetDiagonalScale()` 172 @*/ 173 PetscErrorCode PCSetDiagonalScale(PC pc, Vec s) 174 { 175 PetscFunctionBegin; 176 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 177 PetscValidHeaderSpecific(s, VEC_CLASSID, 2); 178 pc->diagonalscale = PETSC_TRUE; 179 180 PetscCall(PetscObjectReference((PetscObject)s)); 181 PetscCall(VecDestroy(&pc->diagonalscaleleft)); 182 183 pc->diagonalscaleleft = s; 184 185 PetscCall(VecDuplicate(s, &pc->diagonalscaleright)); 186 PetscCall(VecCopy(s, pc->diagonalscaleright)); 187 PetscCall(VecReciprocal(pc->diagonalscaleright)); 188 PetscFunctionReturn(PETSC_SUCCESS); 189 } 190 191 /*@ 192 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 193 194 Logically Collective 195 196 Input Parameters: 197 + pc - the preconditioner context 198 . in - input vector 199 - out - scaled vector (maybe the same as in) 200 201 Level: intermediate 202 203 Notes: 204 The system solved via the Krylov method is 205 .vb 206 D M A D^{-1} y = D M b for left preconditioning or 207 D A M D^{-1} z = D b for right preconditioning 208 .ve 209 210 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 211 212 If diagonal scaling is turned off and in is not out then in is copied to out 213 214 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleSet()`, `PCDiagonalScaleRight()`, `PCDiagonalScale()` 215 @*/ 216 PetscErrorCode PCDiagonalScaleLeft(PC pc, Vec in, Vec out) 217 { 218 PetscFunctionBegin; 219 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 220 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 221 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 222 if (pc->diagonalscale) { 223 PetscCall(VecPointwiseMult(out, pc->diagonalscaleleft, in)); 224 } else if (in != out) { 225 PetscCall(VecCopy(in, out)); 226 } 227 PetscFunctionReturn(PETSC_SUCCESS); 228 } 229 230 /*@ 231 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 232 233 Logically Collective 234 235 Input Parameters: 236 + pc - the preconditioner context 237 . in - input vector 238 - out - scaled vector (maybe the same as in) 239 240 Level: intermediate 241 242 Notes: 243 The system solved via the Krylov method is 244 .vb 245 D M A D^{-1} y = D M b for left preconditioning or 246 D A M D^{-1} z = D b for right preconditioning 247 .ve 248 249 `PCDiagonalScaleLeft()` scales a vector by D. `PCDiagonalScaleRight()` scales a vector by D^{-1}. 250 251 If diagonal scaling is turned off and in is not out then in is copied to out 252 253 .seealso: `PCCreate()`, `PCSetUp()`, `PCDiagonalScaleLeft()`, `PCDiagonalScaleSet()`, `PCDiagonalScale()` 254 @*/ 255 PetscErrorCode PCDiagonalScaleRight(PC pc, Vec in, Vec out) 256 { 257 PetscFunctionBegin; 258 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 259 PetscValidHeaderSpecific(in, VEC_CLASSID, 2); 260 PetscValidHeaderSpecific(out, VEC_CLASSID, 3); 261 if (pc->diagonalscale) { 262 PetscCall(VecPointwiseMult(out, pc->diagonalscaleright, in)); 263 } else if (in != out) { 264 PetscCall(VecCopy(in, out)); 265 } 266 PetscFunctionReturn(PETSC_SUCCESS); 267 } 268 269 /*@ 270 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 271 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 272 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 273 274 Logically Collective 275 276 Input Parameters: 277 + pc - the preconditioner context 278 - flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 279 280 Options Database Key: 281 . -pc_use_amat <true,false> - use the amat to apply the operator 282 283 Level: intermediate 284 285 Note: 286 For the common case in which the linear system matrix and the matrix used to construct the 287 preconditioner are identical, this routine is does nothing. 288 289 .seealso: `PC`, `PCGetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 290 @*/ 291 PetscErrorCode PCSetUseAmat(PC pc, PetscBool flg) 292 { 293 PetscFunctionBegin; 294 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 295 pc->useAmat = flg; 296 PetscFunctionReturn(PETSC_SUCCESS); 297 } 298 299 /*@ 300 PCSetErrorIfFailure - Causes `PC` to generate an error if a FPE, for example a zero pivot, is detected. 301 302 Logically Collective 303 304 Input Parameters: 305 + pc - iterative context obtained from PCCreate() 306 - flg - `PETSC_TRUE` indicates you want the error generated 307 308 Level: advanced 309 310 Notes: 311 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call `KSPGetConvergedReason()` after a `KSPSolve()` 312 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 313 detected. 314 315 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 316 317 .seealso: `PC`, `KSPSetErrorIfNotConverged()`, `PCGetInitialGuessNonzero()`, `PCSetInitialGuessKnoll()`, `PCGetInitialGuessKnoll()` 318 @*/ 319 PetscErrorCode PCSetErrorIfFailure(PC pc, PetscBool flg) 320 { 321 PetscFunctionBegin; 322 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 323 PetscValidLogicalCollectiveBool(pc, flg, 2); 324 pc->erroriffailure = flg; 325 PetscFunctionReturn(PETSC_SUCCESS); 326 } 327 328 /*@ 329 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 330 operator during the preconditioning process it applies the Amat provided to `TSSetRHSJacobian()`, 331 `TSSetIJacobian()`, `SNESSetJacobian()`, `KSPSetOperators()` or `PCSetOperators()` not the Pmat. 332 333 Logically Collective 334 335 Input Parameter: 336 . pc - the preconditioner context 337 338 Output Parameter: 339 . flg - `PETSC_TRUE` to use the Amat, `PETSC_FALSE` to use the Pmat (default is false) 340 341 Level: intermediate 342 343 Note: 344 For the common case in which the linear system matrix and the matrix used to construct the 345 preconditioner are identical, this routine is does nothing. 346 347 .seealso: `PC`, `PCSetUseAmat()`, `PCBJACOBI`, `PGMG`, `PCFIELDSPLIT`, `PCCOMPOSITE` 348 @*/ 349 PetscErrorCode PCGetUseAmat(PC pc, PetscBool *flg) 350 { 351 PetscFunctionBegin; 352 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 353 *flg = pc->useAmat; 354 PetscFunctionReturn(PETSC_SUCCESS); 355 } 356 357 /*@ 358 PCSetKSPNestLevel - sets the amount of nesting the `KSP` that contains this `PC` has 359 360 Collective 361 362 Input Parameters: 363 + pc - the `PC` 364 - level - the nest level 365 366 Level: developer 367 368 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPGetNestLevel()`, `PCGetKSPNestLevel()`, `KSPSetNestLevel()` 369 @*/ 370 PetscErrorCode PCSetKSPNestLevel(PC pc, PetscInt level) 371 { 372 PetscFunctionBegin; 373 pc->kspnestlevel = level; 374 PetscFunctionReturn(PETSC_SUCCESS); 375 } 376 377 /*@ 378 PCGetKSPNestLevel - gets the amount of nesting the `KSP` that contains this `PC` has 379 380 Collective 381 382 Input Parameter: 383 . pc - the `PC` 384 385 Output Parameter: 386 . level - the nest level 387 388 Level: developer 389 390 .seealso: [](ch_ksp), `KSPSetUp()`, `KSPSolve()`, `KSPDestroy()`, `KSP`, `KSPGMRES`, `KSPType`, `KSPSetNestLevel()`, `PCSetKSPNestLevel()`, `KSPGetNestLevel()` 391 @*/ 392 PetscErrorCode PCGetKSPNestLevel(PC pc, PetscInt *level) 393 { 394 PetscFunctionBegin; 395 *level = pc->kspnestlevel; 396 PetscFunctionReturn(PETSC_SUCCESS); 397 } 398 399 /*@ 400 PCCreate - Creates a preconditioner context, `PC` 401 402 Collective 403 404 Input Parameter: 405 . comm - MPI communicator 406 407 Output Parameter: 408 . pc - location to put the preconditioner context 409 410 Level: developer 411 412 Note: 413 The default preconditioner for sparse matrices is `PCILU` or `PCICC` with 0 fill on one process and block Jacobi (`PCBJACOBI`) with `PCILU` or `PCICC` 414 in parallel. For dense matrices it is always `PCNONE`. 415 416 .seealso: `PC`, `PCSetUp()`, `PCApply()`, `PCDestroy()` 417 @*/ 418 PetscErrorCode PCCreate(MPI_Comm comm, PC *newpc) 419 { 420 PC pc; 421 422 PetscFunctionBegin; 423 PetscAssertPointer(newpc, 2); 424 *newpc = NULL; 425 PetscCall(PCInitializePackage()); 426 427 PetscCall(PetscHeaderCreate(pc, PC_CLASSID, "PC", "Preconditioner", "PC", comm, PCDestroy, PCView)); 428 429 pc->mat = NULL; 430 pc->pmat = NULL; 431 pc->setupcalled = 0; 432 pc->setfromoptionscalled = 0; 433 pc->data = NULL; 434 pc->diagonalscale = PETSC_FALSE; 435 pc->diagonalscaleleft = NULL; 436 pc->diagonalscaleright = NULL; 437 438 pc->modifysubmatrices = NULL; 439 pc->modifysubmatricesP = NULL; 440 441 *newpc = pc; 442 PetscFunctionReturn(PETSC_SUCCESS); 443 } 444 445 /*@ 446 PCApply - Applies the preconditioner to a vector. 447 448 Collective 449 450 Input Parameters: 451 + pc - the preconditioner context 452 - x - input vector 453 454 Output Parameter: 455 . y - output vector 456 457 Level: developer 458 459 .seealso: `PC`, `PCApplyTranspose()`, `PCApplyBAorAB()` 460 @*/ 461 PetscErrorCode PCApply(PC pc, Vec x, Vec y) 462 { 463 PetscInt m, n, mv, nv; 464 465 PetscFunctionBegin; 466 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 467 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 468 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 469 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 470 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 471 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 472 PetscCall(MatGetLocalSize(pc->pmat, &m, &n)); 473 PetscCall(VecGetLocalSize(x, &mv)); 474 PetscCall(VecGetLocalSize(y, &nv)); 475 /* check pmat * y = x is feasible */ 476 PetscCheck(mv == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local rows %" PetscInt_FMT " does not equal input vector size %" PetscInt_FMT, m, mv); 477 PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Preconditioner number of local columns %" PetscInt_FMT " does not equal output vector size %" PetscInt_FMT, n, nv); 478 PetscCall(VecSetErrorIfLocked(y, 3)); 479 480 PetscCall(PCSetUp(pc)); 481 PetscCall(VecLockReadPush(x)); 482 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 483 PetscUseTypeMethod(pc, apply, x, y); 484 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 485 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 486 PetscCall(VecLockReadPop(x)); 487 PetscFunctionReturn(PETSC_SUCCESS); 488 } 489 490 /*@ 491 PCMatApply - Applies the preconditioner to multiple vectors stored as a `MATDENSE`. Like `PCApply()`, Y and X must be different matrices. 492 493 Collective 494 495 Input Parameters: 496 + pc - the preconditioner context 497 - X - block of input vectors 498 499 Output Parameter: 500 . Y - block of output vectors 501 502 Level: developer 503 504 .seealso: `PC`, `PCApply()`, `KSPMatSolve()` 505 @*/ 506 PetscErrorCode PCMatApply(PC pc, Mat X, Mat Y) 507 { 508 Mat A; 509 Vec cy, cx; 510 PetscInt m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3; 511 PetscBool match; 512 513 PetscFunctionBegin; 514 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 515 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 516 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 517 PetscCheckSameComm(pc, 1, X, 2); 518 PetscCheckSameComm(pc, 1, Y, 3); 519 PetscCheck(Y != X, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 520 PetscCall(PCGetOperators(pc, NULL, &A)); 521 PetscCall(MatGetLocalSize(A, &m3, &n3)); 522 PetscCall(MatGetLocalSize(X, &m2, &n2)); 523 PetscCall(MatGetLocalSize(Y, &m1, &n1)); 524 PetscCall(MatGetSize(A, &M3, &N3)); 525 PetscCall(MatGetSize(X, &M2, &N2)); 526 PetscCall(MatGetSize(Y, &M1, &N1)); 527 PetscCheck(n1 == n2 && N1 == N2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and block of output vectors (n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")", n2, N2, n1, N1); 528 PetscCheck(m2 == m3 && M2 == M3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m2, M2, m3, M3, n3, N3); 529 PetscCheck(m1 == n3 && M1 == N3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%" PetscInt_FMT ",%" PetscInt_FMT ") and Pmat (m,M)x(n,N) = (%" PetscInt_FMT ",%" PetscInt_FMT ")x(%" PetscInt_FMT ",%" PetscInt_FMT ")", m1, M1, m3, M3, n3, N3); 530 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "")); 531 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 532 PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "")); 533 PetscCheck(match, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 534 PetscCall(PCSetUp(pc)); 535 if (pc->ops->matapply) { 536 PetscCall(PetscLogEventBegin(PC_MatApply, pc, X, Y, 0)); 537 PetscUseTypeMethod(pc, matapply, X, Y); 538 PetscCall(PetscLogEventEnd(PC_MatApply, pc, X, Y, 0)); 539 } else { 540 PetscCall(PetscInfo(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name)); 541 for (n1 = 0; n1 < N1; ++n1) { 542 PetscCall(MatDenseGetColumnVecRead(X, n1, &cx)); 543 PetscCall(MatDenseGetColumnVecWrite(Y, n1, &cy)); 544 PetscCall(PCApply(pc, cx, cy)); 545 PetscCall(MatDenseRestoreColumnVecWrite(Y, n1, &cy)); 546 PetscCall(MatDenseRestoreColumnVecRead(X, n1, &cx)); 547 } 548 } 549 PetscFunctionReturn(PETSC_SUCCESS); 550 } 551 552 /*@ 553 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 554 555 Collective 556 557 Input Parameters: 558 + pc - the preconditioner context 559 - x - input vector 560 561 Output Parameter: 562 . y - output vector 563 564 Level: developer 565 566 Note: 567 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 568 569 .seealso: `PC`, `PCApply()`, `PCApplySymmetricRight()` 570 @*/ 571 PetscErrorCode PCApplySymmetricLeft(PC pc, Vec x, Vec y) 572 { 573 PetscFunctionBegin; 574 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 575 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 576 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 577 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 578 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 579 PetscCall(PCSetUp(pc)); 580 PetscCall(VecLockReadPush(x)); 581 PetscCall(PetscLogEventBegin(PC_ApplySymmetricLeft, pc, x, y, 0)); 582 PetscUseTypeMethod(pc, applysymmetricleft, x, y); 583 PetscCall(PetscLogEventEnd(PC_ApplySymmetricLeft, pc, x, y, 0)); 584 PetscCall(VecLockReadPop(x)); 585 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 586 PetscFunctionReturn(PETSC_SUCCESS); 587 } 588 589 /*@ 590 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 591 592 Collective 593 594 Input Parameters: 595 + pc - the preconditioner context 596 - x - input vector 597 598 Output Parameter: 599 . y - output vector 600 601 Level: developer 602 603 Note: 604 Currently, this routine is implemented only for `PCICC` and `PCJACOBI` preconditioners. 605 606 .seealso: `PC`, `PCApply()`, `PCApplySymmetricLeft()` 607 @*/ 608 PetscErrorCode PCApplySymmetricRight(PC pc, Vec x, Vec y) 609 { 610 PetscFunctionBegin; 611 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 612 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 613 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 614 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 615 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 616 PetscCall(PCSetUp(pc)); 617 PetscCall(VecLockReadPush(x)); 618 PetscCall(PetscLogEventBegin(PC_ApplySymmetricRight, pc, x, y, 0)); 619 PetscUseTypeMethod(pc, applysymmetricright, x, y); 620 PetscCall(PetscLogEventEnd(PC_ApplySymmetricRight, pc, x, y, 0)); 621 PetscCall(VecLockReadPop(x)); 622 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 623 PetscFunctionReturn(PETSC_SUCCESS); 624 } 625 626 /*@ 627 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 628 629 Collective 630 631 Input Parameters: 632 + pc - the preconditioner context 633 - x - input vector 634 635 Output Parameter: 636 . y - output vector 637 638 Level: developer 639 640 Note: 641 For complex numbers this applies the non-Hermitian transpose. 642 643 Developer Notes: 644 We need to implement a `PCApplyHermitianTranspose()` 645 646 .seealso: `PC`, `PCApply()`, `PCApplyBAorAB()`, `PCApplyBAorABTranspose()`, `PCApplyTransposeExists()` 647 @*/ 648 PetscErrorCode PCApplyTranspose(PC pc, Vec x, Vec y) 649 { 650 PetscFunctionBegin; 651 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 652 PetscValidHeaderSpecific(x, VEC_CLASSID, 2); 653 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 654 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 655 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE)); 656 PetscCall(PCSetUp(pc)); 657 PetscCall(VecLockReadPush(x)); 658 PetscCall(PetscLogEventBegin(PC_Apply, pc, x, y, 0)); 659 PetscUseTypeMethod(pc, applytranspose, x, y); 660 PetscCall(PetscLogEventEnd(PC_Apply, pc, x, y, 0)); 661 PetscCall(VecLockReadPop(x)); 662 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE)); 663 PetscFunctionReturn(PETSC_SUCCESS); 664 } 665 666 /*@ 667 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 668 669 Collective 670 671 Input Parameter: 672 . pc - the preconditioner context 673 674 Output Parameter: 675 . flg - `PETSC_TRUE` if a transpose operation is defined 676 677 Level: developer 678 679 .seealso: `PC`, `PCApplyTranspose()` 680 @*/ 681 PetscErrorCode PCApplyTransposeExists(PC pc, PetscBool *flg) 682 { 683 PetscFunctionBegin; 684 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 685 PetscAssertPointer(flg, 2); 686 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 687 else *flg = PETSC_FALSE; 688 PetscFunctionReturn(PETSC_SUCCESS); 689 } 690 691 /*@ 692 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 693 694 Collective 695 696 Input Parameters: 697 + pc - the preconditioner context 698 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 699 . x - input vector 700 - work - work vector 701 702 Output Parameter: 703 . y - output vector 704 705 Level: developer 706 707 Note: 708 If the `PC` has had `PCSetDiagonalScale()` set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 709 specific `KSPSolve()` method must also be written to handle the post-solve "correction" for the diagonal scaling. 710 711 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorABTranspose()` 712 @*/ 713 PetscErrorCode PCApplyBAorAB(PC pc, PCSide side, Vec x, Vec y, Vec work) 714 { 715 PetscFunctionBegin; 716 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 717 PetscValidLogicalCollectiveEnum(pc, side, 2); 718 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 719 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 720 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 721 PetscCheckSameComm(pc, 1, x, 3); 722 PetscCheckSameComm(pc, 1, y, 4); 723 PetscCheckSameComm(pc, 1, work, 5); 724 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 725 PetscCheck(side == PC_LEFT || side == PC_SYMMETRIC || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right, left, or symmetric"); 726 PetscCheck(!pc->diagonalscale || side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot include diagonal scaling with symmetric preconditioner application"); 727 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 728 729 PetscCall(PCSetUp(pc)); 730 if (pc->diagonalscale) { 731 if (pc->ops->applyBA) { 732 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 733 PetscCall(VecDuplicate(x, &work2)); 734 PetscCall(PCDiagonalScaleRight(pc, x, work2)); 735 PetscUseTypeMethod(pc, applyBA, side, work2, y, work); 736 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 737 PetscCall(VecDestroy(&work2)); 738 } else if (side == PC_RIGHT) { 739 PetscCall(PCDiagonalScaleRight(pc, x, y)); 740 PetscCall(PCApply(pc, y, work)); 741 PetscCall(MatMult(pc->mat, work, y)); 742 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 743 } else if (side == PC_LEFT) { 744 PetscCall(PCDiagonalScaleRight(pc, x, y)); 745 PetscCall(MatMult(pc->mat, y, work)); 746 PetscCall(PCApply(pc, work, y)); 747 PetscCall(PCDiagonalScaleLeft(pc, y, y)); 748 } else PetscCheck(side != PC_SYMMETRIC, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot provide diagonal scaling with symmetric application of preconditioner"); 749 } else { 750 if (pc->ops->applyBA) { 751 PetscUseTypeMethod(pc, applyBA, side, x, y, work); 752 } else if (side == PC_RIGHT) { 753 PetscCall(PCApply(pc, x, work)); 754 PetscCall(MatMult(pc->mat, work, y)); 755 } else if (side == PC_LEFT) { 756 PetscCall(MatMult(pc->mat, x, work)); 757 PetscCall(PCApply(pc, work, y)); 758 } else if (side == PC_SYMMETRIC) { 759 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 760 PetscCall(PCApplySymmetricRight(pc, x, work)); 761 PetscCall(MatMult(pc->mat, work, y)); 762 PetscCall(VecCopy(y, work)); 763 PetscCall(PCApplySymmetricLeft(pc, work, y)); 764 } 765 } 766 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 767 PetscFunctionReturn(PETSC_SUCCESS); 768 } 769 770 /*@ 771 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 772 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 773 NOT tr(B*A) = tr(A)*tr(B). 774 775 Collective 776 777 Input Parameters: 778 + pc - the preconditioner context 779 . side - indicates the preconditioner side, one of `PC_LEFT`, `PC_RIGHT`, or `PC_SYMMETRIC` 780 . x - input vector 781 - work - work vector 782 783 Output Parameter: 784 . y - output vector 785 786 Level: developer 787 788 Note: 789 This routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 790 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 791 792 .seealso: `PC`, `PCApply()`, `PCApplyTranspose()`, `PCApplyBAorAB()` 793 @*/ 794 PetscErrorCode PCApplyBAorABTranspose(PC pc, PCSide side, Vec x, Vec y, Vec work) 795 { 796 PetscFunctionBegin; 797 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 798 PetscValidHeaderSpecific(x, VEC_CLASSID, 3); 799 PetscValidHeaderSpecific(y, VEC_CLASSID, 4); 800 PetscValidHeaderSpecific(work, VEC_CLASSID, 5); 801 PetscCheck(x != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "x and y must be different vectors"); 802 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(x, 3, PETSC_TRUE)); 803 if (pc->ops->applyBAtranspose) { 804 PetscUseTypeMethod(pc, applyBAtranspose, side, x, y, work); 805 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 PetscCheck(side == PC_LEFT || side == PC_RIGHT, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_OUTOFRANGE, "Side must be right or left"); 809 810 PetscCall(PCSetUp(pc)); 811 if (side == PC_RIGHT) { 812 PetscCall(PCApplyTranspose(pc, x, work)); 813 PetscCall(MatMultTranspose(pc->mat, work, y)); 814 } else if (side == PC_LEFT) { 815 PetscCall(MatMultTranspose(pc->mat, x, work)); 816 PetscCall(PCApplyTranspose(pc, work, y)); 817 } 818 /* add support for PC_SYMMETRIC */ 819 if (pc->erroriffailure) PetscCall(VecValidValues_Internal(y, 4, PETSC_FALSE)); 820 PetscFunctionReturn(PETSC_SUCCESS); 821 } 822 823 /*@ 824 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 825 built-in fast application of Richardson's method. 826 827 Not Collective 828 829 Input Parameter: 830 . pc - the preconditioner 831 832 Output Parameter: 833 . exists - `PETSC_TRUE` or `PETSC_FALSE` 834 835 Level: developer 836 837 .seealso: `PC`, `PCRICHARDSON`, `PCApplyRichardson()` 838 @*/ 839 PetscErrorCode PCApplyRichardsonExists(PC pc, PetscBool *exists) 840 { 841 PetscFunctionBegin; 842 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 843 PetscAssertPointer(exists, 2); 844 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 845 else *exists = PETSC_FALSE; 846 PetscFunctionReturn(PETSC_SUCCESS); 847 } 848 849 /*@ 850 PCApplyRichardson - Applies several steps of Richardson iteration with 851 the particular preconditioner. This routine is usually used by the 852 Krylov solvers and not the application code directly. 853 854 Collective 855 856 Input Parameters: 857 + pc - the preconditioner context 858 . b - the right hand side 859 . w - one work vector 860 . rtol - relative decrease in residual norm convergence criteria 861 . abstol - absolute residual norm convergence criteria 862 . dtol - divergence residual norm increase criteria 863 . its - the number of iterations to apply. 864 - guesszero - if the input x contains nonzero initial guess 865 866 Output Parameters: 867 + outits - number of iterations actually used (for SOR this always equals its) 868 . reason - the reason the apply terminated 869 - y - the solution (also contains initial guess if guesszero is `PETSC_FALSE` 870 871 Level: developer 872 873 Notes: 874 Most preconditioners do not support this function. Use the command 875 `PCApplyRichardsonExists()` to determine if one does. 876 877 Except for the `PCMG` this routine ignores the convergence tolerances 878 and always runs for the number of iterations 879 880 .seealso: `PC`, `PCApplyRichardsonExists()` 881 @*/ 882 PetscErrorCode PCApplyRichardson(PC pc, Vec b, Vec y, Vec w, PetscReal rtol, PetscReal abstol, PetscReal dtol, PetscInt its, PetscBool guesszero, PetscInt *outits, PCRichardsonConvergedReason *reason) 883 { 884 PetscFunctionBegin; 885 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 886 PetscValidHeaderSpecific(b, VEC_CLASSID, 2); 887 PetscValidHeaderSpecific(y, VEC_CLASSID, 3); 888 PetscValidHeaderSpecific(w, VEC_CLASSID, 4); 889 PetscCheck(b != y, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "b and y must be different vectors"); 890 PetscCall(PCSetUp(pc)); 891 PetscUseTypeMethod(pc, applyrichardson, b, y, w, rtol, abstol, dtol, its, guesszero, outits, reason); 892 PetscFunctionReturn(PETSC_SUCCESS); 893 } 894 895 /*@ 896 PCSetFailedReason - Sets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 897 898 Logically Collective 899 900 Input Parameters: 901 + pc - the preconditioner context 902 - reason - the reason it failedx 903 904 Level: advanced 905 906 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCFailedReason` 907 @*/ 908 PetscErrorCode PCSetFailedReason(PC pc, PCFailedReason reason) 909 { 910 PetscFunctionBegin; 911 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 912 pc->failedreason = reason; 913 PetscFunctionReturn(PETSC_SUCCESS); 914 } 915 916 /*@ 917 PCGetFailedReason - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail 918 919 Logically Collective 920 921 Input Parameter: 922 . pc - the preconditioner context 923 924 Output Parameter: 925 . reason - the reason it failed 926 927 Level: advanced 928 929 Note: 930 This is the maximum over reason over all ranks in the PC communicator. It is only valid after 931 a call `KSPCheckDot()` or `KSPCheckNorm()` inside a `KSPSolve()` or `PCReduceFailedReason()`. 932 It is not valid immediately after a `PCSetUp()` or `PCApply()`, then use `PCGetFailedReasonRank()` 933 934 .seealso: `PC`, ``PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReasonRank()`, `PCSetFailedReason()` 935 @*/ 936 PetscErrorCode PCGetFailedReason(PC pc, PCFailedReason *reason) 937 { 938 PetscFunctionBegin; 939 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 940 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 941 else *reason = pc->failedreason; 942 PetscFunctionReturn(PETSC_SUCCESS); 943 } 944 945 /*@ 946 PCGetFailedReasonRank - Gets the reason a `PCSetUp()` failed or `PC_NOERROR` if it did not fail on this MPI rank 947 948 Not Collective 949 950 Input Parameter: 951 . pc - the preconditioner context 952 953 Output Parameter: 954 . reason - the reason it failed 955 956 Level: advanced 957 958 Note: 959 Different ranks may have different reasons or no reason, see `PCGetFailedReason()` 960 961 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()`, `PCReduceFailedReason()` 962 @*/ 963 PetscErrorCode PCGetFailedReasonRank(PC pc, PCFailedReason *reason) 964 { 965 PetscFunctionBegin; 966 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 967 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 968 else *reason = pc->failedreason; 969 PetscFunctionReturn(PETSC_SUCCESS); 970 } 971 972 /*@ 973 PCReduceFailedReason - Reduce the failed reason among the MPI processes that share the `PC` 974 975 Collective 976 977 Input Parameter: 978 . pc - the preconditioner context 979 980 Level: advanced 981 982 Note: 983 Different MPI ranks may have different reasons or no reason, see `PCGetFailedReason()`. This routine 984 makes them have a common value (failure if any MPI process had a failure). 985 986 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()`, `PCGetFailedReason()`, `PCSetFailedReason()` 987 @*/ 988 PetscErrorCode PCReduceFailedReason(PC pc) 989 { 990 PetscInt buf; 991 992 PetscFunctionBegin; 993 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 994 buf = (PetscInt)pc->failedreason; 995 PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &buf, 1, MPIU_INT, MPI_MAX, PetscObjectComm((PetscObject)pc))); 996 pc->failedreason = (PCFailedReason)buf; 997 PetscFunctionReturn(PETSC_SUCCESS); 998 } 999 1000 /* Next line needed to deactivate KSP_Solve logging */ 1001 #include <petsc/private/kspimpl.h> 1002 1003 /* 1004 a setupcall of 0 indicates never setup, 1005 1 indicates has been previously setup 1006 -1 indicates a PCSetUp() was attempted and failed 1007 */ 1008 /*@ 1009 PCSetUp - Prepares for the use of a preconditioner. 1010 1011 Collective 1012 1013 Input Parameter: 1014 . pc - the preconditioner context 1015 1016 Level: developer 1017 1018 .seealso: `PC`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1019 @*/ 1020 PetscErrorCode PCSetUp(PC pc) 1021 { 1022 const char *def; 1023 PetscObjectState matstate, matnonzerostate; 1024 1025 PetscFunctionBegin; 1026 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1027 PetscCheck(pc->mat, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be set first"); 1028 1029 if (pc->setupcalled && pc->reusepreconditioner) { 1030 PetscCall(PetscInfo(pc, "Leaving PC with identical preconditioner since reuse preconditioner is set\n")); 1031 PetscFunctionReturn(PETSC_SUCCESS); 1032 } 1033 1034 PetscCall(PetscObjectStateGet((PetscObject)pc->pmat, &matstate)); 1035 PetscCall(MatGetNonzeroState(pc->pmat, &matnonzerostate)); 1036 if (!pc->setupcalled) { 1037 PetscCall(PetscInfo(pc, "Setting up PC for first time\n")); 1038 pc->flag = DIFFERENT_NONZERO_PATTERN; 1039 } else if (matstate == pc->matstate) PetscFunctionReturn(PETSC_SUCCESS); 1040 else { 1041 if (matnonzerostate != pc->matnonzerostate) { 1042 PetscCall(PetscInfo(pc, "Setting up PC with different nonzero pattern\n")); 1043 pc->flag = DIFFERENT_NONZERO_PATTERN; 1044 } else { 1045 PetscCall(PetscInfo(pc, "Setting up PC with same nonzero pattern\n")); 1046 pc->flag = SAME_NONZERO_PATTERN; 1047 } 1048 } 1049 pc->matstate = matstate; 1050 pc->matnonzerostate = matnonzerostate; 1051 1052 if (!((PetscObject)pc)->type_name) { 1053 PetscCall(PCGetDefaultType_Private(pc, &def)); 1054 PetscCall(PCSetType(pc, def)); 1055 } 1056 1057 PetscCall(MatSetErrorIfFailure(pc->pmat, pc->erroriffailure)); 1058 PetscCall(MatSetErrorIfFailure(pc->mat, pc->erroriffailure)); 1059 PetscCall(PetscLogEventBegin(PC_SetUp, pc, 0, 0, 0)); 1060 if (pc->ops->setup) { 1061 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1062 PetscCall(KSPInitializePackage()); 1063 PetscCall(PetscLogEventDeactivatePush(KSP_Solve)); 1064 PetscCall(PetscLogEventDeactivatePush(PC_Apply)); 1065 PetscUseTypeMethod(pc, setup); 1066 PetscCall(PetscLogEventDeactivatePop(KSP_Solve)); 1067 PetscCall(PetscLogEventDeactivatePop(PC_Apply)); 1068 } 1069 PetscCall(PetscLogEventEnd(PC_SetUp, pc, 0, 0, 0)); 1070 if (!pc->setupcalled) pc->setupcalled = 1; 1071 PetscFunctionReturn(PETSC_SUCCESS); 1072 } 1073 1074 /*@ 1075 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1076 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1077 methods. 1078 1079 Collective 1080 1081 Input Parameter: 1082 . pc - the preconditioner context 1083 1084 Level: developer 1085 1086 Note: 1087 For nested preconditioners such as `PCBJACOBI` `PCSetUp()` is not called on each sub-`KSP` when `PCSetUp()` is 1088 called on the outer `PC`, this routine ensures it is called. 1089 1090 .seealso: `PC`, `PCSetUp()`, `PCCreate()`, `PCApply()`, `PCDestroy()` 1091 @*/ 1092 PetscErrorCode PCSetUpOnBlocks(PC pc) 1093 { 1094 PetscFunctionBegin; 1095 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1096 if (!pc->ops->setuponblocks) PetscFunctionReturn(PETSC_SUCCESS); 1097 PetscCall(PetscLogEventBegin(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1098 PetscUseTypeMethod(pc, setuponblocks); 1099 PetscCall(PetscLogEventEnd(PC_SetUpOnBlocks, pc, 0, 0, 0)); 1100 PetscFunctionReturn(PETSC_SUCCESS); 1101 } 1102 1103 /*@C 1104 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1105 submatrices that arise within certain subdomain-based preconditioners. 1106 The basic submatrices are extracted from the preconditioner matrix as 1107 usual; the user can then alter these (for example, to set different boundary 1108 conditions for each submatrix) before they are used for the local solves. 1109 1110 Logically Collective 1111 1112 Input Parameters: 1113 + pc - the preconditioner context 1114 . func - routine for modifying the submatrices 1115 - ctx - optional user-defined context (may be null) 1116 1117 Calling sequence of `func`: 1118 $ PetscErrorCode func(PC pc, PetscInt nsub, IS *row, IS *col, Mat *submat, void *ctx); 1119 + pc - the preconditioner context 1120 . row - an array of index sets that contain the global row numbers 1121 that comprise each local submatrix 1122 . col - an array of index sets that contain the global column numbers 1123 that comprise each local submatrix 1124 . submat - array of local submatrices 1125 - ctx - optional user-defined context for private data for the 1126 user-defined func routine (may be null) 1127 1128 Level: advanced 1129 1130 Notes: 1131 `PCSetModifySubMatrices()` MUST be called before `KSPSetUp()` and 1132 `KSPSolve()`. 1133 1134 A routine set by `PCSetModifySubMatrices()` is currently called within 1135 the block Jacobi (`PCBJACOBI`) and additive Schwarz (`PCASM`) 1136 preconditioners. All other preconditioners ignore this routine. 1137 1138 .seealso: `PC`, `PCBJACOBI`, `PCASM`, `PCModifySubMatrices()` 1139 @*/ 1140 PetscErrorCode PCSetModifySubMatrices(PC pc, PetscErrorCode (*func)(PC, PetscInt, const IS[], const IS[], Mat[], void *), void *ctx) 1141 { 1142 PetscFunctionBegin; 1143 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1144 pc->modifysubmatrices = func; 1145 pc->modifysubmatricesP = ctx; 1146 PetscFunctionReturn(PETSC_SUCCESS); 1147 } 1148 1149 /*@C 1150 PCModifySubMatrices - Calls an optional user-defined routine within 1151 certain preconditioners if one has been set with `PCSetModifySubMatrices()`. 1152 1153 Collective 1154 1155 Input Parameters: 1156 + pc - the preconditioner context 1157 . nsub - the number of local submatrices 1158 . row - an array of index sets that contain the global row numbers 1159 that comprise each local submatrix 1160 . col - an array of index sets that contain the global column numbers 1161 that comprise each local submatrix 1162 . submat - array of local submatrices 1163 - ctx - optional user-defined context for private data for the 1164 user-defined routine (may be null) 1165 1166 Output Parameter: 1167 . submat - array of local submatrices (the entries of which may 1168 have been modified) 1169 1170 Level: developer 1171 1172 Notes: 1173 The user should NOT generally call this routine, as it will 1174 automatically be called within certain preconditioners (currently 1175 block Jacobi, additive Schwarz) if set. 1176 1177 The basic submatrices are extracted from the preconditioner matrix 1178 as usual; the user can then alter these (for example, to set different 1179 boundary conditions for each submatrix) before they are used for the 1180 local solves. 1181 1182 .seealso: `PC`, `PCSetModifySubMatrices()` 1183 @*/ 1184 PetscErrorCode PCModifySubMatrices(PC pc, PetscInt nsub, const IS row[], const IS col[], Mat submat[], void *ctx) 1185 { 1186 PetscFunctionBegin; 1187 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1188 if (!pc->modifysubmatrices) PetscFunctionReturn(PETSC_SUCCESS); 1189 PetscCall(PetscLogEventBegin(PC_ModifySubMatrices, pc, 0, 0, 0)); 1190 PetscCall((*pc->modifysubmatrices)(pc, nsub, row, col, submat, ctx)); 1191 PetscCall(PetscLogEventEnd(PC_ModifySubMatrices, pc, 0, 0, 0)); 1192 PetscFunctionReturn(PETSC_SUCCESS); 1193 } 1194 1195 /*@ 1196 PCSetOperators - Sets the matrix associated with the linear system and 1197 a (possibly) different one associated with the preconditioner. 1198 1199 Logically Collective 1200 1201 Input Parameters: 1202 + pc - the preconditioner context 1203 . Amat - the matrix that defines the linear system 1204 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1205 1206 Level: intermediate 1207 1208 Notes: 1209 Passing a `NULL` for `Amat` or `Pmat` removes the matrix that is currently used. 1210 1211 If you wish to replace either `Amat` or `Pmat` but leave the other one untouched then 1212 first call `KSPGetOperators()` to get the one you wish to keep, call `PetscObjectReference()` 1213 on it and then pass it back in in your call to `KSPSetOperators()`. 1214 1215 More Notes about Repeated Solution of Linear Systems: 1216 PETSc does NOT reset the matrix entries of either `Amat` or `Pmat` 1217 to zero after a linear solve; the user is completely responsible for 1218 matrix assembly. See the routine `MatZeroEntries()` if desiring to 1219 zero all elements of a matrix. 1220 1221 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()` 1222 @*/ 1223 PetscErrorCode PCSetOperators(PC pc, Mat Amat, Mat Pmat) 1224 { 1225 PetscInt m1, n1, m2, n2; 1226 1227 PetscFunctionBegin; 1228 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1229 if (Amat) PetscValidHeaderSpecific(Amat, MAT_CLASSID, 2); 1230 if (Pmat) PetscValidHeaderSpecific(Pmat, MAT_CLASSID, 3); 1231 if (Amat) PetscCheckSameComm(pc, 1, Amat, 2); 1232 if (Pmat) PetscCheckSameComm(pc, 1, Pmat, 3); 1233 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1234 PetscCall(MatGetLocalSize(Amat, &m1, &n1)); 1235 PetscCall(MatGetLocalSize(pc->mat, &m2, &n2)); 1236 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Amat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1237 PetscCall(MatGetLocalSize(Pmat, &m1, &n1)); 1238 PetscCall(MatGetLocalSize(pc->pmat, &m2, &n2)); 1239 PetscCheck(m1 == m2 && n1 == n2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot change local size of Pmat after use old sizes %" PetscInt_FMT " %" PetscInt_FMT " new sizes %" PetscInt_FMT " %" PetscInt_FMT, m2, n2, m1, n1); 1240 } 1241 1242 if (Pmat != pc->pmat) { 1243 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1244 pc->matnonzerostate = -1; 1245 pc->matstate = -1; 1246 } 1247 1248 /* reference first in case the matrices are the same */ 1249 if (Amat) PetscCall(PetscObjectReference((PetscObject)Amat)); 1250 PetscCall(MatDestroy(&pc->mat)); 1251 if (Pmat) PetscCall(PetscObjectReference((PetscObject)Pmat)); 1252 PetscCall(MatDestroy(&pc->pmat)); 1253 pc->mat = Amat; 1254 pc->pmat = Pmat; 1255 PetscFunctionReturn(PETSC_SUCCESS); 1256 } 1257 1258 /*@ 1259 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1260 1261 Logically Collective 1262 1263 Input Parameters: 1264 + pc - the preconditioner context 1265 - flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1266 1267 Level: intermediate 1268 1269 Note: 1270 Normally if a matrix inside a `PC` changes the `PC` automatically updates itself using information from the changed matrix. This option 1271 prevents this. 1272 1273 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCGetReusePreconditioner()`, `KSPSetReusePreconditioner()` 1274 @*/ 1275 PetscErrorCode PCSetReusePreconditioner(PC pc, PetscBool flag) 1276 { 1277 PetscFunctionBegin; 1278 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1279 PetscValidLogicalCollectiveBool(pc, flag, 2); 1280 pc->reusepreconditioner = flag; 1281 PetscFunctionReturn(PETSC_SUCCESS); 1282 } 1283 1284 /*@ 1285 PCGetReusePreconditioner - Determines if the `PC` reuses the current preconditioner even if the operator in the preconditioner has changed. 1286 1287 Not Collective 1288 1289 Input Parameter: 1290 . pc - the preconditioner context 1291 1292 Output Parameter: 1293 . flag - `PETSC_TRUE` do not compute a new preconditioner, `PETSC_FALSE` do compute a new preconditioner 1294 1295 Level: intermediate 1296 1297 .seealso: `PC`, `PCGetOperators()`, `MatZeroEntries()`, `PCSetReusePreconditioner()` 1298 @*/ 1299 PetscErrorCode PCGetReusePreconditioner(PC pc, PetscBool *flag) 1300 { 1301 PetscFunctionBegin; 1302 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1303 PetscAssertPointer(flag, 2); 1304 *flag = pc->reusepreconditioner; 1305 PetscFunctionReturn(PETSC_SUCCESS); 1306 } 1307 1308 /*@ 1309 PCGetOperators - Gets the matrix associated with the linear system and 1310 possibly a different one associated with the preconditioner. 1311 1312 Not Collective, though parallel mats are returned if pc is parallel 1313 1314 Input Parameter: 1315 . pc - the preconditioner context 1316 1317 Output Parameters: 1318 + Amat - the matrix defining the linear system 1319 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1320 1321 Level: intermediate 1322 1323 Note: 1324 Does not increase the reference count of the matrices, so you should not destroy them 1325 1326 Alternative usage: If the operators have NOT been set with `KSPSetOperators()`/`PCSetOperators()` then the operators 1327 are created in `PC` and returned to the user. In this case, if both operators 1328 mat and pmat are requested, two DIFFERENT operators will be returned. If 1329 only one is requested both operators in the PC will be the same (i.e. as 1330 if one had called `KSPSetOperators()`/`PCSetOperators()` with the same argument for both Mats). 1331 The user must set the sizes of the returned matrices and their type etc just 1332 as if the user created them with `MatCreate()`. For example, 1333 1334 .vb 1335 KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1336 set size, type, etc of Amat 1337 1338 MatCreate(comm,&mat); 1339 KSP/PCSetOperators(ksp/pc,Amat,Amat); 1340 PetscObjectDereference((PetscObject)mat); 1341 set size, type, etc of Amat 1342 .ve 1343 1344 and 1345 1346 .vb 1347 KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1348 set size, type, etc of Amat and Pmat 1349 1350 MatCreate(comm,&Amat); 1351 MatCreate(comm,&Pmat); 1352 KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1353 PetscObjectDereference((PetscObject)Amat); 1354 PetscObjectDereference((PetscObject)Pmat); 1355 set size, type, etc of Amat and Pmat 1356 .ve 1357 1358 The rationale for this support is so that when creating a `TS`, `SNES`, or `KSP` the hierarchy 1359 of underlying objects (i.e. `SNES`, `KSP`, `PC`, `Mat`) and their lifespans can be completely 1360 managed by the top most level object (i.e. the `TS`, `SNES`, or `KSP`). Another way to look 1361 at this is when you create a `SNES` you do not NEED to create a `KSP` and attach it to 1362 the `SNES` object (the `SNES` object manages it for you). Similarly when you create a KSP 1363 you do not need to attach a `PC` to it (the `KSP` object manages the `PC` object for you). 1364 Thus, why should YOU have to create the `Mat` and attach it to the `SNES`/`KSP`/`PC`, when 1365 it can be created for you? 1366 1367 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperatorsSet()` 1368 @*/ 1369 PetscErrorCode PCGetOperators(PC pc, Mat *Amat, Mat *Pmat) 1370 { 1371 PetscFunctionBegin; 1372 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1373 if (Amat) { 1374 if (!pc->mat) { 1375 if (pc->pmat && !Pmat) { /* Pmat has been set, but user did not request it, so use for Amat */ 1376 pc->mat = pc->pmat; 1377 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1378 } else { /* both Amat and Pmat are empty */ 1379 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->mat)); 1380 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1381 pc->pmat = pc->mat; 1382 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1383 } 1384 } 1385 } 1386 *Amat = pc->mat; 1387 } 1388 if (Pmat) { 1389 if (!pc->pmat) { 1390 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1391 pc->pmat = pc->mat; 1392 PetscCall(PetscObjectReference((PetscObject)pc->pmat)); 1393 } else { 1394 PetscCall(MatCreate(PetscObjectComm((PetscObject)pc), &pc->pmat)); 1395 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1396 pc->mat = pc->pmat; 1397 PetscCall(PetscObjectReference((PetscObject)pc->mat)); 1398 } 1399 } 1400 } 1401 *Pmat = pc->pmat; 1402 } 1403 PetscFunctionReturn(PETSC_SUCCESS); 1404 } 1405 1406 /*@C 1407 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1408 possibly a different one associated with the preconditioner have been set in the `PC`. 1409 1410 Not Collective, though the results on all processes should be the same 1411 1412 Input Parameter: 1413 . pc - the preconditioner context 1414 1415 Output Parameters: 1416 + mat - the matrix associated with the linear system was set 1417 - pmat - matrix associated with the preconditioner was set, usually the same 1418 1419 Level: intermediate 1420 1421 .seealso: `PC`, `PCSetOperators()`, `KSPGetOperators()`, `KSPSetOperators()`, `PCGetOperators()` 1422 @*/ 1423 PetscErrorCode PCGetOperatorsSet(PC pc, PetscBool *mat, PetscBool *pmat) 1424 { 1425 PetscFunctionBegin; 1426 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1427 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1428 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1429 PetscFunctionReturn(PETSC_SUCCESS); 1430 } 1431 1432 /*@ 1433 PCFactorGetMatrix - Gets the factored matrix from the 1434 preconditioner context. This routine is valid only for the `PCLU`, 1435 `PCILU`, `PCCHOLESKY`, and `PCICC` methods. 1436 1437 Not Collective though mat is parallel if pc is parallel 1438 1439 Input Parameter: 1440 . pc - the preconditioner context 1441 1442 Output Parameters: 1443 . mat - the factored matrix 1444 1445 Level: advanced 1446 1447 Note: 1448 Does not increase the reference count for the matrix so DO NOT destroy it 1449 1450 .seealso: `PC`, `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC` 1451 @*/ 1452 PetscErrorCode PCFactorGetMatrix(PC pc, Mat *mat) 1453 { 1454 PetscFunctionBegin; 1455 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1456 PetscAssertPointer(mat, 2); 1457 PetscCall(PCFactorSetUpMatSolverType(pc)); 1458 PetscUseTypeMethod(pc, getfactoredmatrix, mat); 1459 PetscFunctionReturn(PETSC_SUCCESS); 1460 } 1461 1462 /*@C 1463 PCSetOptionsPrefix - Sets the prefix used for searching for all 1464 `PC` options in the database. 1465 1466 Logically Collective 1467 1468 Input Parameters: 1469 + pc - the preconditioner context 1470 - prefix - the prefix string to prepend to all `PC` option requests 1471 1472 Note: 1473 A hyphen (-) must NOT be given at the beginning of the prefix name. 1474 The first character of all runtime options is AUTOMATICALLY the 1475 hyphen. 1476 1477 Level: advanced 1478 1479 .seealso: `PC`, `PCSetFromOptions`, `PCAppendOptionsPrefix()`, `PCGetOptionsPrefix()` 1480 @*/ 1481 PetscErrorCode PCSetOptionsPrefix(PC pc, const char prefix[]) 1482 { 1483 PetscFunctionBegin; 1484 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1485 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc, prefix)); 1486 PetscFunctionReturn(PETSC_SUCCESS); 1487 } 1488 1489 /*@C 1490 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1491 `PC` options in the database. 1492 1493 Logically Collective 1494 1495 Input Parameters: 1496 + pc - the preconditioner context 1497 - prefix - the prefix string to prepend to all `PC` option requests 1498 1499 Note: 1500 A hyphen (-) must NOT be given at the beginning of the prefix name. 1501 The first character of all runtime options is AUTOMATICALLY the 1502 hyphen. 1503 1504 Level: advanced 1505 1506 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCGetOptionsPrefix()` 1507 @*/ 1508 PetscErrorCode PCAppendOptionsPrefix(PC pc, const char prefix[]) 1509 { 1510 PetscFunctionBegin; 1511 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1512 PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)pc, prefix)); 1513 PetscFunctionReturn(PETSC_SUCCESS); 1514 } 1515 1516 /*@C 1517 PCGetOptionsPrefix - Gets the prefix used for searching for all 1518 PC options in the database. 1519 1520 Not Collective 1521 1522 Input Parameter: 1523 . pc - the preconditioner context 1524 1525 Output Parameter: 1526 . prefix - pointer to the prefix string used, is returned 1527 1528 Fortran Notes: 1529 The user should pass in a string 'prefix' of 1530 sufficient length to hold the prefix. 1531 1532 Level: advanced 1533 1534 .seealso: `PC`, `PCSetFromOptions`, `PCSetOptionsPrefix()`, `PCAppendOptionsPrefix()` 1535 @*/ 1536 PetscErrorCode PCGetOptionsPrefix(PC pc, const char *prefix[]) 1537 { 1538 PetscFunctionBegin; 1539 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1540 PetscAssertPointer(prefix, 2); 1541 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, prefix)); 1542 PetscFunctionReturn(PETSC_SUCCESS); 1543 } 1544 1545 /* 1546 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1547 preconditioners including BDDC and Eisentat that transform the equations before applying 1548 the Krylov methods 1549 */ 1550 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc, PetscBool *change) 1551 { 1552 PetscFunctionBegin; 1553 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1554 PetscAssertPointer(change, 2); 1555 *change = PETSC_FALSE; 1556 PetscTryMethod(pc, "PCPreSolveChangeRHS_C", (PC, PetscBool *), (pc, change)); 1557 PetscFunctionReturn(PETSC_SUCCESS); 1558 } 1559 1560 /*@ 1561 PCPreSolve - Optional pre-solve phase, intended for any 1562 preconditioner-specific actions that must be performed before 1563 the iterative solve itself. 1564 1565 Collective 1566 1567 Input Parameters: 1568 + pc - the preconditioner context 1569 - ksp - the Krylov subspace context 1570 1571 Level: developer 1572 1573 Example Usage: 1574 .vb 1575 PCPreSolve(pc,ksp); 1576 KSPSolve(ksp,b,x); 1577 PCPostSolve(pc,ksp); 1578 .ve 1579 1580 Notes: 1581 The pre-solve phase is distinct from the `PCSetUp()` phase. 1582 1583 `KSPSolve()` calls this directly, so is rarely called by the user. 1584 1585 .seealso: `PC`, `PCPostSolve()` 1586 @*/ 1587 PetscErrorCode PCPreSolve(PC pc, KSP ksp) 1588 { 1589 Vec x, rhs; 1590 1591 PetscFunctionBegin; 1592 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1593 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1594 pc->presolvedone++; 1595 PetscCheck(pc->presolvedone <= 2, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot embed PCPreSolve() more than twice"); 1596 PetscCall(KSPGetSolution(ksp, &x)); 1597 PetscCall(KSPGetRhs(ksp, &rhs)); 1598 1599 if (pc->ops->presolve) PetscUseTypeMethod(pc, presolve, ksp, rhs, x); 1600 else if (pc->presolve) PetscCall((pc->presolve)(pc, ksp)); 1601 PetscFunctionReturn(PETSC_SUCCESS); 1602 } 1603 1604 /*@C 1605 PCSetPreSolve - Sets function used by `PCPreSolve()` which is intended for any 1606 preconditioner-specific actions that must be performed before 1607 the iterative solve itself. 1608 1609 Logically Collective 1610 1611 Input Parameters: 1612 + pc - the preconditioner object 1613 - presolve - the function to call before the solve 1614 1615 Calling sequence of `presolve`: 1616 $ PetscErrorCode presolve(PC pc, KSP ksp) 1617 + pc - the `PC` context 1618 - ksp - the `KSP` context 1619 1620 Level: developer 1621 1622 .seealso: `PC`, `PCSetUp()`, `PCPreSolve()` 1623 @*/ 1624 PetscErrorCode PCSetPreSolve(PC pc, PetscErrorCode (*presolve)(PC, KSP)) 1625 { 1626 PetscFunctionBegin; 1627 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1628 pc->presolve = presolve; 1629 PetscFunctionReturn(PETSC_SUCCESS); 1630 } 1631 1632 /*@ 1633 PCPostSolve - Optional post-solve phase, intended for any 1634 preconditioner-specific actions that must be performed after 1635 the iterative solve itself. 1636 1637 Collective 1638 1639 Input Parameters: 1640 + pc - the preconditioner context 1641 - ksp - the Krylov subspace context 1642 1643 Example Usage: 1644 .vb 1645 PCPreSolve(pc,ksp); 1646 KSPSolve(ksp,b,x); 1647 PCPostSolve(pc,ksp); 1648 .ve 1649 1650 Note: 1651 `KSPSolve()` calls this routine directly, so it is rarely called by the user. 1652 1653 Level: developer 1654 1655 .seealso: `PC`, `PCSetPostSolve()`, `PCSetPresolve()`, `PCPreSolve()`, `KSPSolve()` 1656 @*/ 1657 PetscErrorCode PCPostSolve(PC pc, KSP ksp) 1658 { 1659 Vec x, rhs; 1660 1661 PetscFunctionBegin; 1662 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1663 PetscValidHeaderSpecific(ksp, KSP_CLASSID, 2); 1664 pc->presolvedone--; 1665 PetscCall(KSPGetSolution(ksp, &x)); 1666 PetscCall(KSPGetRhs(ksp, &rhs)); 1667 PetscTryTypeMethod(pc, postsolve, ksp, rhs, x); 1668 PetscFunctionReturn(PETSC_SUCCESS); 1669 } 1670 1671 /*@C 1672 PCLoad - Loads a `PC` that has been stored in binary with `PCView()`. 1673 1674 Collective 1675 1676 Input Parameters: 1677 + newdm - the newly loaded `PC`, this needs to have been created with `PCCreate()` or 1678 some related function before a call to `PCLoad()`. 1679 - viewer - binary file viewer, obtained from `PetscViewerBinaryOpen()` 1680 1681 Level: intermediate 1682 1683 Note: 1684 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1685 1686 .seealso: `PC`, `PetscViewerBinaryOpen()`, `PCView()`, `MatLoad()`, `VecLoad()` 1687 @*/ 1688 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1689 { 1690 PetscBool isbinary; 1691 PetscInt classid; 1692 char type[256]; 1693 1694 PetscFunctionBegin; 1695 PetscValidHeaderSpecific(newdm, PC_CLASSID, 1); 1696 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1697 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1698 PetscCheck(isbinary, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1699 1700 PetscCall(PetscViewerBinaryRead(viewer, &classid, 1, NULL, PETSC_INT)); 1701 PetscCheck(classid == PC_FILE_CLASSID, PetscObjectComm((PetscObject)newdm), PETSC_ERR_ARG_WRONG, "Not PC next in file"); 1702 PetscCall(PetscViewerBinaryRead(viewer, type, 256, NULL, PETSC_CHAR)); 1703 PetscCall(PCSetType(newdm, type)); 1704 PetscTryTypeMethod(newdm, load, viewer); 1705 PetscFunctionReturn(PETSC_SUCCESS); 1706 } 1707 1708 #include <petscdraw.h> 1709 #if defined(PETSC_HAVE_SAWS) 1710 #include <petscviewersaws.h> 1711 #endif 1712 1713 /*@C 1714 PCViewFromOptions - View from the `PC` based on options in the database 1715 1716 Collective 1717 1718 Input Parameters: 1719 + A - the `PC` context 1720 . obj - Optional object that provides the options prefix 1721 - name - command line option 1722 1723 Level: intermediate 1724 1725 .seealso: `PC`, `PCView`, `PetscObjectViewFromOptions()`, `PCCreate()` 1726 @*/ 1727 PetscErrorCode PCViewFromOptions(PC A, PetscObject obj, const char name[]) 1728 { 1729 PetscFunctionBegin; 1730 PetscValidHeaderSpecific(A, PC_CLASSID, 1); 1731 PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name)); 1732 PetscFunctionReturn(PETSC_SUCCESS); 1733 } 1734 1735 /*@C 1736 PCView - Prints information about the `PC` 1737 1738 Collective 1739 1740 Input Parameters: 1741 + pc - the `PC` context 1742 - viewer - optional visualization context 1743 1744 Level: developer 1745 1746 Notes: 1747 The available visualization contexts include 1748 + `PETSC_VIEWER_STDOUT_SELF` - standard output (default) 1749 - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard 1750 output where only the first processor opens 1751 the file. All other processors send their 1752 data to the first processor to print. 1753 1754 The user can open an alternative visualization contexts with 1755 `PetscViewerASCIIOpen()` (output to a specified file). 1756 1757 .seealso: `PC`, `PetscViewer`, `KSPView()`, `PetscViewerASCIIOpen()` 1758 @*/ 1759 PetscErrorCode PCView(PC pc, PetscViewer viewer) 1760 { 1761 PCType cstr; 1762 PetscBool iascii, isstring, isbinary, isdraw; 1763 #if defined(PETSC_HAVE_SAWS) 1764 PetscBool issaws; 1765 #endif 1766 1767 PetscFunctionBegin; 1768 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1769 if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer)); 1770 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 1771 PetscCheckSameComm(pc, 1, viewer, 2); 1772 1773 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1774 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring)); 1775 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1776 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1777 #if defined(PETSC_HAVE_SAWS) 1778 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws)); 1779 #endif 1780 1781 if (iascii) { 1782 PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)pc, viewer)); 1783 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " PC has not been set up so information may be incomplete\n")); 1784 PetscCall(PetscViewerASCIIPushTab(viewer)); 1785 PetscTryTypeMethod(pc, view, viewer); 1786 PetscCall(PetscViewerASCIIPopTab(viewer)); 1787 if (pc->mat) { 1788 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_ASCII_INFO)); 1789 if (pc->pmat == pc->mat) { 1790 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix = precond matrix:\n")); 1791 PetscCall(PetscViewerASCIIPushTab(viewer)); 1792 PetscCall(MatView(pc->mat, viewer)); 1793 PetscCall(PetscViewerASCIIPopTab(viewer)); 1794 } else { 1795 if (pc->pmat) { 1796 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix followed by preconditioner matrix:\n")); 1797 } else { 1798 PetscCall(PetscViewerASCIIPrintf(viewer, " linear system matrix:\n")); 1799 } 1800 PetscCall(PetscViewerASCIIPushTab(viewer)); 1801 PetscCall(MatView(pc->mat, viewer)); 1802 if (pc->pmat) PetscCall(MatView(pc->pmat, viewer)); 1803 PetscCall(PetscViewerASCIIPopTab(viewer)); 1804 } 1805 PetscCall(PetscViewerPopFormat(viewer)); 1806 } 1807 } else if (isstring) { 1808 PetscCall(PCGetType(pc, &cstr)); 1809 PetscCall(PetscViewerStringSPrintf(viewer, " PCType: %-7.7s", cstr)); 1810 PetscTryTypeMethod(pc, view, viewer); 1811 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1812 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1813 } else if (isbinary) { 1814 PetscInt classid = PC_FILE_CLASSID; 1815 MPI_Comm comm; 1816 PetscMPIInt rank; 1817 char type[256]; 1818 1819 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 1820 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1821 if (rank == 0) { 1822 PetscCall(PetscViewerBinaryWrite(viewer, &classid, 1, PETSC_INT)); 1823 PetscCall(PetscStrncpy(type, ((PetscObject)pc)->type_name, 256)); 1824 PetscCall(PetscViewerBinaryWrite(viewer, type, 256, PETSC_CHAR)); 1825 } 1826 PetscTryTypeMethod(pc, view, viewer); 1827 } else if (isdraw) { 1828 PetscDraw draw; 1829 char str[25]; 1830 PetscReal x, y, bottom, h; 1831 PetscInt n; 1832 1833 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1834 PetscCall(PetscDrawGetCurrentPoint(draw, &x, &y)); 1835 if (pc->mat) { 1836 PetscCall(MatGetSize(pc->mat, &n, NULL)); 1837 PetscCall(PetscSNPrintf(str, 25, "PC: %s (%" PetscInt_FMT ")", ((PetscObject)pc)->type_name, n)); 1838 } else { 1839 PetscCall(PetscSNPrintf(str, 25, "PC: %s", ((PetscObject)pc)->type_name)); 1840 } 1841 PetscCall(PetscDrawStringBoxed(draw, x, y, PETSC_DRAW_RED, PETSC_DRAW_BLACK, str, NULL, &h)); 1842 bottom = y - h; 1843 PetscCall(PetscDrawPushCurrentPoint(draw, x, bottom)); 1844 PetscTryTypeMethod(pc, view, viewer); 1845 PetscCall(PetscDrawPopCurrentPoint(draw)); 1846 #if defined(PETSC_HAVE_SAWS) 1847 } else if (issaws) { 1848 PetscMPIInt rank; 1849 1850 PetscCall(PetscObjectName((PetscObject)pc)); 1851 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 1852 if (!((PetscObject)pc)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)pc, viewer)); 1853 if (pc->mat) PetscCall(MatView(pc->mat, viewer)); 1854 if (pc->pmat && pc->pmat != pc->mat) PetscCall(MatView(pc->pmat, viewer)); 1855 #endif 1856 } 1857 PetscFunctionReturn(PETSC_SUCCESS); 1858 } 1859 1860 /*@C 1861 PCRegister - Adds a method to the preconditioner package. 1862 1863 Not collective 1864 1865 Input Parameters: 1866 + sname - name of a new user-defined solver 1867 - function - routine to create method context 1868 1869 Example Usage: 1870 .vb 1871 PCRegister("my_solver", MySolverCreate); 1872 .ve 1873 1874 Then, your solver can be chosen with the procedural interface via 1875 $ PCSetType(pc, "my_solver") 1876 or at runtime via the option 1877 $ -pc_type my_solver 1878 1879 Level: advanced 1880 1881 Note: 1882 `PCRegister()` may be called multiple times to add several user-defined preconditioners. 1883 1884 .seealso: `PC`, `PCType`, `PCRegisterAll()` 1885 @*/ 1886 PetscErrorCode PCRegister(const char sname[], PetscErrorCode (*function)(PC)) 1887 { 1888 PetscFunctionBegin; 1889 PetscCall(PCInitializePackage()); 1890 PetscCall(PetscFunctionListAdd(&PCList, sname, function)); 1891 PetscFunctionReturn(PETSC_SUCCESS); 1892 } 1893 1894 static PetscErrorCode MatMult_PC(Mat A, Vec X, Vec Y) 1895 { 1896 PC pc; 1897 1898 PetscFunctionBegin; 1899 PetscCall(MatShellGetContext(A, &pc)); 1900 PetscCall(PCApply(pc, X, Y)); 1901 PetscFunctionReturn(PETSC_SUCCESS); 1902 } 1903 1904 /*@C 1905 PCComputeOperator - Computes the explicit preconditioned operator. 1906 1907 Collective 1908 1909 Input Parameters: 1910 + pc - the preconditioner object 1911 - mattype - the matrix type to be used for the operator 1912 1913 Output Parameter: 1914 . mat - the explicit preconditioned operator 1915 1916 Level: advanced 1917 1918 Note: 1919 This computation is done by applying the operators to columns of the identity matrix. 1920 This routine is costly in general, and is recommended for use only with relatively small systems. 1921 Currently, this routine uses a dense matrix format when mattype == NULL 1922 1923 .seealso: `PC`, `KSPComputeOperator()`, `MatType` 1924 @*/ 1925 PetscErrorCode PCComputeOperator(PC pc, MatType mattype, Mat *mat) 1926 { 1927 PetscInt N, M, m, n; 1928 Mat A, Apc; 1929 1930 PetscFunctionBegin; 1931 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1932 PetscAssertPointer(mat, 3); 1933 PetscCall(PCGetOperators(pc, &A, NULL)); 1934 PetscCall(MatGetLocalSize(A, &m, &n)); 1935 PetscCall(MatGetSize(A, &M, &N)); 1936 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)pc), m, n, M, N, pc, &Apc)); 1937 PetscCall(MatShellSetOperation(Apc, MATOP_MULT, (void (*)(void))MatMult_PC)); 1938 PetscCall(MatComputeOperator(Apc, mattype, mat)); 1939 PetscCall(MatDestroy(&Apc)); 1940 PetscFunctionReturn(PETSC_SUCCESS); 1941 } 1942 1943 /*@ 1944 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1945 1946 Collective 1947 1948 Input Parameters: 1949 + pc - the solver context 1950 . dim - the dimension of the coordinates 1, 2, or 3 1951 . nloc - the blocked size of the coordinates array 1952 - coords - the coordinates array 1953 1954 Level: intermediate 1955 1956 Note: 1957 `coords` is an array of the dim coordinates for the nodes on 1958 the local processor, of size `dim`*`nloc`. 1959 If there are 108 equation on a processor 1960 for a displacement finite element discretization of elasticity (so 1961 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1962 double precision values (ie, 3 * 36). These x y z coordinates 1963 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1964 ... , N-1.z ]. 1965 1966 .seealso: `PC`, `MatSetNearNullSpace()` 1967 @*/ 1968 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1969 { 1970 PetscFunctionBegin; 1971 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1972 PetscValidLogicalCollectiveInt(pc, dim, 2); 1973 PetscTryMethod(pc, "PCSetCoordinates_C", (PC, PetscInt, PetscInt, PetscReal[]), (pc, dim, nloc, coords)); 1974 PetscFunctionReturn(PETSC_SUCCESS); 1975 } 1976 1977 /*@ 1978 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1979 1980 Logically Collective 1981 1982 Input Parameter: 1983 . pc - the precondition context 1984 1985 Output Parameters: 1986 + num_levels - the number of levels 1987 - interpolations - the interpolation matrices (size of num_levels-1) 1988 1989 Level: advanced 1990 1991 Developer Notes: 1992 Why is this here instead of in `PCMG` etc? 1993 1994 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetInterpolation()`, `PCGetCoarseOperators()` 1995 @*/ 1996 PetscErrorCode PCGetInterpolations(PC pc, PetscInt *num_levels, Mat *interpolations[]) 1997 { 1998 PetscFunctionBegin; 1999 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2000 PetscAssertPointer(num_levels, 2); 2001 PetscAssertPointer(interpolations, 3); 2002 PetscUseMethod(pc, "PCGetInterpolations_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, interpolations)); 2003 PetscFunctionReturn(PETSC_SUCCESS); 2004 } 2005 2006 /*@ 2007 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 2008 2009 Logically Collective 2010 2011 Input Parameter: 2012 . pc - the precondition context 2013 2014 Output Parameters: 2015 + num_levels - the number of levels 2016 - coarseOperators - the coarse operator matrices (size of num_levels-1) 2017 2018 Level: advanced 2019 2020 Developer Notes: 2021 Why is this here instead of in `PCMG` etc? 2022 2023 .seealso: `PC`, `PCMG`, `PCMGGetRestriction()`, `PCMGSetInterpolation()`, `PCMGGetRScale()`, `PCMGGetInterpolation()`, `PCGetInterpolations()` 2024 @*/ 2025 PetscErrorCode PCGetCoarseOperators(PC pc, PetscInt *num_levels, Mat *coarseOperators[]) 2026 { 2027 PetscFunctionBegin; 2028 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2029 PetscAssertPointer(num_levels, 2); 2030 PetscAssertPointer(coarseOperators, 3); 2031 PetscUseMethod(pc, "PCGetCoarseOperators_C", (PC, PetscInt *, Mat *[]), (pc, num_levels, coarseOperators)); 2032 PetscFunctionReturn(PETSC_SUCCESS); 2033 } 2034