1 2 /* 3 The PC (preconditioner) interface routines, callable by users. 4 */ 5 #include <petsc/private/pcimpl.h> /*I "petscksp.h" I*/ 6 #include <petscdm.h> 7 8 /* Logging support */ 9 PetscClassId PC_CLASSID; 10 PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft; 11 PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks; 12 PetscInt PetscMGLevelId; 13 14 PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[]) 15 { 16 PetscErrorCode ierr; 17 PetscMPIInt size; 18 PetscBool hasop,flg1,flg2,set,flg3; 19 20 PetscFunctionBegin; 21 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRMPI(ierr); 22 if (pc->pmat) { 23 ierr = MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 24 if (size == 1) { 25 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);CHKERRQ(ierr); 26 ierr = MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);CHKERRQ(ierr); 27 ierr = MatIsSymmetricKnown(pc->pmat,&set,&flg3);CHKERRQ(ierr); 28 if (flg1 && (!flg2 || (set && flg3))) { 29 *type = PCICC; 30 } else if (flg2) { 31 *type = PCILU; 32 } else if (hasop) { /* likely is a parallel matrix run on one processor */ 33 *type = PCBJACOBI; 34 } else { 35 *type = PCNONE; 36 } 37 } else { 38 if (hasop) { 39 *type = PCBJACOBI; 40 } else { 41 *type = PCNONE; 42 } 43 } 44 } else { 45 if (size == 1) { 46 *type = PCILU; 47 } else { 48 *type = PCBJACOBI; 49 } 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /*@ 55 PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats 56 57 Collective on PC 58 59 Input Parameter: 60 . pc - the preconditioner context 61 62 Level: developer 63 64 Notes: 65 This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC 66 67 .seealso: PCCreate(), PCSetUp() 68 @*/ 69 PetscErrorCode PCReset(PC pc) 70 { 71 PetscErrorCode ierr; 72 73 PetscFunctionBegin; 74 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 75 if (pc->ops->reset) { 76 ierr = (*pc->ops->reset)(pc);CHKERRQ(ierr); 77 } 78 ierr = VecDestroy(&pc->diagonalscaleright);CHKERRQ(ierr); 79 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 80 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 81 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 82 83 pc->setupcalled = 0; 84 PetscFunctionReturn(0); 85 } 86 87 /*@C 88 PCDestroy - Destroys PC context that was created with PCCreate(). 89 90 Collective on PC 91 92 Input Parameter: 93 . pc - the preconditioner context 94 95 Level: developer 96 97 .seealso: PCCreate(), PCSetUp() 98 @*/ 99 PetscErrorCode PCDestroy(PC *pc) 100 { 101 PetscErrorCode ierr; 102 103 PetscFunctionBegin; 104 if (!*pc) PetscFunctionReturn(0); 105 PetscValidHeaderSpecific((*pc),PC_CLASSID,1); 106 if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; PetscFunctionReturn(0);} 107 108 ierr = PCReset(*pc);CHKERRQ(ierr); 109 110 /* if memory was published with SAWs then destroy it */ 111 ierr = PetscObjectSAWsViewOff((PetscObject)*pc);CHKERRQ(ierr); 112 if ((*pc)->ops->destroy) {ierr = (*(*pc)->ops->destroy)((*pc));CHKERRQ(ierr);} 113 ierr = DMDestroy(&(*pc)->dm);CHKERRQ(ierr); 114 ierr = PetscHeaderDestroy(pc);CHKERRQ(ierr); 115 PetscFunctionReturn(0); 116 } 117 118 /*@C 119 PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right 120 scaling as needed by certain time-stepping codes. 121 122 Logically Collective on PC 123 124 Input Parameter: 125 . pc - the preconditioner context 126 127 Output Parameter: 128 . flag - PETSC_TRUE if it applies the scaling 129 130 Level: developer 131 132 Notes: 133 If this returns PETSC_TRUE then the system solved via the Krylov method is 134 $ D M A D^{-1} y = D M b for left preconditioning or 135 $ D A M D^{-1} z = D b for right preconditioning 136 137 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale() 138 @*/ 139 PetscErrorCode PCGetDiagonalScale(PC pc,PetscBool *flag) 140 { 141 PetscFunctionBegin; 142 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 143 PetscValidBoolPointer(flag,2); 144 *flag = pc->diagonalscale; 145 PetscFunctionReturn(0); 146 } 147 148 /*@ 149 PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right 150 scaling as needed by certain time-stepping codes. 151 152 Logically Collective on PC 153 154 Input Parameters: 155 + pc - the preconditioner context 156 - s - scaling vector 157 158 Level: intermediate 159 160 Notes: 161 The system solved via the Krylov method is 162 $ D M A D^{-1} y = D M b for left preconditioning or 163 $ D A M D^{-1} z = D b for right preconditioning 164 165 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 166 167 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale() 168 @*/ 169 PetscErrorCode PCSetDiagonalScale(PC pc,Vec s) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 175 PetscValidHeaderSpecific(s,VEC_CLASSID,2); 176 pc->diagonalscale = PETSC_TRUE; 177 178 ierr = PetscObjectReference((PetscObject)s);CHKERRQ(ierr); 179 ierr = VecDestroy(&pc->diagonalscaleleft);CHKERRQ(ierr); 180 181 pc->diagonalscaleleft = s; 182 183 ierr = VecDuplicate(s,&pc->diagonalscaleright);CHKERRQ(ierr); 184 ierr = VecCopy(s,pc->diagonalscaleright);CHKERRQ(ierr); 185 ierr = VecReciprocal(pc->diagonalscaleright);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 /*@ 190 PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes. 191 192 Logically Collective on PC 193 194 Input Parameters: 195 + pc - the preconditioner context 196 . in - input vector 197 - out - scaled vector (maybe the same as in) 198 199 Level: intermediate 200 201 Notes: 202 The system solved via the Krylov method is 203 $ D M A D^{-1} y = D M b for left preconditioning or 204 $ D A M D^{-1} z = D b for right preconditioning 205 206 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 207 208 If diagonal scaling is turned off and in is not out then in is copied to out 209 210 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale() 211 @*/ 212 PetscErrorCode PCDiagonalScaleLeft(PC pc,Vec in,Vec out) 213 { 214 PetscErrorCode ierr; 215 216 PetscFunctionBegin; 217 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 218 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 219 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 220 if (pc->diagonalscale) { 221 ierr = VecPointwiseMult(out,pc->diagonalscaleleft,in);CHKERRQ(ierr); 222 } else if (in != out) { 223 ierr = VecCopy(in,out);CHKERRQ(ierr); 224 } 225 PetscFunctionReturn(0); 226 } 227 228 /*@ 229 PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes. 230 231 Logically Collective on PC 232 233 Input Parameters: 234 + pc - the preconditioner context 235 . in - input vector 236 - out - scaled vector (maybe the same as in) 237 238 Level: intermediate 239 240 Notes: 241 The system solved via the Krylov method is 242 $ D M A D^{-1} y = D M b for left preconditioning or 243 $ D A M D^{-1} z = D b for right preconditioning 244 245 PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}. 246 247 If diagonal scaling is turned off and in is not out then in is copied to out 248 249 .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale() 250 @*/ 251 PetscErrorCode PCDiagonalScaleRight(PC pc,Vec in,Vec out) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 257 PetscValidHeaderSpecific(in,VEC_CLASSID,2); 258 PetscValidHeaderSpecific(out,VEC_CLASSID,3); 259 if (pc->diagonalscale) { 260 ierr = VecPointwiseMult(out,pc->diagonalscaleright,in);CHKERRQ(ierr); 261 } else if (in != out) { 262 ierr = VecCopy(in,out);CHKERRQ(ierr); 263 } 264 PetscFunctionReturn(0); 265 } 266 267 /*@ 268 PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the 269 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 270 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 271 272 Logically Collective on PC 273 274 Input Parameters: 275 + pc - the preconditioner context 276 - flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 277 278 Options Database Key: 279 . -pc_use_amat <true,false> 280 281 Notes: 282 For the common case in which the linear system matrix and the matrix used to construct the 283 preconditioner are identical, this routine is does nothing. 284 285 Level: intermediate 286 287 .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 288 @*/ 289 PetscErrorCode PCSetUseAmat(PC pc,PetscBool flg) 290 { 291 PetscFunctionBegin; 292 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 293 pc->useAmat = flg; 294 PetscFunctionReturn(0); 295 } 296 297 /*@ 298 PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected. 299 300 Logically Collective on PC 301 302 Input Parameters: 303 + pc - iterative context obtained from PCCreate() 304 - flg - PETSC_TRUE indicates you want the error generated 305 306 Level: advanced 307 308 Notes: 309 Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve() 310 to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is 311 detected. 312 313 This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs 314 315 .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll() 316 @*/ 317 PetscErrorCode PCSetErrorIfFailure(PC pc,PetscBool flg) 318 { 319 PetscFunctionBegin; 320 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 321 PetscValidLogicalCollectiveBool(pc,flg,2); 322 pc->erroriffailure = flg; 323 PetscFunctionReturn(0); 324 } 325 326 /*@ 327 PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the 328 operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(), 329 TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat. 330 331 Logically Collective on PC 332 333 Input Parameter: 334 . pc - the preconditioner context 335 336 Output Parameter: 337 . flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false) 338 339 Notes: 340 For the common case in which the linear system matrix and the matrix used to construct the 341 preconditioner are identical, this routine is does nothing. 342 343 Level: intermediate 344 345 .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE 346 @*/ 347 PetscErrorCode PCGetUseAmat(PC pc,PetscBool *flg) 348 { 349 PetscFunctionBegin; 350 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 351 *flg = pc->useAmat; 352 PetscFunctionReturn(0); 353 } 354 355 /*@ 356 PCCreate - Creates a preconditioner context. 357 358 Collective 359 360 Input Parameter: 361 . comm - MPI communicator 362 363 Output Parameter: 364 . pc - location to put the preconditioner context 365 366 Notes: 367 The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC 368 in parallel. For dense matrices it is always PCNONE. 369 370 Level: developer 371 372 .seealso: PCSetUp(), PCApply(), PCDestroy() 373 @*/ 374 PetscErrorCode PCCreate(MPI_Comm comm,PC *newpc) 375 { 376 PC pc; 377 PetscErrorCode ierr; 378 379 PetscFunctionBegin; 380 PetscValidPointer(newpc,2); 381 *newpc = NULL; 382 ierr = PCInitializePackage();CHKERRQ(ierr); 383 384 ierr = PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);CHKERRQ(ierr); 385 386 pc->mat = NULL; 387 pc->pmat = NULL; 388 pc->setupcalled = 0; 389 pc->setfromoptionscalled = 0; 390 pc->data = NULL; 391 pc->diagonalscale = PETSC_FALSE; 392 pc->diagonalscaleleft = NULL; 393 pc->diagonalscaleright = NULL; 394 395 pc->modifysubmatrices = NULL; 396 pc->modifysubmatricesP = NULL; 397 398 *newpc = pc; 399 PetscFunctionReturn(0); 400 401 } 402 403 /* -------------------------------------------------------------------------------*/ 404 405 /*@ 406 PCApply - Applies the preconditioner to a vector. 407 408 Collective on PC 409 410 Input Parameters: 411 + pc - the preconditioner context 412 - x - input vector 413 414 Output Parameter: 415 . y - output vector 416 417 Level: developer 418 419 .seealso: PCApplyTranspose(), PCApplyBAorAB() 420 @*/ 421 PetscErrorCode PCApply(PC pc,Vec x,Vec y) 422 { 423 PetscErrorCode ierr; 424 PetscInt m,n,mv,nv; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 428 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 429 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 430 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 431 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 432 /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */ 433 ierr = MatGetLocalSize(pc->pmat,&m,&n);CHKERRQ(ierr); 434 ierr = VecGetLocalSize(x,&mv);CHKERRQ(ierr); 435 ierr = VecGetLocalSize(y,&nv);CHKERRQ(ierr); 436 /* check pmat * y = x is feasible */ 437 if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv); 438 if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv); 439 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr); 440 441 ierr = PCSetUp(pc);CHKERRQ(ierr); 442 if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply"); 443 ierr = VecLockReadPush(x);CHKERRQ(ierr); 444 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 445 ierr = (*pc->ops->apply)(pc,x,y);CHKERRQ(ierr); 446 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 447 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 448 ierr = VecLockReadPop(x);CHKERRQ(ierr); 449 PetscFunctionReturn(0); 450 } 451 452 /*@ 453 PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices. 454 455 Collective on PC 456 457 Input Parameters: 458 + pc - the preconditioner context 459 - X - block of input vectors 460 461 Output Parameter: 462 . Y - block of output vectors 463 464 Level: developer 465 466 .seealso: PCApply(), KSPMatSolve() 467 @*/ 468 PetscErrorCode PCMatApply(PC pc,Mat X,Mat Y) 469 { 470 Mat A; 471 Vec cy, cx; 472 PetscInt m1, M1, m2, M2, n1, N1, n2, N2; 473 PetscBool match; 474 PetscErrorCode ierr; 475 476 PetscFunctionBegin; 477 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 478 PetscValidHeaderSpecific(X, MAT_CLASSID, 2); 479 PetscValidHeaderSpecific(Y, MAT_CLASSID, 3); 480 PetscCheckSameComm(pc, 1, X, 2); 481 PetscCheckSameComm(pc, 1, Y, 3); 482 if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices"); 483 ierr = PCGetOperators(pc, NULL, &A);CHKERRQ(ierr); 484 ierr = MatGetLocalSize(A, &m1, NULL);CHKERRQ(ierr); 485 ierr = MatGetLocalSize(Y, &m2, &n2);CHKERRQ(ierr); 486 ierr = MatGetSize(A, &M1, NULL);CHKERRQ(ierr); 487 ierr = MatGetSize(X, &M2, &N2);CHKERRQ(ierr); 488 if (m1 != m2 || M1 != M2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot use a block of input vectors with (m2,M2) = (%D,%D) for a preconditioner with (m1,M1) = (%D,%D)", m2, M2, m1, M1); 489 ierr = MatGetLocalSize(Y, &m1, &n1);CHKERRQ(ierr); 490 ierr = MatGetSize(Y, &M1, &N1);CHKERRQ(ierr); 491 if (m1 != m2 || M1 != M2 || n1 != n2 || N1 != N2) SETERRQ8(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible block of input vectors (m2,M2)x(n2,N2) = (%D,%D)x(%D,%D) and output vectors (m1,M1)x(n1,N1) = (%D,%D)x(%D,%D)", m2, M2, n2, N2, m1, M1, n1, N1); 492 ierr = PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 493 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat"); 494 ierr = PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");CHKERRQ(ierr); 495 if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat"); 496 ierr = PCSetUp(pc);CHKERRQ(ierr); 497 if (pc->ops->matapply) { 498 ierr = PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 499 ierr = (*pc->ops->matapply)(pc, X, Y);CHKERRQ(ierr); 500 ierr = PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);CHKERRQ(ierr); 501 } else { 502 ierr = PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);CHKERRQ(ierr); 503 for (n2 = 0; n2 < N2; ++n2) { 504 ierr = MatDenseGetColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 505 ierr = MatDenseGetColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 506 ierr = PCApply(pc, cx, cy);CHKERRQ(ierr); 507 ierr = MatDenseRestoreColumnVecWrite(Y, n2, &cy);CHKERRQ(ierr); 508 ierr = MatDenseRestoreColumnVecRead(X, n2, &cx);CHKERRQ(ierr); 509 } 510 } 511 PetscFunctionReturn(0); 512 } 513 514 /*@ 515 PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector. 516 517 Collective on PC 518 519 Input Parameters: 520 + pc - the preconditioner context 521 - x - input vector 522 523 Output Parameter: 524 . y - output vector 525 526 Notes: 527 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 528 529 Level: developer 530 531 .seealso: PCApply(), PCApplySymmetricRight() 532 @*/ 533 PetscErrorCode PCApplySymmetricLeft(PC pc,Vec x,Vec y) 534 { 535 PetscErrorCode ierr; 536 537 PetscFunctionBegin; 538 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 539 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 540 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 541 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 542 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 543 ierr = PCSetUp(pc);CHKERRQ(ierr); 544 if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 545 ierr = VecLockReadPush(x);CHKERRQ(ierr); 546 ierr = PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 547 ierr = (*pc->ops->applysymmetricleft)(pc,x,y);CHKERRQ(ierr); 548 ierr = PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);CHKERRQ(ierr); 549 ierr = VecLockReadPop(x);CHKERRQ(ierr); 550 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 551 PetscFunctionReturn(0); 552 } 553 554 /*@ 555 PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector. 556 557 Collective on PC 558 559 Input Parameters: 560 + pc - the preconditioner context 561 - x - input vector 562 563 Output Parameter: 564 . y - output vector 565 566 Level: developer 567 568 Notes: 569 Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners. 570 571 .seealso: PCApply(), PCApplySymmetricLeft() 572 @*/ 573 PetscErrorCode PCApplySymmetricRight(PC pc,Vec x,Vec y) 574 { 575 PetscErrorCode ierr; 576 577 PetscFunctionBegin; 578 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 579 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 580 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 581 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 582 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 583 ierr = PCSetUp(pc);CHKERRQ(ierr); 584 if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply"); 585 ierr = VecLockReadPush(x);CHKERRQ(ierr); 586 ierr = PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 587 ierr = (*pc->ops->applysymmetricright)(pc,x,y);CHKERRQ(ierr); 588 ierr = PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);CHKERRQ(ierr); 589 ierr = VecLockReadPop(x);CHKERRQ(ierr); 590 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 591 PetscFunctionReturn(0); 592 } 593 594 /*@ 595 PCApplyTranspose - Applies the transpose of preconditioner to a vector. 596 597 Collective on PC 598 599 Input Parameters: 600 + pc - the preconditioner context 601 - x - input vector 602 603 Output Parameter: 604 . y - output vector 605 606 Notes: 607 For complex numbers this applies the non-Hermitian transpose. 608 609 Developer Notes: 610 We need to implement a PCApplyHermitianTranspose() 611 612 Level: developer 613 614 .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists() 615 @*/ 616 PetscErrorCode PCApplyTranspose(PC pc,Vec x,Vec y) 617 { 618 PetscErrorCode ierr; 619 620 PetscFunctionBegin; 621 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 622 PetscValidHeaderSpecific(x,VEC_CLASSID,2); 623 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 624 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 625 if (pc->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr);} 626 ierr = PCSetUp(pc);CHKERRQ(ierr); 627 if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose"); 628 ierr = VecLockReadPush(x);CHKERRQ(ierr); 629 ierr = PetscLogEventBegin(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 630 ierr = (*pc->ops->applytranspose)(pc,x,y);CHKERRQ(ierr); 631 ierr = PetscLogEventEnd(PC_Apply,pc,x,y,0);CHKERRQ(ierr); 632 ierr = VecLockReadPop(x);CHKERRQ(ierr); 633 if (pc->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr);} 634 PetscFunctionReturn(0); 635 } 636 637 /*@ 638 PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation 639 640 Collective on PC 641 642 Input Parameters: 643 . pc - the preconditioner context 644 645 Output Parameter: 646 . flg - PETSC_TRUE if a transpose operation is defined 647 648 Level: developer 649 650 .seealso: PCApplyTranspose() 651 @*/ 652 PetscErrorCode PCApplyTransposeExists(PC pc,PetscBool *flg) 653 { 654 PetscFunctionBegin; 655 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 656 PetscValidBoolPointer(flg,2); 657 if (pc->ops->applytranspose) *flg = PETSC_TRUE; 658 else *flg = PETSC_FALSE; 659 PetscFunctionReturn(0); 660 } 661 662 /*@ 663 PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x. 664 665 Collective on PC 666 667 Input Parameters: 668 + pc - the preconditioner context 669 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 670 . x - input vector 671 - work - work vector 672 673 Output Parameter: 674 . y - output vector 675 676 Level: developer 677 678 Notes: 679 If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or D A M D^{-1} is actually applied. Note that the 680 specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling. 681 682 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose() 683 @*/ 684 PetscErrorCode PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work) 685 { 686 PetscErrorCode ierr; 687 688 PetscFunctionBegin; 689 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 690 PetscValidLogicalCollectiveEnum(pc,side,2); 691 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 692 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 693 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 694 PetscCheckSameComm(pc,1,x,3); 695 PetscCheckSameComm(pc,1,y,4); 696 PetscCheckSameComm(pc,1,work,5); 697 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 698 if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric"); 699 if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application"); 700 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 701 702 ierr = PCSetUp(pc);CHKERRQ(ierr); 703 if (pc->diagonalscale) { 704 if (pc->ops->applyBA) { 705 Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */ 706 ierr = VecDuplicate(x,&work2);CHKERRQ(ierr); 707 ierr = PCDiagonalScaleRight(pc,x,work2);CHKERRQ(ierr); 708 ierr = (*pc->ops->applyBA)(pc,side,work2,y,work);CHKERRQ(ierr); 709 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 710 ierr = VecDestroy(&work2);CHKERRQ(ierr); 711 } else if (side == PC_RIGHT) { 712 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 713 ierr = PCApply(pc,y,work);CHKERRQ(ierr); 714 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 715 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 716 } else if (side == PC_LEFT) { 717 ierr = PCDiagonalScaleRight(pc,x,y);CHKERRQ(ierr); 718 ierr = MatMult(pc->mat,y,work);CHKERRQ(ierr); 719 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 720 ierr = PCDiagonalScaleLeft(pc,y,y);CHKERRQ(ierr); 721 } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner"); 722 } else { 723 if (pc->ops->applyBA) { 724 ierr = (*pc->ops->applyBA)(pc,side,x,y,work);CHKERRQ(ierr); 725 } else if (side == PC_RIGHT) { 726 ierr = PCApply(pc,x,work);CHKERRQ(ierr); 727 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 728 } else if (side == PC_LEFT) { 729 ierr = MatMult(pc->mat,x,work);CHKERRQ(ierr); 730 ierr = PCApply(pc,work,y);CHKERRQ(ierr); 731 } else if (side == PC_SYMMETRIC) { 732 /* There's an extra copy here; maybe should provide 2 work vectors instead? */ 733 ierr = PCApplySymmetricRight(pc,x,work);CHKERRQ(ierr); 734 ierr = MatMult(pc->mat,work,y);CHKERRQ(ierr); 735 ierr = VecCopy(y,work);CHKERRQ(ierr); 736 ierr = PCApplySymmetricLeft(pc,work,y);CHKERRQ(ierr); 737 } 738 } 739 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 740 PetscFunctionReturn(0); 741 } 742 743 /*@ 744 PCApplyBAorABTranspose - Applies the transpose of the preconditioner 745 and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning, 746 NOT tr(B*A) = tr(A)*tr(B). 747 748 Collective on PC 749 750 Input Parameters: 751 + pc - the preconditioner context 752 . side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC 753 . x - input vector 754 - work - work vector 755 756 Output Parameter: 757 . y - output vector 758 759 Notes: 760 this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner 761 defined by B'. This is why this has the funny form that it computes tr(B) * tr(A) 762 763 Level: developer 764 765 .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB() 766 @*/ 767 PetscErrorCode PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work) 768 { 769 PetscErrorCode ierr; 770 771 PetscFunctionBegin; 772 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 773 PetscValidHeaderSpecific(x,VEC_CLASSID,3); 774 PetscValidHeaderSpecific(y,VEC_CLASSID,4); 775 PetscValidHeaderSpecific(work,VEC_CLASSID,5); 776 if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors"); 777 if (pc->erroriffailure) {ierr = VecValidValues(x,3,PETSC_TRUE);CHKERRQ(ierr);} 778 if (pc->ops->applyBAtranspose) { 779 ierr = (*pc->ops->applyBAtranspose)(pc,side,x,y,work);CHKERRQ(ierr); 780 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 781 PetscFunctionReturn(0); 782 } 783 if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left"); 784 785 ierr = PCSetUp(pc);CHKERRQ(ierr); 786 if (side == PC_RIGHT) { 787 ierr = PCApplyTranspose(pc,x,work);CHKERRQ(ierr); 788 ierr = MatMultTranspose(pc->mat,work,y);CHKERRQ(ierr); 789 } else if (side == PC_LEFT) { 790 ierr = MatMultTranspose(pc->mat,x,work);CHKERRQ(ierr); 791 ierr = PCApplyTranspose(pc,work,y);CHKERRQ(ierr); 792 } 793 /* add support for PC_SYMMETRIC */ 794 if (pc->erroriffailure) {ierr = VecValidValues(y,4,PETSC_FALSE);CHKERRQ(ierr);} 795 PetscFunctionReturn(0); 796 } 797 798 /* -------------------------------------------------------------------------------*/ 799 800 /*@ 801 PCApplyRichardsonExists - Determines whether a particular preconditioner has a 802 built-in fast application of Richardson's method. 803 804 Not Collective 805 806 Input Parameter: 807 . pc - the preconditioner 808 809 Output Parameter: 810 . exists - PETSC_TRUE or PETSC_FALSE 811 812 Level: developer 813 814 .seealso: PCApplyRichardson() 815 @*/ 816 PetscErrorCode PCApplyRichardsonExists(PC pc,PetscBool *exists) 817 { 818 PetscFunctionBegin; 819 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 820 PetscValidPointer(exists,2); 821 if (pc->ops->applyrichardson) *exists = PETSC_TRUE; 822 else *exists = PETSC_FALSE; 823 PetscFunctionReturn(0); 824 } 825 826 /*@ 827 PCApplyRichardson - Applies several steps of Richardson iteration with 828 the particular preconditioner. This routine is usually used by the 829 Krylov solvers and not the application code directly. 830 831 Collective on PC 832 833 Input Parameters: 834 + pc - the preconditioner context 835 . b - the right hand side 836 . w - one work vector 837 . rtol - relative decrease in residual norm convergence criteria 838 . abstol - absolute residual norm convergence criteria 839 . dtol - divergence residual norm increase criteria 840 . its - the number of iterations to apply. 841 - guesszero - if the input x contains nonzero initial guess 842 843 Output Parameter: 844 + outits - number of iterations actually used (for SOR this always equals its) 845 . reason - the reason the apply terminated 846 - y - the solution (also contains initial guess if guesszero is PETSC_FALSE 847 848 Notes: 849 Most preconditioners do not support this function. Use the command 850 PCApplyRichardsonExists() to determine if one does. 851 852 Except for the multigrid PC this routine ignores the convergence tolerances 853 and always runs for the number of iterations 854 855 Level: developer 856 857 .seealso: PCApplyRichardsonExists() 858 @*/ 859 PetscErrorCode PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason) 860 { 861 PetscErrorCode ierr; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 865 PetscValidHeaderSpecific(b,VEC_CLASSID,2); 866 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 867 PetscValidHeaderSpecific(w,VEC_CLASSID,4); 868 if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors"); 869 ierr = PCSetUp(pc);CHKERRQ(ierr); 870 if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson"); 871 ierr = (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 /*@ 876 PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 877 878 Logically Collective on PC 879 880 Input Parameter: 881 + pc - the preconditioner context 882 - reason - the reason it failedx 883 884 Level: advanced 885 886 .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason 887 @*/ 888 PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason) 889 { 890 PetscFunctionBegin; 891 pc->failedreason = reason; 892 PetscFunctionReturn(0); 893 } 894 895 /*@ 896 PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail 897 898 Logically Collective on PC 899 900 Input Parameter: 901 . pc - the preconditioner context 902 903 Output Parameter: 904 . reason - the reason it failed 905 906 Level: advanced 907 908 Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after 909 a call KSPCheckDot() or KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp() 910 or PCApply(), then use PCGetFailedReasonRank() 911 912 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason() 913 @*/ 914 PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason) 915 { 916 PetscFunctionBegin; 917 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 918 else *reason = pc->failedreason; 919 PetscFunctionReturn(0); 920 } 921 922 /*@ 923 PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank 924 925 Not Collective on PC 926 927 Input Parameter: 928 . pc - the preconditioner context 929 930 Output Parameter: 931 . reason - the reason it failed 932 933 Notes: 934 Different ranks may have different reasons or no reason, see PCGetFailedReason() 935 936 Level: advanced 937 938 .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason() 939 @*/ 940 PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason) 941 { 942 PetscFunctionBegin; 943 if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled; 944 else *reason = pc->failedreason; 945 PetscFunctionReturn(0); 946 } 947 948 /* Next line needed to deactivate KSP_Solve logging */ 949 #include <petsc/private/kspimpl.h> 950 951 /* 952 a setupcall of 0 indicates never setup, 953 1 indicates has been previously setup 954 -1 indicates a PCSetUp() was attempted and failed 955 */ 956 /*@ 957 PCSetUp - Prepares for the use of a preconditioner. 958 959 Collective on PC 960 961 Input Parameter: 962 . pc - the preconditioner context 963 964 Level: developer 965 966 .seealso: PCCreate(), PCApply(), PCDestroy() 967 @*/ 968 PetscErrorCode PCSetUp(PC pc) 969 { 970 PetscErrorCode ierr; 971 const char *def; 972 PetscObjectState matstate, matnonzerostate; 973 974 PetscFunctionBegin; 975 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 976 if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first"); 977 978 if (pc->setupcalled && pc->reusepreconditioner) { 979 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");CHKERRQ(ierr); 980 PetscFunctionReturn(0); 981 } 982 983 ierr = PetscObjectStateGet((PetscObject)pc->pmat,&matstate);CHKERRQ(ierr); 984 ierr = MatGetNonzeroState(pc->pmat,&matnonzerostate);CHKERRQ(ierr); 985 if (!pc->setupcalled) { 986 ierr = PetscInfo(pc,"Setting up PC for first time\n");CHKERRQ(ierr); 987 pc->flag = DIFFERENT_NONZERO_PATTERN; 988 } else if (matstate == pc->matstate) { 989 ierr = PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");CHKERRQ(ierr); 990 PetscFunctionReturn(0); 991 } else { 992 if (matnonzerostate > pc->matnonzerostate) { 993 ierr = PetscInfo(pc,"Setting up PC with different nonzero pattern\n");CHKERRQ(ierr); 994 pc->flag = DIFFERENT_NONZERO_PATTERN; 995 } else { 996 ierr = PetscInfo(pc,"Setting up PC with same nonzero pattern\n");CHKERRQ(ierr); 997 pc->flag = SAME_NONZERO_PATTERN; 998 } 999 } 1000 pc->matstate = matstate; 1001 pc->matnonzerostate = matnonzerostate; 1002 1003 if (!((PetscObject)pc)->type_name) { 1004 ierr = PCGetDefaultType_Private(pc,&def);CHKERRQ(ierr); 1005 ierr = PCSetType(pc,def);CHKERRQ(ierr); 1006 } 1007 1008 ierr = MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);CHKERRQ(ierr); 1009 ierr = MatSetErrorIfFailure(pc->mat,pc->erroriffailure);CHKERRQ(ierr); 1010 ierr = PetscLogEventBegin(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1011 if (pc->ops->setup) { 1012 /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */ 1013 ierr = PetscLogEventDeactivatePush(KSP_Solve);CHKERRQ(ierr); 1014 ierr = PetscLogEventDeactivatePush(PC_Apply);CHKERRQ(ierr); 1015 ierr = (*pc->ops->setup)(pc);CHKERRQ(ierr); 1016 ierr = PetscLogEventDeactivatePop(KSP_Solve);CHKERRQ(ierr); 1017 ierr = PetscLogEventDeactivatePop(PC_Apply);CHKERRQ(ierr); 1018 } 1019 ierr = PetscLogEventEnd(PC_SetUp,pc,0,0,0);CHKERRQ(ierr); 1020 if (!pc->setupcalled) pc->setupcalled = 1; 1021 PetscFunctionReturn(0); 1022 } 1023 1024 /*@ 1025 PCSetUpOnBlocks - Sets up the preconditioner for each block in 1026 the block Jacobi, block Gauss-Seidel, and overlapping Schwarz 1027 methods. 1028 1029 Collective on PC 1030 1031 Input Parameters: 1032 . pc - the preconditioner context 1033 1034 Level: developer 1035 1036 .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp() 1037 @*/ 1038 PetscErrorCode PCSetUpOnBlocks(PC pc) 1039 { 1040 PetscErrorCode ierr; 1041 1042 PetscFunctionBegin; 1043 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1044 if (!pc->ops->setuponblocks) PetscFunctionReturn(0); 1045 ierr = PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1046 ierr = (*pc->ops->setuponblocks)(pc);CHKERRQ(ierr); 1047 ierr = PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);CHKERRQ(ierr); 1048 PetscFunctionReturn(0); 1049 } 1050 1051 /*@C 1052 PCSetModifySubMatrices - Sets a user-defined routine for modifying the 1053 submatrices that arise within certain subdomain-based preconditioners. 1054 The basic submatrices are extracted from the preconditioner matrix as 1055 usual; the user can then alter these (for example, to set different boundary 1056 conditions for each submatrix) before they are used for the local solves. 1057 1058 Logically Collective on PC 1059 1060 Input Parameters: 1061 + pc - the preconditioner context 1062 . func - routine for modifying the submatrices 1063 - ctx - optional user-defined context (may be null) 1064 1065 Calling sequence of func: 1066 $ func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx); 1067 1068 + row - an array of index sets that contain the global row numbers 1069 that comprise each local submatrix 1070 . col - an array of index sets that contain the global column numbers 1071 that comprise each local submatrix 1072 . submat - array of local submatrices 1073 - ctx - optional user-defined context for private data for the 1074 user-defined func routine (may be null) 1075 1076 Notes: 1077 PCSetModifySubMatrices() MUST be called before KSPSetUp() and 1078 KSPSolve(). 1079 1080 A routine set by PCSetModifySubMatrices() is currently called within 1081 the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM) 1082 preconditioners. All other preconditioners ignore this routine. 1083 1084 Level: advanced 1085 1086 .seealso: PCModifySubMatrices() 1087 @*/ 1088 PetscErrorCode PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx) 1089 { 1090 PetscFunctionBegin; 1091 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1092 pc->modifysubmatrices = func; 1093 pc->modifysubmatricesP = ctx; 1094 PetscFunctionReturn(0); 1095 } 1096 1097 /*@C 1098 PCModifySubMatrices - Calls an optional user-defined routine within 1099 certain preconditioners if one has been set with PCSetModifySubMatrices(). 1100 1101 Collective on PC 1102 1103 Input Parameters: 1104 + pc - the preconditioner context 1105 . nsub - the number of local submatrices 1106 . row - an array of index sets that contain the global row numbers 1107 that comprise each local submatrix 1108 . col - an array of index sets that contain the global column numbers 1109 that comprise each local submatrix 1110 . submat - array of local submatrices 1111 - ctx - optional user-defined context for private data for the 1112 user-defined routine (may be null) 1113 1114 Output Parameter: 1115 . submat - array of local submatrices (the entries of which may 1116 have been modified) 1117 1118 Notes: 1119 The user should NOT generally call this routine, as it will 1120 automatically be called within certain preconditioners (currently 1121 block Jacobi, additive Schwarz) if set. 1122 1123 The basic submatrices are extracted from the preconditioner matrix 1124 as usual; the user can then alter these (for example, to set different 1125 boundary conditions for each submatrix) before they are used for the 1126 local solves. 1127 1128 Level: developer 1129 1130 .seealso: PCSetModifySubMatrices() 1131 @*/ 1132 PetscErrorCode PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx) 1133 { 1134 PetscErrorCode ierr; 1135 1136 PetscFunctionBegin; 1137 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1138 if (!pc->modifysubmatrices) PetscFunctionReturn(0); 1139 ierr = PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1140 ierr = (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);CHKERRQ(ierr); 1141 ierr = PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);CHKERRQ(ierr); 1142 PetscFunctionReturn(0); 1143 } 1144 1145 /*@ 1146 PCSetOperators - Sets the matrix associated with the linear system and 1147 a (possibly) different one associated with the preconditioner. 1148 1149 Logically Collective on PC 1150 1151 Input Parameters: 1152 + pc - the preconditioner context 1153 . Amat - the matrix that defines the linear system 1154 - Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat. 1155 1156 Notes: 1157 Passing a NULL for Amat or Pmat removes the matrix that is currently used. 1158 1159 If you wish to replace either Amat or Pmat but leave the other one untouched then 1160 first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference() 1161 on it and then pass it back in in your call to KSPSetOperators(). 1162 1163 More Notes about Repeated Solution of Linear Systems: 1164 PETSc does NOT reset the matrix entries of either Amat or Pmat 1165 to zero after a linear solve; the user is completely responsible for 1166 matrix assembly. See the routine MatZeroEntries() if desiring to 1167 zero all elements of a matrix. 1168 1169 Level: intermediate 1170 1171 .seealso: PCGetOperators(), MatZeroEntries() 1172 @*/ 1173 PetscErrorCode PCSetOperators(PC pc,Mat Amat,Mat Pmat) 1174 { 1175 PetscErrorCode ierr; 1176 PetscInt m1,n1,m2,n2; 1177 1178 PetscFunctionBegin; 1179 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1180 if (Amat) PetscValidHeaderSpecific(Amat,MAT_CLASSID,2); 1181 if (Pmat) PetscValidHeaderSpecific(Pmat,MAT_CLASSID,3); 1182 if (Amat) PetscCheckSameComm(pc,1,Amat,2); 1183 if (Pmat) PetscCheckSameComm(pc,1,Pmat,3); 1184 if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) { 1185 ierr = MatGetLocalSize(Amat,&m1,&n1);CHKERRQ(ierr); 1186 ierr = MatGetLocalSize(pc->mat,&m2,&n2);CHKERRQ(ierr); 1187 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1188 ierr = MatGetLocalSize(Pmat,&m1,&n1);CHKERRQ(ierr); 1189 ierr = MatGetLocalSize(pc->pmat,&m2,&n2);CHKERRQ(ierr); 1190 if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1); 1191 } 1192 1193 if (Pmat != pc->pmat) { 1194 /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */ 1195 pc->matnonzerostate = -1; 1196 pc->matstate = -1; 1197 } 1198 1199 /* reference first in case the matrices are the same */ 1200 if (Amat) {ierr = PetscObjectReference((PetscObject)Amat);CHKERRQ(ierr);} 1201 ierr = MatDestroy(&pc->mat);CHKERRQ(ierr); 1202 if (Pmat) {ierr = PetscObjectReference((PetscObject)Pmat);CHKERRQ(ierr);} 1203 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 1204 pc->mat = Amat; 1205 pc->pmat = Pmat; 1206 PetscFunctionReturn(0); 1207 } 1208 1209 /*@ 1210 PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed. 1211 1212 Logically Collective on PC 1213 1214 Input Parameters: 1215 + pc - the preconditioner context 1216 - flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1217 1218 Level: intermediate 1219 1220 .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner() 1221 @*/ 1222 PetscErrorCode PCSetReusePreconditioner(PC pc,PetscBool flag) 1223 { 1224 PetscFunctionBegin; 1225 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1226 PetscValidLogicalCollectiveBool(pc,flag,2); 1227 pc->reusepreconditioner = flag; 1228 PetscFunctionReturn(0); 1229 } 1230 1231 /*@ 1232 PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed. 1233 1234 Not Collective 1235 1236 Input Parameter: 1237 . pc - the preconditioner context 1238 1239 Output Parameter: 1240 . flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner 1241 1242 Level: intermediate 1243 1244 .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner() 1245 @*/ 1246 PetscErrorCode PCGetReusePreconditioner(PC pc,PetscBool *flag) 1247 { 1248 PetscFunctionBegin; 1249 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1250 PetscValidPointer(flag,2); 1251 *flag = pc->reusepreconditioner; 1252 PetscFunctionReturn(0); 1253 } 1254 1255 /*@ 1256 PCGetOperators - Gets the matrix associated with the linear system and 1257 possibly a different one associated with the preconditioner. 1258 1259 Not collective, though parallel Mats are returned if the PC is parallel 1260 1261 Input Parameter: 1262 . pc - the preconditioner context 1263 1264 Output Parameters: 1265 + Amat - the matrix defining the linear system 1266 - Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat. 1267 1268 Level: intermediate 1269 1270 Notes: 1271 Does not increase the reference count of the matrices, so you should not destroy them 1272 1273 Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators 1274 are created in PC and returned to the user. In this case, if both operators 1275 mat and pmat are requested, two DIFFERENT operators will be returned. If 1276 only one is requested both operators in the PC will be the same (i.e. as 1277 if one had called KSP/PCSetOperators() with the same argument for both Mats). 1278 The user must set the sizes of the returned matrices and their type etc just 1279 as if the user created them with MatCreate(). For example, 1280 1281 $ KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to 1282 $ set size, type, etc of Amat 1283 1284 $ MatCreate(comm,&mat); 1285 $ KSP/PCSetOperators(ksp/pc,Amat,Amat); 1286 $ PetscObjectDereference((PetscObject)mat); 1287 $ set size, type, etc of Amat 1288 1289 and 1290 1291 $ KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to 1292 $ set size, type, etc of Amat and Pmat 1293 1294 $ MatCreate(comm,&Amat); 1295 $ MatCreate(comm,&Pmat); 1296 $ KSP/PCSetOperators(ksp/pc,Amat,Pmat); 1297 $ PetscObjectDereference((PetscObject)Amat); 1298 $ PetscObjectDereference((PetscObject)Pmat); 1299 $ set size, type, etc of Amat and Pmat 1300 1301 The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy 1302 of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely 1303 managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look 1304 at this is when you create a SNES you do not NEED to create a KSP and attach it to 1305 the SNES object (the SNES object manages it for you). Similarly when you create a KSP 1306 you do not need to attach a PC to it (the KSP object manages the PC object for you). 1307 Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when 1308 it can be created for you? 1309 1310 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet() 1311 @*/ 1312 PetscErrorCode PCGetOperators(PC pc,Mat *Amat,Mat *Pmat) 1313 { 1314 PetscErrorCode ierr; 1315 1316 PetscFunctionBegin; 1317 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1318 if (Amat) { 1319 if (!pc->mat) { 1320 if (pc->pmat && !Pmat) { /* Apmat has been set, but user did not request it, so use for Amat */ 1321 pc->mat = pc->pmat; 1322 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1323 } else { /* both Amat and Pmat are empty */ 1324 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);CHKERRQ(ierr); 1325 if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */ 1326 pc->pmat = pc->mat; 1327 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1328 } 1329 } 1330 } 1331 *Amat = pc->mat; 1332 } 1333 if (Pmat) { 1334 if (!pc->pmat) { 1335 if (pc->mat && !Amat) { /* Amat has been set but was not requested, so use for pmat */ 1336 pc->pmat = pc->mat; 1337 ierr = PetscObjectReference((PetscObject)pc->pmat);CHKERRQ(ierr); 1338 } else { 1339 ierr = MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);CHKERRQ(ierr); 1340 if (!Amat) { /* user did NOT request Amat, so make same as Pmat */ 1341 pc->mat = pc->pmat; 1342 ierr = PetscObjectReference((PetscObject)pc->mat);CHKERRQ(ierr); 1343 } 1344 } 1345 } 1346 *Pmat = pc->pmat; 1347 } 1348 PetscFunctionReturn(0); 1349 } 1350 1351 /*@C 1352 PCGetOperatorsSet - Determines if the matrix associated with the linear system and 1353 possibly a different one associated with the preconditioner have been set in the PC. 1354 1355 Not collective, though the results on all processes should be the same 1356 1357 Input Parameter: 1358 . pc - the preconditioner context 1359 1360 Output Parameters: 1361 + mat - the matrix associated with the linear system was set 1362 - pmat - matrix associated with the preconditioner was set, usually the same 1363 1364 Level: intermediate 1365 1366 .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators() 1367 @*/ 1368 PetscErrorCode PCGetOperatorsSet(PC pc,PetscBool *mat,PetscBool *pmat) 1369 { 1370 PetscFunctionBegin; 1371 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1372 if (mat) *mat = (pc->mat) ? PETSC_TRUE : PETSC_FALSE; 1373 if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE; 1374 PetscFunctionReturn(0); 1375 } 1376 1377 /*@ 1378 PCFactorGetMatrix - Gets the factored matrix from the 1379 preconditioner context. This routine is valid only for the LU, 1380 incomplete LU, Cholesky, and incomplete Cholesky methods. 1381 1382 Not Collective on PC though Mat is parallel if PC is parallel 1383 1384 Input Parameters: 1385 . pc - the preconditioner context 1386 1387 Output parameters: 1388 . mat - the factored matrix 1389 1390 Level: advanced 1391 1392 Notes: 1393 Does not increase the reference count for the matrix so DO NOT destroy it 1394 1395 @*/ 1396 PetscErrorCode PCFactorGetMatrix(PC pc,Mat *mat) 1397 { 1398 PetscErrorCode ierr; 1399 1400 PetscFunctionBegin; 1401 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1402 PetscValidPointer(mat,2); 1403 if (pc->ops->getfactoredmatrix) { 1404 ierr = (*pc->ops->getfactoredmatrix)(pc,mat);CHKERRQ(ierr); 1405 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix"); 1406 PetscFunctionReturn(0); 1407 } 1408 1409 /*@C 1410 PCSetOptionsPrefix - Sets the prefix used for searching for all 1411 PC options in the database. 1412 1413 Logically Collective on PC 1414 1415 Input Parameters: 1416 + pc - the preconditioner context 1417 - prefix - the prefix string to prepend to all PC option requests 1418 1419 Notes: 1420 A hyphen (-) must NOT be given at the beginning of the prefix name. 1421 The first character of all runtime options is AUTOMATICALLY the 1422 hyphen. 1423 1424 Level: advanced 1425 1426 .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix() 1427 @*/ 1428 PetscErrorCode PCSetOptionsPrefix(PC pc,const char prefix[]) 1429 { 1430 PetscErrorCode ierr; 1431 1432 PetscFunctionBegin; 1433 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1434 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1435 PetscFunctionReturn(0); 1436 } 1437 1438 /*@C 1439 PCAppendOptionsPrefix - Appends to the prefix used for searching for all 1440 PC options in the database. 1441 1442 Logically Collective on PC 1443 1444 Input Parameters: 1445 + pc - the preconditioner context 1446 - prefix - the prefix string to prepend to all PC option requests 1447 1448 Notes: 1449 A hyphen (-) must NOT be given at the beginning of the prefix name. 1450 The first character of all runtime options is AUTOMATICALLY the 1451 hyphen. 1452 1453 Level: advanced 1454 1455 .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix() 1456 @*/ 1457 PetscErrorCode PCAppendOptionsPrefix(PC pc,const char prefix[]) 1458 { 1459 PetscErrorCode ierr; 1460 1461 PetscFunctionBegin; 1462 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1463 ierr = PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1464 PetscFunctionReturn(0); 1465 } 1466 1467 /*@C 1468 PCGetOptionsPrefix - Gets the prefix used for searching for all 1469 PC options in the database. 1470 1471 Not Collective 1472 1473 Input Parameters: 1474 . pc - the preconditioner context 1475 1476 Output Parameters: 1477 . prefix - pointer to the prefix string used, is returned 1478 1479 Notes: 1480 On the fortran side, the user should pass in a string 'prifix' of 1481 sufficient length to hold the prefix. 1482 1483 Level: advanced 1484 1485 .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix() 1486 @*/ 1487 PetscErrorCode PCGetOptionsPrefix(PC pc,const char *prefix[]) 1488 { 1489 PetscErrorCode ierr; 1490 1491 PetscFunctionBegin; 1492 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1493 PetscValidPointer(prefix,2); 1494 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);CHKERRQ(ierr); 1495 PetscFunctionReturn(0); 1496 } 1497 1498 /* 1499 Indicates the right hand side will be changed by KSPSolve(), this occurs for a few 1500 preconditioners including BDDC and Eisentat that transform the equations before applying 1501 the Krylov methods 1502 */ 1503 PETSC_INTERN PetscErrorCode PCPreSolveChangeRHS(PC pc,PetscBool *change) 1504 { 1505 PetscErrorCode ierr; 1506 1507 PetscFunctionBegin; 1508 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1509 PetscValidPointer(change,2); 1510 *change = PETSC_FALSE; 1511 ierr = PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));CHKERRQ(ierr); 1512 PetscFunctionReturn(0); 1513 } 1514 1515 /*@ 1516 PCPreSolve - Optional pre-solve phase, intended for any 1517 preconditioner-specific actions that must be performed before 1518 the iterative solve itself. 1519 1520 Collective on PC 1521 1522 Input Parameters: 1523 + pc - the preconditioner context 1524 - ksp - the Krylov subspace context 1525 1526 Level: developer 1527 1528 Sample of Usage: 1529 .vb 1530 PCPreSolve(pc,ksp); 1531 KSPSolve(ksp,b,x); 1532 PCPostSolve(pc,ksp); 1533 .ve 1534 1535 Notes: 1536 The pre-solve phase is distinct from the PCSetUp() phase. 1537 1538 KSPSolve() calls this directly, so is rarely called by the user. 1539 1540 .seealso: PCPostSolve() 1541 @*/ 1542 PetscErrorCode PCPreSolve(PC pc,KSP ksp) 1543 { 1544 PetscErrorCode ierr; 1545 Vec x,rhs; 1546 1547 PetscFunctionBegin; 1548 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1549 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1550 pc->presolvedone++; 1551 if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice"); 1552 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1553 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1554 1555 if (pc->ops->presolve) { 1556 ierr = (*pc->ops->presolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1557 } 1558 PetscFunctionReturn(0); 1559 } 1560 1561 /*@ 1562 PCPostSolve - Optional post-solve phase, intended for any 1563 preconditioner-specific actions that must be performed after 1564 the iterative solve itself. 1565 1566 Collective on PC 1567 1568 Input Parameters: 1569 + pc - the preconditioner context 1570 - ksp - the Krylov subspace context 1571 1572 Sample of Usage: 1573 .vb 1574 PCPreSolve(pc,ksp); 1575 KSPSolve(ksp,b,x); 1576 PCPostSolve(pc,ksp); 1577 .ve 1578 1579 Note: 1580 KSPSolve() calls this routine directly, so it is rarely called by the user. 1581 1582 Level: developer 1583 1584 .seealso: PCPreSolve(), KSPSolve() 1585 @*/ 1586 PetscErrorCode PCPostSolve(PC pc,KSP ksp) 1587 { 1588 PetscErrorCode ierr; 1589 Vec x,rhs; 1590 1591 PetscFunctionBegin; 1592 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1593 PetscValidHeaderSpecific(ksp,KSP_CLASSID,2); 1594 pc->presolvedone--; 1595 ierr = KSPGetSolution(ksp,&x);CHKERRQ(ierr); 1596 ierr = KSPGetRhs(ksp,&rhs);CHKERRQ(ierr); 1597 if (pc->ops->postsolve) { 1598 ierr = (*pc->ops->postsolve)(pc,ksp,rhs,x);CHKERRQ(ierr); 1599 } 1600 PetscFunctionReturn(0); 1601 } 1602 1603 /*@C 1604 PCLoad - Loads a PC that has been stored in binary with PCView(). 1605 1606 Collective on PetscViewer 1607 1608 Input Parameters: 1609 + newdm - the newly loaded PC, this needs to have been created with PCCreate() or 1610 some related function before a call to PCLoad(). 1611 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() 1612 1613 Level: intermediate 1614 1615 Notes: 1616 The type is determined by the data in the file, any type set into the PC before this call is ignored. 1617 1618 Notes for advanced users: 1619 Most users should not need to know the details of the binary storage 1620 format, since PCLoad() and PCView() completely hide these details. 1621 But for anyone who's interested, the standard binary matrix storage 1622 format is 1623 .vb 1624 has not yet been determined 1625 .ve 1626 1627 .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad() 1628 @*/ 1629 PetscErrorCode PCLoad(PC newdm, PetscViewer viewer) 1630 { 1631 PetscErrorCode ierr; 1632 PetscBool isbinary; 1633 PetscInt classid; 1634 char type[256]; 1635 1636 PetscFunctionBegin; 1637 PetscValidHeaderSpecific(newdm,PC_CLASSID,1); 1638 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1639 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1640 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 1641 1642 ierr = PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);CHKERRQ(ierr); 1643 if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file"); 1644 ierr = PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);CHKERRQ(ierr); 1645 ierr = PCSetType(newdm, type);CHKERRQ(ierr); 1646 if (newdm->ops->load) { 1647 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 1648 } 1649 PetscFunctionReturn(0); 1650 } 1651 1652 #include <petscdraw.h> 1653 #if defined(PETSC_HAVE_SAWS) 1654 #include <petscviewersaws.h> 1655 #endif 1656 1657 /*@C 1658 PCViewFromOptions - View from Options 1659 1660 Collective on PC 1661 1662 Input Parameters: 1663 + A - the PC context 1664 . obj - Optional object 1665 - name - command line option 1666 1667 Level: intermediate 1668 .seealso: PC, PCView, PetscObjectViewFromOptions(), PCCreate() 1669 @*/ 1670 PetscErrorCode PCViewFromOptions(PC A,PetscObject obj,const char name[]) 1671 { 1672 PetscErrorCode ierr; 1673 1674 PetscFunctionBegin; 1675 PetscValidHeaderSpecific(A,PC_CLASSID,1); 1676 ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr); 1677 PetscFunctionReturn(0); 1678 } 1679 1680 /*@C 1681 PCView - Prints the PC data structure. 1682 1683 Collective on PC 1684 1685 Input Parameters: 1686 + PC - the PC context 1687 - viewer - optional visualization context 1688 1689 Note: 1690 The available visualization contexts include 1691 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 1692 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 1693 output where only the first processor opens 1694 the file. All other processors send their 1695 data to the first processor to print. 1696 1697 The user can open an alternative visualization contexts with 1698 PetscViewerASCIIOpen() (output to a specified file). 1699 1700 Level: developer 1701 1702 .seealso: KSPView(), PetscViewerASCIIOpen() 1703 @*/ 1704 PetscErrorCode PCView(PC pc,PetscViewer viewer) 1705 { 1706 PCType cstr; 1707 PetscErrorCode ierr; 1708 PetscBool iascii,isstring,isbinary,isdraw; 1709 #if defined(PETSC_HAVE_SAWS) 1710 PetscBool issaws; 1711 #endif 1712 1713 PetscFunctionBegin; 1714 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1715 if (!viewer) { 1716 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);CHKERRQ(ierr); 1717 } 1718 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1719 PetscCheckSameComm(pc,1,viewer,2); 1720 1721 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 1722 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 1723 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 1724 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 1725 #if defined(PETSC_HAVE_SAWS) 1726 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);CHKERRQ(ierr); 1727 #endif 1728 1729 if (iascii) { 1730 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);CHKERRQ(ierr); 1731 if (!pc->setupcalled) { 1732 ierr = PetscViewerASCIIPrintf(viewer," PC has not been set up so information may be incomplete\n");CHKERRQ(ierr); 1733 } 1734 if (pc->ops->view) { 1735 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1736 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1737 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1738 } 1739 if (pc->mat) { 1740 ierr = PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);CHKERRQ(ierr); 1741 if (pc->pmat == pc->mat) { 1742 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix = precond matrix:\n");CHKERRQ(ierr); 1743 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1744 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1745 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1746 } else { 1747 if (pc->pmat) { 1748 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix followed by preconditioner matrix:\n");CHKERRQ(ierr); 1749 } else { 1750 ierr = PetscViewerASCIIPrintf(viewer," linear system matrix:\n");CHKERRQ(ierr); 1751 } 1752 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1753 ierr = MatView(pc->mat,viewer);CHKERRQ(ierr); 1754 if (pc->pmat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1755 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1756 } 1757 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 1758 } 1759 } else if (isstring) { 1760 ierr = PCGetType(pc,&cstr);CHKERRQ(ierr); 1761 ierr = PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);CHKERRQ(ierr); 1762 if (pc->ops->view) {ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr);} 1763 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1764 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1765 } else if (isbinary) { 1766 PetscInt classid = PC_FILE_CLASSID; 1767 MPI_Comm comm; 1768 PetscMPIInt rank; 1769 char type[256]; 1770 1771 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1772 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 1773 if (!rank) { 1774 ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 1775 ierr = PetscStrncpy(type,((PetscObject)pc)->type_name,256);CHKERRQ(ierr); 1776 ierr = PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 1777 } 1778 if (pc->ops->view) { 1779 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1780 } 1781 } else if (isdraw) { 1782 PetscDraw draw; 1783 char str[25]; 1784 PetscReal x,y,bottom,h; 1785 PetscInt n; 1786 1787 ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1788 ierr = PetscDrawGetCurrentPoint(draw,&x,&y);CHKERRQ(ierr); 1789 if (pc->mat) { 1790 ierr = MatGetSize(pc->mat,&n,NULL);CHKERRQ(ierr); 1791 ierr = PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);CHKERRQ(ierr); 1792 } else { 1793 ierr = PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);CHKERRQ(ierr); 1794 } 1795 ierr = PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);CHKERRQ(ierr); 1796 bottom = y - h; 1797 ierr = PetscDrawPushCurrentPoint(draw,x,bottom);CHKERRQ(ierr); 1798 if (pc->ops->view) { 1799 ierr = (*pc->ops->view)(pc,viewer);CHKERRQ(ierr); 1800 } 1801 ierr = PetscDrawPopCurrentPoint(draw);CHKERRQ(ierr); 1802 #if defined(PETSC_HAVE_SAWS) 1803 } else if (issaws) { 1804 PetscMPIInt rank; 1805 1806 ierr = PetscObjectName((PetscObject)pc);CHKERRQ(ierr); 1807 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr); 1808 if (!((PetscObject)pc)->amsmem && !rank) { 1809 ierr = PetscObjectViewSAWs((PetscObject)pc,viewer);CHKERRQ(ierr); 1810 } 1811 if (pc->mat) {ierr = MatView(pc->mat,viewer);CHKERRQ(ierr);} 1812 if (pc->pmat && pc->pmat != pc->mat) {ierr = MatView(pc->pmat,viewer);CHKERRQ(ierr);} 1813 #endif 1814 } 1815 PetscFunctionReturn(0); 1816 } 1817 1818 /*@C 1819 PCRegister - Adds a method to the preconditioner package. 1820 1821 Not collective 1822 1823 Input Parameters: 1824 + name_solver - name of a new user-defined solver 1825 - routine_create - routine to create method context 1826 1827 Notes: 1828 PCRegister() may be called multiple times to add several user-defined preconditioners. 1829 1830 Sample usage: 1831 .vb 1832 PCRegister("my_solver", MySolverCreate); 1833 .ve 1834 1835 Then, your solver can be chosen with the procedural interface via 1836 $ PCSetType(pc,"my_solver") 1837 or at runtime via the option 1838 $ -pc_type my_solver 1839 1840 Level: advanced 1841 1842 .seealso: PCRegisterAll() 1843 @*/ 1844 PetscErrorCode PCRegister(const char sname[],PetscErrorCode (*function)(PC)) 1845 { 1846 PetscErrorCode ierr; 1847 1848 PetscFunctionBegin; 1849 ierr = PCInitializePackage();CHKERRQ(ierr); 1850 ierr = PetscFunctionListAdd(&PCList,sname,function);CHKERRQ(ierr); 1851 PetscFunctionReturn(0); 1852 } 1853 1854 static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y) 1855 { 1856 PC pc; 1857 PetscErrorCode ierr; 1858 1859 PetscFunctionBegin; 1860 ierr = MatShellGetContext(A,&pc);CHKERRQ(ierr); 1861 ierr = PCApply(pc,X,Y);CHKERRQ(ierr); 1862 PetscFunctionReturn(0); 1863 } 1864 1865 /*@ 1866 PCComputeOperator - Computes the explicit preconditioned operator. 1867 1868 Collective on PC 1869 1870 Input Parameter: 1871 + pc - the preconditioner object 1872 - mattype - the matrix type to be used for the operator 1873 1874 Output Parameter: 1875 . mat - the explict preconditioned operator 1876 1877 Notes: 1878 This computation is done by applying the operators to columns of the identity matrix. 1879 This routine is costly in general, and is recommended for use only with relatively small systems. 1880 Currently, this routine uses a dense matrix format when mattype == NULL 1881 1882 Level: advanced 1883 1884 .seealso: KSPComputeOperator(), MatType 1885 1886 @*/ 1887 PetscErrorCode PCComputeOperator(PC pc,MatType mattype,Mat *mat) 1888 { 1889 PetscErrorCode ierr; 1890 PetscInt N,M,m,n; 1891 Mat A,Apc; 1892 1893 PetscFunctionBegin; 1894 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1895 PetscValidPointer(mat,3); 1896 ierr = PCGetOperators(pc,&A,NULL);CHKERRQ(ierr); 1897 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr); 1898 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr); 1899 ierr = MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);CHKERRQ(ierr); 1900 ierr = MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);CHKERRQ(ierr); 1901 ierr = MatComputeOperator(Apc,mattype,mat);CHKERRQ(ierr); 1902 ierr = MatDestroy(&Apc);CHKERRQ(ierr); 1903 PetscFunctionReturn(0); 1904 } 1905 1906 /*@ 1907 PCSetCoordinates - sets the coordinates of all the nodes on the local process 1908 1909 Collective on PC 1910 1911 Input Parameters: 1912 + pc - the solver context 1913 . dim - the dimension of the coordinates 1, 2, or 3 1914 . nloc - the blocked size of the coordinates array 1915 - coords - the coordinates array 1916 1917 Level: intermediate 1918 1919 Notes: 1920 coords is an array of the dim coordinates for the nodes on 1921 the local processor, of size dim*nloc. 1922 If there are 108 equation on a processor 1923 for a displacement finite element discretization of elasticity (so 1924 that there are nloc = 36 = 108/3 nodes) then the array must have 108 1925 double precision values (ie, 3 * 36). These x y z coordinates 1926 should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x, 1927 ... , N-1.z ]. 1928 1929 .seealso: MatSetNearNullSpace() 1930 @*/ 1931 PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[]) 1932 { 1933 PetscErrorCode ierr; 1934 1935 PetscFunctionBegin; 1936 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1937 PetscValidLogicalCollectiveInt(pc,dim,2); 1938 ierr = PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));CHKERRQ(ierr); 1939 PetscFunctionReturn(0); 1940 } 1941 1942 /*@ 1943 PCGetInterpolations - Gets interpolation matrices for all levels (except level 0) 1944 1945 Logically Collective on PC 1946 1947 Input Parameters: 1948 + pc - the precondition context 1949 1950 Output Parameter: 1951 - num_levels - the number of levels 1952 . interpolations - the interpolation matrices (size of num_levels-1) 1953 1954 Level: advanced 1955 1956 .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level 1957 1958 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators() 1959 @*/ 1960 PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[]) 1961 { 1962 PetscErrorCode ierr; 1963 1964 PetscFunctionBegin; 1965 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1966 PetscValidPointer(num_levels,2); 1967 PetscValidPointer(interpolations,3); 1968 ierr = PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));CHKERRQ(ierr); 1969 PetscFunctionReturn(0); 1970 } 1971 1972 /*@ 1973 PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level) 1974 1975 Logically Collective on PC 1976 1977 Input Parameters: 1978 + pc - the precondition context 1979 1980 Output Parameter: 1981 - num_levels - the number of levels 1982 . coarseOperators - the coarse operator matrices (size of num_levels-1) 1983 1984 Level: advanced 1985 1986 .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level 1987 1988 .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations() 1989 @*/ 1990 PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[]) 1991 { 1992 PetscErrorCode ierr; 1993 1994 PetscFunctionBegin; 1995 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1996 PetscValidPointer(num_levels,2); 1997 PetscValidPointer(coarseOperators,3); 1998 ierr = PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));CHKERRQ(ierr); 1999 PetscFunctionReturn(0); 2000 } 2001