1 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 2 3 typedef struct { 4 PetscBool allocated; 5 PetscBool scalediag; 6 KSP kspL; 7 Vec scale; 8 Vec x0, y0, x1; 9 Mat L; /* keep a copy to reuse when obtained with L = A10*A01 */ 10 } PC_LSC; 11 12 static PetscErrorCode PCLSCAllocate_Private(PC pc) 13 { 14 PC_LSC *lsc = (PC_LSC *)pc->data; 15 Mat A; 16 17 PetscFunctionBegin; 18 if (lsc->allocated) PetscFunctionReturn(PETSC_SUCCESS); 19 PetscCall(KSPCreate(PetscObjectComm((PetscObject)pc), &lsc->kspL)); 20 PetscCall(KSPSetNestLevel(lsc->kspL, pc->kspnestlevel)); 21 PetscCall(KSPSetErrorIfNotConverged(lsc->kspL, pc->erroriffailure)); 22 PetscCall(PetscObjectIncrementTabLevel((PetscObject)lsc->kspL, (PetscObject)pc, 1)); 23 PetscCall(KSPSetType(lsc->kspL, KSPPREONLY)); 24 PetscCall(KSPSetOptionsPrefix(lsc->kspL, ((PetscObject)pc)->prefix)); 25 PetscCall(KSPAppendOptionsPrefix(lsc->kspL, "lsc_")); 26 PetscCall(MatSchurComplementGetSubMatrices(pc->mat, &A, NULL, NULL, NULL, NULL)); 27 PetscCall(MatCreateVecs(A, &lsc->x0, &lsc->y0)); 28 PetscCall(MatCreateVecs(pc->pmat, &lsc->x1, NULL)); 29 if (lsc->scalediag) PetscCall(VecDuplicate(lsc->x0, &lsc->scale)); 30 lsc->allocated = PETSC_TRUE; 31 PetscFunctionReturn(PETSC_SUCCESS); 32 } 33 34 static PetscErrorCode PCSetUp_LSC(PC pc) 35 { 36 PC_LSC *lsc = (PC_LSC *)pc->data; 37 Mat L, Lp, B, C; 38 39 PetscFunctionBegin; 40 PetscCall(PCLSCAllocate_Private(pc)); 41 PetscCall(PetscObjectQuery((PetscObject)pc->mat, "LSC_L", (PetscObject *)&L)); 42 if (!L) PetscCall(PetscObjectQuery((PetscObject)pc->pmat, "LSC_L", (PetscObject *)&L)); 43 PetscCall(PetscObjectQuery((PetscObject)pc->pmat, "LSC_Lp", (PetscObject *)&Lp)); 44 if (!Lp) PetscCall(PetscObjectQuery((PetscObject)pc->mat, "LSC_Lp", (PetscObject *)&Lp)); 45 if (!L) { 46 PetscCall(MatSchurComplementGetSubMatrices(pc->mat, NULL, NULL, &B, &C, NULL)); 47 if (!lsc->L) { 48 PetscCall(MatMatMult(C, B, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &lsc->L)); 49 } else { 50 PetscCall(MatMatMult(C, B, MAT_REUSE_MATRIX, PETSC_DEFAULT, &lsc->L)); 51 } 52 Lp = L = lsc->L; 53 } 54 if (lsc->scale) { 55 Mat Ap; 56 PetscCall(MatSchurComplementGetSubMatrices(pc->mat, NULL, &Ap, NULL, NULL, NULL)); 57 PetscCall(MatGetDiagonal(Ap, lsc->scale)); /* Should be the mass matrix, but we don't have plumbing for that yet */ 58 PetscCall(VecReciprocal(lsc->scale)); 59 } 60 PetscCall(KSPSetOperators(lsc->kspL, L, Lp)); 61 PetscCall(KSPSetFromOptions(lsc->kspL)); 62 PetscFunctionReturn(PETSC_SUCCESS); 63 } 64 65 static PetscErrorCode PCApply_LSC(PC pc, Vec x, Vec y) 66 { 67 PC_LSC *lsc = (PC_LSC *)pc->data; 68 Mat A, B, C; 69 70 PetscFunctionBegin; 71 PetscCall(MatSchurComplementGetSubMatrices(pc->mat, &A, NULL, &B, &C, NULL)); 72 PetscCall(KSPSolve(lsc->kspL, x, lsc->x1)); 73 PetscCall(KSPCheckSolve(lsc->kspL, pc, lsc->x1)); 74 PetscCall(MatMult(B, lsc->x1, lsc->x0)); 75 if (lsc->scale) PetscCall(VecPointwiseMult(lsc->x0, lsc->x0, lsc->scale)); 76 PetscCall(MatMult(A, lsc->x0, lsc->y0)); 77 if (lsc->scale) PetscCall(VecPointwiseMult(lsc->y0, lsc->y0, lsc->scale)); 78 PetscCall(MatMult(C, lsc->y0, lsc->x1)); 79 PetscCall(KSPSolve(lsc->kspL, lsc->x1, y)); 80 PetscCall(KSPCheckSolve(lsc->kspL, pc, y)); 81 PetscFunctionReturn(PETSC_SUCCESS); 82 } 83 84 static PetscErrorCode PCReset_LSC(PC pc) 85 { 86 PC_LSC *lsc = (PC_LSC *)pc->data; 87 88 PetscFunctionBegin; 89 PetscCall(VecDestroy(&lsc->x0)); 90 PetscCall(VecDestroy(&lsc->y0)); 91 PetscCall(VecDestroy(&lsc->x1)); 92 PetscCall(VecDestroy(&lsc->scale)); 93 PetscCall(KSPDestroy(&lsc->kspL)); 94 PetscCall(MatDestroy(&lsc->L)); 95 PetscFunctionReturn(PETSC_SUCCESS); 96 } 97 98 static PetscErrorCode PCDestroy_LSC(PC pc) 99 { 100 PetscFunctionBegin; 101 PetscCall(PCReset_LSC(pc)); 102 PetscCall(PetscFree(pc->data)); 103 PetscFunctionReturn(PETSC_SUCCESS); 104 } 105 106 static PetscErrorCode PCSetFromOptions_LSC(PC pc, PetscOptionItems *PetscOptionsObject) 107 { 108 PC_LSC *lsc = (PC_LSC *)pc->data; 109 110 PetscFunctionBegin; 111 PetscOptionsHeadBegin(PetscOptionsObject, "LSC options"); 112 { 113 PetscCall(PetscOptionsBool("-pc_lsc_scale_diag", "Use diagonal of velocity block (A) for scaling", "None", lsc->scalediag, &lsc->scalediag, NULL)); 114 } 115 PetscOptionsHeadEnd(); 116 PetscFunctionReturn(PETSC_SUCCESS); 117 } 118 119 static PetscErrorCode PCView_LSC(PC pc, PetscViewer viewer) 120 { 121 PC_LSC *jac = (PC_LSC *)pc->data; 122 PetscBool iascii; 123 124 PetscFunctionBegin; 125 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 126 if (iascii) { 127 PetscCall(PetscViewerASCIIPushTab(viewer)); 128 if (jac->kspL) { 129 PetscCall(KSPView(jac->kspL, viewer)); 130 } else { 131 PetscCall(PetscViewerASCIIPrintf(viewer, "PCLSC KSP object not yet created, hence cannot display")); 132 } 133 PetscCall(PetscViewerASCIIPopTab(viewer)); 134 } 135 PetscFunctionReturn(PETSC_SUCCESS); 136 } 137 138 /*MC 139 PCLSC - Preconditioning for Schur complements, based on Least Squares Commutators 140 141 Options Database Key: 142 . -pc_lsc_scale_diag - Use the diagonal of A for scaling 143 144 Level: intermediate 145 146 Notes: 147 This preconditioner will normally be used with `PCFIELDSPLIT` to precondition the Schur complement, but 148 it can be used for any Schur complement system. Consider the Schur complement 149 150 .vb 151 S = A11 - A10 inv(A00) A01 152 .ve 153 154 `PCLSC` currently doesn't do anything with A11, so let's assume it is 0. The idea is that a good approximation to 155 inv(S) is given by 156 157 .vb 158 inv(A10 A01) A10 A00 A01 inv(A10 A01) 159 .ve 160 161 The product A10 A01 can be computed for you, but you can provide it (this is 162 usually more efficient anyway). In the case of incompressible flow, A10 A01 is a Laplacian; call it L. The current 163 interface is to hang L and a preconditioning matrix Lp on the preconditioning matrix. 164 165 If you had called `KSPSetOperators`(ksp,S,Sp), S should have type `MATSCHURCOMPLEMENT` and Sp can be any type you 166 like (`PCLSC` doesn't use it directly) but should have matrices composed with it, under the names "LSC_L" and "LSC_Lp". 167 For example, you might have setup code like this 168 169 .vb 170 PetscObjectCompose((PetscObject)Sp,"LSC_L",(PetscObject)L); 171 PetscObjectCompose((PetscObject)Sp,"LSC_Lp",(PetscObject)Lp); 172 .ve 173 174 And then your Jacobian assembly would look like 175 176 .vb 177 PetscObjectQuery((PetscObject)Sp,"LSC_L",(PetscObject*)&L); 178 PetscObjectQuery((PetscObject)Sp,"LSC_Lp",(PetscObject*)&Lp); 179 if (L) { assembly L } 180 if (Lp) { assemble Lp } 181 .ve 182 183 With this, you should be able to choose LSC preconditioning, using e.g. ML's algebraic multigrid to solve with L 184 185 .vb 186 -fieldsplit_1_pc_type lsc -fieldsplit_1_lsc_pc_type ml 187 .ve 188 189 Since we do not use the values in Sp, you can still put an assembled matrix there to use normal preconditioners. 190 191 References: 192 + * - Elman, Howle, Shadid, Shuttleworth, and Tuminaro, Block preconditioners based on approximate commutators, 2006. 193 - * - Silvester, Elman, Kay, Wathen, Efficient preconditioning of the linearized Navier Stokes equations for incompressible flow, 2001. 194 195 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `Block_Preconditioners`, `PCFIELDSPLIT`, 196 `PCFieldSplitGetSubKSP()`, `PCFieldSplitSetFields()`, `PCFieldSplitSetType()`, `PCFieldSplitSetIS()`, `PCFieldSplitSetSchurPre()`, 197 `MatCreateSchurComplement()`, `MatCreateSchurComplement()`, `MatSchurComplementSetSubMatrices()`, `MatSchurComplementUpdateSubMatrices()`, 198 `MatSchurComplementSetAinvType()`, `MatGetSchurComplement()` 199 M*/ 200 201 PETSC_EXTERN PetscErrorCode PCCreate_LSC(PC pc) 202 { 203 PC_LSC *lsc; 204 205 PetscFunctionBegin; 206 PetscCall(PetscNew(&lsc)); 207 pc->data = (void *)lsc; 208 209 pc->ops->apply = PCApply_LSC; 210 pc->ops->applytranspose = NULL; 211 pc->ops->setup = PCSetUp_LSC; 212 pc->ops->reset = PCReset_LSC; 213 pc->ops->destroy = PCDestroy_LSC; 214 pc->ops->setfromoptions = PCSetFromOptions_LSC; 215 pc->ops->view = PCView_LSC; 216 pc->ops->applyrichardson = NULL; 217 PetscFunctionReturn(PETSC_SUCCESS); 218 } 219