1 2 /* 3 Partial differential equation 4 5 d d u = 1, 0 < x < 1, 6 -- -- 7 dx dx 8 with boundary conditions 9 10 u = 0 for x = 0, x = 1 11 12 This uses multigrid to solve the linear system 13 14 Demonstrates how to build a DMSHELL for managing multigrid. The DMSHELL simply creates a 15 DMDA1d to construct all the needed PETSc objects. 16 17 */ 18 19 static char help[] = "Solves 1D constant coefficient Laplacian using DMSHELL and multigrid.\n\n"; 20 21 #include <petscdm.h> 22 #include <petscdmda.h> 23 #include <petscdmshell.h> 24 #include <petscksp.h> 25 26 static PetscErrorCode ComputeMatrix(KSP,Mat,Mat,void*); 27 static PetscErrorCode ComputeRHS(KSP,Vec,void*); 28 static PetscErrorCode CreateMatrix(DM,Mat*); 29 static PetscErrorCode CreateGlobalVector(DM,Vec*); 30 static PetscErrorCode CreateLocalVector(DM,Vec*); 31 static PetscErrorCode Refine(DM,MPI_Comm,DM*); 32 static PetscErrorCode Coarsen(DM,MPI_Comm,DM*); 33 static PetscErrorCode CreateInterpolation(DM,DM,Mat*,Vec*); 34 static PetscErrorCode CreateRestriction(DM,DM,Mat*); 35 36 static PetscErrorCode MyDMShellCreate(MPI_Comm comm,DM da,DM *shell) 37 { 38 39 PetscCall(DMShellCreate(comm,shell)); 40 PetscCall(DMShellSetContext(*shell,da)); 41 PetscCall(DMShellSetCreateMatrix(*shell,CreateMatrix)); 42 PetscCall(DMShellSetCreateGlobalVector(*shell,CreateGlobalVector)); 43 PetscCall(DMShellSetCreateLocalVector(*shell,CreateLocalVector)); 44 PetscCall(DMShellSetRefine(*shell,Refine)); 45 PetscCall(DMShellSetCoarsen(*shell,Coarsen)); 46 PetscCall(DMShellSetCreateInterpolation(*shell,CreateInterpolation)); 47 PetscCall(DMShellSetCreateRestriction(*shell,CreateRestriction)); 48 return 0; 49 } 50 51 int main(int argc,char **argv) 52 { 53 KSP ksp; 54 DM da,shell; 55 PetscInt levels; 56 57 PetscFunctionBeginUser; 58 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 59 PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp)); 60 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,129,1,1,0,&da)); 61 PetscCall(DMSetFromOptions(da)); 62 PetscCall(DMSetUp(da)); 63 PetscCall(MyDMShellCreate(PETSC_COMM_WORLD,da,&shell)); 64 /* these two lines are not needed but allow PCMG to automatically know how many multigrid levels the user wants */ 65 PetscCall(DMGetRefineLevel(da,&levels)); 66 PetscCall(DMSetRefineLevel(shell,levels)); 67 68 PetscCall(KSPSetDM(ksp,shell)); 69 PetscCall(KSPSetComputeRHS(ksp,ComputeRHS,NULL)); 70 PetscCall(KSPSetComputeOperators(ksp,ComputeMatrix,NULL)); 71 PetscCall(KSPSetFromOptions(ksp)); 72 PetscCall(KSPSolve(ksp,NULL,NULL)); 73 74 PetscCall(KSPDestroy(&ksp)); 75 PetscCall(DMDestroy(&shell)); 76 PetscCall(DMDestroy(&da)); 77 PetscCall(PetscFinalize()); 78 return 0; 79 } 80 81 static PetscErrorCode CreateMatrix(DM shell,Mat *A) 82 { 83 DM da; 84 85 PetscCall(DMShellGetContext(shell,&da)); 86 PetscCall(DMCreateMatrix(da,A)); 87 return 0; 88 } 89 90 static PetscErrorCode CreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 91 { 92 DM da1,da2; 93 94 PetscCall(DMShellGetContext(dm1,&da1)); 95 PetscCall(DMShellGetContext(dm2,&da2)); 96 PetscCall(DMCreateInterpolation(da1,da2,mat,vec)); 97 return 0; 98 } 99 100 static PetscErrorCode CreateRestriction(DM dm1,DM dm2,Mat *mat) 101 { 102 DM da1,da2; 103 Mat tmat; 104 105 PetscCall(DMShellGetContext(dm1,&da1)); 106 PetscCall(DMShellGetContext(dm2,&da2)); 107 PetscCall(DMCreateInterpolation(da1,da2,&tmat,NULL)); 108 PetscCall(MatTranspose(tmat,MAT_INITIAL_MATRIX,mat)); 109 PetscCall(MatDestroy(&tmat)); 110 return 0; 111 } 112 113 static PetscErrorCode CreateGlobalVector(DM shell,Vec *x) 114 { 115 DM da; 116 117 PetscCall(DMShellGetContext(shell,&da)); 118 PetscCall(DMCreateGlobalVector(da,x)); 119 PetscCall(VecSetDM(*x,shell)); 120 return 0; 121 } 122 123 static PetscErrorCode CreateLocalVector(DM shell,Vec *x) 124 { 125 DM da; 126 127 PetscCall(DMShellGetContext(shell,&da)); 128 PetscCall(DMCreateLocalVector(da,x)); 129 PetscCall(VecSetDM(*x,shell)); 130 return 0; 131 } 132 133 static PetscErrorCode Refine(DM shell,MPI_Comm comm,DM *dmnew) 134 { 135 DM da,dafine; 136 137 PetscCall(DMShellGetContext(shell,&da)); 138 PetscCall(DMRefine(da,comm,&dafine)); 139 PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell),dafine,dmnew)); 140 return 0; 141 } 142 143 static PetscErrorCode Coarsen(DM shell,MPI_Comm comm,DM *dmnew) 144 { 145 DM da,dacoarse; 146 147 PetscCall(DMShellGetContext(shell,&da)); 148 PetscCall(DMCoarsen(da,comm,&dacoarse)); 149 PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell),dacoarse,dmnew)); 150 /* discard an "extra" reference count to dacoarse */ 151 PetscCall(DMDestroy(&dacoarse)); 152 return 0; 153 } 154 155 static PetscErrorCode ComputeRHS(KSP ksp,Vec b,void *ctx) 156 { 157 PetscInt mx,idx[2]; 158 PetscScalar h,v[2]; 159 DM da,shell; 160 161 PetscFunctionBeginUser; 162 PetscCall(KSPGetDM(ksp,&shell)); 163 PetscCall(DMShellGetContext(shell,&da)); 164 PetscCall(DMDAGetInfo(da,0,&mx,0,0,0,0,0,0,0,0,0,0,0)); 165 h = 1.0/((mx-1)); 166 PetscCall(VecSet(b,h)); 167 idx[0] = 0; idx[1] = mx -1; 168 v[0] = v[1] = 0.0; 169 PetscCall(VecSetValues(b,2,idx,v,INSERT_VALUES)); 170 PetscCall(VecAssemblyBegin(b)); 171 PetscCall(VecAssemblyEnd(b)); 172 PetscFunctionReturn(0); 173 } 174 175 static PetscErrorCode ComputeMatrix(KSP ksp,Mat J,Mat jac,void *ctx) 176 { 177 PetscInt i,mx,xm,xs; 178 PetscScalar v[3],h; 179 MatStencil row,col[3]; 180 DM da,shell; 181 182 PetscFunctionBeginUser; 183 PetscCall(KSPGetDM(ksp,&shell)); 184 PetscCall(DMShellGetContext(shell,&da)); 185 PetscCall(DMDAGetInfo(da,0,&mx,0,0,0,0,0,0,0,0,0,0,0)); 186 PetscCall(DMDAGetCorners(da,&xs,0,0,&xm,0,0)); 187 h = 1.0/(mx-1); 188 189 for (i=xs; i<xs+xm; i++) { 190 row.i = i; 191 if (i==0 || i==mx-1) { 192 v[0] = 2.0/h; 193 PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES)); 194 } else { 195 v[0] = (-1.0)/h;col[0].i = i-1; 196 v[1] = (2.0)/h;col[1].i = row.i; 197 v[2] = (-1.0)/h;col[2].i = i+1; 198 PetscCall(MatSetValuesStencil(jac,1,&row,3,col,v,INSERT_VALUES)); 199 } 200 } 201 PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY)); 202 PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY)); 203 PetscFunctionReturn(0); 204 } 205 206 /*TEST 207 208 test: 209 nsize: 4 210 args: -ksp_monitor -pc_type mg -da_refine 3 211 212 TEST*/ 213