1 2 /* 3 Partial differential equation 4 5 d d u = 1, 0 < x < 1, 6 -- -- 7 dx dx 8 with boundary conditions 9 10 u = 0 for x = 0, x = 1 11 12 This uses multigrid to solve the linear system 13 14 Demonstrates how to build a DMSHELL for managing multigrid. The DMSHELL simply creates a 15 DMDA1d to construct all the needed PETSc objects. 16 17 */ 18 19 static char help[] = "Solves 1D constant coefficient Laplacian using DMSHELL and multigrid.\n\n"; 20 21 #include <petscdm.h> 22 #include <petscdmda.h> 23 #include <petscdmshell.h> 24 #include <petscksp.h> 25 26 static PetscErrorCode ComputeMatrix(KSP,Mat,Mat,void*); 27 static PetscErrorCode ComputeRHS(KSP,Vec,void*); 28 static PetscErrorCode CreateMatrix(DM,Mat*); 29 static PetscErrorCode CreateGlobalVector(DM,Vec*); 30 static PetscErrorCode CreateLocalVector(DM,Vec*); 31 static PetscErrorCode Refine(DM,MPI_Comm,DM*); 32 static PetscErrorCode Coarsen(DM,MPI_Comm,DM*); 33 static PetscErrorCode CreateInterpolation(DM,DM,Mat*,Vec*); 34 static PetscErrorCode CreateRestriction(DM,DM,Mat*); 35 36 static PetscErrorCode MyDMShellCreate(MPI_Comm comm,DM da,DM *shell) 37 { 38 39 PetscCall(DMShellCreate(comm,shell)); 40 PetscCall(DMShellSetContext(*shell,da)); 41 PetscCall(DMShellSetCreateMatrix(*shell,CreateMatrix)); 42 PetscCall(DMShellSetCreateGlobalVector(*shell,CreateGlobalVector)); 43 PetscCall(DMShellSetCreateLocalVector(*shell,CreateLocalVector)); 44 PetscCall(DMShellSetRefine(*shell,Refine)); 45 PetscCall(DMShellSetCoarsen(*shell,Coarsen)); 46 PetscCall(DMShellSetCreateInterpolation(*shell,CreateInterpolation)); 47 PetscCall(DMShellSetCreateRestriction(*shell,CreateRestriction)); 48 return 0; 49 } 50 51 int main(int argc,char **argv) 52 { 53 KSP ksp; 54 DM da,shell; 55 PetscInt levels; 56 57 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 58 PetscCall(KSPCreate(PETSC_COMM_WORLD,&ksp)); 59 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,129,1,1,0,&da)); 60 PetscCall(DMSetFromOptions(da)); 61 PetscCall(DMSetUp(da)); 62 PetscCall(MyDMShellCreate(PETSC_COMM_WORLD,da,&shell)); 63 /* these two lines are not needed but allow PCMG to automatically know how many multigrid levels the user wants */ 64 PetscCall(DMGetRefineLevel(da,&levels)); 65 PetscCall(DMSetRefineLevel(shell,levels)); 66 67 PetscCall(KSPSetDM(ksp,shell)); 68 PetscCall(KSPSetComputeRHS(ksp,ComputeRHS,NULL)); 69 PetscCall(KSPSetComputeOperators(ksp,ComputeMatrix,NULL)); 70 PetscCall(KSPSetFromOptions(ksp)); 71 PetscCall(KSPSolve(ksp,NULL,NULL)); 72 73 PetscCall(KSPDestroy(&ksp)); 74 PetscCall(DMDestroy(&shell)); 75 PetscCall(DMDestroy(&da)); 76 PetscCall(PetscFinalize()); 77 return 0; 78 } 79 80 static PetscErrorCode CreateMatrix(DM shell,Mat *A) 81 { 82 DM da; 83 84 PetscCall(DMShellGetContext(shell,&da)); 85 PetscCall(DMCreateMatrix(da,A)); 86 return 0; 87 } 88 89 static PetscErrorCode CreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 90 { 91 DM da1,da2; 92 93 PetscCall(DMShellGetContext(dm1,&da1)); 94 PetscCall(DMShellGetContext(dm2,&da2)); 95 PetscCall(DMCreateInterpolation(da1,da2,mat,vec)); 96 return 0; 97 } 98 99 static PetscErrorCode CreateRestriction(DM dm1,DM dm2,Mat *mat) 100 { 101 DM da1,da2; 102 Mat tmat; 103 104 PetscCall(DMShellGetContext(dm1,&da1)); 105 PetscCall(DMShellGetContext(dm2,&da2)); 106 PetscCall(DMCreateInterpolation(da1,da2,&tmat,NULL)); 107 PetscCall(MatTranspose(tmat,MAT_INITIAL_MATRIX,mat)); 108 PetscCall(MatDestroy(&tmat)); 109 return 0; 110 } 111 112 static PetscErrorCode CreateGlobalVector(DM shell,Vec *x) 113 { 114 DM da; 115 116 PetscCall(DMShellGetContext(shell,&da)); 117 PetscCall(DMCreateGlobalVector(da,x)); 118 PetscCall(VecSetDM(*x,shell)); 119 return 0; 120 } 121 122 static PetscErrorCode CreateLocalVector(DM shell,Vec *x) 123 { 124 DM da; 125 126 PetscCall(DMShellGetContext(shell,&da)); 127 PetscCall(DMCreateLocalVector(da,x)); 128 PetscCall(VecSetDM(*x,shell)); 129 return 0; 130 } 131 132 static PetscErrorCode Refine(DM shell,MPI_Comm comm,DM *dmnew) 133 { 134 DM da,dafine; 135 136 PetscCall(DMShellGetContext(shell,&da)); 137 PetscCall(DMRefine(da,comm,&dafine)); 138 PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell),dafine,dmnew)); 139 return 0; 140 } 141 142 static PetscErrorCode Coarsen(DM shell,MPI_Comm comm,DM *dmnew) 143 { 144 DM da,dacoarse; 145 146 PetscCall(DMShellGetContext(shell,&da)); 147 PetscCall(DMCoarsen(da,comm,&dacoarse)); 148 PetscCall(MyDMShellCreate(PetscObjectComm((PetscObject)shell),dacoarse,dmnew)); 149 /* discard an "extra" reference count to dacoarse */ 150 PetscCall(DMDestroy(&dacoarse)); 151 return 0; 152 } 153 154 static PetscErrorCode ComputeRHS(KSP ksp,Vec b,void *ctx) 155 { 156 PetscInt mx,idx[2]; 157 PetscScalar h,v[2]; 158 DM da,shell; 159 160 PetscFunctionBeginUser; 161 PetscCall(KSPGetDM(ksp,&shell)); 162 PetscCall(DMShellGetContext(shell,&da)); 163 PetscCall(DMDAGetInfo(da,0,&mx,0,0,0,0,0,0,0,0,0,0,0)); 164 h = 1.0/((mx-1)); 165 PetscCall(VecSet(b,h)); 166 idx[0] = 0; idx[1] = mx -1; 167 v[0] = v[1] = 0.0; 168 PetscCall(VecSetValues(b,2,idx,v,INSERT_VALUES)); 169 PetscCall(VecAssemblyBegin(b)); 170 PetscCall(VecAssemblyEnd(b)); 171 PetscFunctionReturn(0); 172 } 173 174 static PetscErrorCode ComputeMatrix(KSP ksp,Mat J,Mat jac,void *ctx) 175 { 176 PetscInt i,mx,xm,xs; 177 PetscScalar v[3],h; 178 MatStencil row,col[3]; 179 DM da,shell; 180 181 PetscFunctionBeginUser; 182 PetscCall(KSPGetDM(ksp,&shell)); 183 PetscCall(DMShellGetContext(shell,&da)); 184 PetscCall(DMDAGetInfo(da,0,&mx,0,0,0,0,0,0,0,0,0,0,0)); 185 PetscCall(DMDAGetCorners(da,&xs,0,0,&xm,0,0)); 186 h = 1.0/(mx-1); 187 188 for (i=xs; i<xs+xm; i++) { 189 row.i = i; 190 if (i==0 || i==mx-1) { 191 v[0] = 2.0/h; 192 PetscCall(MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES)); 193 } else { 194 v[0] = (-1.0)/h;col[0].i = i-1; 195 v[1] = (2.0)/h;col[1].i = row.i; 196 v[2] = (-1.0)/h;col[2].i = i+1; 197 PetscCall(MatSetValuesStencil(jac,1,&row,3,col,v,INSERT_VALUES)); 198 } 199 } 200 PetscCall(MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY)); 201 PetscCall(MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY)); 202 PetscFunctionReturn(0); 203 } 204 205 /*TEST 206 207 test: 208 nsize: 4 209 args: -ksp_monitor -pc_type mg -da_refine 3 210 211 TEST*/ 212