1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n"; 2 3 /* 4 See ex5.c for details on the equation. 5 This code demonestrates the TSAdjoint interface to a system of time-dependent partial differential equations. 6 It computes the sensitivity of a component in the final solution, which locates in the center of the 2D domain, w.r.t. the initial conditions. 7 The user does not need to provide any additional functions. The required functions in the original simulation are reused in the adjoint run. 8 9 Runtime options: 10 -forwardonly - run the forward simulation without adjoint 11 -implicitform - provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used 12 -aijpc - set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL) 13 */ 14 15 #include "reaction_diffusion.h" 16 #include <petscdm.h> 17 #include <petscdmda.h> 18 19 /* Matrix free support */ 20 typedef struct { 21 PetscReal time; 22 Vec U; 23 Vec Udot; 24 PetscReal shift; 25 AppCtx* appctx; 26 TS ts; 27 } MCtx; 28 29 /* 30 User-defined routines 31 */ 32 PetscErrorCode InitialConditions(DM,Vec); 33 PetscErrorCode RHSJacobianShell(TS,PetscReal,Vec,Mat,Mat,void*); 34 PetscErrorCode IJacobianShell(TS,PetscReal,Vec,Vec,PetscReal,Mat,Mat,void*); 35 36 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y) 37 { 38 PetscInt i,j,Mx,My,xs,ys,xm,ym; 39 Field **l; 40 PetscFunctionBegin; 41 42 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 43 /* locate the global i index for x and j index for y */ 44 i = (PetscInt)(x*(Mx-1)); 45 j = (PetscInt)(y*(My-1)); 46 CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 47 48 if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) { 49 /* the i,j vertex is on this process */ 50 CHKERRQ(DMDAVecGetArray(da,lambda,&l)); 51 l[j][i].u = 1.0; 52 l[j][i].v = 1.0; 53 CHKERRQ(DMDAVecRestoreArray(da,lambda,&l)); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 static PetscErrorCode MyRHSMatMultTranspose(Mat A_shell,Vec X,Vec Y) 59 { 60 MCtx *mctx; 61 AppCtx *appctx; 62 DM da; 63 PetscInt i,j,Mx,My,xs,ys,xm,ym; 64 PetscReal hx,hy,sx,sy; 65 PetscScalar uc,uxx,uyy,vc,vxx,vyy,ucb,vcb; 66 Field **u,**x,**y; 67 Vec localX; 68 69 PetscFunctionBeginUser; 70 CHKERRQ(MatShellGetContext(A_shell,&mctx)); 71 appctx = mctx->appctx; 72 CHKERRQ(TSGetDM(mctx->ts,&da)); 73 CHKERRQ(DMGetLocalVector(da,&localX)); 74 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 75 hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx); 76 hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy); 77 78 /* Scatter ghost points to local vector,using the 2-step process 79 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 80 By placing code between these two statements, computations can be 81 done while messages are in transition. */ 82 CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 83 CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 84 85 /* Get pointers to vector data */ 86 CHKERRQ(DMDAVecGetArrayRead(da,localX,&x)); 87 CHKERRQ(DMDAVecGetArrayRead(da,mctx->U,&u)); 88 CHKERRQ(DMDAVecGetArray(da,Y,&y)); 89 90 /* Get local grid boundaries */ 91 CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 92 93 /* Compute function over the locally owned part of the grid */ 94 for (j=ys; j<ys+ym; j++) { 95 for (i=xs; i<xs+xm; i++) { 96 uc = u[j][i].u; 97 ucb = x[j][i].u; 98 uxx = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx; 99 uyy = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy; 100 vc = u[j][i].v; 101 vcb = x[j][i].v; 102 vxx = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx; 103 vyy = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy; 104 y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb; 105 y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb; 106 } 107 } 108 CHKERRQ(DMDAVecRestoreArrayRead(da,localX,&x)); 109 CHKERRQ(DMDAVecRestoreArrayRead(da,mctx->U,&u)); 110 CHKERRQ(DMDAVecRestoreArray(da,Y,&y)); 111 CHKERRQ(DMRestoreLocalVector(da,&localX)); 112 PetscFunctionReturn(0); 113 } 114 115 static PetscErrorCode MyIMatMultTranspose(Mat A_shell,Vec X,Vec Y) 116 { 117 MCtx *mctx; 118 AppCtx *appctx; 119 DM da; 120 PetscInt i,j,Mx,My,xs,ys,xm,ym; 121 PetscReal hx,hy,sx,sy; 122 PetscScalar uc,uxx,uyy,vc,vxx,vyy,ucb,vcb; 123 Field **u,**x,**y; 124 Vec localX; 125 126 PetscFunctionBeginUser; 127 CHKERRQ(MatShellGetContext(A_shell,&mctx)); 128 appctx = mctx->appctx; 129 CHKERRQ(TSGetDM(mctx->ts,&da)); 130 CHKERRQ(DMGetLocalVector(da,&localX)); 131 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 132 hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx); 133 hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy); 134 135 /* Scatter ghost points to local vector,using the 2-step process 136 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 137 By placing code between these two statements, computations can be 138 done while messages are in transition. */ 139 CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 140 CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 141 142 /* Get pointers to vector data */ 143 CHKERRQ(DMDAVecGetArrayRead(da,localX,&x)); 144 CHKERRQ(DMDAVecGetArrayRead(da,mctx->U,&u)); 145 CHKERRQ(DMDAVecGetArray(da,Y,&y)); 146 147 /* Get local grid boundaries */ 148 CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 149 150 /* Compute function over the locally owned part of the grid */ 151 for (j=ys; j<ys+ym; j++) { 152 for (i=xs; i<xs+xm; i++) { 153 uc = u[j][i].u; 154 ucb = x[j][i].u; 155 uxx = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx; 156 uyy = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy; 157 vc = u[j][i].v; 158 vcb = x[j][i].v; 159 vxx = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx; 160 vyy = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy; 161 y[j][i].u = appctx->D1*(uxx + uyy) - ucb*(vc*vc+appctx->gamma) + vc*vc*vcb; 162 y[j][i].v = appctx->D2*(vxx + vyy) - 2.0*uc*vc*ucb + (2.0*uc*vc-appctx->gamma - appctx->kappa)*vcb; 163 y[j][i].u = mctx->shift*ucb - y[j][i].u; 164 y[j][i].v = mctx->shift*vcb - y[j][i].v; 165 } 166 } 167 CHKERRQ(DMDAVecRestoreArrayRead(da,localX,&x)); 168 CHKERRQ(DMDAVecRestoreArrayRead(da,mctx->U,&u)); 169 CHKERRQ(DMDAVecRestoreArray(da,Y,&y)); 170 CHKERRQ(DMRestoreLocalVector(da,&localX)); 171 PetscFunctionReturn(0); 172 } 173 174 static PetscErrorCode MyIMatMult(Mat A_shell,Vec X,Vec Y) 175 { 176 MCtx *mctx; 177 AppCtx *appctx; 178 DM da; 179 PetscInt i,j,Mx,My,xs,ys,xm,ym; 180 PetscReal hx,hy,sx,sy; 181 PetscScalar uc,uxx,uyy,vc,vxx,vyy,ucb,vcb; 182 Field **u,**x,**y; 183 Vec localX; 184 185 PetscFunctionBeginUser; 186 CHKERRQ(MatShellGetContext(A_shell,&mctx)); 187 appctx = mctx->appctx; 188 CHKERRQ(TSGetDM(mctx->ts,&da)); 189 CHKERRQ(DMGetLocalVector(da,&localX)); 190 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 191 hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx); 192 hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy); 193 194 /* Scatter ghost points to local vector,using the 2-step process 195 DMGlobalToLocalBegin(),DMGlobalToLocalEnd(). 196 By placing code between these two statements, computations can be 197 done while messages are in transition. */ 198 CHKERRQ(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX)); 199 CHKERRQ(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX)); 200 201 /* Get pointers to vector data */ 202 CHKERRQ(DMDAVecGetArrayRead(da,localX,&x)); 203 CHKERRQ(DMDAVecGetArrayRead(da,mctx->U,&u)); 204 CHKERRQ(DMDAVecGetArray(da,Y,&y)); 205 206 /* Get local grid boundaries */ 207 CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 208 209 /* Compute function over the locally owned part of the grid */ 210 for (j=ys; j<ys+ym; j++) { 211 for (i=xs; i<xs+xm; i++) { 212 uc = u[j][i].u; 213 ucb = x[j][i].u; 214 uxx = (-2.0*ucb + x[j][i-1].u + x[j][i+1].u)*sx; 215 uyy = (-2.0*ucb + x[j-1][i].u + x[j+1][i].u)*sy; 216 vc = u[j][i].v; 217 vcb = x[j][i].v; 218 vxx = (-2.0*vcb + x[j][i-1].v + x[j][i+1].v)*sx; 219 vyy = (-2.0*vcb + x[j-1][i].v + x[j+1][i].v)*sy; 220 y[j][i].u = appctx->D1*(uxx + uyy) - (vc*vc+appctx->gamma)*ucb - 2.0*uc*vc*vcb; 221 y[j][i].v = appctx->D2*(vxx + vyy) + vc*vc*ucb + (2.0*uc*vc-appctx->gamma-appctx->kappa)*vcb; 222 y[j][i].u = mctx->shift*ucb - y[j][i].u; 223 y[j][i].v = mctx->shift*vcb - y[j][i].v; 224 } 225 } 226 CHKERRQ(DMDAVecRestoreArrayRead(da,localX,&x)); 227 CHKERRQ(DMDAVecRestoreArrayRead(da,mctx->U,&u)); 228 CHKERRQ(DMDAVecRestoreArray(da,Y,&y)); 229 CHKERRQ(DMRestoreLocalVector(da,&localX)); 230 PetscFunctionReturn(0); 231 } 232 233 int main(int argc,char **argv) 234 { 235 TS ts; /* ODE integrator */ 236 Vec x; /* solution */ 237 PetscErrorCode ierr; 238 DM da; 239 AppCtx appctx; 240 MCtx mctx; 241 Vec lambda[1]; 242 PetscBool forwardonly=PETSC_FALSE,implicitform=PETSC_TRUE,mf = PETSC_FALSE; 243 PetscLogDouble v1,v2; 244 #if defined(PETSC_USE_LOG) 245 PetscLogStage stage; 246 #endif 247 248 ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; 249 CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL)); 250 CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL)); 251 appctx.aijpc = PETSC_FALSE; 252 CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL)); 253 CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-mf",&mf,NULL)); 254 255 appctx.D1 = 8.0e-5; 256 appctx.D2 = 4.0e-5; 257 appctx.gamma = .024; 258 appctx.kappa = .06; 259 260 PetscLogStageRegister("MyAdjoint", &stage); 261 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 262 Create distributed array (DMDA) to manage parallel grid and vectors 263 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 264 CHKERRQ(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,64,64,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da)); 265 CHKERRQ(DMSetFromOptions(da)); 266 CHKERRQ(DMSetUp(da)); 267 CHKERRQ(DMDASetFieldName(da,0,"u")); 268 CHKERRQ(DMDASetFieldName(da,1,"v")); 269 270 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 271 Extract global vectors from DMDA; then duplicate for remaining 272 vectors that are the same types 273 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 274 CHKERRQ(DMCreateGlobalVector(da,&x)); 275 276 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 277 Create timestepping solver context 278 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 279 CHKERRQ(TSCreate(PETSC_COMM_WORLD,&ts)); 280 CHKERRQ(TSSetDM(ts,da)); 281 CHKERRQ(TSSetProblemType(ts,TS_NONLINEAR)); 282 CHKERRQ(TSSetEquationType(ts,TS_EQ_ODE_EXPLICIT)); /* less Jacobian evaluations when adjoint BEuler is used, otherwise no effect */ 283 if (!implicitform) { 284 CHKERRQ(TSSetType(ts,TSRK)); 285 CHKERRQ(TSSetRHSFunction(ts,NULL,RHSFunction,&appctx)); 286 CHKERRQ(TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx)); 287 } else { 288 CHKERRQ(TSSetType(ts,TSCN)); 289 CHKERRQ(TSSetIFunction(ts,NULL,IFunction,&appctx)); 290 if (appctx.aijpc) { 291 Mat A,B; 292 293 CHKERRQ(DMSetMatType(da,MATSELL)); 294 CHKERRQ(DMCreateMatrix(da,&A)); 295 CHKERRQ(MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B)); 296 /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */ 297 CHKERRQ(TSSetIJacobian(ts,A,B,IJacobian,&appctx)); 298 CHKERRQ(MatDestroy(&A)); 299 CHKERRQ(MatDestroy(&B)); 300 } else { 301 CHKERRQ(TSSetIJacobian(ts,NULL,NULL,IJacobian,&appctx)); 302 } 303 } 304 305 if (mf) { 306 PetscInt xm,ym,Mx,My,dof; 307 mctx.ts = ts; 308 mctx.appctx = &appctx; 309 CHKERRQ(VecDuplicate(x,&mctx.U)); 310 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 311 Create matrix free context 312 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 313 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,&dof,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 314 CHKERRQ(DMDAGetCorners(da,NULL,NULL,NULL,&xm,&ym,NULL)); 315 CHKERRQ(MatCreateShell(PETSC_COMM_WORLD,dof*xm*ym,PETSC_DETERMINE,dof*Mx*My,dof*Mx*My,&mctx,&appctx.A)); 316 CHKERRQ(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyRHSMatMultTranspose)); 317 if (!implicitform) { /* for explicit methods only */ 318 CHKERRQ(TSSetRHSJacobian(ts,appctx.A,appctx.A,RHSJacobianShell,&appctx)); 319 } else { 320 /* CHKERRQ(VecDuplicate(appctx.U,&mctx.Udot)); */ 321 CHKERRQ(MatShellSetOperation(appctx.A,MATOP_MULT,(void (*)(void))MyIMatMult)); 322 CHKERRQ(MatShellSetOperation(appctx.A,MATOP_MULT_TRANSPOSE,(void (*)(void))MyIMatMultTranspose)); 323 CHKERRQ(TSSetIJacobian(ts,appctx.A,appctx.A,IJacobianShell,&appctx)); 324 } 325 } 326 327 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 328 Set initial conditions 329 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 330 CHKERRQ(InitialConditions(da,x)); 331 CHKERRQ(TSSetSolution(ts,x)); 332 333 /* 334 Have the TS save its trajectory so that TSAdjointSolve() may be used 335 */ 336 if (!forwardonly) CHKERRQ(TSSetSaveTrajectory(ts)); 337 338 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 339 Set solver options 340 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 341 CHKERRQ(TSSetMaxTime(ts,200.0)); 342 CHKERRQ(TSSetTimeStep(ts,0.5)); 343 CHKERRQ(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_MATCHSTEP)); 344 CHKERRQ(TSSetFromOptions(ts)); 345 346 CHKERRQ(PetscTime(&v1)); 347 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 348 Solve ODE system 349 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 350 CHKERRQ(TSSolve(ts,x)); 351 if (!forwardonly) { 352 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 353 Start the Adjoint model 354 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 355 CHKERRQ(VecDuplicate(x,&lambda[0])); 356 /* Reset initial conditions for the adjoint integration */ 357 CHKERRQ(InitializeLambda(da,lambda[0],0.5,0.5)); 358 CHKERRQ(TSSetCostGradients(ts,1,lambda,NULL)); 359 PetscLogStagePush(stage); 360 CHKERRQ(TSAdjointSolve(ts)); 361 PetscLogStagePop(); 362 CHKERRQ(VecDestroy(&lambda[0])); 363 } 364 CHKERRQ(PetscTime(&v2)); 365 ierr = PetscPrintf(PETSC_COMM_WORLD," %.3lf ",v2-v1); 366 367 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 368 Free work space. All PETSc objects should be destroyed when they 369 are no longer needed. 370 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 371 CHKERRQ(VecDestroy(&x)); 372 CHKERRQ(TSDestroy(&ts)); 373 CHKERRQ(DMDestroy(&da)); 374 if (mf) { 375 CHKERRQ(VecDestroy(&mctx.U)); 376 /* CHKERRQ(VecDestroy(&mctx.Udot));*/ 377 CHKERRQ(MatDestroy(&appctx.A)); 378 } 379 ierr = PetscFinalize(); 380 return ierr; 381 } 382 383 /* ------------------------------------------------------------------- */ 384 PetscErrorCode RHSJacobianShell(TS ts,PetscReal t,Vec U,Mat A,Mat BB,void *ctx) 385 { 386 MCtx *mctx; 387 388 PetscFunctionBegin; 389 CHKERRQ(MatShellGetContext(A,&mctx)); 390 CHKERRQ(VecCopy(U,mctx->U)); 391 PetscFunctionReturn(0); 392 } 393 394 PetscErrorCode IJacobianShell(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat BB,void *ctx) 395 { 396 MCtx *mctx; 397 398 PetscFunctionBegin; 399 CHKERRQ(MatShellGetContext(A,&mctx)); 400 CHKERRQ(VecCopy(U,mctx->U)); 401 /* CHKERRQ(VecCopy(Udot,mctx->Udot)); */ 402 mctx->shift = a; 403 PetscFunctionReturn(0); 404 } 405 406 /* ------------------------------------------------------------------- */ 407 PetscErrorCode InitialConditions(DM da,Vec U) 408 { 409 PetscInt i,j,xs,ys,xm,ym,Mx,My; 410 Field **u; 411 PetscReal hx,hy,x,y; 412 413 PetscFunctionBegin; 414 CHKERRQ(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE)); 415 416 hx = 2.5/(PetscReal)Mx; 417 hy = 2.5/(PetscReal)My; 418 419 /* 420 Get pointers to vector data 421 */ 422 CHKERRQ(DMDAVecGetArray(da,U,&u)); 423 424 /* 425 Get local grid boundaries 426 */ 427 CHKERRQ(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL)); 428 429 /* 430 Compute function over the locally owned part of the grid 431 */ 432 for (j=ys; j<ys+ym; j++) { 433 y = j*hy; 434 for (i=xs; i<xs+xm; i++) { 435 x = i*hx; 436 if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0); 437 else u[j][i].v = 0.0; 438 439 u[j][i].u = 1.0 - 2.0*u[j][i].v; 440 } 441 } 442 443 /* 444 Restore vectors 445 */ 446 CHKERRQ(DMDAVecRestoreArray(da,U,&u)); 447 PetscFunctionReturn(0); 448 } 449 450 /*TEST 451 452 build: 453 depends: reaction_diffusion.c 454 requires: !complex !single 455 456 test: 457 requires: cams 458 args: -mf -ts_max_steps 10 -ts_monitor -ts_adjoint_monitor -ts_trajectory_type memory -ts_trajectory_solution_only 0 -ts_trajectory_max_units_ram 6 -ts_trajectory_memory_type cams 459 TEST*/ 460