xref: /petsc/src/ts/tutorials/autodiff/adr_ex5adj_mf.cxx (revision ebead697dbf761eb322f829370bbe90b3bd93fa3)
1 static char help[] = "Demonstrates automatic, matrix-free Jacobian generation using ADOL-C for a time-dependent PDE in 2d, solved using implicit timestepping.\n";
2 
3 /*
4    REQUIRES configuration of PETSc with option --download-adolc.
5 
6    For documentation on ADOL-C, see
7      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
8 */
9 /* ------------------------------------------------------------------------
10   See ../advection-diffusion-reaction/ex5 for a description of the problem
11   ------------------------------------------------------------------------- */
12 
13 #include <petscdmda.h>
14 #include <petscts.h>
15 #include "adolc-utils/init.cxx"
16 #include "adolc-utils/matfree.cxx"
17 #include <adolc/adolc.h>
18 
19 /* (Passive) field for the two variables */
20 typedef struct {
21   PetscScalar u,v;
22 } Field;
23 
24 /* Active field for the two variables */
25 typedef struct {
26   adouble u,v;
27 } AField;
28 
29 /* Application context */
30 typedef struct {
31   PetscReal D1,D2,gamma,kappa;
32   AField    **u_a,**f_a;
33   AdolcCtx  *adctx; /* Automatic differentation support */
34 } AppCtx;
35 
36 extern PetscErrorCode InitialConditions(DM da,Vec U);
37 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
38 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
39 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
40 extern PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx);
41 
42 int main(int argc,char **argv)
43 {
44   TS             ts;                  /* ODE integrator */
45   Vec            x,r;                 /* solution, residual */
46   DM             da;
47   AppCtx         appctx;              /* Application context */
48   AdolcMatCtx    matctx;              /* Matrix (free) context */
49   Vec            lambda[1];
50   PetscBool      forwardonly=PETSC_FALSE;
51   Mat            A;                   /* (Matrix free) Jacobian matrix */
52   PetscInt       gxm,gym;
53 
54   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
55      Initialize program
56      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
57   PetscFunctionBeginUser;
58   PetscCall(PetscInitialize(&argc,&argv,NULL,help));
59   PetscCall(PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL));
60   PetscFunctionBeginUser;
61   appctx.D1    = 8.0e-5;
62   appctx.D2    = 4.0e-5;
63   appctx.gamma = .024;
64   appctx.kappa = .06;
65   PetscCall(PetscLogEventRegister("df/dx forward",MAT_CLASSID,&matctx.event1));
66   PetscCall(PetscLogEventRegister("df/d(xdot) forward",MAT_CLASSID,&matctx.event2));
67   PetscCall(PetscLogEventRegister("df/dx reverse",MAT_CLASSID,&matctx.event3));
68   PetscCall(PetscLogEventRegister("df/d(xdot) reverse",MAT_CLASSID,&matctx.event4));
69 
70   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
71      Create distributed array (DMDA) to manage parallel grid and vectors
72   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
73   PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da));
74   PetscCall(DMSetFromOptions(da));
75   PetscCall(DMSetUp(da));
76   PetscCall(DMDASetFieldName(da,0,"u"));
77   PetscCall(DMDASetFieldName(da,1,"v"));
78 
79   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80      Extract global vectors from DMDA; then duplicate for remaining
81      vectors that are the same types
82    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
83   PetscCall(DMCreateGlobalVector(da,&x));
84   PetscCall(VecDuplicate(x,&r));
85 
86   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
87     Create matrix free context and specify usage of PETSc-ADOL-C drivers
88     - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
89   PetscCall(DMSetMatType(da,MATSHELL));
90   PetscCall(DMCreateMatrix(da,&A));
91   PetscCall(MatShellSetContext(A,&matctx));
92   PetscCall(MatShellSetOperation(A,MATOP_MULT,(void (*)(void))PetscAdolcIJacobianVectorProductIDMass));
93   PetscCall(MatShellSetOperation(A,MATOP_MULT_TRANSPOSE,(void (*)(void))PetscAdolcIJacobianTransposeVectorProductIDMass));
94   PetscCall(VecDuplicate(x,&matctx.X));
95   PetscCall(VecDuplicate(x,&matctx.Xdot));
96   PetscCall(DMGetLocalVector(da,&matctx.localX0));
97 
98   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
99      Create timestepping solver context
100      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
101   PetscCall(TSCreate(PETSC_COMM_WORLD,&ts));
102   PetscCall(TSSetType(ts,TSCN));
103   PetscCall(TSSetDM(ts,da));
104   PetscCall(TSSetProblemType(ts,TS_NONLINEAR));
105   PetscCall(DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx));
106 
107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
108     Some data required for matrix-free context
109      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
110   PetscCall(DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL));
111   matctx.m = 2*gxm*gym;matctx.n = 2*gxm*gym; /* Number of dependent and independent variables */
112   matctx.flg = PETSC_FALSE;                  /* Flag for reverse mode */
113   matctx.tag1 = 1;                           /* Tape identifier */
114 
115   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116      Trace function just once
117    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118   PetscCall(PetscNew(&appctx.adctx));
119   PetscCall(IFunctionActive(ts,1.,x,matctx.Xdot,r,&appctx));
120   PetscCall(PetscFree(appctx.adctx));
121 
122   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123      Set Jacobian. In this case, IJacobian simply acts to pass context
124      information to the matrix-free Jacobian vector product.
125    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126   PetscCall(TSSetIJacobian(ts,A,A,IJacobianMatFree,&appctx));
127 
128   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129      Set initial conditions
130    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131   PetscCall(InitialConditions(da,x));
132   PetscCall(TSSetSolution(ts,x));
133 
134   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135     Have the TS save its trajectory so that TSAdjointSolve() may be used
136     and set solver options
137    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138   if (!forwardonly) {
139     PetscCall(TSSetSaveTrajectory(ts));
140     PetscCall(TSSetMaxTime(ts,200.0));
141     PetscCall(TSSetTimeStep(ts,0.5));
142   } else {
143     PetscCall(TSSetMaxTime(ts,2000.0));
144     PetscCall(TSSetTimeStep(ts,10));
145   }
146   PetscCall(TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER));
147   PetscCall(TSSetFromOptions(ts));
148 
149   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150      Solve ODE system
151      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152   PetscCall(TSSolve(ts,x));
153   if (!forwardonly) {
154     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155        Start the Adjoint model
156        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157     PetscCall(VecDuplicate(x,&lambda[0]));
158     /*   Reset initial conditions for the adjoint integration */
159     PetscCall(InitializeLambda(da,lambda[0],0.5,0.5));
160     PetscCall(TSSetCostGradients(ts,1,lambda,NULL));
161     PetscCall(TSAdjointSolve(ts));
162     PetscCall(VecDestroy(&lambda[0]));
163   }
164 
165   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166      Free work space.  All PETSc objects should be destroyed when they
167      are no longer needed.
168    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
169   PetscCall(DMRestoreLocalVector(da,&matctx.localX0));
170   PetscCall(VecDestroy(&r));
171   PetscCall(VecDestroy(&matctx.X));
172   PetscCall(VecDestroy(&matctx.Xdot));
173   PetscCall(MatDestroy(&A));
174   PetscCall(VecDestroy(&x));
175   PetscCall(TSDestroy(&ts));
176   PetscCall(DMDestroy(&da));
177 
178   PetscCall(PetscFinalize());
179   return 0;
180 }
181 
182 PetscErrorCode InitialConditions(DM da,Vec U)
183 {
184   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
185   Field          **u;
186   PetscReal      hx,hy,x,y;
187 
188   PetscFunctionBegin;
189   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
190 
191   hx = 2.5/(PetscReal)Mx;
192   hy = 2.5/(PetscReal)My;
193 
194   /*
195      Get pointers to vector data
196   */
197   PetscCall(DMDAVecGetArray(da,U,&u));
198 
199   /*
200      Get local grid boundaries
201   */
202   PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
203 
204   /*
205      Compute function over the locally owned part of the grid
206   */
207   for (j=ys; j<ys+ym; j++) {
208     y = j*hy;
209     for (i=xs; i<xs+xm; i++) {
210       x = i*hx;
211       if (PetscApproximateGTE(x,1.0) && PetscApproximateLTE(x,1.5) && PetscApproximateGTE(y,1.0) && PetscApproximateLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
212       else u[j][i].v = 0.0;
213 
214       u[j][i].u = 1.0 - 2.0*u[j][i].v;
215     }
216   }
217 
218   /*
219      Restore vectors
220   */
221   PetscCall(DMDAVecRestoreArray(da,U,&u));
222   PetscFunctionReturn(0);
223 }
224 
225 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
226 {
227    PetscInt i,j,Mx,My,xs,ys,xm,ym;
228    Field **l;
229 
230    PetscFunctionBegin;
231    PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
232    /* locate the global i index for x and j index for y */
233    i = (PetscInt)(x*(Mx-1));
234    j = (PetscInt)(y*(My-1));
235    PetscCall(DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL));
236 
237    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
238      /* the i,j vertex is on this process */
239      PetscCall(DMDAVecGetArray(da,lambda,&l));
240      l[j][i].u = 1.0;
241      l[j][i].v = 1.0;
242      PetscCall(DMDAVecRestoreArray(da,lambda,&l));
243    }
244    PetscFunctionReturn(0);
245 }
246 
247 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
248 {
249   AppCtx         *appctx = (AppCtx*)ptr;
250   PetscInt       i,j,xs,ys,xm,ym;
251   PetscReal      hx,hy,sx,sy;
252   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
253 
254   PetscFunctionBegin;
255   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
256   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
257 
258   /* Get local grid boundaries */
259   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
260 
261   /* Compute function over the locally owned part of the grid */
262   for (j=ys; j<ys+ym; j++) {
263     for (i=xs; i<xs+xm; i++) {
264       uc        = u[j][i].u;
265       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
266       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
267       vc        = u[j][i].v;
268       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
269       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
270       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
271       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
272     }
273   }
274   PetscCall(PetscLogFlops(16.0*xm*ym));
275   PetscFunctionReturn(0);
276 }
277 
278 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
279 {
280   AppCtx         *appctx = (AppCtx*)ptr;
281   DM             da;
282   DMDALocalInfo  info;
283   Field          **u,**f,**udot;
284   Vec            localU;
285   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
286   PetscReal      hx,hy,sx,sy;
287   adouble        uc,uxx,uyy,vc,vxx,vyy;
288   AField         **f_a,*f_c,**u_a,*u_c;
289   PetscScalar    dummy;
290 
291   PetscFunctionBegin;
292   PetscCall(TSGetDM(ts,&da));
293   PetscCall(DMDAGetLocalInfo(da,&info));
294   PetscCall(DMGetLocalVector(da,&localU));
295   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
296   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
297   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
298   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
299 
300   /*
301      Scatter ghost points to local vector,using the 2-step process
302         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
303      By placing code between these two statements, computations can be
304      done while messages are in transition.
305   */
306   PetscCall(DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU));
307   PetscCall(DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU));
308 
309   /*
310      Get pointers to vector data
311   */
312   PetscCall(DMDAVecGetArrayRead(da,localU,&u));
313   PetscCall(DMDAVecGetArray(da,F,&f));
314   PetscCall(DMDAVecGetArrayRead(da,Udot,&udot));
315 
316   /*
317     Create contiguous 1-arrays of AFields
318 
319     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
320           cannot be allocated using PetscMalloc, as this does not call the
321           relevant class constructor. Instead, we use the C++ keyword `new`.
322   */
323   u_c = new AField[info.gxm*info.gym];
324   f_c = new AField[info.gxm*info.gym];
325 
326   /* Create corresponding 2-arrays of AFields */
327   u_a = new AField*[info.gym];
328   f_a = new AField*[info.gym];
329 
330   /* Align indices between array types to endow 2d array with ghost points */
331   PetscCall(GiveGhostPoints(da,u_c,&u_a));
332   PetscCall(GiveGhostPoints(da,f_c,&f_a));
333 
334   trace_on(1);  /* Start of active section on tape 1 */
335 
336   /*
337     Mark independence
338 
339     NOTE: Ghost points are marked as independent, in place of the points they represent on
340           other processors / on other boundaries.
341   */
342   for (j=gys; j<gys+gym; j++) {
343     for (i=gxs; i<gxs+gxm; i++) {
344       u_a[j][i].u <<= u[j][i].u;
345       u_a[j][i].v <<= u[j][i].v;
346     }
347   }
348 
349   /* Compute function over the locally owned part of the grid */
350   for (j=ys; j<ys+ym; j++) {
351     for (i=xs; i<xs+xm; i++) {
352       uc        = u_a[j][i].u;
353       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
354       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
355       vc        = u_a[j][i].v;
356       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
357       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
358       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
359       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
360     }
361   }
362 
363   /*
364     Mark dependence
365 
366     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
367           the Jacobian later.
368   */
369   for (j=gys; j<gys+gym; j++) {
370     for (i=gxs; i<gxs+gxm; i++) {
371       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
372         f_a[j][i].u >>= dummy;
373         f_a[j][i].v >>= dummy;
374       } else {
375         f_a[j][i].u >>= f[j][i].u;
376         f_a[j][i].v >>= f[j][i].v;
377       }
378     }
379   }
380   trace_off();  /* End of active section */
381   PetscCall(PetscLogFlops(16.0*xm*ym));
382 
383   /* Restore vectors */
384   PetscCall(DMDAVecRestoreArray(da,F,&f));
385   PetscCall(DMDAVecRestoreArrayRead(da,localU,&u));
386   PetscCall(DMDAVecRestoreArrayRead(da,Udot,&udot));
387 
388   PetscCall(DMRestoreLocalVector(da,&localU));
389 
390   /* Destroy AFields appropriately */
391   f_a += info.gys;
392   u_a += info.gys;
393   delete[] f_a;
394   delete[] u_a;
395   delete[] f_c;
396   delete[] u_c;
397   PetscFunctionReturn(0);
398 }
399 
400 /*
401   Simply acts to pass TS information to the AdolcMatCtx
402 */
403 PetscErrorCode IJacobianMatFree(TS ts,PetscReal t,Vec X,Vec Xdot,PetscReal a,Mat A_shell,Mat B,void *ctx)
404 {
405   AdolcMatCtx       *mctx;
406   DM                da;
407 
408   PetscFunctionBeginUser;
409   PetscCall(MatShellGetContext(A_shell,&mctx));
410 
411   mctx->time  = t;
412   mctx->shift = a;
413   if (mctx->ts != ts) mctx->ts = ts;
414   PetscCall(VecCopy(X,mctx->X));
415   PetscCall(VecCopy(Xdot,mctx->Xdot));
416   PetscCall(TSGetDM(ts,&da));
417   PetscCall(DMGlobalToLocalBegin(da,mctx->X,INSERT_VALUES,mctx->localX0));
418   PetscCall(DMGlobalToLocalEnd(da,mctx->X,INSERT_VALUES,mctx->localX0));
419   PetscFunctionReturn(0);
420 }
421 
422 /*TEST
423 
424   build:
425     requires: double !complex adolc
426 
427   test:
428     suffix: 1
429     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
430     output_file: output/adr_ex5adj_mf_1.out
431 
432   test:
433     suffix: 2
434     nsize: 4
435     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
436     output_file: output/adr_ex5adj_mf_2.out
437 
438 TEST*/
439