1 2 static char help[] ="Solves a simple data assimilation problem with one dimensional Burger's equation using TSAdjoint\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 /* 10 Concepts: TS^time-dependent nonlinear problems 11 Concepts: TS^Burger's equation 12 Concepts: adjoints 13 Processors: n 14 */ 15 16 /* ------------------------------------------------------------------------ 17 18 This program uses the one-dimensional Burger's equation 19 u_t = mu*u_xx - u u_x, 20 on the domain 0 <= x <= 1, with periodic boundary conditions 21 22 to demonstrate solving a data assimilation problem of finding the initial conditions 23 to produce a given solution at a fixed time. 24 25 The operators are discretized with the spectral element method 26 27 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO 28 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution 29 used 30 31 ------------------------------------------------------------------------- */ 32 33 #include <petsctao.h> 34 #include <petscts.h> 35 #include <petscdt.h> 36 #include <petscdraw.h> 37 #include <petscdmda.h> 38 39 /* 40 User-defined application context - contains data needed by the 41 application-provided call-back routines. 42 */ 43 44 typedef struct { 45 PetscInt n; /* number of nodes */ 46 PetscReal *nodes; /* GLL nodes */ 47 PetscReal *weights; /* GLL weights */ 48 } PetscGLL; 49 50 typedef struct { 51 PetscInt N; /* grid points per elements*/ 52 PetscInt E; /* number of elements */ 53 PetscReal tol_L2,tol_max; /* error norms */ 54 PetscInt steps; /* number of timesteps */ 55 PetscReal Tend; /* endtime */ 56 PetscReal mu; /* viscosity */ 57 PetscReal L; /* total length of domain */ 58 PetscReal Le; 59 PetscReal Tadj; 60 } PetscParam; 61 62 typedef struct { 63 Vec obj; /* desired end state */ 64 Vec grid; /* total grid */ 65 Vec grad; 66 Vec ic; 67 Vec curr_sol; 68 Vec true_solution; /* actual initial conditions for the final solution */ 69 } PetscData; 70 71 typedef struct { 72 Vec grid; /* total grid */ 73 Vec mass; /* mass matrix for total integration */ 74 Mat stiff; /* stifness matrix */ 75 Mat keptstiff; 76 Mat grad; 77 PetscGLL gll; 78 } PetscSEMOperators; 79 80 typedef struct { 81 DM da; /* distributed array data structure */ 82 PetscSEMOperators SEMop; 83 PetscParam param; 84 PetscData dat; 85 TS ts; 86 PetscReal initial_dt; 87 } AppCtx; 88 89 /* 90 User-defined routines 91 */ 92 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*); 93 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*); 94 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*); 95 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 96 extern PetscErrorCode TrueSolution(Vec,AppCtx*); 97 extern PetscErrorCode ComputeObjective(PetscReal,Vec,AppCtx*); 98 extern PetscErrorCode MonitorError(Tao,void*); 99 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 100 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 101 102 int main(int argc,char **argv) 103 { 104 AppCtx appctx; /* user-defined application context */ 105 Tao tao; 106 Vec u; /* approximate solution vector */ 107 PetscErrorCode ierr; 108 PetscInt i, xs, xm, ind, j, lenglob; 109 PetscReal x, *wrk_ptr1, *wrk_ptr2; 110 MatNullSpace nsp; 111 PetscMPIInt size; 112 113 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 114 Initialize program and set problem parameters 115 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 116 PetscFunctionBegin; 117 118 ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; 119 120 /*initialize parameters */ 121 appctx.param.N = 10; /* order of the spectral element */ 122 appctx.param.E = 10; /* number of elements */ 123 appctx.param.L = 4.0; /* length of the domain */ 124 appctx.param.mu = 0.01; /* diffusion coefficient */ 125 appctx.initial_dt = 5e-3; 126 appctx.param.steps = PETSC_MAX_INT; 127 appctx.param.Tend = 4; 128 129 ierr = PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);CHKERRQ(ierr); 130 ierr = PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);CHKERRQ(ierr); 131 ierr = PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);CHKERRQ(ierr); 132 ierr = PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);CHKERRQ(ierr); 133 appctx.param.Le = appctx.param.L/appctx.param.E; 134 135 ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr); 136 if (appctx.param.E % size) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes"); 137 138 139 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 140 Create GLL data structures 141 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 142 ierr = PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);CHKERRQ(ierr); 143 ierr = PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr); 144 appctx.SEMop.gll.n = appctx.param.N; 145 lenglob = appctx.param.E*(appctx.param.N-1); 146 147 /* 148 Create distributed array (DMDA) to manage parallel grid and vectors 149 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 150 total grid values spread equally among all the processors, except first and last 151 */ 152 153 ierr = DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);CHKERRQ(ierr); 154 ierr = DMSetFromOptions(appctx.da);CHKERRQ(ierr); 155 ierr = DMSetUp(appctx.da);CHKERRQ(ierr); 156 157 /* 158 Extract global and local vectors from DMDA; we use these to store the 159 approximate solution. Then duplicate these for remaining vectors that 160 have the same types. 161 */ 162 163 ierr = DMCreateGlobalVector(appctx.da,&u);CHKERRQ(ierr); 164 ierr = VecDuplicate(u,&appctx.dat.ic);CHKERRQ(ierr); 165 ierr = VecDuplicate(u,&appctx.dat.true_solution);CHKERRQ(ierr); 166 ierr = VecDuplicate(u,&appctx.dat.obj);CHKERRQ(ierr); 167 ierr = VecDuplicate(u,&appctx.SEMop.grid);CHKERRQ(ierr); 168 ierr = VecDuplicate(u,&appctx.SEMop.mass);CHKERRQ(ierr); 169 ierr = VecDuplicate(u,&appctx.dat.curr_sol);CHKERRQ(ierr); 170 171 ierr = DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);CHKERRQ(ierr); 172 ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr); 173 ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr); 174 175 /* Compute function over the locally owned part of the grid */ 176 177 xs=xs/(appctx.param.N-1); 178 xm=xm/(appctx.param.N-1); 179 180 /* 181 Build total grid and mass over entire mesh (multi-elemental) 182 */ 183 184 for (i=xs; i<xs+xm; i++) { 185 for (j=0; j<appctx.param.N-1; j++) { 186 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 187 ind=i*(appctx.param.N-1)+j; 188 wrk_ptr1[ind]=x; 189 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 190 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 191 } 192 } 193 ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr); 194 ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr); 195 196 197 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 198 Create matrix data structure; set matrix evaluation routine. 199 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 200 ierr = DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);CHKERRQ(ierr); 201 ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);CHKERRQ(ierr); 202 ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.grad);CHKERRQ(ierr); 203 /* 204 For linear problems with a time-dependent f(u,t) in the equation 205 u_t = f(u,t), the user provides the discretized right-hand-side 206 as a time-dependent matrix. 207 */ 208 ierr = RHSMatrixLaplaciangllDM(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);CHKERRQ(ierr); 209 ierr = RHSMatrixAdvectiongllDM(appctx.ts,0.0,u,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);CHKERRQ(ierr); 210 /* 211 For linear problems with a time-dependent f(u,t) in the equation 212 u_t = f(u,t), the user provides the discretized right-hand-side 213 as a time-dependent matrix. 214 */ 215 216 ierr = MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);CHKERRQ(ierr); 217 218 /* attach the null space to the matrix, this probably is not needed but does no harm */ 219 ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr); 220 ierr = MatSetNullSpace(appctx.SEMop.stiff,nsp);CHKERRQ(ierr); 221 ierr = MatSetNullSpace(appctx.SEMop.keptstiff,nsp);CHKERRQ(ierr); 222 ierr = MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);CHKERRQ(ierr); 223 ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr); 224 /* attach the null space to the matrix, this probably is not needed but does no harm */ 225 ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr); 226 ierr = MatSetNullSpace(appctx.SEMop.grad,nsp);CHKERRQ(ierr); 227 ierr = MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);CHKERRQ(ierr); 228 ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr); 229 230 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 231 ierr = TSCreate(PETSC_COMM_WORLD,&appctx.ts);CHKERRQ(ierr); 232 ierr = TSSetProblemType(appctx.ts,TS_NONLINEAR);CHKERRQ(ierr); 233 ierr = TSSetType(appctx.ts,TSRK);CHKERRQ(ierr); 234 ierr = TSSetDM(appctx.ts,appctx.da);CHKERRQ(ierr); 235 ierr = TSSetTime(appctx.ts,0.0);CHKERRQ(ierr); 236 ierr = TSSetTimeStep(appctx.ts,appctx.initial_dt);CHKERRQ(ierr); 237 ierr = TSSetMaxSteps(appctx.ts,appctx.param.steps);CHKERRQ(ierr); 238 ierr = TSSetMaxTime(appctx.ts,appctx.param.Tend);CHKERRQ(ierr); 239 ierr = TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr); 240 ierr = TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);CHKERRQ(ierr); 241 ierr = TSSetFromOptions(appctx.ts);CHKERRQ(ierr); 242 /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */ 243 ierr = TSGetTimeStep(appctx.ts,&appctx.initial_dt);CHKERRQ(ierr); 244 ierr = TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr); 245 ierr = TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);CHKERRQ(ierr); 246 247 /* Set Objective and Initial conditions for the problem and compute Objective function (evolution of true_solution to final time */ 248 ierr = InitialConditions(appctx.dat.ic,&appctx);CHKERRQ(ierr); 249 ierr = TrueSolution(appctx.dat.true_solution,&appctx);CHKERRQ(ierr); 250 ierr = ComputeObjective(appctx.param.Tend,appctx.dat.obj,&appctx);CHKERRQ(ierr); 251 252 ierr = TSSetSaveTrajectory(appctx.ts);CHKERRQ(ierr); 253 ierr = TSSetFromOptions(appctx.ts);CHKERRQ(ierr); 254 255 /* Create TAO solver and set desired solution method */ 256 ierr = TaoCreate(PETSC_COMM_WORLD,&tao);CHKERRQ(ierr); 257 ierr = TaoSetMonitor(tao,MonitorError,&appctx,NULL);CHKERRQ(ierr); 258 ierr = TaoSetType(tao,TAOBQNLS);CHKERRQ(ierr); 259 ierr = TaoSetInitialVector(tao,appctx.dat.ic);CHKERRQ(ierr); 260 /* Set routine for function and gradient evaluation */ 261 ierr = TaoSetObjectiveAndGradientRoutine(tao,FormFunctionGradient,(void *)&appctx);CHKERRQ(ierr); 262 /* Check for any TAO command line options */ 263 ierr = TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr); 264 ierr = TaoSetFromOptions(tao);CHKERRQ(ierr); 265 ierr = TaoSolve(tao);CHKERRQ(ierr); 266 267 ierr = TaoDestroy(&tao);CHKERRQ(ierr); 268 ierr = MatDestroy(&appctx.SEMop.stiff);CHKERRQ(ierr); 269 ierr = MatDestroy(&appctx.SEMop.keptstiff);CHKERRQ(ierr); 270 ierr = MatDestroy(&appctx.SEMop.grad);CHKERRQ(ierr); 271 ierr = VecDestroy(&u);CHKERRQ(ierr); 272 ierr = VecDestroy(&appctx.dat.ic);CHKERRQ(ierr); 273 ierr = VecDestroy(&appctx.dat.true_solution);CHKERRQ(ierr); 274 ierr = VecDestroy(&appctx.dat.obj);CHKERRQ(ierr); 275 ierr = VecDestroy(&appctx.SEMop.grid);CHKERRQ(ierr); 276 ierr = VecDestroy(&appctx.SEMop.mass);CHKERRQ(ierr); 277 ierr = VecDestroy(&appctx.dat.curr_sol);CHKERRQ(ierr); 278 ierr = PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr); 279 ierr = DMDestroy(&appctx.da);CHKERRQ(ierr); 280 ierr = TSDestroy(&appctx.ts);CHKERRQ(ierr); 281 282 /* 283 Always call PetscFinalize() before exiting a program. This routine 284 - finalizes the PETSc libraries as well as MPI 285 - provides summary and diagnostic information if certain runtime 286 options are chosen (e.g., -log_summary). 287 */ 288 ierr = PetscFinalize(); 289 return ierr; 290 } 291 292 /* --------------------------------------------------------------------- */ 293 /* 294 InitialConditions - Computes the initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve() 295 296 The routine TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function 297 298 Input Parameter: 299 u - uninitialized solution vector (global) 300 appctx - user-defined application context 301 302 Output Parameter: 303 u - vector with solution at initial time (global) 304 */ 305 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 306 { 307 PetscScalar *s; 308 const PetscScalar *xg; 309 PetscErrorCode ierr; 310 PetscInt i,xs,xn; 311 312 PetscFunctionBegin; 313 ierr = DMDAVecGetArray(appctx->da,u,&s);CHKERRQ(ierr); 314 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 315 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 316 for (i=xs; i<xs+xn; i++) { 317 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i]))+0.25*PetscExpReal(-4.0*PetscPowReal(xg[i]-2.0,2.0)); 318 } 319 ierr = DMDAVecRestoreArray(appctx->da,u,&s);CHKERRQ(ierr); 320 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 321 PetscFunctionReturn(0); 322 } 323 324 /* 325 TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 326 327 InitialConditions() computes the initial conditions for the begining of the Tao iterations 328 329 Input Parameter: 330 u - uninitialized solution vector (global) 331 appctx - user-defined application context 332 333 Output Parameter: 334 u - vector with solution at initial time (global) 335 */ 336 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx) 337 { 338 PetscScalar *s; 339 const PetscScalar *xg; 340 PetscErrorCode ierr; 341 PetscInt i,xs,xn; 342 343 PetscFunctionBegin; 344 ierr = DMDAVecGetArray(appctx->da,u,&s);CHKERRQ(ierr); 345 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 346 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 347 for (i=xs; i<xs+xn; i++) { 348 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i])); 349 } 350 ierr = DMDAVecRestoreArray(appctx->da,u,&s);CHKERRQ(ierr); 351 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 352 PetscFunctionReturn(0); 353 } 354 /* --------------------------------------------------------------------- */ 355 /* 356 Sets the desired profile for the final end time 357 358 Input Parameters: 359 t - final time 360 obj - vector storing the desired profile 361 appctx - user-defined application context 362 363 */ 364 PetscErrorCode ComputeObjective(PetscReal t,Vec obj,AppCtx *appctx) 365 { 366 PetscScalar *s; 367 const PetscScalar *xg; 368 PetscErrorCode ierr; 369 PetscInt i, xs,xn; 370 371 PetscFunctionBegin; 372 ierr = DMDAVecGetArray(appctx->da,obj,&s);CHKERRQ(ierr); 373 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 374 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 375 for (i=xs; i<xs+xn; i++) { 376 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)\ 377 /(2.0+PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)*PetscCosScalar(PETSC_PI*xg[i])); 378 } 379 ierr = DMDAVecRestoreArray(appctx->da,obj,&s);CHKERRQ(ierr); 380 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 381 PetscFunctionReturn(0); 382 } 383 384 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 385 { 386 PetscErrorCode ierr; 387 AppCtx *appctx = (AppCtx*)ctx; 388 389 PetscFunctionBegin; 390 ierr = MatMult(appctx->SEMop.grad,globalin,globalout);CHKERRQ(ierr); /* grad u */ 391 ierr = VecPointwiseMult(globalout,globalin,globalout);CHKERRQ(ierr); /* u grad u */ 392 ierr = VecScale(globalout, -1.0);CHKERRQ(ierr); 393 ierr = MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);CHKERRQ(ierr); 394 PetscFunctionReturn(0); 395 } 396 397 /* 398 399 K is the discretiziation of the Laplacian 400 G is the discretization of the gradient 401 402 Computes Jacobian of K u + diag(u) G u which is given by 403 K + diag(u)G + diag(Gu) 404 */ 405 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 406 { 407 PetscErrorCode ierr; 408 AppCtx *appctx = (AppCtx*)ctx; 409 Vec Gglobalin; 410 411 PetscFunctionBegin; 412 /* A = diag(u) G */ 413 414 ierr = MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 415 ierr = MatDiagonalScale(A,globalin,NULL);CHKERRQ(ierr); 416 417 /* A = A + diag(Gu) */ 418 ierr = VecDuplicate(globalin,&Gglobalin);CHKERRQ(ierr); 419 ierr = MatMult(appctx->SEMop.grad,globalin,Gglobalin);CHKERRQ(ierr); 420 ierr = MatDiagonalSet(A,Gglobalin,ADD_VALUES);CHKERRQ(ierr); 421 ierr = VecDestroy(&Gglobalin);CHKERRQ(ierr); 422 423 /* A = K - A */ 424 ierr = MatScale(A,-1.0);CHKERRQ(ierr); 425 ierr = MatAXPY(A,1.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 426 PetscFunctionReturn(0); 427 } 428 429 /* --------------------------------------------------------------------- */ 430 431 /* 432 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side 433 matrix for the heat equation. 434 435 Input Parameters: 436 ts - the TS context 437 t - current time (ignored) 438 X - current solution (ignored) 439 dummy - optional user-defined context, as set by TSetRHSJacobian() 440 441 Output Parameters: 442 AA - Jacobian matrix 443 BB - optionally different matrix from which the preconditioner is built 444 str - flag indicating matrix structure 445 446 */ 447 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 448 { 449 PetscReal **temp; 450 PetscReal vv; 451 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 452 PetscErrorCode ierr; 453 PetscInt i,xs,xn,l,j; 454 PetscInt *rowsDM; 455 456 PetscFunctionBegin; 457 /* 458 Creates the element stiffness matrix for the given gll 459 */ 460 ierr = PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 461 /* workarround for clang analyzer warning: Division by zero */ 462 if (appctx->param.N <= 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1"); 463 464 /* scale by the size of the element */ 465 for (i=0; i<appctx->param.N; i++) { 466 vv=-appctx->param.mu*2.0/appctx->param.Le; 467 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 468 } 469 470 ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 471 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 472 473 xs = xs/(appctx->param.N-1); 474 xn = xn/(appctx->param.N-1); 475 476 ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr); 477 /* 478 loop over local elements 479 */ 480 for (j=xs; j<xs+xn; j++) { 481 for (l=0; l<appctx->param.N; l++) { 482 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 483 } 484 ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr); 485 } 486 ierr = PetscFree(rowsDM);CHKERRQ(ierr); 487 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 488 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 489 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 490 ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr); 491 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 492 493 ierr = PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 494 PetscFunctionReturn(0); 495 } 496 497 /* 498 RHSMatrixAdvection - User-provided routine to compute the right-hand-side 499 matrix for the Advection equation. 500 501 Input Parameters: 502 ts - the TS context 503 t - current time 504 global_in - global input vector 505 dummy - optional user-defined context, as set by TSetRHSJacobian() 506 507 Output Parameters: 508 AA - Jacobian matrix 509 BB - optionally different preconditioning matrix 510 str - flag indicating matrix structure 511 512 */ 513 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 514 { 515 PetscReal **temp; 516 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 517 PetscErrorCode ierr; 518 PetscInt xs,xn,l,j; 519 PetscInt *rowsDM; 520 521 PetscFunctionBegin; 522 /* 523 Creates the advection matrix for the given gll 524 */ 525 ierr = PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 526 ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 527 528 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 529 530 xs = xs/(appctx->param.N-1); 531 xn = xn/(appctx->param.N-1); 532 533 ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr); 534 for (j=xs; j<xs+xn; j++) { 535 for (l=0; l<appctx->param.N; l++) { 536 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 537 } 538 ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr); 539 } 540 ierr = PetscFree(rowsDM);CHKERRQ(ierr); 541 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 542 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 543 544 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 545 ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr); 546 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 547 ierr = PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 548 PetscFunctionReturn(0); 549 } 550 /* ------------------------------------------------------------------ */ 551 /* 552 FormFunctionGradient - Evaluates the function and corresponding gradient. 553 554 Input Parameters: 555 tao - the Tao context 556 IC - the input vector 557 ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradientRoutine() 558 559 Output Parameters: 560 f - the newly evaluated function 561 G - the newly evaluated gradient 562 563 Notes: 564 565 The forward equation is 566 M u_t = F(U) 567 which is converted to 568 u_t = M^{-1} F(u) 569 in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is 570 M^{-1} J 571 where J is the Jacobian of F. Now the adjoint equation is 572 M v_t = J^T v 573 but TSAdjoint does not solve this since it can only solve the transposed system for the 574 Jacobian the user provided. Hence TSAdjoint solves 575 w_t = J^T M^{-1} w (where w = M v) 576 since there is no way to indicate the mass matrix as a separate entitity to TS. Thus one 577 must be careful in initializing the "adjoint equation" and using the result. This is 578 why 579 G = -2 M(u(T) - u_d) 580 below (instead of -2(u(T) - u_d) and why the result is 581 G = G/appctx->SEMop.mass (that is G = M^{-1}w) 582 below (instead of just the result of the "adjoint solve"). 583 584 585 */ 586 PetscErrorCode FormFunctionGradient(Tao tao,Vec IC,PetscReal *f,Vec G,void *ctx) 587 { 588 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 589 PetscErrorCode ierr; 590 Vec temp; 591 PetscInt its; 592 PetscReal ff, gnorm, cnorm, xdiff,errex; 593 TaoConvergedReason reason; 594 595 PetscFunctionBegin; 596 ierr = TSSetTime(appctx->ts,0.0);CHKERRQ(ierr); 597 ierr = TSSetStepNumber(appctx->ts,0);CHKERRQ(ierr); 598 ierr = TSSetTimeStep(appctx->ts,appctx->initial_dt);CHKERRQ(ierr); 599 ierr = VecCopy(IC,appctx->dat.curr_sol);CHKERRQ(ierr); 600 601 ierr = TSSolve(appctx->ts,appctx->dat.curr_sol);CHKERRQ(ierr); 602 603 ierr = VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.obj);CHKERRQ(ierr); 604 605 /* 606 Compute the L2-norm of the objective function, cost function is f 607 */ 608 ierr = VecDuplicate(G,&temp);CHKERRQ(ierr); 609 ierr = VecPointwiseMult(temp,G,G);CHKERRQ(ierr); 610 ierr = VecDot(temp,appctx->SEMop.mass,f);CHKERRQ(ierr); 611 612 /* local error evaluation */ 613 ierr = VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution);CHKERRQ(ierr); 614 ierr = VecPointwiseMult(temp,temp,temp);CHKERRQ(ierr); 615 /* for error evaluation */ 616 ierr = VecDot(temp,appctx->SEMop.mass,&errex);CHKERRQ(ierr); 617 ierr = VecDestroy(&temp);CHKERRQ(ierr); 618 errex = PetscSqrtReal(errex); 619 620 /* 621 Compute initial conditions for the adjoint integration. See Notes above 622 */ 623 624 ierr = VecScale(G, -2.0);CHKERRQ(ierr); 625 ierr = VecPointwiseMult(G,G,appctx->SEMop.mass);CHKERRQ(ierr); 626 ierr = TSSetCostGradients(appctx->ts,1,&G,NULL);CHKERRQ(ierr); 627 ierr = TSAdjointSolve(appctx->ts);CHKERRQ(ierr); 628 ierr = VecPointwiseDivide(G,G,appctx->SEMop.mass);CHKERRQ(ierr); 629 630 ierr = TaoGetSolutionStatus(tao, &its, &ff, &gnorm, &cnorm, &xdiff, &reason);CHKERRQ(ierr); 631 PetscFunctionReturn(0); 632 } 633 634 PetscErrorCode MonitorError(Tao tao,void *ctx) 635 { 636 AppCtx *appctx = (AppCtx*)ctx; 637 Vec temp; 638 PetscReal nrm; 639 PetscErrorCode ierr; 640 641 PetscFunctionBegin; 642 ierr = VecDuplicate(appctx->dat.ic,&temp);CHKERRQ(ierr); 643 ierr = VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution);CHKERRQ(ierr); 644 ierr = VecPointwiseMult(temp,temp,temp);CHKERRQ(ierr); 645 ierr = VecDot(temp,appctx->SEMop.mass,&nrm);CHKERRQ(ierr); 646 ierr = VecDestroy(&temp);CHKERRQ(ierr); 647 nrm = PetscSqrtReal(nrm); 648 ierr = PetscPrintf(PETSC_COMM_WORLD,"Error for initial conditions %g\n",(double)nrm);CHKERRQ(ierr); 649 PetscFunctionReturn(0); 650 } 651 652 653 /*TEST 654 655 build: 656 requires: !complex 657 658 test: 659 args: -tao_max_it 5 -tao_gatol 1.e-4 660 requires: !single 661 662 test: 663 suffix: 2 664 nsize: 2 665 args: -tao_max_it 5 -tao_gatol 1.e-4 666 requires: !single 667 668 TEST*/ 669