1 2 static char help[] ="Solves a simple data assimilation problem with one dimensional Burger's equation using TSAdjoint\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 10 /* ------------------------------------------------------------------------ 11 12 This program uses the one-dimensional Burger's equation 13 u_t = mu*u_xx - u u_x, 14 on the domain 0 <= x <= 1, with periodic boundary conditions 15 16 to demonstrate solving a data assimilation problem of finding the initial conditions 17 to produce a given solution at a fixed time. 18 19 The operators are discretized with the spectral element method 20 21 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO 22 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution 23 used 24 25 ------------------------------------------------------------------------- */ 26 27 #include <petsctao.h> 28 #include <petscts.h> 29 #include <petscdt.h> 30 #include <petscdraw.h> 31 #include <petscdmda.h> 32 33 /* 34 User-defined application context - contains data needed by the 35 application-provided call-back routines. 36 */ 37 38 typedef struct { 39 PetscInt n; /* number of nodes */ 40 PetscReal *nodes; /* GLL nodes */ 41 PetscReal *weights; /* GLL weights */ 42 } PetscGLL; 43 44 typedef struct { 45 PetscInt N; /* grid points per elements*/ 46 PetscInt E; /* number of elements */ 47 PetscReal tol_L2,tol_max; /* error norms */ 48 PetscInt steps; /* number of timesteps */ 49 PetscReal Tend; /* endtime */ 50 PetscReal mu; /* viscosity */ 51 PetscReal L; /* total length of domain */ 52 PetscReal Le; 53 PetscReal Tadj; 54 } PetscParam; 55 56 typedef struct { 57 Vec obj; /* desired end state */ 58 Vec grid; /* total grid */ 59 Vec grad; 60 Vec ic; 61 Vec curr_sol; 62 Vec true_solution; /* actual initial conditions for the final solution */ 63 } PetscData; 64 65 typedef struct { 66 Vec grid; /* total grid */ 67 Vec mass; /* mass matrix for total integration */ 68 Mat stiff; /* stifness matrix */ 69 Mat keptstiff; 70 Mat grad; 71 PetscGLL gll; 72 } PetscSEMOperators; 73 74 typedef struct { 75 DM da; /* distributed array data structure */ 76 PetscSEMOperators SEMop; 77 PetscParam param; 78 PetscData dat; 79 TS ts; 80 PetscReal initial_dt; 81 } AppCtx; 82 83 /* 84 User-defined routines 85 */ 86 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*); 87 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*); 88 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*); 89 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 90 extern PetscErrorCode TrueSolution(Vec,AppCtx*); 91 extern PetscErrorCode ComputeObjective(PetscReal,Vec,AppCtx*); 92 extern PetscErrorCode MonitorError(Tao,void*); 93 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 94 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 95 96 int main(int argc,char **argv) 97 { 98 AppCtx appctx; /* user-defined application context */ 99 Tao tao; 100 Vec u; /* approximate solution vector */ 101 PetscInt i, xs, xm, ind, j, lenglob; 102 PetscReal x, *wrk_ptr1, *wrk_ptr2; 103 MatNullSpace nsp; 104 PetscMPIInt size; 105 106 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 107 Initialize program and set problem parameters 108 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 109 PetscFunctionBegin; 110 111 PetscFunctionBeginUser; 112 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 113 114 /*initialize parameters */ 115 appctx.param.N = 10; /* order of the spectral element */ 116 appctx.param.E = 10; /* number of elements */ 117 appctx.param.L = 4.0; /* length of the domain */ 118 appctx.param.mu = 0.01; /* diffusion coefficient */ 119 appctx.initial_dt = 5e-3; 120 appctx.param.steps = PETSC_MAX_INT; 121 appctx.param.Tend = 4; 122 123 PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL)); 124 PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL)); 125 PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL)); 126 PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL)); 127 appctx.param.Le = appctx.param.L/appctx.param.E; 128 129 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 130 PetscCheck((appctx.param.E % size) == 0,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes"); 131 132 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 133 Create GLL data structures 134 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 135 PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights)); 136 PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 137 appctx.SEMop.gll.n = appctx.param.N; 138 lenglob = appctx.param.E*(appctx.param.N-1); 139 140 /* 141 Create distributed array (DMDA) to manage parallel grid and vectors 142 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 143 total grid values spread equally among all the processors, except first and last 144 */ 145 146 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da)); 147 PetscCall(DMSetFromOptions(appctx.da)); 148 PetscCall(DMSetUp(appctx.da)); 149 150 /* 151 Extract global and local vectors from DMDA; we use these to store the 152 approximate solution. Then duplicate these for remaining vectors that 153 have the same types. 154 */ 155 156 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 157 PetscCall(VecDuplicate(u,&appctx.dat.ic)); 158 PetscCall(VecDuplicate(u,&appctx.dat.true_solution)); 159 PetscCall(VecDuplicate(u,&appctx.dat.obj)); 160 PetscCall(VecDuplicate(u,&appctx.SEMop.grid)); 161 PetscCall(VecDuplicate(u,&appctx.SEMop.mass)); 162 PetscCall(VecDuplicate(u,&appctx.dat.curr_sol)); 163 164 PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL)); 165 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 166 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 167 168 /* Compute function over the locally owned part of the grid */ 169 170 xs=xs/(appctx.param.N-1); 171 xm=xm/(appctx.param.N-1); 172 173 /* 174 Build total grid and mass over entire mesh (multi-elemental) 175 */ 176 177 for (i=xs; i<xs+xm; i++) { 178 for (j=0; j<appctx.param.N-1; j++) { 179 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 180 ind=i*(appctx.param.N-1)+j; 181 wrk_ptr1[ind]=x; 182 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 183 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 184 } 185 } 186 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 187 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 188 189 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 190 Create matrix data structure; set matrix evaluation routine. 191 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 192 PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE)); 193 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff)); 194 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.grad)); 195 /* 196 For linear problems with a time-dependent f(u,t) in the equation 197 u_t = f(u,t), the user provides the discretized right-hand-side 198 as a time-dependent matrix. 199 */ 200 PetscCall(RHSMatrixLaplaciangllDM(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx)); 201 PetscCall(RHSMatrixAdvectiongllDM(appctx.ts,0.0,u,appctx.SEMop.grad,appctx.SEMop.grad,&appctx)); 202 /* 203 For linear problems with a time-dependent f(u,t) in the equation 204 u_t = f(u,t), the user provides the discretized right-hand-side 205 as a time-dependent matrix. 206 */ 207 208 PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff)); 209 210 /* attach the null space to the matrix, this probably is not needed but does no harm */ 211 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 212 PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp)); 213 PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff,nsp)); 214 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL)); 215 PetscCall(MatNullSpaceDestroy(&nsp)); 216 /* attach the null space to the matrix, this probably is not needed but does no harm */ 217 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 218 PetscCall(MatSetNullSpace(appctx.SEMop.grad,nsp)); 219 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL)); 220 PetscCall(MatNullSpaceDestroy(&nsp)); 221 222 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 223 PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts)); 224 PetscCall(TSSetProblemType(appctx.ts,TS_NONLINEAR)); 225 PetscCall(TSSetType(appctx.ts,TSRK)); 226 PetscCall(TSSetDM(appctx.ts,appctx.da)); 227 PetscCall(TSSetTime(appctx.ts,0.0)); 228 PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt)); 229 PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps)); 230 PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend)); 231 PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP)); 232 PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL)); 233 PetscCall(TSSetFromOptions(appctx.ts)); 234 /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */ 235 PetscCall(TSGetTimeStep(appctx.ts,&appctx.initial_dt)); 236 PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx)); 237 PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); 238 239 /* Set Objective and Initial conditions for the problem and compute Objective function (evolution of true_solution to final time */ 240 PetscCall(InitialConditions(appctx.dat.ic,&appctx)); 241 PetscCall(TrueSolution(appctx.dat.true_solution,&appctx)); 242 PetscCall(ComputeObjective(appctx.param.Tend,appctx.dat.obj,&appctx)); 243 244 PetscCall(TSSetSaveTrajectory(appctx.ts)); 245 PetscCall(TSSetFromOptions(appctx.ts)); 246 247 /* Create TAO solver and set desired solution method */ 248 PetscCall(TaoCreate(PETSC_COMM_WORLD,&tao)); 249 PetscCall(TaoSetMonitor(tao,MonitorError,&appctx,NULL)); 250 PetscCall(TaoSetType(tao,TAOBQNLS)); 251 PetscCall(TaoSetSolution(tao,appctx.dat.ic)); 252 /* Set routine for function and gradient evaluation */ 253 PetscCall(TaoSetObjectiveAndGradient(tao,NULL,FormFunctionGradient,(void *)&appctx)); 254 /* Check for any TAO command line options */ 255 PetscCall(TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT)); 256 PetscCall(TaoSetFromOptions(tao)); 257 PetscCall(TaoSolve(tao)); 258 259 PetscCall(TaoDestroy(&tao)); 260 PetscCall(MatDestroy(&appctx.SEMop.stiff)); 261 PetscCall(MatDestroy(&appctx.SEMop.keptstiff)); 262 PetscCall(MatDestroy(&appctx.SEMop.grad)); 263 PetscCall(VecDestroy(&u)); 264 PetscCall(VecDestroy(&appctx.dat.ic)); 265 PetscCall(VecDestroy(&appctx.dat.true_solution)); 266 PetscCall(VecDestroy(&appctx.dat.obj)); 267 PetscCall(VecDestroy(&appctx.SEMop.grid)); 268 PetscCall(VecDestroy(&appctx.SEMop.mass)); 269 PetscCall(VecDestroy(&appctx.dat.curr_sol)); 270 PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 271 PetscCall(DMDestroy(&appctx.da)); 272 PetscCall(TSDestroy(&appctx.ts)); 273 274 /* 275 Always call PetscFinalize() before exiting a program. This routine 276 - finalizes the PETSc libraries as well as MPI 277 - provides summary and diagnostic information if certain runtime 278 options are chosen (e.g., -log_summary). 279 */ 280 PetscCall(PetscFinalize()); 281 return 0; 282 } 283 284 /* --------------------------------------------------------------------- */ 285 /* 286 InitialConditions - Computes the initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve() 287 288 The routine TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function 289 290 Input Parameter: 291 u - uninitialized solution vector (global) 292 appctx - user-defined application context 293 294 Output Parameter: 295 u - vector with solution at initial time (global) 296 */ 297 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 298 { 299 PetscScalar *s; 300 const PetscScalar *xg; 301 PetscInt i,xs,xn; 302 303 PetscFunctionBegin; 304 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 305 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 306 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 307 for (i=xs; i<xs+xn; i++) { 308 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i]))+0.25*PetscExpReal(-4.0*PetscPowReal(xg[i]-2.0,2.0)); 309 } 310 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 311 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 312 PetscFunctionReturn(0); 313 } 314 315 /* 316 TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 317 318 InitialConditions() computes the initial conditions for the beginning of the Tao iterations 319 320 Input Parameter: 321 u - uninitialized solution vector (global) 322 appctx - user-defined application context 323 324 Output Parameter: 325 u - vector with solution at initial time (global) 326 */ 327 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx) 328 { 329 PetscScalar *s; 330 const PetscScalar *xg; 331 PetscInt i,xs,xn; 332 333 PetscFunctionBegin; 334 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 335 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 336 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 337 for (i=xs; i<xs+xn; i++) { 338 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i])); 339 } 340 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 341 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 342 PetscFunctionReturn(0); 343 } 344 /* --------------------------------------------------------------------- */ 345 /* 346 Sets the desired profile for the final end time 347 348 Input Parameters: 349 t - final time 350 obj - vector storing the desired profile 351 appctx - user-defined application context 352 353 */ 354 PetscErrorCode ComputeObjective(PetscReal t,Vec obj,AppCtx *appctx) 355 { 356 PetscScalar *s; 357 const PetscScalar *xg; 358 PetscInt i, xs,xn; 359 360 PetscFunctionBegin; 361 PetscCall(DMDAVecGetArray(appctx->da,obj,&s)); 362 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 363 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 364 for (i=xs; i<xs+xn; i++) { 365 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)\ 366 /(2.0+PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)*PetscCosScalar(PETSC_PI*xg[i])); 367 } 368 PetscCall(DMDAVecRestoreArray(appctx->da,obj,&s)); 369 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 370 PetscFunctionReturn(0); 371 } 372 373 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 374 { 375 AppCtx *appctx = (AppCtx*)ctx; 376 377 PetscFunctionBegin; 378 PetscCall(MatMult(appctx->SEMop.grad,globalin,globalout)); /* grad u */ 379 PetscCall(VecPointwiseMult(globalout,globalin,globalout)); /* u grad u */ 380 PetscCall(VecScale(globalout, -1.0)); 381 PetscCall(MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout)); 382 PetscFunctionReturn(0); 383 } 384 385 /* 386 387 K is the discretiziation of the Laplacian 388 G is the discretization of the gradient 389 390 Computes Jacobian of K u + diag(u) G u which is given by 391 K + diag(u)G + diag(Gu) 392 */ 393 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 394 { 395 AppCtx *appctx = (AppCtx*)ctx; 396 Vec Gglobalin; 397 398 PetscFunctionBegin; 399 /* A = diag(u) G */ 400 401 PetscCall(MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN)); 402 PetscCall(MatDiagonalScale(A,globalin,NULL)); 403 404 /* A = A + diag(Gu) */ 405 PetscCall(VecDuplicate(globalin,&Gglobalin)); 406 PetscCall(MatMult(appctx->SEMop.grad,globalin,Gglobalin)); 407 PetscCall(MatDiagonalSet(A,Gglobalin,ADD_VALUES)); 408 PetscCall(VecDestroy(&Gglobalin)); 409 410 /* A = K - A */ 411 PetscCall(MatScale(A,-1.0)); 412 PetscCall(MatAXPY(A,1.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN)); 413 PetscFunctionReturn(0); 414 } 415 416 /* --------------------------------------------------------------------- */ 417 418 /* 419 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side 420 matrix for the heat equation. 421 422 Input Parameters: 423 ts - the TS context 424 t - current time (ignored) 425 X - current solution (ignored) 426 dummy - optional user-defined context, as set by TSetRHSJacobian() 427 428 Output Parameters: 429 AA - Jacobian matrix 430 BB - optionally different matrix from which the preconditioner is built 431 str - flag indicating matrix structure 432 433 */ 434 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 435 { 436 PetscReal **temp; 437 PetscReal vv; 438 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 439 PetscInt i,xs,xn,l,j; 440 PetscInt *rowsDM; 441 442 PetscFunctionBegin; 443 /* 444 Creates the element stiffness matrix for the given gll 445 */ 446 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 447 /* workaround for clang analyzer warning: Division by zero */ 448 PetscCheck(appctx->param.N > 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1"); 449 450 /* scale by the size of the element */ 451 for (i=0; i<appctx->param.N; i++) { 452 vv=-appctx->param.mu*2.0/appctx->param.Le; 453 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 454 } 455 456 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 457 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 458 459 xs = xs/(appctx->param.N-1); 460 xn = xn/(appctx->param.N-1); 461 462 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 463 /* 464 loop over local elements 465 */ 466 for (j=xs; j<xs+xn; j++) { 467 for (l=0; l<appctx->param.N; l++) { 468 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 469 } 470 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 471 } 472 PetscCall(PetscFree(rowsDM)); 473 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 474 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 475 PetscCall(VecReciprocal(appctx->SEMop.mass)); 476 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 477 PetscCall(VecReciprocal(appctx->SEMop.mass)); 478 479 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 480 PetscFunctionReturn(0); 481 } 482 483 /* 484 RHSMatrixAdvection - User-provided routine to compute the right-hand-side 485 matrix for the Advection equation. 486 487 Input Parameters: 488 ts - the TS context 489 t - current time 490 global_in - global input vector 491 dummy - optional user-defined context, as set by TSetRHSJacobian() 492 493 Output Parameters: 494 AA - Jacobian matrix 495 BB - optionally different preconditioning matrix 496 str - flag indicating matrix structure 497 498 */ 499 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 500 { 501 PetscReal **temp; 502 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 503 PetscInt xs,xn,l,j; 504 PetscInt *rowsDM; 505 506 PetscFunctionBegin; 507 /* 508 Creates the advection matrix for the given gll 509 */ 510 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 511 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 512 513 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 514 515 xs = xs/(appctx->param.N-1); 516 xn = xn/(appctx->param.N-1); 517 518 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 519 for (j=xs; j<xs+xn; j++) { 520 for (l=0; l<appctx->param.N; l++) { 521 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 522 } 523 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 524 } 525 PetscCall(PetscFree(rowsDM)); 526 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 527 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 528 529 PetscCall(VecReciprocal(appctx->SEMop.mass)); 530 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 531 PetscCall(VecReciprocal(appctx->SEMop.mass)); 532 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 533 PetscFunctionReturn(0); 534 } 535 /* ------------------------------------------------------------------ */ 536 /* 537 FormFunctionGradient - Evaluates the function and corresponding gradient. 538 539 Input Parameters: 540 tao - the Tao context 541 IC - the input vector 542 ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient() 543 544 Output Parameters: 545 f - the newly evaluated function 546 G - the newly evaluated gradient 547 548 Notes: 549 550 The forward equation is 551 M u_t = F(U) 552 which is converted to 553 u_t = M^{-1} F(u) 554 in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is 555 M^{-1} J 556 where J is the Jacobian of F. Now the adjoint equation is 557 M v_t = J^T v 558 but TSAdjoint does not solve this since it can only solve the transposed system for the 559 Jacobian the user provided. Hence TSAdjoint solves 560 w_t = J^T M^{-1} w (where w = M v) 561 since there is no way to indicate the mass matrix as a separate entity to TS. Thus one 562 must be careful in initializing the "adjoint equation" and using the result. This is 563 why 564 G = -2 M(u(T) - u_d) 565 below (instead of -2(u(T) - u_d) and why the result is 566 G = G/appctx->SEMop.mass (that is G = M^{-1}w) 567 below (instead of just the result of the "adjoint solve"). 568 569 */ 570 PetscErrorCode FormFunctionGradient(Tao tao,Vec IC,PetscReal *f,Vec G,void *ctx) 571 { 572 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 573 Vec temp; 574 PetscInt its; 575 PetscReal ff, gnorm, cnorm, xdiff,errex; 576 TaoConvergedReason reason; 577 578 PetscFunctionBegin; 579 PetscCall(TSSetTime(appctx->ts,0.0)); 580 PetscCall(TSSetStepNumber(appctx->ts,0)); 581 PetscCall(TSSetTimeStep(appctx->ts,appctx->initial_dt)); 582 PetscCall(VecCopy(IC,appctx->dat.curr_sol)); 583 584 PetscCall(TSSolve(appctx->ts,appctx->dat.curr_sol)); 585 586 PetscCall(VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.obj)); 587 588 /* 589 Compute the L2-norm of the objective function, cost function is f 590 */ 591 PetscCall(VecDuplicate(G,&temp)); 592 PetscCall(VecPointwiseMult(temp,G,G)); 593 PetscCall(VecDot(temp,appctx->SEMop.mass,f)); 594 595 /* local error evaluation */ 596 PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution)); 597 PetscCall(VecPointwiseMult(temp,temp,temp)); 598 /* for error evaluation */ 599 PetscCall(VecDot(temp,appctx->SEMop.mass,&errex)); 600 PetscCall(VecDestroy(&temp)); 601 errex = PetscSqrtReal(errex); 602 603 /* 604 Compute initial conditions for the adjoint integration. See Notes above 605 */ 606 607 PetscCall(VecScale(G, -2.0)); 608 PetscCall(VecPointwiseMult(G,G,appctx->SEMop.mass)); 609 PetscCall(TSSetCostGradients(appctx->ts,1,&G,NULL)); 610 PetscCall(TSAdjointSolve(appctx->ts)); 611 PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass)); 612 613 PetscCall(TaoGetSolutionStatus(tao, &its, &ff, &gnorm, &cnorm, &xdiff, &reason)); 614 PetscFunctionReturn(0); 615 } 616 617 PetscErrorCode MonitorError(Tao tao,void *ctx) 618 { 619 AppCtx *appctx = (AppCtx*)ctx; 620 Vec temp; 621 PetscReal nrm; 622 623 PetscFunctionBegin; 624 PetscCall(VecDuplicate(appctx->dat.ic,&temp)); 625 PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution)); 626 PetscCall(VecPointwiseMult(temp,temp,temp)); 627 PetscCall(VecDot(temp,appctx->SEMop.mass,&nrm)); 628 PetscCall(VecDestroy(&temp)); 629 nrm = PetscSqrtReal(nrm); 630 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error for initial conditions %g\n",(double)nrm)); 631 PetscFunctionReturn(0); 632 } 633 634 /*TEST 635 636 build: 637 requires: !complex 638 639 test: 640 args: -tao_max_it 5 -tao_gatol 1.e-4 641 requires: !single 642 643 test: 644 suffix: 2 645 nsize: 2 646 args: -tao_max_it 5 -tao_gatol 1.e-4 647 requires: !single 648 649 TEST*/ 650