1 2 static char help[] ="Solves a simple data assimilation problem with one dimensional Burger's equation using TSAdjoint\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 10 /* ------------------------------------------------------------------------ 11 12 This program uses the one-dimensional Burger's equation 13 u_t = mu*u_xx - u u_x, 14 on the domain 0 <= x <= 1, with periodic boundary conditions 15 16 to demonstrate solving a data assimilation problem of finding the initial conditions 17 to produce a given solution at a fixed time. 18 19 The operators are discretized with the spectral element method 20 21 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO 22 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution 23 used 24 25 ------------------------------------------------------------------------- */ 26 27 #include <petsctao.h> 28 #include <petscts.h> 29 #include <petscdt.h> 30 #include <petscdraw.h> 31 #include <petscdmda.h> 32 33 /* 34 User-defined application context - contains data needed by the 35 application-provided call-back routines. 36 */ 37 38 typedef struct { 39 PetscInt n; /* number of nodes */ 40 PetscReal *nodes; /* GLL nodes */ 41 PetscReal *weights; /* GLL weights */ 42 } PetscGLL; 43 44 typedef struct { 45 PetscInt N; /* grid points per elements*/ 46 PetscInt E; /* number of elements */ 47 PetscReal tol_L2,tol_max; /* error norms */ 48 PetscInt steps; /* number of timesteps */ 49 PetscReal Tend; /* endtime */ 50 PetscReal mu; /* viscosity */ 51 PetscReal L; /* total length of domain */ 52 PetscReal Le; 53 PetscReal Tadj; 54 } PetscParam; 55 56 typedef struct { 57 Vec obj; /* desired end state */ 58 Vec grid; /* total grid */ 59 Vec grad; 60 Vec ic; 61 Vec curr_sol; 62 Vec true_solution; /* actual initial conditions for the final solution */ 63 } PetscData; 64 65 typedef struct { 66 Vec grid; /* total grid */ 67 Vec mass; /* mass matrix for total integration */ 68 Mat stiff; /* stifness matrix */ 69 Mat keptstiff; 70 Mat grad; 71 PetscGLL gll; 72 } PetscSEMOperators; 73 74 typedef struct { 75 DM da; /* distributed array data structure */ 76 PetscSEMOperators SEMop; 77 PetscParam param; 78 PetscData dat; 79 TS ts; 80 PetscReal initial_dt; 81 } AppCtx; 82 83 /* 84 User-defined routines 85 */ 86 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*); 87 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*); 88 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*); 89 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 90 extern PetscErrorCode TrueSolution(Vec,AppCtx*); 91 extern PetscErrorCode ComputeObjective(PetscReal,Vec,AppCtx*); 92 extern PetscErrorCode MonitorError(Tao,void*); 93 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 94 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 95 96 int main(int argc,char **argv) 97 { 98 AppCtx appctx; /* user-defined application context */ 99 Tao tao; 100 Vec u; /* approximate solution vector */ 101 PetscInt i, xs, xm, ind, j, lenglob; 102 PetscReal x, *wrk_ptr1, *wrk_ptr2; 103 MatNullSpace nsp; 104 PetscMPIInt size; 105 106 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 107 Initialize program and set problem parameters 108 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 109 PetscFunctionBegin; 110 111 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 112 113 /*initialize parameters */ 114 appctx.param.N = 10; /* order of the spectral element */ 115 appctx.param.E = 10; /* number of elements */ 116 appctx.param.L = 4.0; /* length of the domain */ 117 appctx.param.mu = 0.01; /* diffusion coefficient */ 118 appctx.initial_dt = 5e-3; 119 appctx.param.steps = PETSC_MAX_INT; 120 appctx.param.Tend = 4; 121 122 PetscCall(PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL)); 123 PetscCall(PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL)); 124 PetscCall(PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL)); 125 PetscCall(PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL)); 126 appctx.param.Le = appctx.param.L/appctx.param.E; 127 128 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 129 PetscCheck((appctx.param.E % size) == 0,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes"); 130 131 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 132 Create GLL data structures 133 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 134 PetscCall(PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights)); 135 PetscCall(PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 136 appctx.SEMop.gll.n = appctx.param.N; 137 lenglob = appctx.param.E*(appctx.param.N-1); 138 139 /* 140 Create distributed array (DMDA) to manage parallel grid and vectors 141 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 142 total grid values spread equally among all the processors, except first and last 143 */ 144 145 PetscCall(DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da)); 146 PetscCall(DMSetFromOptions(appctx.da)); 147 PetscCall(DMSetUp(appctx.da)); 148 149 /* 150 Extract global and local vectors from DMDA; we use these to store the 151 approximate solution. Then duplicate these for remaining vectors that 152 have the same types. 153 */ 154 155 PetscCall(DMCreateGlobalVector(appctx.da,&u)); 156 PetscCall(VecDuplicate(u,&appctx.dat.ic)); 157 PetscCall(VecDuplicate(u,&appctx.dat.true_solution)); 158 PetscCall(VecDuplicate(u,&appctx.dat.obj)); 159 PetscCall(VecDuplicate(u,&appctx.SEMop.grid)); 160 PetscCall(VecDuplicate(u,&appctx.SEMop.mass)); 161 PetscCall(VecDuplicate(u,&appctx.dat.curr_sol)); 162 163 PetscCall(DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL)); 164 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 165 PetscCall(DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 166 167 /* Compute function over the locally owned part of the grid */ 168 169 xs=xs/(appctx.param.N-1); 170 xm=xm/(appctx.param.N-1); 171 172 /* 173 Build total grid and mass over entire mesh (multi-elemental) 174 */ 175 176 for (i=xs; i<xs+xm; i++) { 177 for (j=0; j<appctx.param.N-1; j++) { 178 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 179 ind=i*(appctx.param.N-1)+j; 180 wrk_ptr1[ind]=x; 181 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 182 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 183 } 184 } 185 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1)); 186 PetscCall(DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2)); 187 188 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 189 Create matrix data structure; set matrix evaluation routine. 190 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 191 PetscCall(DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE)); 192 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.stiff)); 193 PetscCall(DMCreateMatrix(appctx.da,&appctx.SEMop.grad)); 194 /* 195 For linear problems with a time-dependent f(u,t) in the equation 196 u_t = f(u,t), the user provides the discretized right-hand-side 197 as a time-dependent matrix. 198 */ 199 PetscCall(RHSMatrixLaplaciangllDM(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx)); 200 PetscCall(RHSMatrixAdvectiongllDM(appctx.ts,0.0,u,appctx.SEMop.grad,appctx.SEMop.grad,&appctx)); 201 /* 202 For linear problems with a time-dependent f(u,t) in the equation 203 u_t = f(u,t), the user provides the discretized right-hand-side 204 as a time-dependent matrix. 205 */ 206 207 PetscCall(MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff)); 208 209 /* attach the null space to the matrix, this probably is not needed but does no harm */ 210 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 211 PetscCall(MatSetNullSpace(appctx.SEMop.stiff,nsp)); 212 PetscCall(MatSetNullSpace(appctx.SEMop.keptstiff,nsp)); 213 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL)); 214 PetscCall(MatNullSpaceDestroy(&nsp)); 215 /* attach the null space to the matrix, this probably is not needed but does no harm */ 216 PetscCall(MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp)); 217 PetscCall(MatSetNullSpace(appctx.SEMop.grad,nsp)); 218 PetscCall(MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL)); 219 PetscCall(MatNullSpaceDestroy(&nsp)); 220 221 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 222 PetscCall(TSCreate(PETSC_COMM_WORLD,&appctx.ts)); 223 PetscCall(TSSetProblemType(appctx.ts,TS_NONLINEAR)); 224 PetscCall(TSSetType(appctx.ts,TSRK)); 225 PetscCall(TSSetDM(appctx.ts,appctx.da)); 226 PetscCall(TSSetTime(appctx.ts,0.0)); 227 PetscCall(TSSetTimeStep(appctx.ts,appctx.initial_dt)); 228 PetscCall(TSSetMaxSteps(appctx.ts,appctx.param.steps)); 229 PetscCall(TSSetMaxTime(appctx.ts,appctx.param.Tend)); 230 PetscCall(TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP)); 231 PetscCall(TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL)); 232 PetscCall(TSSetFromOptions(appctx.ts)); 233 /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */ 234 PetscCall(TSGetTimeStep(appctx.ts,&appctx.initial_dt)); 235 PetscCall(TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx)); 236 PetscCall(TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx)); 237 238 /* Set Objective and Initial conditions for the problem and compute Objective function (evolution of true_solution to final time */ 239 PetscCall(InitialConditions(appctx.dat.ic,&appctx)); 240 PetscCall(TrueSolution(appctx.dat.true_solution,&appctx)); 241 PetscCall(ComputeObjective(appctx.param.Tend,appctx.dat.obj,&appctx)); 242 243 PetscCall(TSSetSaveTrajectory(appctx.ts)); 244 PetscCall(TSSetFromOptions(appctx.ts)); 245 246 /* Create TAO solver and set desired solution method */ 247 PetscCall(TaoCreate(PETSC_COMM_WORLD,&tao)); 248 PetscCall(TaoSetMonitor(tao,MonitorError,&appctx,NULL)); 249 PetscCall(TaoSetType(tao,TAOBQNLS)); 250 PetscCall(TaoSetSolution(tao,appctx.dat.ic)); 251 /* Set routine for function and gradient evaluation */ 252 PetscCall(TaoSetObjectiveAndGradient(tao,NULL,FormFunctionGradient,(void *)&appctx)); 253 /* Check for any TAO command line options */ 254 PetscCall(TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT)); 255 PetscCall(TaoSetFromOptions(tao)); 256 PetscCall(TaoSolve(tao)); 257 258 PetscCall(TaoDestroy(&tao)); 259 PetscCall(MatDestroy(&appctx.SEMop.stiff)); 260 PetscCall(MatDestroy(&appctx.SEMop.keptstiff)); 261 PetscCall(MatDestroy(&appctx.SEMop.grad)); 262 PetscCall(VecDestroy(&u)); 263 PetscCall(VecDestroy(&appctx.dat.ic)); 264 PetscCall(VecDestroy(&appctx.dat.true_solution)); 265 PetscCall(VecDestroy(&appctx.dat.obj)); 266 PetscCall(VecDestroy(&appctx.SEMop.grid)); 267 PetscCall(VecDestroy(&appctx.SEMop.mass)); 268 PetscCall(VecDestroy(&appctx.dat.curr_sol)); 269 PetscCall(PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights)); 270 PetscCall(DMDestroy(&appctx.da)); 271 PetscCall(TSDestroy(&appctx.ts)); 272 273 /* 274 Always call PetscFinalize() before exiting a program. This routine 275 - finalizes the PETSc libraries as well as MPI 276 - provides summary and diagnostic information if certain runtime 277 options are chosen (e.g., -log_summary). 278 */ 279 PetscCall(PetscFinalize()); 280 return 0; 281 } 282 283 /* --------------------------------------------------------------------- */ 284 /* 285 InitialConditions - Computes the initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve() 286 287 The routine TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function 288 289 Input Parameter: 290 u - uninitialized solution vector (global) 291 appctx - user-defined application context 292 293 Output Parameter: 294 u - vector with solution at initial time (global) 295 */ 296 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 297 { 298 PetscScalar *s; 299 const PetscScalar *xg; 300 PetscInt i,xs,xn; 301 302 PetscFunctionBegin; 303 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 304 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 305 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 306 for (i=xs; i<xs+xn; i++) { 307 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i]))+0.25*PetscExpReal(-4.0*PetscPowReal(xg[i]-2.0,2.0)); 308 } 309 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 310 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 311 PetscFunctionReturn(0); 312 } 313 314 /* 315 TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 316 317 InitialConditions() computes the initial conditions for the beginning of the Tao iterations 318 319 Input Parameter: 320 u - uninitialized solution vector (global) 321 appctx - user-defined application context 322 323 Output Parameter: 324 u - vector with solution at initial time (global) 325 */ 326 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx) 327 { 328 PetscScalar *s; 329 const PetscScalar *xg; 330 PetscInt i,xs,xn; 331 332 PetscFunctionBegin; 333 PetscCall(DMDAVecGetArray(appctx->da,u,&s)); 334 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 335 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 336 for (i=xs; i<xs+xn; i++) { 337 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i])); 338 } 339 PetscCall(DMDAVecRestoreArray(appctx->da,u,&s)); 340 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 341 PetscFunctionReturn(0); 342 } 343 /* --------------------------------------------------------------------- */ 344 /* 345 Sets the desired profile for the final end time 346 347 Input Parameters: 348 t - final time 349 obj - vector storing the desired profile 350 appctx - user-defined application context 351 352 */ 353 PetscErrorCode ComputeObjective(PetscReal t,Vec obj,AppCtx *appctx) 354 { 355 PetscScalar *s; 356 const PetscScalar *xg; 357 PetscInt i, xs,xn; 358 359 PetscFunctionBegin; 360 PetscCall(DMDAVecGetArray(appctx->da,obj,&s)); 361 PetscCall(DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 362 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 363 for (i=xs; i<xs+xn; i++) { 364 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)\ 365 /(2.0+PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)*PetscCosScalar(PETSC_PI*xg[i])); 366 } 367 PetscCall(DMDAVecRestoreArray(appctx->da,obj,&s)); 368 PetscCall(DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg)); 369 PetscFunctionReturn(0); 370 } 371 372 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 373 { 374 AppCtx *appctx = (AppCtx*)ctx; 375 376 PetscFunctionBegin; 377 PetscCall(MatMult(appctx->SEMop.grad,globalin,globalout)); /* grad u */ 378 PetscCall(VecPointwiseMult(globalout,globalin,globalout)); /* u grad u */ 379 PetscCall(VecScale(globalout, -1.0)); 380 PetscCall(MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout)); 381 PetscFunctionReturn(0); 382 } 383 384 /* 385 386 K is the discretiziation of the Laplacian 387 G is the discretization of the gradient 388 389 Computes Jacobian of K u + diag(u) G u which is given by 390 K + diag(u)G + diag(Gu) 391 */ 392 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 393 { 394 AppCtx *appctx = (AppCtx*)ctx; 395 Vec Gglobalin; 396 397 PetscFunctionBegin; 398 /* A = diag(u) G */ 399 400 PetscCall(MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN)); 401 PetscCall(MatDiagonalScale(A,globalin,NULL)); 402 403 /* A = A + diag(Gu) */ 404 PetscCall(VecDuplicate(globalin,&Gglobalin)); 405 PetscCall(MatMult(appctx->SEMop.grad,globalin,Gglobalin)); 406 PetscCall(MatDiagonalSet(A,Gglobalin,ADD_VALUES)); 407 PetscCall(VecDestroy(&Gglobalin)); 408 409 /* A = K - A */ 410 PetscCall(MatScale(A,-1.0)); 411 PetscCall(MatAXPY(A,1.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN)); 412 PetscFunctionReturn(0); 413 } 414 415 /* --------------------------------------------------------------------- */ 416 417 /* 418 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side 419 matrix for the heat equation. 420 421 Input Parameters: 422 ts - the TS context 423 t - current time (ignored) 424 X - current solution (ignored) 425 dummy - optional user-defined context, as set by TSetRHSJacobian() 426 427 Output Parameters: 428 AA - Jacobian matrix 429 BB - optionally different matrix from which the preconditioner is built 430 str - flag indicating matrix structure 431 432 */ 433 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 434 { 435 PetscReal **temp; 436 PetscReal vv; 437 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 438 PetscInt i,xs,xn,l,j; 439 PetscInt *rowsDM; 440 441 PetscFunctionBegin; 442 /* 443 Creates the element stiffness matrix for the given gll 444 */ 445 PetscCall(PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 446 /* workaround for clang analyzer warning: Division by zero */ 447 PetscCheck(appctx->param.N > 1,PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1"); 448 449 /* scale by the size of the element */ 450 for (i=0; i<appctx->param.N; i++) { 451 vv=-appctx->param.mu*2.0/appctx->param.Le; 452 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 453 } 454 455 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 456 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 457 458 xs = xs/(appctx->param.N-1); 459 xn = xn/(appctx->param.N-1); 460 461 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 462 /* 463 loop over local elements 464 */ 465 for (j=xs; j<xs+xn; j++) { 466 for (l=0; l<appctx->param.N; l++) { 467 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 468 } 469 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 470 } 471 PetscCall(PetscFree(rowsDM)); 472 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 473 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 474 PetscCall(VecReciprocal(appctx->SEMop.mass)); 475 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 476 PetscCall(VecReciprocal(appctx->SEMop.mass)); 477 478 PetscCall(PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 479 PetscFunctionReturn(0); 480 } 481 482 /* 483 RHSMatrixAdvection - User-provided routine to compute the right-hand-side 484 matrix for the Advection equation. 485 486 Input Parameters: 487 ts - the TS context 488 t - current time 489 global_in - global input vector 490 dummy - optional user-defined context, as set by TSetRHSJacobian() 491 492 Output Parameters: 493 AA - Jacobian matrix 494 BB - optionally different preconditioning matrix 495 str - flag indicating matrix structure 496 497 */ 498 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 499 { 500 PetscReal **temp; 501 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 502 PetscInt xs,xn,l,j; 503 PetscInt *rowsDM; 504 505 PetscFunctionBegin; 506 /* 507 Creates the advection matrix for the given gll 508 */ 509 PetscCall(PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 510 PetscCall(MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE)); 511 512 PetscCall(DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL)); 513 514 xs = xs/(appctx->param.N-1); 515 xn = xn/(appctx->param.N-1); 516 517 PetscCall(PetscMalloc1(appctx->param.N,&rowsDM)); 518 for (j=xs; j<xs+xn; j++) { 519 for (l=0; l<appctx->param.N; l++) { 520 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 521 } 522 PetscCall(MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES)); 523 } 524 PetscCall(PetscFree(rowsDM)); 525 PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY)); 526 PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY)); 527 528 PetscCall(VecReciprocal(appctx->SEMop.mass)); 529 PetscCall(MatDiagonalScale(A,appctx->SEMop.mass,0)); 530 PetscCall(VecReciprocal(appctx->SEMop.mass)); 531 PetscCall(PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp)); 532 PetscFunctionReturn(0); 533 } 534 /* ------------------------------------------------------------------ */ 535 /* 536 FormFunctionGradient - Evaluates the function and corresponding gradient. 537 538 Input Parameters: 539 tao - the Tao context 540 IC - the input vector 541 ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradient() 542 543 Output Parameters: 544 f - the newly evaluated function 545 G - the newly evaluated gradient 546 547 Notes: 548 549 The forward equation is 550 M u_t = F(U) 551 which is converted to 552 u_t = M^{-1} F(u) 553 in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is 554 M^{-1} J 555 where J is the Jacobian of F. Now the adjoint equation is 556 M v_t = J^T v 557 but TSAdjoint does not solve this since it can only solve the transposed system for the 558 Jacobian the user provided. Hence TSAdjoint solves 559 w_t = J^T M^{-1} w (where w = M v) 560 since there is no way to indicate the mass matrix as a separate entity to TS. Thus one 561 must be careful in initializing the "adjoint equation" and using the result. This is 562 why 563 G = -2 M(u(T) - u_d) 564 below (instead of -2(u(T) - u_d) and why the result is 565 G = G/appctx->SEMop.mass (that is G = M^{-1}w) 566 below (instead of just the result of the "adjoint solve"). 567 568 */ 569 PetscErrorCode FormFunctionGradient(Tao tao,Vec IC,PetscReal *f,Vec G,void *ctx) 570 { 571 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 572 Vec temp; 573 PetscInt its; 574 PetscReal ff, gnorm, cnorm, xdiff,errex; 575 TaoConvergedReason reason; 576 577 PetscFunctionBegin; 578 PetscCall(TSSetTime(appctx->ts,0.0)); 579 PetscCall(TSSetStepNumber(appctx->ts,0)); 580 PetscCall(TSSetTimeStep(appctx->ts,appctx->initial_dt)); 581 PetscCall(VecCopy(IC,appctx->dat.curr_sol)); 582 583 PetscCall(TSSolve(appctx->ts,appctx->dat.curr_sol)); 584 585 PetscCall(VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.obj)); 586 587 /* 588 Compute the L2-norm of the objective function, cost function is f 589 */ 590 PetscCall(VecDuplicate(G,&temp)); 591 PetscCall(VecPointwiseMult(temp,G,G)); 592 PetscCall(VecDot(temp,appctx->SEMop.mass,f)); 593 594 /* local error evaluation */ 595 PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution)); 596 PetscCall(VecPointwiseMult(temp,temp,temp)); 597 /* for error evaluation */ 598 PetscCall(VecDot(temp,appctx->SEMop.mass,&errex)); 599 PetscCall(VecDestroy(&temp)); 600 errex = PetscSqrtReal(errex); 601 602 /* 603 Compute initial conditions for the adjoint integration. See Notes above 604 */ 605 606 PetscCall(VecScale(G, -2.0)); 607 PetscCall(VecPointwiseMult(G,G,appctx->SEMop.mass)); 608 PetscCall(TSSetCostGradients(appctx->ts,1,&G,NULL)); 609 PetscCall(TSAdjointSolve(appctx->ts)); 610 PetscCall(VecPointwiseDivide(G,G,appctx->SEMop.mass)); 611 612 PetscCall(TaoGetSolutionStatus(tao, &its, &ff, &gnorm, &cnorm, &xdiff, &reason)); 613 PetscFunctionReturn(0); 614 } 615 616 PetscErrorCode MonitorError(Tao tao,void *ctx) 617 { 618 AppCtx *appctx = (AppCtx*)ctx; 619 Vec temp; 620 PetscReal nrm; 621 622 PetscFunctionBegin; 623 PetscCall(VecDuplicate(appctx->dat.ic,&temp)); 624 PetscCall(VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution)); 625 PetscCall(VecPointwiseMult(temp,temp,temp)); 626 PetscCall(VecDot(temp,appctx->SEMop.mass,&nrm)); 627 PetscCall(VecDestroy(&temp)); 628 nrm = PetscSqrtReal(nrm); 629 PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Error for initial conditions %g\n",(double)nrm)); 630 PetscFunctionReturn(0); 631 } 632 633 /*TEST 634 635 build: 636 requires: !complex 637 638 test: 639 args: -tao_max_it 5 -tao_gatol 1.e-4 640 requires: !single 641 642 test: 643 suffix: 2 644 nsize: 2 645 args: -tao_max_it 5 -tao_gatol 1.e-4 646 requires: !single 647 648 TEST*/ 649