1 2 static char help[] ="Solves a simple data assimilation problem with one dimensional Burger's equation using TSAdjoint\n\n"; 3 4 /* 5 6 Not yet tested in parallel 7 8 */ 9 /* 10 Concepts: TS^time-dependent nonlinear problems 11 Concepts: TS^Burger's equation 12 Concepts: adjoints 13 Processors: n 14 */ 15 16 /* ------------------------------------------------------------------------ 17 18 This program uses the one-dimensional Burger's equation 19 u_t = mu*u_xx - u u_x, 20 on the domain 0 <= x <= 1, with periodic boundary conditions 21 22 to demonstrate solving a data assimilation problem of finding the initial conditions 23 to produce a given solution at a fixed time. 24 25 The operators are discretized with the spectral element method 26 27 See the paper PDE-CONSTRAINED OPTIMIZATION WITH SPECTRAL ELEMENTS USING PETSC AND TAO 28 by OANA MARIN, EMIL CONSTANTINESCU, AND BARRY SMITH for details on the exact solution 29 used 30 31 ------------------------------------------------------------------------- */ 32 33 #include <petsctao.h> 34 #include <petscts.h> 35 #include <petscdt.h> 36 #include <petscdraw.h> 37 #include <petscdmda.h> 38 39 /* 40 User-defined application context - contains data needed by the 41 application-provided call-back routines. 42 */ 43 44 typedef struct { 45 PetscInt n; /* number of nodes */ 46 PetscReal *nodes; /* GLL nodes */ 47 PetscReal *weights; /* GLL weights */ 48 } PetscGLL; 49 50 typedef struct { 51 PetscInt N; /* grid points per elements*/ 52 PetscInt E; /* number of elements */ 53 PetscReal tol_L2,tol_max; /* error norms */ 54 PetscInt steps; /* number of timesteps */ 55 PetscReal Tend; /* endtime */ 56 PetscReal mu; /* viscosity */ 57 PetscReal L; /* total length of domain */ 58 PetscReal Le; 59 PetscReal Tadj; 60 } PetscParam; 61 62 typedef struct { 63 Vec obj; /* desired end state */ 64 Vec grid; /* total grid */ 65 Vec grad; 66 Vec ic; 67 Vec curr_sol; 68 Vec true_solution; /* actual initial conditions for the final solution */ 69 } PetscData; 70 71 typedef struct { 72 Vec grid; /* total grid */ 73 Vec mass; /* mass matrix for total integration */ 74 Mat stiff; /* stifness matrix */ 75 Mat keptstiff; 76 Mat grad; 77 PetscGLL gll; 78 } PetscSEMOperators; 79 80 typedef struct { 81 DM da; /* distributed array data structure */ 82 PetscSEMOperators SEMop; 83 PetscParam param; 84 PetscData dat; 85 TS ts; 86 PetscReal initial_dt; 87 } AppCtx; 88 89 /* 90 User-defined routines 91 */ 92 extern PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*); 93 extern PetscErrorCode RHSMatrixLaplaciangllDM(TS,PetscReal,Vec,Mat,Mat,void*); 94 extern PetscErrorCode RHSMatrixAdvectiongllDM(TS,PetscReal,Vec,Mat,Mat,void*); 95 extern PetscErrorCode InitialConditions(Vec,AppCtx*); 96 extern PetscErrorCode TrueSolution(Vec,AppCtx*); 97 extern PetscErrorCode ComputeObjective(PetscReal,Vec,AppCtx*); 98 extern PetscErrorCode MonitorError(Tao,void*); 99 extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*); 100 extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat,Mat,void*); 101 102 int main(int argc,char **argv) 103 { 104 AppCtx appctx; /* user-defined application context */ 105 Tao tao; 106 Vec u; /* approximate solution vector */ 107 PetscErrorCode ierr; 108 PetscInt i, xs, xm, ind, j, lenglob; 109 PetscReal x, *wrk_ptr1, *wrk_ptr2; 110 MatNullSpace nsp; 111 PetscMPIInt size; 112 113 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 114 Initialize program and set problem parameters 115 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 116 PetscFunctionBegin; 117 118 ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; 119 120 /*initialize parameters */ 121 appctx.param.N = 10; /* order of the spectral element */ 122 appctx.param.E = 10; /* number of elements */ 123 appctx.param.L = 4.0; /* length of the domain */ 124 appctx.param.mu = 0.01; /* diffusion coefficient */ 125 appctx.initial_dt = 5e-3; 126 appctx.param.steps = PETSC_MAX_INT; 127 appctx.param.Tend = 4; 128 129 ierr = PetscOptionsGetInt(NULL,NULL,"-N",&appctx.param.N,NULL);CHKERRQ(ierr); 130 ierr = PetscOptionsGetInt(NULL,NULL,"-E",&appctx.param.E,NULL);CHKERRQ(ierr); 131 ierr = PetscOptionsGetReal(NULL,NULL,"-Tend",&appctx.param.Tend,NULL);CHKERRQ(ierr); 132 ierr = PetscOptionsGetReal(NULL,NULL,"-mu",&appctx.param.mu,NULL);CHKERRQ(ierr); 133 appctx.param.Le = appctx.param.L/appctx.param.E; 134 135 ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr); 136 if (appctx.param.E % size) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Number of elements must be divisible by number of processes"); 137 138 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 139 Create GLL data structures 140 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 141 ierr = PetscMalloc2(appctx.param.N,&appctx.SEMop.gll.nodes,appctx.param.N,&appctx.SEMop.gll.weights);CHKERRQ(ierr); 142 ierr = PetscDTGaussLobattoLegendreQuadrature(appctx.param.N,PETSCGAUSSLOBATTOLEGENDRE_VIA_LINEAR_ALGEBRA,appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr); 143 appctx.SEMop.gll.n = appctx.param.N; 144 lenglob = appctx.param.E*(appctx.param.N-1); 145 146 /* 147 Create distributed array (DMDA) to manage parallel grid and vectors 148 and to set up the ghost point communication pattern. There are E*(Nl-1)+1 149 total grid values spread equally among all the processors, except first and last 150 */ 151 152 ierr = DMDACreate1d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,lenglob,1,1,NULL,&appctx.da);CHKERRQ(ierr); 153 ierr = DMSetFromOptions(appctx.da);CHKERRQ(ierr); 154 ierr = DMSetUp(appctx.da);CHKERRQ(ierr); 155 156 /* 157 Extract global and local vectors from DMDA; we use these to store the 158 approximate solution. Then duplicate these for remaining vectors that 159 have the same types. 160 */ 161 162 ierr = DMCreateGlobalVector(appctx.da,&u);CHKERRQ(ierr); 163 ierr = VecDuplicate(u,&appctx.dat.ic);CHKERRQ(ierr); 164 ierr = VecDuplicate(u,&appctx.dat.true_solution);CHKERRQ(ierr); 165 ierr = VecDuplicate(u,&appctx.dat.obj);CHKERRQ(ierr); 166 ierr = VecDuplicate(u,&appctx.SEMop.grid);CHKERRQ(ierr); 167 ierr = VecDuplicate(u,&appctx.SEMop.mass);CHKERRQ(ierr); 168 ierr = VecDuplicate(u,&appctx.dat.curr_sol);CHKERRQ(ierr); 169 170 ierr = DMDAGetCorners(appctx.da,&xs,NULL,NULL,&xm,NULL,NULL);CHKERRQ(ierr); 171 ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr); 172 ierr = DMDAVecGetArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr); 173 174 /* Compute function over the locally owned part of the grid */ 175 176 xs=xs/(appctx.param.N-1); 177 xm=xm/(appctx.param.N-1); 178 179 /* 180 Build total grid and mass over entire mesh (multi-elemental) 181 */ 182 183 for (i=xs; i<xs+xm; i++) { 184 for (j=0; j<appctx.param.N-1; j++) { 185 x = (appctx.param.Le/2.0)*(appctx.SEMop.gll.nodes[j]+1.0)+appctx.param.Le*i; 186 ind=i*(appctx.param.N-1)+j; 187 wrk_ptr1[ind]=x; 188 wrk_ptr2[ind]=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 189 if (j==0) wrk_ptr2[ind]+=.5*appctx.param.Le*appctx.SEMop.gll.weights[j]; 190 } 191 } 192 ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.grid,&wrk_ptr1);CHKERRQ(ierr); 193 ierr = DMDAVecRestoreArray(appctx.da,appctx.SEMop.mass,&wrk_ptr2);CHKERRQ(ierr); 194 195 /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 196 Create matrix data structure; set matrix evaluation routine. 197 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ 198 ierr = DMSetMatrixPreallocateOnly(appctx.da, PETSC_TRUE);CHKERRQ(ierr); 199 ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.stiff);CHKERRQ(ierr); 200 ierr = DMCreateMatrix(appctx.da,&appctx.SEMop.grad);CHKERRQ(ierr); 201 /* 202 For linear problems with a time-dependent f(u,t) in the equation 203 u_t = f(u,t), the user provides the discretized right-hand-side 204 as a time-dependent matrix. 205 */ 206 ierr = RHSMatrixLaplaciangllDM(appctx.ts,0.0,u,appctx.SEMop.stiff,appctx.SEMop.stiff,&appctx);CHKERRQ(ierr); 207 ierr = RHSMatrixAdvectiongllDM(appctx.ts,0.0,u,appctx.SEMop.grad,appctx.SEMop.grad,&appctx);CHKERRQ(ierr); 208 /* 209 For linear problems with a time-dependent f(u,t) in the equation 210 u_t = f(u,t), the user provides the discretized right-hand-side 211 as a time-dependent matrix. 212 */ 213 214 ierr = MatDuplicate(appctx.SEMop.stiff,MAT_COPY_VALUES,&appctx.SEMop.keptstiff);CHKERRQ(ierr); 215 216 /* attach the null space to the matrix, this probably is not needed but does no harm */ 217 ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr); 218 ierr = MatSetNullSpace(appctx.SEMop.stiff,nsp);CHKERRQ(ierr); 219 ierr = MatSetNullSpace(appctx.SEMop.keptstiff,nsp);CHKERRQ(ierr); 220 ierr = MatNullSpaceTest(nsp,appctx.SEMop.stiff,NULL);CHKERRQ(ierr); 221 ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr); 222 /* attach the null space to the matrix, this probably is not needed but does no harm */ 223 ierr = MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,NULL,&nsp);CHKERRQ(ierr); 224 ierr = MatSetNullSpace(appctx.SEMop.grad,nsp);CHKERRQ(ierr); 225 ierr = MatNullSpaceTest(nsp,appctx.SEMop.grad,NULL);CHKERRQ(ierr); 226 ierr = MatNullSpaceDestroy(&nsp);CHKERRQ(ierr); 227 228 /* Create the TS solver that solves the ODE and its adjoint; set its options */ 229 ierr = TSCreate(PETSC_COMM_WORLD,&appctx.ts);CHKERRQ(ierr); 230 ierr = TSSetProblemType(appctx.ts,TS_NONLINEAR);CHKERRQ(ierr); 231 ierr = TSSetType(appctx.ts,TSRK);CHKERRQ(ierr); 232 ierr = TSSetDM(appctx.ts,appctx.da);CHKERRQ(ierr); 233 ierr = TSSetTime(appctx.ts,0.0);CHKERRQ(ierr); 234 ierr = TSSetTimeStep(appctx.ts,appctx.initial_dt);CHKERRQ(ierr); 235 ierr = TSSetMaxSteps(appctx.ts,appctx.param.steps);CHKERRQ(ierr); 236 ierr = TSSetMaxTime(appctx.ts,appctx.param.Tend);CHKERRQ(ierr); 237 ierr = TSSetExactFinalTime(appctx.ts,TS_EXACTFINALTIME_MATCHSTEP);CHKERRQ(ierr); 238 ierr = TSSetTolerances(appctx.ts,1e-7,NULL,1e-7,NULL);CHKERRQ(ierr); 239 ierr = TSSetFromOptions(appctx.ts);CHKERRQ(ierr); 240 /* Need to save initial timestep user may have set with -ts_dt so it can be reset for each new TSSolve() */ 241 ierr = TSGetTimeStep(appctx.ts,&appctx.initial_dt);CHKERRQ(ierr); 242 ierr = TSSetRHSFunction(appctx.ts,NULL,RHSFunction,&appctx);CHKERRQ(ierr); 243 ierr = TSSetRHSJacobian(appctx.ts,appctx.SEMop.stiff,appctx.SEMop.stiff,RHSJacobian,&appctx);CHKERRQ(ierr); 244 245 /* Set Objective and Initial conditions for the problem and compute Objective function (evolution of true_solution to final time */ 246 ierr = InitialConditions(appctx.dat.ic,&appctx);CHKERRQ(ierr); 247 ierr = TrueSolution(appctx.dat.true_solution,&appctx);CHKERRQ(ierr); 248 ierr = ComputeObjective(appctx.param.Tend,appctx.dat.obj,&appctx);CHKERRQ(ierr); 249 250 ierr = TSSetSaveTrajectory(appctx.ts);CHKERRQ(ierr); 251 ierr = TSSetFromOptions(appctx.ts);CHKERRQ(ierr); 252 253 /* Create TAO solver and set desired solution method */ 254 ierr = TaoCreate(PETSC_COMM_WORLD,&tao);CHKERRQ(ierr); 255 ierr = TaoSetMonitor(tao,MonitorError,&appctx,NULL);CHKERRQ(ierr); 256 ierr = TaoSetType(tao,TAOBQNLS);CHKERRQ(ierr); 257 ierr = TaoSetInitialVector(tao,appctx.dat.ic);CHKERRQ(ierr); 258 /* Set routine for function and gradient evaluation */ 259 ierr = TaoSetObjectiveAndGradientRoutine(tao,FormFunctionGradient,(void *)&appctx);CHKERRQ(ierr); 260 /* Check for any TAO command line options */ 261 ierr = TaoSetTolerances(tao,1e-8,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr); 262 ierr = TaoSetFromOptions(tao);CHKERRQ(ierr); 263 ierr = TaoSolve(tao);CHKERRQ(ierr); 264 265 ierr = TaoDestroy(&tao);CHKERRQ(ierr); 266 ierr = MatDestroy(&appctx.SEMop.stiff);CHKERRQ(ierr); 267 ierr = MatDestroy(&appctx.SEMop.keptstiff);CHKERRQ(ierr); 268 ierr = MatDestroy(&appctx.SEMop.grad);CHKERRQ(ierr); 269 ierr = VecDestroy(&u);CHKERRQ(ierr); 270 ierr = VecDestroy(&appctx.dat.ic);CHKERRQ(ierr); 271 ierr = VecDestroy(&appctx.dat.true_solution);CHKERRQ(ierr); 272 ierr = VecDestroy(&appctx.dat.obj);CHKERRQ(ierr); 273 ierr = VecDestroy(&appctx.SEMop.grid);CHKERRQ(ierr); 274 ierr = VecDestroy(&appctx.SEMop.mass);CHKERRQ(ierr); 275 ierr = VecDestroy(&appctx.dat.curr_sol);CHKERRQ(ierr); 276 ierr = PetscFree2(appctx.SEMop.gll.nodes,appctx.SEMop.gll.weights);CHKERRQ(ierr); 277 ierr = DMDestroy(&appctx.da);CHKERRQ(ierr); 278 ierr = TSDestroy(&appctx.ts);CHKERRQ(ierr); 279 280 /* 281 Always call PetscFinalize() before exiting a program. This routine 282 - finalizes the PETSc libraries as well as MPI 283 - provides summary and diagnostic information if certain runtime 284 options are chosen (e.g., -log_summary). 285 */ 286 ierr = PetscFinalize(); 287 return ierr; 288 } 289 290 /* --------------------------------------------------------------------- */ 291 /* 292 InitialConditions - Computes the initial conditions for the Tao optimization solve (these are also initial conditions for the first TSSolve() 293 294 The routine TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function 295 296 Input Parameter: 297 u - uninitialized solution vector (global) 298 appctx - user-defined application context 299 300 Output Parameter: 301 u - vector with solution at initial time (global) 302 */ 303 PetscErrorCode InitialConditions(Vec u,AppCtx *appctx) 304 { 305 PetscScalar *s; 306 const PetscScalar *xg; 307 PetscErrorCode ierr; 308 PetscInt i,xs,xn; 309 310 PetscFunctionBegin; 311 ierr = DMDAVecGetArray(appctx->da,u,&s);CHKERRQ(ierr); 312 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 313 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 314 for (i=xs; i<xs+xn; i++) { 315 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i]))+0.25*PetscExpReal(-4.0*PetscPowReal(xg[i]-2.0,2.0)); 316 } 317 ierr = DMDAVecRestoreArray(appctx->da,u,&s);CHKERRQ(ierr); 318 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 319 PetscFunctionReturn(0); 320 } 321 322 /* 323 TrueSolution() computes the true solution for the Tao optimization solve which means they are the initial conditions for the objective function. 324 325 InitialConditions() computes the initial conditions for the beginning of the Tao iterations 326 327 Input Parameter: 328 u - uninitialized solution vector (global) 329 appctx - user-defined application context 330 331 Output Parameter: 332 u - vector with solution at initial time (global) 333 */ 334 PetscErrorCode TrueSolution(Vec u,AppCtx *appctx) 335 { 336 PetscScalar *s; 337 const PetscScalar *xg; 338 PetscErrorCode ierr; 339 PetscInt i,xs,xn; 340 341 PetscFunctionBegin; 342 ierr = DMDAVecGetArray(appctx->da,u,&s);CHKERRQ(ierr); 343 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 344 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 345 for (i=xs; i<xs+xn; i++) { 346 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])/(2.0+PetscCosScalar(PETSC_PI*xg[i])); 347 } 348 ierr = DMDAVecRestoreArray(appctx->da,u,&s);CHKERRQ(ierr); 349 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 350 PetscFunctionReturn(0); 351 } 352 /* --------------------------------------------------------------------- */ 353 /* 354 Sets the desired profile for the final end time 355 356 Input Parameters: 357 t - final time 358 obj - vector storing the desired profile 359 appctx - user-defined application context 360 361 */ 362 PetscErrorCode ComputeObjective(PetscReal t,Vec obj,AppCtx *appctx) 363 { 364 PetscScalar *s; 365 const PetscScalar *xg; 366 PetscErrorCode ierr; 367 PetscInt i, xs,xn; 368 369 PetscFunctionBegin; 370 ierr = DMDAVecGetArray(appctx->da,obj,&s);CHKERRQ(ierr); 371 ierr = DMDAVecGetArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 372 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 373 for (i=xs; i<xs+xn; i++) { 374 s[i]=2.0*appctx->param.mu*PETSC_PI*PetscSinScalar(PETSC_PI*xg[i])*PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)\ 375 /(2.0+PetscExpScalar(-PETSC_PI*PETSC_PI*t*appctx->param.mu)*PetscCosScalar(PETSC_PI*xg[i])); 376 } 377 ierr = DMDAVecRestoreArray(appctx->da,obj,&s);CHKERRQ(ierr); 378 ierr = DMDAVecRestoreArrayRead(appctx->da,appctx->SEMop.grid,(void*)&xg);CHKERRQ(ierr); 379 PetscFunctionReturn(0); 380 } 381 382 PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx) 383 { 384 PetscErrorCode ierr; 385 AppCtx *appctx = (AppCtx*)ctx; 386 387 PetscFunctionBegin; 388 ierr = MatMult(appctx->SEMop.grad,globalin,globalout);CHKERRQ(ierr); /* grad u */ 389 ierr = VecPointwiseMult(globalout,globalin,globalout);CHKERRQ(ierr); /* u grad u */ 390 ierr = VecScale(globalout, -1.0);CHKERRQ(ierr); 391 ierr = MatMultAdd(appctx->SEMop.keptstiff,globalin,globalout,globalout);CHKERRQ(ierr); 392 PetscFunctionReturn(0); 393 } 394 395 /* 396 397 K is the discretiziation of the Laplacian 398 G is the discretization of the gradient 399 400 Computes Jacobian of K u + diag(u) G u which is given by 401 K + diag(u)G + diag(Gu) 402 */ 403 PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec globalin,Mat A, Mat B,void *ctx) 404 { 405 PetscErrorCode ierr; 406 AppCtx *appctx = (AppCtx*)ctx; 407 Vec Gglobalin; 408 409 PetscFunctionBegin; 410 /* A = diag(u) G */ 411 412 ierr = MatCopy(appctx->SEMop.grad,A,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 413 ierr = MatDiagonalScale(A,globalin,NULL);CHKERRQ(ierr); 414 415 /* A = A + diag(Gu) */ 416 ierr = VecDuplicate(globalin,&Gglobalin);CHKERRQ(ierr); 417 ierr = MatMult(appctx->SEMop.grad,globalin,Gglobalin);CHKERRQ(ierr); 418 ierr = MatDiagonalSet(A,Gglobalin,ADD_VALUES);CHKERRQ(ierr); 419 ierr = VecDestroy(&Gglobalin);CHKERRQ(ierr); 420 421 /* A = K - A */ 422 ierr = MatScale(A,-1.0);CHKERRQ(ierr); 423 ierr = MatAXPY(A,1.0,appctx->SEMop.keptstiff,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 424 PetscFunctionReturn(0); 425 } 426 427 /* --------------------------------------------------------------------- */ 428 429 /* 430 RHSMatrixLaplacian - User-provided routine to compute the right-hand-side 431 matrix for the heat equation. 432 433 Input Parameters: 434 ts - the TS context 435 t - current time (ignored) 436 X - current solution (ignored) 437 dummy - optional user-defined context, as set by TSetRHSJacobian() 438 439 Output Parameters: 440 AA - Jacobian matrix 441 BB - optionally different matrix from which the preconditioner is built 442 str - flag indicating matrix structure 443 444 */ 445 PetscErrorCode RHSMatrixLaplaciangllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 446 { 447 PetscReal **temp; 448 PetscReal vv; 449 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 450 PetscErrorCode ierr; 451 PetscInt i,xs,xn,l,j; 452 PetscInt *rowsDM; 453 454 PetscFunctionBegin; 455 /* 456 Creates the element stiffness matrix for the given gll 457 */ 458 ierr = PetscGaussLobattoLegendreElementLaplacianCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 459 /* workaround for clang analyzer warning: Division by zero */ 460 if (appctx->param.N <= 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Spectral element order should be > 1"); 461 462 /* scale by the size of the element */ 463 for (i=0; i<appctx->param.N; i++) { 464 vv=-appctx->param.mu*2.0/appctx->param.Le; 465 for (j=0; j<appctx->param.N; j++) temp[i][j]=temp[i][j]*vv; 466 } 467 468 ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 469 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 470 471 xs = xs/(appctx->param.N-1); 472 xn = xn/(appctx->param.N-1); 473 474 ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr); 475 /* 476 loop over local elements 477 */ 478 for (j=xs; j<xs+xn; j++) { 479 for (l=0; l<appctx->param.N; l++) { 480 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 481 } 482 ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr); 483 } 484 ierr = PetscFree(rowsDM);CHKERRQ(ierr); 485 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 486 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 487 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 488 ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr); 489 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 490 491 ierr = PetscGaussLobattoLegendreElementLaplacianDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 492 PetscFunctionReturn(0); 493 } 494 495 /* 496 RHSMatrixAdvection - User-provided routine to compute the right-hand-side 497 matrix for the Advection equation. 498 499 Input Parameters: 500 ts - the TS context 501 t - current time 502 global_in - global input vector 503 dummy - optional user-defined context, as set by TSetRHSJacobian() 504 505 Output Parameters: 506 AA - Jacobian matrix 507 BB - optionally different preconditioning matrix 508 str - flag indicating matrix structure 509 510 */ 511 PetscErrorCode RHSMatrixAdvectiongllDM(TS ts,PetscReal t,Vec X,Mat A,Mat BB,void *ctx) 512 { 513 PetscReal **temp; 514 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 515 PetscErrorCode ierr; 516 PetscInt xs,xn,l,j; 517 PetscInt *rowsDM; 518 519 PetscFunctionBegin; 520 /* 521 Creates the advection matrix for the given gll 522 */ 523 ierr = PetscGaussLobattoLegendreElementAdvectionCreate(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 524 ierr = MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr); 525 526 ierr = DMDAGetCorners(appctx->da,&xs,NULL,NULL,&xn,NULL,NULL);CHKERRQ(ierr); 527 528 xs = xs/(appctx->param.N-1); 529 xn = xn/(appctx->param.N-1); 530 531 ierr = PetscMalloc1(appctx->param.N,&rowsDM);CHKERRQ(ierr); 532 for (j=xs; j<xs+xn; j++) { 533 for (l=0; l<appctx->param.N; l++) { 534 rowsDM[l] = 1+(j-xs)*(appctx->param.N-1)+l; 535 } 536 ierr = MatSetValuesLocal(A,appctx->param.N,rowsDM,appctx->param.N,rowsDM,&temp[0][0],ADD_VALUES);CHKERRQ(ierr); 537 } 538 ierr = PetscFree(rowsDM);CHKERRQ(ierr); 539 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 540 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 541 542 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 543 ierr = MatDiagonalScale(A,appctx->SEMop.mass,0);CHKERRQ(ierr); 544 ierr = VecReciprocal(appctx->SEMop.mass);CHKERRQ(ierr); 545 ierr = PetscGaussLobattoLegendreElementAdvectionDestroy(appctx->SEMop.gll.n,appctx->SEMop.gll.nodes,appctx->SEMop.gll.weights,&temp);CHKERRQ(ierr); 546 PetscFunctionReturn(0); 547 } 548 /* ------------------------------------------------------------------ */ 549 /* 550 FormFunctionGradient - Evaluates the function and corresponding gradient. 551 552 Input Parameters: 553 tao - the Tao context 554 IC - the input vector 555 ctx - optional user-defined context, as set when calling TaoSetObjectiveAndGradientRoutine() 556 557 Output Parameters: 558 f - the newly evaluated function 559 G - the newly evaluated gradient 560 561 Notes: 562 563 The forward equation is 564 M u_t = F(U) 565 which is converted to 566 u_t = M^{-1} F(u) 567 in the user code since TS has no direct way of providing a mass matrix. The Jacobian of this is 568 M^{-1} J 569 where J is the Jacobian of F. Now the adjoint equation is 570 M v_t = J^T v 571 but TSAdjoint does not solve this since it can only solve the transposed system for the 572 Jacobian the user provided. Hence TSAdjoint solves 573 w_t = J^T M^{-1} w (where w = M v) 574 since there is no way to indicate the mass matrix as a separate entity to TS. Thus one 575 must be careful in initializing the "adjoint equation" and using the result. This is 576 why 577 G = -2 M(u(T) - u_d) 578 below (instead of -2(u(T) - u_d) and why the result is 579 G = G/appctx->SEMop.mass (that is G = M^{-1}w) 580 below (instead of just the result of the "adjoint solve"). 581 582 */ 583 PetscErrorCode FormFunctionGradient(Tao tao,Vec IC,PetscReal *f,Vec G,void *ctx) 584 { 585 AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */ 586 PetscErrorCode ierr; 587 Vec temp; 588 PetscInt its; 589 PetscReal ff, gnorm, cnorm, xdiff,errex; 590 TaoConvergedReason reason; 591 592 PetscFunctionBegin; 593 ierr = TSSetTime(appctx->ts,0.0);CHKERRQ(ierr); 594 ierr = TSSetStepNumber(appctx->ts,0);CHKERRQ(ierr); 595 ierr = TSSetTimeStep(appctx->ts,appctx->initial_dt);CHKERRQ(ierr); 596 ierr = VecCopy(IC,appctx->dat.curr_sol);CHKERRQ(ierr); 597 598 ierr = TSSolve(appctx->ts,appctx->dat.curr_sol);CHKERRQ(ierr); 599 600 ierr = VecWAXPY(G,-1.0,appctx->dat.curr_sol,appctx->dat.obj);CHKERRQ(ierr); 601 602 /* 603 Compute the L2-norm of the objective function, cost function is f 604 */ 605 ierr = VecDuplicate(G,&temp);CHKERRQ(ierr); 606 ierr = VecPointwiseMult(temp,G,G);CHKERRQ(ierr); 607 ierr = VecDot(temp,appctx->SEMop.mass,f);CHKERRQ(ierr); 608 609 /* local error evaluation */ 610 ierr = VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution);CHKERRQ(ierr); 611 ierr = VecPointwiseMult(temp,temp,temp);CHKERRQ(ierr); 612 /* for error evaluation */ 613 ierr = VecDot(temp,appctx->SEMop.mass,&errex);CHKERRQ(ierr); 614 ierr = VecDestroy(&temp);CHKERRQ(ierr); 615 errex = PetscSqrtReal(errex); 616 617 /* 618 Compute initial conditions for the adjoint integration. See Notes above 619 */ 620 621 ierr = VecScale(G, -2.0);CHKERRQ(ierr); 622 ierr = VecPointwiseMult(G,G,appctx->SEMop.mass);CHKERRQ(ierr); 623 ierr = TSSetCostGradients(appctx->ts,1,&G,NULL);CHKERRQ(ierr); 624 ierr = TSAdjointSolve(appctx->ts);CHKERRQ(ierr); 625 ierr = VecPointwiseDivide(G,G,appctx->SEMop.mass);CHKERRQ(ierr); 626 627 ierr = TaoGetSolutionStatus(tao, &its, &ff, &gnorm, &cnorm, &xdiff, &reason);CHKERRQ(ierr); 628 PetscFunctionReturn(0); 629 } 630 631 PetscErrorCode MonitorError(Tao tao,void *ctx) 632 { 633 AppCtx *appctx = (AppCtx*)ctx; 634 Vec temp; 635 PetscReal nrm; 636 PetscErrorCode ierr; 637 638 PetscFunctionBegin; 639 ierr = VecDuplicate(appctx->dat.ic,&temp);CHKERRQ(ierr); 640 ierr = VecWAXPY(temp,-1.0,appctx->dat.ic,appctx->dat.true_solution);CHKERRQ(ierr); 641 ierr = VecPointwiseMult(temp,temp,temp);CHKERRQ(ierr); 642 ierr = VecDot(temp,appctx->SEMop.mass,&nrm);CHKERRQ(ierr); 643 ierr = VecDestroy(&temp);CHKERRQ(ierr); 644 nrm = PetscSqrtReal(nrm); 645 ierr = PetscPrintf(PETSC_COMM_WORLD,"Error for initial conditions %g\n",(double)nrm);CHKERRQ(ierr); 646 PetscFunctionReturn(0); 647 } 648 649 /*TEST 650 651 build: 652 requires: !complex 653 654 test: 655 args: -tao_max_it 5 -tao_gatol 1.e-4 656 requires: !single 657 658 test: 659 suffix: 2 660 nsize: 2 661 args: -tao_max_it 5 -tao_gatol 1.e-4 662 requires: !single 663 664 TEST*/ 665