xref: /petsc/src/ts/tutorials/ex2.c (revision 9d5502f909a5471573a8c674cbf31c27599ddcb9)
1c4762a1bSJed Brown static char help[] = "Solves a time-dependent nonlinear PDE. Uses implicit\n\
2c4762a1bSJed Brown timestepping.  Runtime options include:\n\
3c4762a1bSJed Brown   -M <xg>, where <xg> = number of grid points\n\
4c4762a1bSJed Brown   -debug : Activate debugging printouts\n\
5c4762a1bSJed Brown   -nox   : Deactivate x-window graphics\n\n";
6c4762a1bSJed Brown 
7c4762a1bSJed Brown /* ------------------------------------------------------------------------
8c4762a1bSJed Brown 
9c4762a1bSJed Brown    This program solves the PDE
10c4762a1bSJed Brown 
11c4762a1bSJed Brown                u * u_xx
12c4762a1bSJed Brown          u_t = ---------
13c4762a1bSJed Brown                2*(t+1)^2
14c4762a1bSJed Brown 
15c4762a1bSJed Brown     on the domain 0 <= x <= 1, with boundary conditions
16c4762a1bSJed Brown          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
17c4762a1bSJed Brown     and initial condition
18c4762a1bSJed Brown          u(0,x) = 1 + x*x.
19c4762a1bSJed Brown 
20c4762a1bSJed Brown     The exact solution is:
21c4762a1bSJed Brown          u(t,x) = (1 + x*x) * (1 + t)
22c4762a1bSJed Brown 
23c4762a1bSJed Brown     Note that since the solution is linear in time and quadratic in x,
24c4762a1bSJed Brown     the finite difference scheme actually computes the "exact" solution.
25c4762a1bSJed Brown 
26c4762a1bSJed Brown     We use by default the backward Euler method.
27c4762a1bSJed Brown 
28c4762a1bSJed Brown   ------------------------------------------------------------------------- */
29c4762a1bSJed Brown 
30c4762a1bSJed Brown /*
31c4762a1bSJed Brown    Include "petscts.h" to use the PETSc timestepping routines. Note that
32c4762a1bSJed Brown    this file automatically includes "petscsys.h" and other lower-level
33c4762a1bSJed Brown    PETSc include files.
34c4762a1bSJed Brown 
35c4762a1bSJed Brown    Include the "petscdmda.h" to allow us to use the distributed array data
36c4762a1bSJed Brown    structures to manage the parallel grid.
37c4762a1bSJed Brown */
38c4762a1bSJed Brown #include <petscts.h>
39c4762a1bSJed Brown #include <petscdm.h>
40c4762a1bSJed Brown #include <petscdmda.h>
41c4762a1bSJed Brown #include <petscdraw.h>
42c4762a1bSJed Brown 
43c4762a1bSJed Brown /*
44c4762a1bSJed Brown    User-defined application context - contains data needed by the
45c4762a1bSJed Brown    application-provided callback routines.
46c4762a1bSJed Brown */
47c4762a1bSJed Brown typedef struct {
48c4762a1bSJed Brown   MPI_Comm  comm;      /* communicator */
49c4762a1bSJed Brown   DM        da;        /* distributed array data structure */
50c4762a1bSJed Brown   Vec       localwork; /* local ghosted work vector */
51c4762a1bSJed Brown   Vec       u_local;   /* local ghosted approximate solution vector */
52c4762a1bSJed Brown   Vec       solution;  /* global exact solution vector */
53c4762a1bSJed Brown   PetscInt  m;         /* total number of grid points */
54c4762a1bSJed Brown   PetscReal h;         /* mesh width: h = 1/(m-1) */
55c4762a1bSJed Brown   PetscBool debug;     /* flag (1 indicates activation of debugging printouts) */
56c4762a1bSJed Brown } AppCtx;
57c4762a1bSJed Brown 
58c4762a1bSJed Brown /*
59c4762a1bSJed Brown    User-defined routines, provided below.
60c4762a1bSJed Brown */
61c4762a1bSJed Brown extern PetscErrorCode InitialConditions(Vec, AppCtx *);
62c4762a1bSJed Brown extern PetscErrorCode RHSFunction(TS, PetscReal, Vec, Vec, void *);
63c4762a1bSJed Brown extern PetscErrorCode RHSJacobian(TS, PetscReal, Vec, Mat, Mat, void *);
64c4762a1bSJed Brown extern PetscErrorCode Monitor(TS, PetscInt, PetscReal, Vec, void *);
65c4762a1bSJed Brown extern PetscErrorCode ExactSolution(PetscReal, Vec, AppCtx *);
66c4762a1bSJed Brown 
67d71ae5a4SJacob Faibussowitsch int main(int argc, char **argv)
68d71ae5a4SJacob Faibussowitsch {
69c4762a1bSJed Brown   AppCtx    appctx;               /* user-defined application context */
70c4762a1bSJed Brown   TS        ts;                   /* timestepping context */
71c4762a1bSJed Brown   Mat       A;                    /* Jacobian matrix data structure */
72c4762a1bSJed Brown   Vec       u;                    /* approximate solution vector */
73c4762a1bSJed Brown   PetscInt  time_steps_max = 100; /* default max timesteps */
74c4762a1bSJed Brown   PetscReal dt;
75c4762a1bSJed Brown   PetscReal time_total_max = 100.0; /* default max total time */
76c4762a1bSJed Brown   PetscBool mymonitor      = PETSC_FALSE;
77c4762a1bSJed Brown   PetscReal bounds[]       = {1.0, 3.3};
78c4762a1bSJed Brown 
79c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
80c4762a1bSJed Brown      Initialize program and set problem parameters
81c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
82c4762a1bSJed Brown 
83327415f7SBarry Smith   PetscFunctionBeginUser;
849566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
859566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD), 1, bounds));
86c4762a1bSJed Brown 
87c4762a1bSJed Brown   appctx.comm = PETSC_COMM_WORLD;
88c4762a1bSJed Brown   appctx.m    = 60;
89c4762a1bSJed Brown 
909566063dSJacob Faibussowitsch   PetscCall(PetscOptionsGetInt(NULL, NULL, "-M", &appctx.m, NULL));
919566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL, NULL, "-debug", &appctx.debug));
929566063dSJacob Faibussowitsch   PetscCall(PetscOptionsHasName(NULL, NULL, "-mymonitor", &mymonitor));
93c4762a1bSJed Brown 
94c4762a1bSJed Brown   appctx.h = 1.0 / (appctx.m - 1.0);
95c4762a1bSJed Brown 
96c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
97c4762a1bSJed Brown      Create vector data structures
98c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
99c4762a1bSJed Brown 
100c4762a1bSJed Brown   /*
101c4762a1bSJed Brown      Create distributed array (DMDA) to manage parallel grid and vectors
102c4762a1bSJed Brown      and to set up the ghost point communication pattern.  There are M
103c4762a1bSJed Brown      total grid values spread equally among all the processors.
104c4762a1bSJed Brown   */
1059566063dSJacob Faibussowitsch   PetscCall(DMDACreate1d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, appctx.m, 1, 1, NULL, &appctx.da));
1069566063dSJacob Faibussowitsch   PetscCall(DMSetFromOptions(appctx.da));
1079566063dSJacob Faibussowitsch   PetscCall(DMSetUp(appctx.da));
108c4762a1bSJed Brown 
109c4762a1bSJed Brown   /*
110c4762a1bSJed Brown      Extract global and local vectors from DMDA; we use these to store the
111c4762a1bSJed Brown      approximate solution.  Then duplicate these for remaining vectors that
112c4762a1bSJed Brown      have the same types.
113c4762a1bSJed Brown   */
1149566063dSJacob Faibussowitsch   PetscCall(DMCreateGlobalVector(appctx.da, &u));
1159566063dSJacob Faibussowitsch   PetscCall(DMCreateLocalVector(appctx.da, &appctx.u_local));
116c4762a1bSJed Brown 
117c4762a1bSJed Brown   /*
118c4762a1bSJed Brown      Create local work vector for use in evaluating right-hand-side function;
119c4762a1bSJed Brown      create global work vector for storing exact solution.
120c4762a1bSJed Brown   */
1219566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(appctx.u_local, &appctx.localwork));
1229566063dSJacob Faibussowitsch   PetscCall(VecDuplicate(u, &appctx.solution));
123c4762a1bSJed Brown 
124c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
125c4762a1bSJed Brown      Create timestepping solver context; set callback routine for
126c4762a1bSJed Brown      right-hand-side function evaluation.
127c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128c4762a1bSJed Brown 
1299566063dSJacob Faibussowitsch   PetscCall(TSCreate(PETSC_COMM_WORLD, &ts));
1309566063dSJacob Faibussowitsch   PetscCall(TSSetProblemType(ts, TS_NONLINEAR));
1319566063dSJacob Faibussowitsch   PetscCall(TSSetRHSFunction(ts, NULL, RHSFunction, &appctx));
132c4762a1bSJed Brown 
133c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134c4762a1bSJed Brown      Set optional user-defined monitoring routine
135c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136c4762a1bSJed Brown 
13748a46eb9SPierre Jolivet   if (mymonitor) PetscCall(TSMonitorSet(ts, Monitor, &appctx, NULL));
138c4762a1bSJed Brown 
139c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
140c4762a1bSJed Brown      For nonlinear problems, the user can provide a Jacobian evaluation
141c4762a1bSJed Brown      routine (or use a finite differencing approximation).
142c4762a1bSJed Brown 
143c4762a1bSJed Brown      Create matrix data structure; set Jacobian evaluation routine.
144c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
145c4762a1bSJed Brown 
1469566063dSJacob Faibussowitsch   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
1479566063dSJacob Faibussowitsch   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, appctx.m, appctx.m));
1489566063dSJacob Faibussowitsch   PetscCall(MatSetFromOptions(A));
1499566063dSJacob Faibussowitsch   PetscCall(MatSetUp(A));
1509566063dSJacob Faibussowitsch   PetscCall(TSSetRHSJacobian(ts, A, A, RHSJacobian, &appctx));
151c4762a1bSJed Brown 
152c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153c4762a1bSJed Brown      Set solution vector and initial timestep
154c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155c4762a1bSJed Brown 
156c4762a1bSJed Brown   dt = appctx.h / 2.0;
1579566063dSJacob Faibussowitsch   PetscCall(TSSetTimeStep(ts, dt));
158c4762a1bSJed Brown 
159c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160c4762a1bSJed Brown      Customize timestepping solver:
161c4762a1bSJed Brown        - Set the solution method to be the Backward Euler method.
162c4762a1bSJed Brown        - Set timestepping duration info
163c4762a1bSJed Brown      Then set runtime options, which can override these defaults.
164c4762a1bSJed Brown      For example,
165c4762a1bSJed Brown           -ts_max_steps <maxsteps> -ts_max_time <maxtime>
166c4762a1bSJed Brown      to override the defaults set by TSSetMaxSteps()/TSSetMaxTime().
167c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168c4762a1bSJed Brown 
1699566063dSJacob Faibussowitsch   PetscCall(TSSetType(ts, TSBEULER));
1709566063dSJacob Faibussowitsch   PetscCall(TSSetMaxSteps(ts, time_steps_max));
1719566063dSJacob Faibussowitsch   PetscCall(TSSetMaxTime(ts, time_total_max));
1729566063dSJacob Faibussowitsch   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
1739566063dSJacob Faibussowitsch   PetscCall(TSSetFromOptions(ts));
174c4762a1bSJed Brown 
175c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176c4762a1bSJed Brown      Solve the problem
177c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178c4762a1bSJed Brown 
179c4762a1bSJed Brown   /*
180c4762a1bSJed Brown      Evaluate initial conditions
181c4762a1bSJed Brown   */
1829566063dSJacob Faibussowitsch   PetscCall(InitialConditions(u, &appctx));
183c4762a1bSJed Brown 
184c4762a1bSJed Brown   /*
185c4762a1bSJed Brown      Run the timestepping solver
186c4762a1bSJed Brown   */
1879566063dSJacob Faibussowitsch   PetscCall(TSSolve(ts, u));
188c4762a1bSJed Brown 
189c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190c4762a1bSJed Brown      Free work space.  All PETSc objects should be destroyed when they
191c4762a1bSJed Brown      are no longer needed.
192c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
193c4762a1bSJed Brown 
1949566063dSJacob Faibussowitsch   PetscCall(TSDestroy(&ts));
1959566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&u));
1969566063dSJacob Faibussowitsch   PetscCall(MatDestroy(&A));
1979566063dSJacob Faibussowitsch   PetscCall(DMDestroy(&appctx.da));
1989566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.localwork));
1999566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.solution));
2009566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&appctx.u_local));
201c4762a1bSJed Brown 
202c4762a1bSJed Brown   /*
203c4762a1bSJed Brown      Always call PetscFinalize() before exiting a program.  This routine
204c4762a1bSJed Brown        - finalizes the PETSc libraries as well as MPI
205c4762a1bSJed Brown        - provides summary and diagnostic information if certain runtime
206c4762a1bSJed Brown          options are chosen (e.g., -log_view).
207c4762a1bSJed Brown   */
2089566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
209b122ec5aSJacob Faibussowitsch   return 0;
210c4762a1bSJed Brown }
211c4762a1bSJed Brown /* --------------------------------------------------------------------- */
212c4762a1bSJed Brown /*
213c4762a1bSJed Brown    InitialConditions - Computes the solution at the initial time.
214c4762a1bSJed Brown 
215c4762a1bSJed Brown    Input Parameters:
216c4762a1bSJed Brown    u - uninitialized solution vector (global)
217c4762a1bSJed Brown    appctx - user-defined application context
218c4762a1bSJed Brown 
219c4762a1bSJed Brown    Output Parameter:
220c4762a1bSJed Brown    u - vector with solution at initial time (global)
221c4762a1bSJed Brown */
222d71ae5a4SJacob Faibussowitsch PetscErrorCode InitialConditions(Vec u, AppCtx *appctx)
223d71ae5a4SJacob Faibussowitsch {
224c4762a1bSJed Brown   PetscScalar *u_localptr, h = appctx->h, x;
225c4762a1bSJed Brown   PetscInt     i, mybase, myend;
226c4762a1bSJed Brown 
2273ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
228c4762a1bSJed Brown   /*
229c4762a1bSJed Brown      Determine starting point of each processor's range of
230c4762a1bSJed Brown      grid values.
231c4762a1bSJed Brown   */
2329566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(u, &mybase, &myend));
233c4762a1bSJed Brown 
234c4762a1bSJed Brown   /*
235c4762a1bSJed Brown     Get a pointer to vector data.
236c4762a1bSJed Brown     - For default PETSc vectors, VecGetArray() returns a pointer to
237c4762a1bSJed Brown       the data array.  Otherwise, the routine is implementation dependent.
238c4762a1bSJed Brown     - You MUST call VecRestoreArray() when you no longer need access to
239c4762a1bSJed Brown       the array.
240c4762a1bSJed Brown     - Note that the Fortran interface to VecGetArray() differs from the
241c4762a1bSJed Brown       C version.  See the users manual for details.
242c4762a1bSJed Brown   */
2439566063dSJacob Faibussowitsch   PetscCall(VecGetArray(u, &u_localptr));
244c4762a1bSJed Brown 
245c4762a1bSJed Brown   /*
246c4762a1bSJed Brown      We initialize the solution array by simply writing the solution
247c4762a1bSJed Brown      directly into the array locations.  Alternatively, we could use
248c4762a1bSJed Brown      VecSetValues() or VecSetValuesLocal().
249c4762a1bSJed Brown   */
250c4762a1bSJed Brown   for (i = mybase; i < myend; i++) {
251c4762a1bSJed Brown     x                      = h * (PetscReal)i; /* current location in global grid */
252c4762a1bSJed Brown     u_localptr[i - mybase] = 1.0 + x * x;
253c4762a1bSJed Brown   }
254c4762a1bSJed Brown 
255c4762a1bSJed Brown   /*
256c4762a1bSJed Brown      Restore vector
257c4762a1bSJed Brown   */
2589566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(u, &u_localptr));
259c4762a1bSJed Brown 
260c4762a1bSJed Brown   /*
261c4762a1bSJed Brown      Print debugging information if desired
262c4762a1bSJed Brown   */
263c4762a1bSJed Brown   if (appctx->debug) {
2649566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "initial guess vector\n"));
2659566063dSJacob Faibussowitsch     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
266c4762a1bSJed Brown   }
267c4762a1bSJed Brown 
2683ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
269c4762a1bSJed Brown }
270c4762a1bSJed Brown /* --------------------------------------------------------------------- */
271c4762a1bSJed Brown /*
272c4762a1bSJed Brown    ExactSolution - Computes the exact solution at a given time.
273c4762a1bSJed Brown 
274c4762a1bSJed Brown    Input Parameters:
275c4762a1bSJed Brown    t - current time
276c4762a1bSJed Brown    solution - vector in which exact solution will be computed
277c4762a1bSJed Brown    appctx - user-defined application context
278c4762a1bSJed Brown 
279c4762a1bSJed Brown    Output Parameter:
280c4762a1bSJed Brown    solution - vector with the newly computed exact solution
281c4762a1bSJed Brown */
282d71ae5a4SJacob Faibussowitsch PetscErrorCode ExactSolution(PetscReal t, Vec solution, AppCtx *appctx)
283d71ae5a4SJacob Faibussowitsch {
284c4762a1bSJed Brown   PetscScalar *s_localptr, h = appctx->h, x;
285c4762a1bSJed Brown   PetscInt     i, mybase, myend;
286c4762a1bSJed Brown 
2873ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
288c4762a1bSJed Brown   /*
289c4762a1bSJed Brown      Determine starting and ending points of each processor's
290c4762a1bSJed Brown      range of grid values
291c4762a1bSJed Brown   */
2929566063dSJacob Faibussowitsch   PetscCall(VecGetOwnershipRange(solution, &mybase, &myend));
293c4762a1bSJed Brown 
294c4762a1bSJed Brown   /*
295c4762a1bSJed Brown      Get a pointer to vector data.
296c4762a1bSJed Brown   */
2979566063dSJacob Faibussowitsch   PetscCall(VecGetArray(solution, &s_localptr));
298c4762a1bSJed Brown 
299c4762a1bSJed Brown   /*
300c4762a1bSJed Brown      Simply write the solution directly into the array locations.
301c4762a1bSJed Brown      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
302c4762a1bSJed Brown   */
303c4762a1bSJed Brown   for (i = mybase; i < myend; i++) {
304c4762a1bSJed Brown     x                      = h * (PetscReal)i;
305c4762a1bSJed Brown     s_localptr[i - mybase] = (t + 1.0) * (1.0 + x * x);
306c4762a1bSJed Brown   }
307c4762a1bSJed Brown 
308c4762a1bSJed Brown   /*
309c4762a1bSJed Brown      Restore vector
310c4762a1bSJed Brown   */
3119566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(solution, &s_localptr));
3123ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
313c4762a1bSJed Brown }
314c4762a1bSJed Brown /* --------------------------------------------------------------------- */
315c4762a1bSJed Brown /*
316c4762a1bSJed Brown    Monitor - User-provided routine to monitor the solution computed at
317c4762a1bSJed Brown    each timestep.  This example plots the solution and computes the
318c4762a1bSJed Brown    error in two different norms.
319c4762a1bSJed Brown 
320c4762a1bSJed Brown    Input Parameters:
321c4762a1bSJed Brown    ts     - the timestep context
322c4762a1bSJed Brown    step   - the count of the current step (with 0 meaning the
323c4762a1bSJed Brown             initial condition)
324c4762a1bSJed Brown    time   - the current time
325c4762a1bSJed Brown    u      - the solution at this timestep
326c4762a1bSJed Brown    ctx    - the user-provided context for this monitoring routine.
327c4762a1bSJed Brown             In this case we use the application context which contains
328c4762a1bSJed Brown             information about the problem size, workspace and the exact
329c4762a1bSJed Brown             solution.
330c4762a1bSJed Brown */
331d71ae5a4SJacob Faibussowitsch PetscErrorCode Monitor(TS ts, PetscInt step, PetscReal time, Vec u, void *ctx)
332d71ae5a4SJacob Faibussowitsch {
333c4762a1bSJed Brown   AppCtx   *appctx = (AppCtx *)ctx; /* user-defined application context */
334c4762a1bSJed Brown   PetscReal en2, en2s, enmax;
335c4762a1bSJed Brown   PetscDraw draw;
336c4762a1bSJed Brown 
3373ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
338c4762a1bSJed Brown   /*
339e1dfdf8eSBarry Smith      We use the default X Windows viewer
340c4762a1bSJed Brown              PETSC_VIEWER_DRAW_(appctx->comm)
341c4762a1bSJed Brown      that is associated with the current communicator. This saves
342c4762a1bSJed Brown      the effort of calling PetscViewerDrawOpen() to create the window.
343c4762a1bSJed Brown      Note that if we wished to plot several items in separate windows we
344c4762a1bSJed Brown      would create each viewer with PetscViewerDrawOpen() and store them in
345c4762a1bSJed Brown      the application context, appctx.
346c4762a1bSJed Brown 
347c4762a1bSJed Brown      PetscReal buffering makes graphics look better.
348c4762a1bSJed Brown   */
3499566063dSJacob Faibussowitsch   PetscCall(PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm), 0, &draw));
3509566063dSJacob Faibussowitsch   PetscCall(PetscDrawSetDoubleBuffer(draw));
3519566063dSJacob Faibussowitsch   PetscCall(VecView(u, PETSC_VIEWER_DRAW_(appctx->comm)));
352c4762a1bSJed Brown 
353c4762a1bSJed Brown   /*
354c4762a1bSJed Brown      Compute the exact solution at this timestep
355c4762a1bSJed Brown   */
3569566063dSJacob Faibussowitsch   PetscCall(ExactSolution(time, appctx->solution, appctx));
357c4762a1bSJed Brown 
358c4762a1bSJed Brown   /*
359c4762a1bSJed Brown      Print debugging information if desired
360c4762a1bSJed Brown   */
361c4762a1bSJed Brown   if (appctx->debug) {
3629566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Computed solution vector\n"));
3639566063dSJacob Faibussowitsch     PetscCall(VecView(u, PETSC_VIEWER_STDOUT_WORLD));
3649566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Exact solution vector\n"));
3659566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
366c4762a1bSJed Brown   }
367c4762a1bSJed Brown 
368c4762a1bSJed Brown   /*
369c4762a1bSJed Brown      Compute the 2-norm and max-norm of the error
370c4762a1bSJed Brown   */
3719566063dSJacob Faibussowitsch   PetscCall(VecAXPY(appctx->solution, -1.0, u));
3729566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution, NORM_2, &en2));
373c4762a1bSJed Brown   en2s = PetscSqrtReal(appctx->h) * en2; /* scale the 2-norm by the grid spacing */
3749566063dSJacob Faibussowitsch   PetscCall(VecNorm(appctx->solution, NORM_MAX, &enmax));
375c4762a1bSJed Brown 
376c4762a1bSJed Brown   /*
377c4762a1bSJed Brown      PetscPrintf() causes only the first processor in this
378c4762a1bSJed Brown      communicator to print the timestep information.
379c4762a1bSJed Brown   */
38063a3b9bcSJacob Faibussowitsch   PetscCall(PetscPrintf(appctx->comm, "Timestep %" PetscInt_FMT ": time = %g 2-norm error = %g  max norm error = %g\n", step, (double)time, (double)en2s, (double)enmax));
381c4762a1bSJed Brown 
382c4762a1bSJed Brown   /*
383c4762a1bSJed Brown      Print debugging information if desired
384c4762a1bSJed Brown   */
385c4762a1bSJed Brown   if (appctx->debug) {
3869566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "Error vector\n"));
3879566063dSJacob Faibussowitsch     PetscCall(VecView(appctx->solution, PETSC_VIEWER_STDOUT_WORLD));
388c4762a1bSJed Brown   }
3893ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
390c4762a1bSJed Brown }
391c4762a1bSJed Brown /* --------------------------------------------------------------------- */
392c4762a1bSJed Brown /*
393c4762a1bSJed Brown    RHSFunction - User-provided routine that evalues the right-hand-side
394c4762a1bSJed Brown    function of the ODE.  This routine is set in the main program by
395c4762a1bSJed Brown    calling TSSetRHSFunction().  We compute:
396c4762a1bSJed Brown           global_out = F(global_in)
397c4762a1bSJed Brown 
398c4762a1bSJed Brown    Input Parameters:
399c4762a1bSJed Brown    ts         - timesteping context
400c4762a1bSJed Brown    t          - current time
401c4762a1bSJed Brown    global_in  - vector containing the current iterate
402c4762a1bSJed Brown    ctx        - (optional) user-provided context for function evaluation.
403c4762a1bSJed Brown                 In this case we use the appctx defined above.
404c4762a1bSJed Brown 
405c4762a1bSJed Brown    Output Parameter:
406c4762a1bSJed Brown    global_out - vector containing the newly evaluated function
407c4762a1bSJed Brown */
408d71ae5a4SJacob Faibussowitsch PetscErrorCode RHSFunction(TS ts, PetscReal t, Vec global_in, Vec global_out, void *ctx)
409d71ae5a4SJacob Faibussowitsch {
410c4762a1bSJed Brown   AppCtx            *appctx    = (AppCtx *)ctx;     /* user-defined application context */
411c4762a1bSJed Brown   DM                 da        = appctx->da;        /* distributed array */
412c4762a1bSJed Brown   Vec                local_in  = appctx->u_local;   /* local ghosted input vector */
413c4762a1bSJed Brown   Vec                localwork = appctx->localwork; /* local ghosted work vector */
414c4762a1bSJed Brown   PetscInt           i, localsize;
415c4762a1bSJed Brown   PetscMPIInt        rank, size;
416c4762a1bSJed Brown   PetscScalar       *copyptr, sc;
417c4762a1bSJed Brown   const PetscScalar *localptr;
418c4762a1bSJed Brown 
4193ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
420c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
421c4762a1bSJed Brown      Get ready for local function computations
422c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
423c4762a1bSJed Brown   /*
424c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
425c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
426c4762a1bSJed Brown      By placing code between these two statements, computations can be
427c4762a1bSJed Brown      done while messages are in transition.
428c4762a1bSJed Brown   */
4299566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
4309566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
431c4762a1bSJed Brown 
432c4762a1bSJed Brown   /*
433c4762a1bSJed Brown       Access directly the values in our local INPUT work array
434c4762a1bSJed Brown   */
4359566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in, &localptr));
436c4762a1bSJed Brown 
437c4762a1bSJed Brown   /*
438c4762a1bSJed Brown       Access directly the values in our local OUTPUT work array
439c4762a1bSJed Brown   */
4409566063dSJacob Faibussowitsch   PetscCall(VecGetArray(localwork, &copyptr));
441c4762a1bSJed Brown 
442c4762a1bSJed Brown   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
443c4762a1bSJed Brown 
444c4762a1bSJed Brown   /*
445c4762a1bSJed Brown       Evaluate our function on the nodes owned by this processor
446c4762a1bSJed Brown   */
4479566063dSJacob Faibussowitsch   PetscCall(VecGetLocalSize(local_in, &localsize));
448c4762a1bSJed Brown 
449c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
450c4762a1bSJed Brown      Compute entries for the locally owned part
451c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
452c4762a1bSJed Brown 
453c4762a1bSJed Brown   /*
454c4762a1bSJed Brown      Handle boundary conditions: This is done by using the boundary condition
455c4762a1bSJed Brown         u(t,boundary) = g(t,boundary)
456c4762a1bSJed Brown      for some function g. Now take the derivative with respect to t to obtain
457c4762a1bSJed Brown         u_{t}(t,boundary) = g_{t}(t,boundary)
458c4762a1bSJed Brown 
459c4762a1bSJed Brown      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
460c4762a1bSJed Brown              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
461c4762a1bSJed Brown   */
4629566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(appctx->comm, &rank));
4639566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(appctx->comm, &size));
464dd400576SPatrick Sanan   if (rank == 0) copyptr[0] = 1.0;
465c4762a1bSJed Brown   if (rank == size - 1) copyptr[localsize - 1] = 2.0;
466c4762a1bSJed Brown 
467c4762a1bSJed Brown   /*
468c4762a1bSJed Brown      Handle the interior nodes where the PDE is replace by finite
469c4762a1bSJed Brown      difference operators.
470c4762a1bSJed Brown   */
471c4762a1bSJed Brown   for (i = 1; i < localsize - 1; i++) copyptr[i] = localptr[i] * sc * (localptr[i + 1] + localptr[i - 1] - 2.0 * localptr[i]);
472c4762a1bSJed Brown 
473c4762a1bSJed Brown   /*
474c4762a1bSJed Brown      Restore vectors
475c4762a1bSJed Brown   */
4769566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in, &localptr));
4779566063dSJacob Faibussowitsch   PetscCall(VecRestoreArray(localwork, &copyptr));
478c4762a1bSJed Brown 
479c4762a1bSJed Brown   /*
480c4762a1bSJed Brown      Insert values from the local OUTPUT vector into the global
481c4762a1bSJed Brown      output vector
482c4762a1bSJed Brown   */
4839566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalBegin(da, localwork, INSERT_VALUES, global_out));
4849566063dSJacob Faibussowitsch   PetscCall(DMLocalToGlobalEnd(da, localwork, INSERT_VALUES, global_out));
485c4762a1bSJed Brown 
486c4762a1bSJed Brown   /* Print debugging information if desired */
487c4762a1bSJed Brown   if (appctx->debug) {
4889566063dSJacob Faibussowitsch     PetscCall(PetscPrintf(appctx->comm, "RHS function vector\n"));
4899566063dSJacob Faibussowitsch     PetscCall(VecView(global_out, PETSC_VIEWER_STDOUT_WORLD));
490c4762a1bSJed Brown   }
491c4762a1bSJed Brown 
4923ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
493c4762a1bSJed Brown }
494c4762a1bSJed Brown /* --------------------------------------------------------------------- */
495c4762a1bSJed Brown /*
496c4762a1bSJed Brown    RHSJacobian - User-provided routine to compute the Jacobian of
497c4762a1bSJed Brown    the nonlinear right-hand-side function of the ODE.
498c4762a1bSJed Brown 
499c4762a1bSJed Brown    Input Parameters:
500c4762a1bSJed Brown    ts - the TS context
501c4762a1bSJed Brown    t - current time
502c4762a1bSJed Brown    global_in - global input vector
503c4762a1bSJed Brown    dummy - optional user-defined context, as set by TSetRHSJacobian()
504c4762a1bSJed Brown 
505c4762a1bSJed Brown    Output Parameters:
506c4762a1bSJed Brown    AA - Jacobian matrix
507c4762a1bSJed Brown    BB - optionally different preconditioning matrix
508c4762a1bSJed Brown    str - flag indicating matrix structure
509c4762a1bSJed Brown 
510c4762a1bSJed Brown   Notes:
511c4762a1bSJed Brown   RHSJacobian computes entries for the locally owned part of the Jacobian.
512c4762a1bSJed Brown    - Currently, all PETSc parallel matrix formats are partitioned by
513c4762a1bSJed Brown      contiguous chunks of rows across the processors.
514c4762a1bSJed Brown    - Each processor needs to insert only elements that it owns
515c4762a1bSJed Brown      locally (but any non-local elements will be sent to the
516c4762a1bSJed Brown      appropriate processor during matrix assembly).
517c4762a1bSJed Brown    - Always specify global row and columns of matrix entries when
518c4762a1bSJed Brown      using MatSetValues().
519c4762a1bSJed Brown    - Here, we set all entries for a particular row at once.
520c4762a1bSJed Brown    - Note that MatSetValues() uses 0-based row and column numbers
521c4762a1bSJed Brown      in Fortran as well as in C.
522c4762a1bSJed Brown */
523d71ae5a4SJacob Faibussowitsch PetscErrorCode RHSJacobian(TS ts, PetscReal t, Vec global_in, Mat AA, Mat BB, void *ctx)
524d71ae5a4SJacob Faibussowitsch {
525c4762a1bSJed Brown   AppCtx            *appctx   = (AppCtx *)ctx;   /* user-defined application context */
526c4762a1bSJed Brown   Vec                local_in = appctx->u_local; /* local ghosted input vector */
527c4762a1bSJed Brown   DM                 da       = appctx->da;      /* distributed array */
528c4762a1bSJed Brown   PetscScalar        v[3], sc;
529c4762a1bSJed Brown   const PetscScalar *localptr;
530c4762a1bSJed Brown   PetscInt           i, mstart, mend, mstarts, mends, idx[3], is;
531c4762a1bSJed Brown 
5323ba16761SJacob Faibussowitsch   PetscFunctionBeginUser;
533c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
534c4762a1bSJed Brown      Get ready for local Jacobian computations
535c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
536c4762a1bSJed Brown   /*
537c4762a1bSJed Brown      Scatter ghost points to local vector, using the 2-step process
538c4762a1bSJed Brown         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
539c4762a1bSJed Brown      By placing code between these two statements, computations can be
540c4762a1bSJed Brown      done while messages are in transition.
541c4762a1bSJed Brown   */
5429566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalBegin(da, global_in, INSERT_VALUES, local_in));
5439566063dSJacob Faibussowitsch   PetscCall(DMGlobalToLocalEnd(da, global_in, INSERT_VALUES, local_in));
544c4762a1bSJed Brown 
545c4762a1bSJed Brown   /*
546c4762a1bSJed Brown      Get pointer to vector data
547c4762a1bSJed Brown   */
5489566063dSJacob Faibussowitsch   PetscCall(VecGetArrayRead(local_in, &localptr));
549c4762a1bSJed Brown 
550c4762a1bSJed Brown   /*
551c4762a1bSJed Brown      Get starting and ending locally owned rows of the matrix
552c4762a1bSJed Brown   */
5539566063dSJacob Faibussowitsch   PetscCall(MatGetOwnershipRange(BB, &mstarts, &mends));
5549371c9d4SSatish Balay   mstart = mstarts;
5559371c9d4SSatish Balay   mend   = mends;
556c4762a1bSJed Brown 
557c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
558c4762a1bSJed Brown      Compute entries for the locally owned part of the Jacobian.
559c4762a1bSJed Brown       - Currently, all PETSc parallel matrix formats are partitioned by
560c4762a1bSJed Brown         contiguous chunks of rows across the processors.
561c4762a1bSJed Brown       - Each processor needs to insert only elements that it owns
562c4762a1bSJed Brown         locally (but any non-local elements will be sent to the
563c4762a1bSJed Brown         appropriate processor during matrix assembly).
564c4762a1bSJed Brown       - Here, we set all entries for a particular row at once.
565c4762a1bSJed Brown       - We can set matrix entries either using either
566c4762a1bSJed Brown         MatSetValuesLocal() or MatSetValues().
567c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
568c4762a1bSJed Brown 
569c4762a1bSJed Brown   /*
570c4762a1bSJed Brown      Set matrix rows corresponding to boundary data
571c4762a1bSJed Brown   */
572c4762a1bSJed Brown   if (mstart == 0) {
573c4762a1bSJed Brown     v[0] = 0.0;
5749566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &mstart, 1, &mstart, v, INSERT_VALUES));
575c4762a1bSJed Brown     mstart++;
576c4762a1bSJed Brown   }
577c4762a1bSJed Brown   if (mend == appctx->m) {
578c4762a1bSJed Brown     mend--;
579c4762a1bSJed Brown     v[0] = 0.0;
5809566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &mend, 1, &mend, v, INSERT_VALUES));
581c4762a1bSJed Brown   }
582c4762a1bSJed Brown 
583c4762a1bSJed Brown   /*
584c4762a1bSJed Brown      Set matrix rows corresponding to interior data.  We construct the
585c4762a1bSJed Brown      matrix one row at a time.
586c4762a1bSJed Brown   */
587c4762a1bSJed Brown   sc = 1.0 / (appctx->h * appctx->h * 2.0 * (1.0 + t) * (1.0 + t));
588c4762a1bSJed Brown   for (i = mstart; i < mend; i++) {
5899371c9d4SSatish Balay     idx[0] = i - 1;
5909371c9d4SSatish Balay     idx[1] = i;
5919371c9d4SSatish Balay     idx[2] = i + 1;
592c4762a1bSJed Brown     is     = i - mstart + 1;
593c4762a1bSJed Brown     v[0]   = sc * localptr[is];
594c4762a1bSJed Brown     v[1]   = sc * (localptr[is + 1] + localptr[is - 1] - 4.0 * localptr[is]);
595c4762a1bSJed Brown     v[2]   = sc * localptr[is];
5969566063dSJacob Faibussowitsch     PetscCall(MatSetValues(BB, 1, &i, 3, idx, v, INSERT_VALUES));
597c4762a1bSJed Brown   }
598c4762a1bSJed Brown 
599c4762a1bSJed Brown   /*
600c4762a1bSJed Brown      Restore vector
601c4762a1bSJed Brown   */
6029566063dSJacob Faibussowitsch   PetscCall(VecRestoreArrayRead(local_in, &localptr));
603c4762a1bSJed Brown 
604c4762a1bSJed Brown   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
605c4762a1bSJed Brown      Complete the matrix assembly process and set some options
606c4762a1bSJed Brown      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
607c4762a1bSJed Brown   /*
608c4762a1bSJed Brown      Assemble matrix, using the 2-step process:
609c4762a1bSJed Brown        MatAssemblyBegin(), MatAssemblyEnd()
610c4762a1bSJed Brown      Computations can be done while messages are in transition
611c4762a1bSJed Brown      by placing code between these two statements.
612c4762a1bSJed Brown   */
6139566063dSJacob Faibussowitsch   PetscCall(MatAssemblyBegin(BB, MAT_FINAL_ASSEMBLY));
6149566063dSJacob Faibussowitsch   PetscCall(MatAssemblyEnd(BB, MAT_FINAL_ASSEMBLY));
615c4762a1bSJed Brown   if (BB != AA) {
6169566063dSJacob Faibussowitsch     PetscCall(MatAssemblyBegin(AA, MAT_FINAL_ASSEMBLY));
6179566063dSJacob Faibussowitsch     PetscCall(MatAssemblyEnd(AA, MAT_FINAL_ASSEMBLY));
618c4762a1bSJed Brown   }
619c4762a1bSJed Brown 
620c4762a1bSJed Brown   /*
621c4762a1bSJed Brown      Set and option to indicate that we will never add a new nonzero location
622c4762a1bSJed Brown      to the matrix. If we do, it will generate an error.
623c4762a1bSJed Brown   */
6249566063dSJacob Faibussowitsch   PetscCall(MatSetOption(BB, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
625c4762a1bSJed Brown 
6263ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
627c4762a1bSJed Brown }
628c4762a1bSJed Brown 
629c4762a1bSJed Brown /*TEST
630c4762a1bSJed Brown 
631c4762a1bSJed Brown     test:
632c4762a1bSJed Brown       args: -nox -ts_dt 10 -mymonitor
633c4762a1bSJed Brown       nsize: 2
634c4762a1bSJed Brown       requires: !single
635c4762a1bSJed Brown 
636c4762a1bSJed Brown     test:
637c4762a1bSJed Brown       suffix: tut_1
638c4762a1bSJed Brown       nsize: 1
639c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor
640c4762a1bSJed Brown 
641c4762a1bSJed Brown     test:
642c4762a1bSJed Brown       suffix: tut_2
643c4762a1bSJed Brown       nsize: 4
644c4762a1bSJed Brown       args: -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
645*9d5502f9SJunchao Zhang       # GEMV sensitve to single
646*9d5502f9SJunchao Zhang       args:  -vec_mdot_use_gemv 0 -vec_maxpy_use_gemv 0
647c4762a1bSJed Brown 
648c4762a1bSJed Brown     test:
649c4762a1bSJed Brown       suffix: tut_3
650c4762a1bSJed Brown       nsize: 4
6512e16c0ceSBarry Smith       args: -ts_max_steps 10 -ts_monitor -M 128
652c4762a1bSJed Brown 
653c4762a1bSJed Brown TEST*/
654