xref: /petsc/src/ts/utils/dmplexlandau/tutorials/ex1.c (revision 6b664d0030d045511a188253fb89991de72e26b7)
1 static char help[] = "Landau collision operator with amnisotropic thermalization verification test as per Hager et al.\n 'A fully non-linear multi-species Fokker-Planck-Landau collision operator for simulation of fusion plasma', and "
2                      "published as 'A performance portable, fully implicit Landau collision operator with batched linear solvers' https://arxiv.org/abs/2209.03228\n\n";
3 
4 #include <petscts.h>
5 #include <petsclandau.h>
6 #include <petscdmcomposite.h>
7 #include <petscds.h>
8 
9 /*
10  call back method for DMPlexLandauAccess:
11 
12 Input Parameters:
13  .   dm - a DM for this field
14  -   local_field - the local index in the grid for this field
15  .   grid - the grid index
16  +   b_id - the batch index
17  -   vctx - a user context
18 
19  Input/Output Parameters:
20  +   x - Vector to data to
21 
22  */
23 PetscErrorCode landau_field_print_access_callback(DM dm, Vec x, PetscInt local_field, PetscInt grid, PetscInt b_id, void *vctx)
24 {
25   LandauCtx  *ctx;
26   PetscScalar val;
27   PetscInt    species;
28 
29   PetscFunctionBegin;
30   PetscCall(DMGetApplicationContext(dm, &ctx));
31   species = ctx->species_offset[grid] + local_field;
32   val     = (PetscScalar)(LAND_PACK_IDX(b_id, grid) + (species + 1) * 10);
33   PetscCall(VecSet(x, val));
34   PetscCall(PetscInfo(dm, "DMPlexLandauAccess user 'add' method to grid %" PetscInt_FMT ", batch %" PetscInt_FMT " and local field %" PetscInt_FMT " with %" PetscInt_FMT " grids\n", grid, b_id, local_field, ctx->num_grids));
35 
36   PetscFunctionReturn(PETSC_SUCCESS);
37 }
38 
39 static const PetscReal alphai   = 1 / 1.3;
40 static const PetscReal kev_joul = 6.241506479963235e+15; /* 1/1000e */
41 
42 // constants: [index of (anisotropic) direction of source, z x[1] shift
43 /* < v, n_s v_|| > */
44 static void f0_vz(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar *f0)
45 {
46   if (dim == 2) f0[0] = u[0] * (2. * PETSC_PI * x[0]) * x[1]; /* n r v_|| */
47   else f0[0] = u[0] * x[2];
48 }
49 /* < v, n (v-shift)^2 > */
50 static void f0_v2_par_shift(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar *f0)
51 {
52   PetscReal vz = PetscRealPart(constants[0]);
53   if (dim == 2) *f0 = u[0] * (2. * PETSC_PI * x[0]) * (x[1] - vz) * (x[1] - vz); /* n r v^2_par|perp */
54   else *f0 = u[0] * (x[2] - vz) * (x[2] - vz);
55 }
56 static void f0_v2_perp(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar *f0)
57 {
58   if (dim == 2) *f0 = u[0] * (2. * PETSC_PI * x[0]) * x[0] * x[0]; /* n r v^2_perp */
59   else *f0 = u[0] * (x[0] * x[0] + x[1] * x[1]);
60 }
61 /* < v, n_e > */
62 static void f0_n(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar *f0)
63 {
64   if (dim == 2) f0[0] = 2. * PETSC_PI * x[0] * u[0];
65   else f0[0] = u[0];
66 }
67 static void f0_v2_shift(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar *f0)
68 {
69   PetscReal vz = PetscRealPart(constants[0]);
70   if (dim == 2) f0[0] = u[0] * (2. * PETSC_PI * x[0]) * (x[0] * x[0] + (x[1] - vz) * (x[1] - vz));
71   else f0[0] = u[0] * (x[0] * x[0] + x[1] * x[1] + (x[2] - vz) * (x[2] - vz));
72 }
73 /* Define a Maxwellian function for testing out the operator. */
74 typedef struct {
75   PetscReal v_0;
76   PetscReal kT_m;
77   PetscReal n;
78   PetscReal shift;
79   PetscInt  species;
80 } MaxwellianCtx;
81 
82 static PetscErrorCode maxwellian(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf_dummy, PetscScalar *u, void *actx)
83 {
84   MaxwellianCtx *mctx  = (MaxwellianCtx *)actx;
85   PetscReal      theta = 2 * mctx->kT_m / (mctx->v_0 * mctx->v_0); /* theta = 2kT/mc^2 */
86   PetscFunctionBegin;
87   /* evaluate the shifted Maxwellian */
88   if (dim == 2) u[0] += alphai * mctx->n * PetscPowReal(PETSC_PI * theta, -1.5) * PetscExpReal(-(alphai * x[0] * x[0] + (x[1] - mctx->shift) * (x[1] - mctx->shift)) / theta);
89   else u[0] += alphai * mctx->n * PetscPowReal(PETSC_PI * theta, -1.5) * PetscExpReal(-(alphai * (x[0] * x[0] + x[1] * x[1]) + (x[2] - mctx->shift) * (x[2] - mctx->shift)) / theta);
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static PetscErrorCode SetMaxwellians(DM dm, Vec X, PetscReal time, PetscReal temps[], PetscReal ns[], PetscInt grid, PetscReal shifts[], LandauCtx *ctx)
94 {
95   PetscErrorCode (*initu[LANDAU_MAX_SPECIES])(PetscInt, PetscReal, const PetscReal[], PetscInt, PetscScalar[], void *);
96   MaxwellianCtx *mctxs[LANDAU_MAX_SPECIES], data[LANDAU_MAX_SPECIES];
97   PetscFunctionBegin;
98   if (!ctx) PetscCall(DMGetApplicationContext(dm, &ctx));
99   for (PetscInt ii = ctx->species_offset[grid], i0 = 0; ii < ctx->species_offset[grid + 1]; ii++, i0++) {
100     mctxs[i0]        = &data[i0];
101     data[i0].v_0     = ctx->v_0;                             // v_0 same for all grids
102     data[i0].kT_m    = ctx->k * temps[ii] / ctx->masses[ii]; /* kT/m = v_th ^ 2*/
103     data[i0].n       = ns[ii];
104     initu[i0]        = maxwellian;
105     data[i0].shift   = 0;
106     data[i0].species = ii;
107   }
108   if (1) {
109     data[0].shift = -((PetscReal)PetscSign(ctx->charges[ctx->species_offset[grid]])) * ctx->electronShift * ctx->m_0 / ctx->masses[ctx->species_offset[grid]];
110   } else {
111     shifts[0]     = 0.5 * PetscSqrtReal(ctx->masses[0] / ctx->masses[1]);
112     shifts[1]     = 50 * (ctx->masses[0] / ctx->masses[1]);
113     data[0].shift = ctx->electronShift * shifts[grid] * PetscSqrtReal(data[0].kT_m) / ctx->v_0; // shifts to not matter!!!!
114   }
115   PetscCall(DMProjectFunction(dm, time, initu, (void **)mctxs, INSERT_ALL_VALUES, X));
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 typedef enum {
120   E_PAR_IDX,
121   E_PERP_IDX,
122   I_PAR_IDX,
123   I_PERP_IDX,
124   NUM_TEMPS
125 } TemperatureIDX;
126 
127 /* --------------------  Evaluate Function F(x) --------------------- */
128 static PetscReal n_cm3[2] = {0, 0};
129 PetscErrorCode   FormFunction(TS ts, PetscReal tdummy, Vec X, Vec F, void *ptr)
130 {
131   LandauCtx         *ctx = (LandauCtx *)ptr; /* user-defined application context */
132   PetscScalar       *f;
133   const PetscScalar *x;
134   const PetscReal    k_B = 1.6e-12, e_cgs = 4.8e-10, m_cgs[2] = {9.1094e-28, 9.1094e-28 * ctx->masses[1] / ctx->masses[0]}; // erg/eV, e, m as per NRL;
135   PetscReal          AA, sqrtA, v_abT, vTe, t1, TeDiff, Te, Ti, Tdiff;
136 
137   PetscFunctionBeginUser;
138   PetscCall(VecGetArrayRead(X, &x));
139   Te = PetscRealPart(2 * x[E_PERP_IDX] + x[E_PAR_IDX]) / 3, Ti = PetscRealPart(2 * x[I_PERP_IDX] + x[I_PAR_IDX]) / 3;
140   v_abT = 1.8e-19 * PetscSqrtReal(m_cgs[0] * m_cgs[1]) * n_cm3[0] * ctx->lnLam * PetscPowReal(m_cgs[0] * Ti + m_cgs[1] * Te, -1.5);
141   PetscCall(VecGetArray(F, &f));
142   for (PetscInt ii = 0; ii < 2; ii++) {
143     PetscReal tPerp = PetscRealPart(x[2 * ii + E_PERP_IDX]), tPar = PetscRealPart(x[2 * ii + E_PAR_IDX]);
144     TeDiff = tPerp - tPar;
145     AA     = tPerp / tPar - 1;
146     if (AA < 1e-6) t1 = 0;
147     else {
148       sqrtA = PetscSqrtReal(AA);
149       t1    = (-3 + (AA + 3) * PetscAtanReal(sqrtA) / sqrtA) / PetscSqr(AA);
150       //PetscReal vTeB = 8.2e-7 * n_cm3[0] * ctx->lnLam * PetscPowReal(Te, -1.5);
151       vTe = PetscRealPart(2 * PetscSqrtReal(PETSC_PI / m_cgs[ii]) * PetscSqr(PetscSqr(e_cgs)) * n_cm3[0] * ctx->lnLam * PetscPowReal(k_B * x[E_PAR_IDX], -1.5)) * t1;
152       t1  = vTe * TeDiff * PetscSqrtReal(PETSC_PI); // scaling form NRL that makes it work ???
153     }
154     f[2 * ii + E_PAR_IDX]  = 2 * t1; // par
155     f[2 * ii + E_PERP_IDX] = -t1;    // perp
156     Tdiff                  = (ii == 0) ? (Ti - Te) : (Te - Ti);
157     f[2 * ii + E_PAR_IDX] += v_abT * Tdiff;
158     f[2 * ii + E_PERP_IDX] += v_abT * Tdiff;
159   }
160   PetscCall(VecRestoreArrayRead(X, &x));
161   PetscCall(VecRestoreArray(F, &f));
162   PetscFunctionReturn(PETSC_SUCCESS);
163 }
164 
165 /* --------------------  Form initial approximation ----------------- */
166 static PetscReal T0[4] = {300, 390, 200, 260};
167 PetscErrorCode   createVec_NRL(LandauCtx *ctx, Vec *vec)
168 {
169   PetscScalar *x;
170   Vec          Temps;
171 
172   PetscFunctionBeginUser;
173   PetscCall(VecCreateSeq(PETSC_COMM_SELF, NUM_TEMPS, &Temps));
174   PetscCall(VecGetArray(Temps, &x));
175   for (PetscInt i = 0; i < NUM_TEMPS; i++) x[i] = T0[i];
176   PetscCall(VecRestoreArray(Temps, &x));
177   *vec = Temps;
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 PetscErrorCode createTS_NRL(LandauCtx *ctx, Vec Temps)
182 {
183   TSAdapt adapt;
184   TS      ts;
185 
186   PetscFunctionBeginUser;
187   PetscCall(TSCreate(PETSC_COMM_SELF, &ts));
188   ctx->data = (void *)ts; // 'data' is for applications (eg, monitors)
189   PetscCall(TSSetApplicationContext(ts, ctx));
190   PetscCall(TSSetType(ts, TSRK));
191   PetscCall(TSSetRHSFunction(ts, NULL, FormFunction, ctx));
192   PetscCall(TSSetSolution(ts, Temps));
193   PetscCall(TSRKSetType(ts, TSRK2A));
194   PetscCall(TSSetOptionsPrefix(ts, "nrl_"));
195   PetscCall(TSSetFromOptions(ts));
196   PetscCall(TSGetAdapt(ts, &adapt));
197   PetscCall(TSAdaptSetType(adapt, TSADAPTNONE));
198   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP));
199   PetscCall(TSSetStepNumber(ts, 0));
200   PetscCall(TSSetMaxSteps(ts, 1));
201   PetscCall(TSSetTime(ts, 0));
202 
203   PetscFunctionReturn(PETSC_SUCCESS);
204 }
205 
206 PetscErrorCode Monitor_nrl(TS ts, PetscInt stepi, PetscReal time, Vec X, void *actx)
207 {
208   const PetscScalar *x;
209   LandauCtx         *ctx = (LandauCtx *)actx; /* user-defined application context */
210   PetscInt           period, logT;
211   PetscReal          dt;
212 
213   PetscFunctionBeginUser;
214   PetscCall(TSGetTimeStep(ts, &dt));
215   logT = (PetscInt)PetscLog2Real(time / dt);
216   if (logT < 0) logT = 0;
217   period = PetscPowInt(2, logT) / 2;
218   if (period == 0) period = 1;
219   if (stepi % period == 0) {
220     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "nrl-step %d time= %g ", (int)stepi, (double)(time / ctx->t_0)));
221     PetscCall(VecGetArrayRead(X, &x));
222     for (PetscInt i = 0; i < NUM_TEMPS; i++) { PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%g ", (double)PetscRealPart(x[i]))); }
223     PetscCall(VecRestoreArrayRead(X, &x));
224     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n"));
225   }
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 
229 PetscErrorCode Monitor(TS ts, PetscInt stepi, PetscReal time, Vec X, void *actx)
230 {
231   LandauCtx *ctx      = (LandauCtx *)actx; /* user-defined application context */
232   TS         ts_nrl   = (TS)ctx->data;
233   PetscInt   printing = 0, logT;
234 
235   PetscFunctionBeginUser;
236   if (ctx->verbose > 0) { // hacks to generate sparse data (eg, use '-dm_landau_verbose 1' and '-dm_landau_verbose -1' to get all steps printed)
237     PetscReal dt;
238     PetscCall(TSGetTimeStep(ts, &dt));
239     logT = (PetscInt)PetscLog2Real(time / dt);
240     if (logT < 0) logT = 0;
241     ctx->verbose = PetscPowInt(2, logT) / 2;
242     if (ctx->verbose == 0) ctx->verbose = 1;
243   }
244   if (ctx->verbose) {
245     TSConvergedReason reason;
246     PetscCall(TSGetConvergedReason(ts, &reason));
247     if (stepi % ctx->verbose == 0 || reason || stepi == 1 || ctx->verbose < 0) {
248       PetscInt nDMs, id;
249       DM       pack;
250       Vec     *XsubArray = NULL;
251       printing           = 1;
252       PetscCall(TSGetDM(ts, &pack));
253       PetscCall(DMCompositeGetNumberDM(pack, &nDMs));
254       PetscCall(DMGetOutputSequenceNumber(ctx->plex[0], &id, NULL));
255       PetscCall(DMSetOutputSequenceNumber(ctx->plex[0], id + 1, time));
256       PetscCall(DMSetOutputSequenceNumber(ctx->plex[1], id + 1, time));
257       PetscCall(PetscInfo(pack, "ex1 plot step %" PetscInt_FMT ", time = %g\n", id, (double)time));
258       PetscCall(PetscMalloc(sizeof(*XsubArray) * nDMs, &XsubArray));
259       PetscCall(DMCompositeGetAccessArray(pack, X, nDMs, NULL, XsubArray)); // read only
260       PetscCall(VecViewFromOptions(XsubArray[LAND_PACK_IDX(ctx->batch_view_idx, 0)], NULL, "-ex1_vec_view_e"));
261       PetscCall(VecViewFromOptions(XsubArray[LAND_PACK_IDX(ctx->batch_view_idx, 1)], NULL, "-ex1_vec_view_i"));
262       // temps
263       for (PetscInt grid = 0; grid < ctx->num_grids; grid++) {
264         PetscDS     prob;
265         DM          dm      = ctx->plex[grid];
266         PetscScalar user[2] = {0, 0}, tt[1];
267         PetscReal   vz_0 = 0, n, energy, e_perp, e_par, m_s = ctx->masses[ctx->species_offset[grid]];
268         Vec         Xloc = XsubArray[LAND_PACK_IDX(ctx->batch_view_idx, grid)];
269         PetscCall(DMGetDS(dm, &prob));
270         /* get n */
271         PetscCall(PetscDSSetObjective(prob, 0, &f0_n));
272         PetscCall(DMPlexComputeIntegralFEM(dm, Xloc, tt, NULL));
273         n = PetscRealPart(tt[0]);
274         /* get vz */
275         PetscCall(PetscDSSetObjective(prob, 0, &f0_vz));
276         PetscCall(DMPlexComputeIntegralFEM(dm, Xloc, tt, NULL));
277         user[0] = vz_0 = PetscRealPart(tt[0]) / n;
278         /* energy temp */
279         PetscCall(PetscDSSetConstants(prob, 2, user));
280         PetscCall(PetscDSSetObjective(prob, 0, &f0_v2_shift));
281         PetscCall(DMPlexComputeIntegralFEM(dm, Xloc, tt, ctx));
282         energy = PetscRealPart(tt[0]) * ctx->v_0 * ctx->v_0 * m_s / n / 3; // scale?
283         energy *= kev_joul * 1000;                                         // T eV
284         /* energy temp - par */
285         PetscCall(PetscDSSetConstants(prob, 2, user));
286         PetscCall(PetscDSSetObjective(prob, 0, &f0_v2_par_shift));
287         PetscCall(DMPlexComputeIntegralFEM(dm, Xloc, tt, ctx));
288         e_par = PetscRealPart(tt[0]) * ctx->v_0 * ctx->v_0 * m_s / n;
289         e_par *= kev_joul * 1000; // eV
290         /* energy temp - perp */
291         PetscCall(PetscDSSetConstants(prob, 2, user));
292         PetscCall(PetscDSSetObjective(prob, 0, &f0_v2_perp));
293         PetscCall(DMPlexComputeIntegralFEM(dm, Xloc, tt, ctx));
294         e_perp = PetscRealPart(tt[0]) * ctx->v_0 * ctx->v_0 * m_s / n / 2;
295         e_perp *= kev_joul * 1000; // eV
296         if (grid == 0) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "step %4d) time= %e temperature (eV): ", (int)stepi, (double)time));
297         PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s T= %9.4e T_par= %9.4e T_perp= %9.4e ", (grid == 0) ? "electron:" : ";ion:", (double)energy, (double)e_par, (double)e_perp));
298         if (n_cm3[grid] == 0) n_cm3[grid] = ctx->n_0 * n * 1e-6; // does not change m^3 --> cm^3
299       }
300       // cleanup
301       PetscCall(DMCompositeRestoreAccessArray(pack, X, nDMs, NULL, XsubArray));
302       PetscCall(PetscFree(XsubArray));
303     }
304   }
305   /* evolve NRL data, end line */
306   if (n_cm3[NUM_TEMPS / 2 - 1] < 0 && ts_nrl) {
307     PetscCall(TSDestroy(&ts_nrl));
308     ctx->data = NULL;
309     if (printing) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\nSTOP printing NRL Ts\n"));
310   } else if (ts_nrl) {
311     const PetscScalar *x;
312     PetscReal          dt_real, dt;
313     Vec                U;
314     PetscCall(TSGetTimeStep(ts, &dt)); // dt for NEXT time step
315     dt_real = dt * ctx->t_0;
316     PetscCall(TSGetSolution(ts_nrl, &U));
317     if (printing) {
318       PetscCall(VecGetArrayRead(U, &x));
319       PetscCall(PetscPrintf(PETSC_COMM_WORLD, "NRL_i_par= %9.4e NRL_i_perp= %9.4e ", (double)PetscRealPart(x[I_PAR_IDX]), (double)PetscRealPart(x[I_PERP_IDX])));
320       if (n_cm3[0] > 0) {
321         PetscCall(PetscPrintf(PETSC_COMM_WORLD, "NRL_e_par= %9.4e NRL_e_perp= %9.4e\n", (double)PetscRealPart(x[E_PAR_IDX]), (double)PetscRealPart(x[E_PERP_IDX])));
322       } else PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n"));
323       PetscCall(VecRestoreArrayRead(U, &x));
324     }
325     // we have the next time step, so need to advance now
326     PetscCall(TSSetTimeStep(ts_nrl, dt_real));
327     PetscCall(TSSetMaxSteps(ts_nrl, stepi + 1)); // next step
328     PetscCall(TSSolve(ts_nrl, NULL));
329   } else if (printing) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "\n"));
330   if (printing) { PetscCall(DMPlexLandauPrintNorms(X, stepi /*id + 1*/)); }
331 
332   PetscFunctionReturn(PETSC_SUCCESS);
333 }
334 
335 int main(int argc, char **argv)
336 {
337   DM          pack;
338   Vec         X;
339   PetscInt    dim = 2, nDMs;
340   TS          ts, ts_nrl = NULL;
341   Mat         J;
342   Vec        *XsubArray = NULL;
343   LandauCtx  *ctx;
344   PetscMPIInt rank;
345   PetscBool   use_nrl   = PETSC_FALSE;
346   PetscBool   print_nrl = PETSC_TRUE;
347   PetscReal   dt0;
348   PetscFunctionBeginUser;
349   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
350   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
351   if (rank) { /* turn off output stuff for duplicate runs */
352     PetscCall(PetscOptionsClearValue(NULL, "-ex1_dm_view_e"));
353     PetscCall(PetscOptionsClearValue(NULL, "-ex1_dm_view_i"));
354     PetscCall(PetscOptionsClearValue(NULL, "-ex1_vec_view_e"));
355     PetscCall(PetscOptionsClearValue(NULL, "-ex1_vec_view_i"));
356     PetscCall(PetscOptionsClearValue(NULL, "-info"));
357     PetscCall(PetscOptionsClearValue(NULL, "-snes_converged_reason"));
358     PetscCall(PetscOptionsClearValue(NULL, "-pc_bjkokkos_ksp_converged_reason"));
359     PetscCall(PetscOptionsClearValue(NULL, "-ksp_converged_reason"));
360     PetscCall(PetscOptionsClearValue(NULL, "-ts_adapt_monitor"));
361     PetscCall(PetscOptionsClearValue(NULL, "-ts_monitor"));
362     PetscCall(PetscOptionsClearValue(NULL, "-snes_monitor"));
363   }
364   PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL));
365   PetscCall(PetscOptionsGetBool(NULL, NULL, "-use_nrl", &use_nrl, NULL));
366   PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_nrl", &print_nrl, NULL));
367   /* Create a mesh */
368   PetscCall(DMPlexLandauCreateVelocitySpace(PETSC_COMM_SELF, dim, "", &X, &J, &pack));
369   PetscCall(DMSetUp(pack));
370   PetscCall(DMGetApplicationContext(pack, &ctx));
371   PetscCheck(ctx->num_grids == 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Must have two grids: use '-dm_landau_num_species_grid 1,1'");
372   PetscCheck(ctx->num_species == 2, PETSC_COMM_WORLD, PETSC_ERR_ARG_WRONG, "Must have two species: use '-dm_landau_num_species_grid 1,1'");
373   PetscCall(DMCompositeGetNumberDM(pack, &nDMs));
374   /* output plot names */
375   PetscCall(PetscMalloc(sizeof(*XsubArray) * nDMs, &XsubArray));
376   PetscCall(DMCompositeGetAccessArray(pack, X, nDMs, NULL, XsubArray)); // read only
377   PetscCall(PetscObjectSetName((PetscObject)XsubArray[LAND_PACK_IDX(ctx->batch_view_idx, 0)], 0 == 0 ? "ue" : "ui"));
378   PetscCall(PetscObjectSetName((PetscObject)XsubArray[LAND_PACK_IDX(ctx->batch_view_idx, 1)], 1 == 0 ? "ue" : "ui"));
379   /* add bimaxwellian anisotropic test */
380   for (PetscInt b_id = 0; b_id < ctx->batch_sz; b_id++) {
381     for (PetscInt grid = 0; grid < ctx->num_grids; grid++) {
382       PetscReal shifts[2];
383       PetscCall(SetMaxwellians(ctx->plex[grid], XsubArray[LAND_PACK_IDX(b_id, grid)], 0.0, ctx->thermal_temps, ctx->n, grid, shifts, ctx));
384     }
385   }
386   PetscCall(DMCompositeRestoreAccessArray(pack, X, nDMs, NULL, XsubArray));
387   PetscCall(PetscFree(XsubArray));
388   /* plot */
389   PetscCall(DMSetOutputSequenceNumber(ctx->plex[0], -1, 0.0));
390   PetscCall(DMSetOutputSequenceNumber(ctx->plex[1], -1, 0.0));
391   PetscCall(DMViewFromOptions(ctx->plex[0], NULL, "-ex1_dm_view_e"));
392   PetscCall(DMViewFromOptions(ctx->plex[1], NULL, "-ex1_dm_view_i"));
393   /* Create timestepping solver context */
394   PetscCall(TSCreate(PETSC_COMM_SELF, &ts));
395   PetscCall(TSSetDM(ts, pack));
396   PetscCall(TSSetIFunction(ts, NULL, DMPlexLandauIFunction, NULL));
397   PetscCall(TSSetIJacobian(ts, J, J, DMPlexLandauIJacobian, NULL));
398   PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
399   PetscCall(TSSetFromOptions(ts));
400   PetscCall(TSSetSolution(ts, X));
401   PetscCall(TSMonitorSet(ts, Monitor, ctx, NULL));
402   /* Create NRL timestepping */
403   if (use_nrl || print_nrl) {
404     Vec NRL_vec;
405     PetscCall(createVec_NRL(ctx, &NRL_vec));
406     PetscCall(createTS_NRL(ctx, NRL_vec));
407     PetscCall(VecDestroy(&NRL_vec));
408   } else ctx->data = NULL;
409   /* solve */
410   PetscCall(TSGetTimeStep(ts, &dt0));
411   PetscCall(TSSetTime(ts, dt0 / 2));
412   PetscCall(TSSolve(ts, X));
413   /* test add field method & output */
414   PetscCall(DMPlexLandauAccess(pack, X, landau_field_print_access_callback, NULL));
415   //PetscCall(Monitor(ts, -1, 1.0, X, ctx));
416   /* clean up */
417   ts_nrl = (TS)ctx->data;
418   if (print_nrl) {
419     PetscReal    finalTime, dt_real, tstart = dt0 * ctx->t_0 / 2; // hack
420     Vec          U;
421     PetscScalar *x;
422     PetscInt     nsteps;
423     dt_real = dt0 * ctx->t_0;
424     PetscCall(TSSetTimeStep(ts_nrl, dt_real));
425     PetscCall(TSGetTime(ts, &finalTime));
426     finalTime *= ctx->t_0;
427     PetscCall(TSSetMaxTime(ts_nrl, finalTime));
428     nsteps = (PetscInt)(finalTime / dt_real) + 1;
429     PetscCall(TSSetMaxSteps(ts_nrl, nsteps));
430     PetscCall(TSSetStepNumber(ts_nrl, 0));
431     PetscCall(TSSetTime(ts_nrl, tstart));
432     PetscCall(TSGetSolution(ts_nrl, &U));
433     PetscCall(VecGetArray(U, &x));
434     for (PetscInt i = 0; i < NUM_TEMPS; i++) x[i] = T0[i];
435     PetscCall(VecRestoreArray(U, &x));
436     PetscCall(TSMonitorSet(ts_nrl, Monitor_nrl, ctx, NULL));
437     PetscCall(TSSolve(ts_nrl, NULL));
438   }
439   PetscCall(TSDestroy(&ts));
440   PetscCall(TSDestroy(&ts_nrl));
441   PetscCall(VecDestroy(&X));
442   PetscCall(DMPlexLandauDestroyVelocitySpace(&pack));
443   PetscCall(PetscFinalize());
444   return 0;
445 }
446 
447 /*TEST
448   testset:
449     requires: p4est !complex double defined(PETSC_USE_DMLANDAU_2D)
450     output_file: output/ex1_0.out
451     filter: grep -v "DM"
452     args: -dm_landau_amr_levels_max 0,2 -dm_landau_amr_post_refine 0 -dm_landau_amr_re_levels 2 -dm_landau_domain_radius 6,6 -dm_landau_electron_shift 1.5 -dm_landau_ion_charges 1 -dm_landau_ion_masses 2 -dm_landau_n 1,1 -dm_landau_n_0 1e20 -dm_landau_num_cells 2,4 -dm_landau_num_species_grid 1,1 -dm_landau_re_radius 2 -use_nrl true -print_nrl false -dm_landau_thermal_temps .3,.2 -dm_landau_type p4est -dm_landau_verbose -1 -dm_preallocate_only false -ex1_dm_view_e -ksp_type preonly -pc_type lu -petscspace_degree 3 -snes_converged_reason -snes_rtol 1.e-14 -snes_stol 1.e-14 -ts_adapt_clip .5,1.5 -ts_adapt_dt_max 5 -ts_adapt_monitor -ts_adapt_scale_solve_failed 0.5 -ts_arkimex_type 1bee -ts_dt .01 -ts_max_snes_failures -1 -ts_max_steps 1 -ts_max_time 8 -ts_monitor -ts_rtol 1e-2 -ts_type arkimex
453     test:
454       suffix: cpu
455       args: -dm_landau_device_type cpu -dm_landau_use_relativistic_corrections
456     test:
457       suffix: kokkos
458       requires: kokkos_kernels !defined(PETSC_HAVE_CUDA_CLANG)
459       args: -dm_landau_device_type kokkos -dm_mat_type aijkokkos -dm_vec_type kokkos
460     test:
461       suffix: cuda
462       requires: cuda !defined(PETSC_HAVE_CUDA_CLANG)
463       args: -dm_landau_device_type cuda -dm_mat_type aijcusparse -dm_vec_type cuda -mat_cusparse_use_cpu_solve
464 
465   testset:
466     requires: !complex defined(PETSC_USE_DMLANDAU_2D) !cuda p4est
467     args: -dm_landau_type p4est -dm_landau_num_cells 4,4 -dm_landau_amr_levels_max 3,3 -dm_landau_num_species_grid 1,1 -dm_landau_n 1,1 -dm_landau_thermal_temps 1,1 -dm_landau_ion_charges 1 -dm_landau_ion_masses 2 -petscspace_degree 2 -ts_type beuler -ts_dt .1 -ts_max_steps 0 -dm_landau_verbose 2 -ksp_type preonly -pc_type lu -dm_landau_device_type cpu -use_nrl false -print_nrl -snes_rtol 1.e-14 -snes_stol 1.e-14
468     nsize: 1
469     test:
470       suffix: sphere
471       args: -dm_landau_sphere -ts_max_steps 0 -dm_landau_amr_post_refine 1
472     test:
473       suffix: re
474       args: -dm_landau_amr_levels_max 0,2 -dm_landau_z_radius_pre 2.5 -dm_landau_z_radius_post 3.75 -dm_landau_amr_z_refine_pre 1 -dm_landau_amr_z_refine_post 1 -dm_landau_electron_shift 1.25 -ts_max_steps 1 -snes_converged_reason -info :vec
475 
476 TEST*/
477