xref: /petsc/src/dm/tests/ex2k.kokkos.cxx (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 static char help[] = "Benchmarking various accessing methods of DMDA vectors on host\n\n";
2 
3 /*
4   On a machine with AMD EPYC-7452 CPUs, we got this data using one MPI rank and a serial-only Kokkos:
5            Time (sec.), on Mar. 1, 2022
6   ------------------------------------------
7   n     PETSc          C          Kokkos
8   ------------------------------------------
9   32    4.6464E-05  4.7451E-05  1.6880E-04
10   64    2.5654E-04  2.5164E-04  5.6780E-04
11   128   1.9383E-03  1.8878E-03  4.7938E-03
12   256   1.4450E-02  1.3619E-02  3.7778E-02
13   512   1.1580E-01  1.1551E-01  2.8428E-01
14   1024  1.4179E+00  1.3772E+00  2.2873E+00
15 
16   Overall, C is -2% ~ 5% faster than PETSc. But Kokkos is 1.6~3.6x slower than PETSc
17 */
18 
19 #include <petscdmda_kokkos.hpp>
20 #include <petscdm.h>
21 #include <petscdmda.h>
22 
23 using namespace Kokkos;
24 using PetscScalarKokkosOffsetView3D      = Kokkos::Experimental::OffsetView<PetscScalar ***, Kokkos::LayoutRight, Kokkos::HostSpace>;
25 using ConstPetscScalarKokkosOffsetView3D = Kokkos::Experimental::OffsetView<const PetscScalar ***, Kokkos::LayoutRight, Kokkos::HostSpace>;
26 
27 /* PETSc multi-dimensional array access */
28 static PetscErrorCode Update1(DM da, const PetscScalar ***__restrict__ x1, PetscScalar ***__restrict__ y1, PetscInt nwarm, PetscInt nloop, PetscLogDouble *avgTime) {
29   PetscInt       it, i, j, k;
30   PetscLogDouble tstart = 0.0, tend;
31   PetscInt       xm, ym, zm, xs, ys, zs, gxm, gym, gzm, gxs, gys, gzs;
32 
33   PetscFunctionBegin;
34   PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
35   PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, &gzs, &gxm, &gym, &gzm));
36   for (it = 0; it < nwarm + nloop; it++) {
37     if (it == nwarm) PetscCall(PetscTime(&tstart));
38     for (k = zs; k < zs + zm; k++) {
39       for (j = ys; j < ys + ym; j++) {
40         for (i = xs; i < xs + xm; i++) { y1[k][j][i] = 6 * x1[k][j][i] - x1[k - 1][j][i] - x1[k][j - 1][i] - x1[k][j][i - 1] - x1[k + 1][j][i] - x1[k][j + 1][i] - x1[k][j][i + 1]; }
41       }
42     }
43   }
44   PetscCall(PetscTime(&tend));
45   *avgTime = (tend - tstart) / nloop;
46   PetscFunctionReturn(0);
47 }
48 
49 /* C multi-dimensional array access */
50 static PetscErrorCode Update2(DM da, const PetscScalar *__restrict__ x2, PetscScalar *__restrict__ y2, PetscInt nwarm, PetscInt nloop, PetscLogDouble *avgTime) {
51   PetscInt       it, i, j, k;
52   PetscLogDouble tstart = 0.0, tend;
53   PetscInt       xm, ym, zm, xs, ys, zs, gxm, gym, gzm, gxs, gys, gzs;
54 
55   PetscFunctionBegin;
56   PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
57   PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, &gzs, &gxm, &gym, &gzm));
58 #define X2(k, j, i) x2[(k - gzs) * gym * gxm + (j - gys) * gxm + (i - gxs)]
59 #define Y2(k, j, i) y2[(k - zs) * ym * xm + (j - ys) * xm + (i - xs)]
60   for (it = 0; it < nwarm + nloop; it++) {
61     if (it == nwarm) PetscCall(PetscTime(&tstart));
62     for (k = zs; k < zs + zm; k++) {
63       for (j = ys; j < ys + ym; j++) {
64         for (i = xs; i < xs + xm; i++) { Y2(k, j, i) = 6 * X2(k, j, i) - X2(k - 1, j, i) - X2(k, j - 1, i) - X2(k, j, i - 1) - X2(k + 1, j, i) - X2(k, j + 1, i) - X2(k, j, i + 1); }
65       }
66     }
67   }
68   PetscCall(PetscTime(&tend));
69   *avgTime = (tend - tstart) / nloop;
70 #undef X2
71 #undef Y2
72   PetscFunctionReturn(0);
73 }
74 
75 int main(int argc, char **argv) {
76   DM                                 da;
77   PetscInt                           xm, ym, zm, xs, ys, zs, gxm, gym, gzm, gxs, gys, gzs;
78   PetscInt                           dof = 1, sw = 1;
79   DMBoundaryType                     bx = DM_BOUNDARY_PERIODIC, by = DM_BOUNDARY_PERIODIC, bz = DM_BOUNDARY_PERIODIC;
80   DMDAStencilType                    st = DMDA_STENCIL_STAR;
81   Vec                                x, y; /* local/global vectors of the da */
82   PetscRandom                        rctx;
83   const PetscScalar               ***x1;
84   PetscScalar                     ***y1; /* arrays of g, ll */
85   const PetscScalar                 *x2;
86   PetscScalar                       *y2;
87   ConstPetscScalarKokkosOffsetView3D x3;
88   PetscScalarKokkosOffsetView3D      y3;
89   PetscLogDouble                     tstart = 0.0, tend = 0.0, avgTime = 0.0;
90   PetscInt                           nwarm = 2, nloop = 10;
91   PetscInt                           min = 32, max = 32 * 8; /* min and max sizes of the grids to sample */
92 
93   PetscFunctionBeginUser;
94   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
95   PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
96   PetscCall(PetscOptionsGetInt(NULL, NULL, "-min", &min, NULL));
97   PetscCall(PetscOptionsGetInt(NULL, NULL, "-max", &max, NULL));
98 
99   for (PetscInt len = min; len <= max; len = len * 2) {
100     PetscCall(DMDACreate3d(PETSC_COMM_WORLD, bx, by, bz, st, len, len, len, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, dof, sw, 0, 0, 0, &da));
101     PetscCall(DMSetFromOptions(da));
102     PetscCall(DMSetUp(da));
103 
104     PetscCall(DMDAGetCorners(da, &xs, &ys, &zs, &xm, &ym, &zm));
105     PetscCall(DMDAGetGhostCorners(da, &gxs, &gys, &gzs, &gxm, &gym, &gzm));
106     PetscCall(DMCreateLocalVector(da, &x)); /* Create local x and global y */
107     PetscCall(DMCreateGlobalVector(da, &y));
108 
109     /* Access with petsc multi-dimensional arrays */
110     PetscCall(VecSetRandom(x, rctx));
111     PetscCall(VecSet(y, 0.0));
112     PetscCall(DMDAVecGetArrayRead(da, x, &x1));
113     PetscCall(DMDAVecGetArray(da, y, &y1));
114     PetscCall(Update1(da, x1, y1, nwarm, nloop, &avgTime));
115     PetscCall(DMDAVecRestoreArrayRead(da, x, &x1));
116     PetscCall(DMDAVecRestoreArray(da, y, &y1));
117     PetscCall(PetscTime(&tend));
118     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%4d^3 -- PETSc average time  = %e\n", len, avgTime));
119 
120     /* Access with C multi-dimensional arrays */
121     PetscCall(VecSetRandom(x, rctx));
122     PetscCall(VecSet(y, 0.0));
123     PetscCall(VecGetArrayRead(x, &x2));
124     PetscCall(VecGetArray(y, &y2));
125     PetscCall(Update2(da, x2, y2, nwarm, nloop, &avgTime));
126     PetscCall(VecRestoreArrayRead(x, &x2));
127     PetscCall(VecRestoreArray(y, &y2));
128     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%4d^3 -- C average time      = %e\n", len, avgTime));
129 
130     /* Access with Kokkos multi-dimensional OffsetViews */
131     PetscCall(VecSet(y, 0.0));
132     PetscCall(VecSetRandom(x, rctx));
133     PetscCall(DMDAVecGetKokkosOffsetView(da, x, &x3));
134     PetscCall(DMDAVecGetKokkosOffsetView(da, y, &y3));
135 
136     for (PetscInt it = 0; it < nwarm + nloop; it++) {
137       if (it == nwarm) PetscCall(PetscTime(&tstart));
138       Kokkos::parallel_for(
139         "stencil", MDRangePolicy<Kokkos::DefaultHostExecutionSpace, Rank<3, Iterate::Right, Iterate::Right>>({zs, ys, xs}, {zs + zm, ys + ym, xs + xm}),
140         KOKKOS_LAMBDA(PetscInt k, PetscInt j, PetscInt i) { y3(k, j, i) = 6 * x3(k, j, i) - x3(k - 1, j, i) - x3(k, j - 1, i) - x3(k, j, i - 1) - x3(k + 1, j, i) - x3(k, j + 1, i) - x3(k, j, i + 1); });
141     }
142     PetscCall(PetscTime(&tend));
143     PetscCall(DMDAVecRestoreKokkosOffsetView(da, x, &x3));
144     PetscCall(DMDAVecRestoreKokkosOffsetView(da, y, &y3));
145     avgTime = (tend - tstart) / nloop;
146     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%4d^3 -- Kokkos average time = %e\n", len, avgTime));
147 
148     PetscCall(VecDestroy(&x));
149     PetscCall(VecDestroy(&y));
150     PetscCall(DMDestroy(&da));
151   }
152   PetscCall(PetscRandomDestroy(&rctx));
153   PetscCall(PetscFinalize());
154   return 0;
155 }
156 
157 /*TEST
158   build:
159     requires: kokkos_kernels
160 
161   test:
162     suffix: 1
163     requires: kokkos_kernels
164     args: -min 32 -max 32 -dm_vec_type kokkos
165     filter: grep -v "time"
166 
167 TEST*/
168