xref: /petsc/src/vec/is/sf/tests/ex8.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[]= "Test VecScatterCreateToZero, VecScatterCreateToAll\n\n";
2 
3 #include <petscvec.h>
4 int main(int argc,char **argv)
5 {
6   PetscErrorCode     ierr;
7   PetscInt           i,N=10,low,high;
8   PetscMPIInt        size,rank;
9   Vec                x,y;
10   VecScatter         vscat;
11 
12   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
13   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
14   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
15 
16   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
17   CHKERRQ(VecSetFromOptions(x));
18   CHKERRQ(VecSetSizes(x,PETSC_DECIDE,N));
19   CHKERRQ(VecGetOwnershipRange(x,&low,&high));
20   CHKERRQ(PetscObjectSetName((PetscObject)x,"x"));
21 
22   /*-------------------------------------*/
23   /*       VecScatterCreateToZero        */
24   /*-------------------------------------*/
25 
26   /* MPI vec x = [0, 1, 2, .., N-1] */
27   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES));
28   CHKERRQ(VecAssemblyBegin(x));
29   CHKERRQ(VecAssemblyEnd(x));
30 
31   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToZero\n"));
32   CHKERRQ(VecScatterCreateToZero(x,&vscat,&y));
33   CHKERRQ(PetscObjectSetName((PetscObject)y,"y"));
34 
35   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on rank 0 */
36   CHKERRQ(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
37   CHKERRQ(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
38   if (rank == 0) CHKERRQ(VecView(y,PETSC_VIEWER_STDOUT_SELF));
39 
40   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
41   CHKERRQ(VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD));
42   CHKERRQ(VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD));
43   if (rank == 0) CHKERRQ(VecView(y,PETSC_VIEWER_STDOUT_SELF));
44 
45   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
46   CHKERRQ(VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE));
47   CHKERRQ(VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE));
48   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
49 
50   /* Test PetscSFReduce with op = MPI_SUM, which does x += y on x's local part on rank 0*/
51   CHKERRQ(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL));
52   CHKERRQ(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE_LOCAL));
53   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
54 
55   CHKERRQ(VecDestroy(&y));
56   CHKERRQ(VecScatterDestroy(&vscat));
57 
58   /*-------------------------------------*/
59   /*       VecScatterCreateToAll         */
60   /*-------------------------------------*/
61   for (i=low; i<high; i++) CHKERRQ(VecSetValue(x,i,(PetscScalar)i,INSERT_VALUES));
62   CHKERRQ(VecAssemblyBegin(x));
63   CHKERRQ(VecAssemblyEnd(x));
64 
65   CHKERRQ(PetscPrintf(PETSC_COMM_WORLD,"\nTesting VecScatterCreateToAll\n"));
66 
67   CHKERRQ(VecScatterCreateToAll(x,&vscat,&y));
68   CHKERRQ(PetscObjectSetName((PetscObject)y,"y"));
69 
70   /* Test PetscSFBcastAndOp with op = MPI_REPLACE, which does y = x on all ranks */
71   CHKERRQ(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
72   CHKERRQ(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
73   if (rank == 0) CHKERRQ(VecView(y,PETSC_VIEWER_STDOUT_SELF));
74 
75   /* Test PetscSFBcastAndOp with op = MPI_SUM, which does y += x */
76   CHKERRQ(VecScatterBegin(vscat,x,y,ADD_VALUES,SCATTER_FORWARD));
77   CHKERRQ(VecScatterEnd(vscat,x,y,ADD_VALUES,SCATTER_FORWARD));
78   if (rank == 0) CHKERRQ(VecView(y,PETSC_VIEWER_STDOUT_SELF));
79 
80   /* Test PetscSFReduce with op = MPI_REPLACE, which does x = y */
81   CHKERRQ(VecScatterBegin(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE));
82   CHKERRQ(VecScatterEnd(vscat,y,x,INSERT_VALUES,SCATTER_REVERSE));
83   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
84 
85   /* Test PetscSFReduce with op = MPI_SUM, which does x += size*y */
86   CHKERRQ(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
87   CHKERRQ(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
88   CHKERRQ(VecView(x,PETSC_VIEWER_STDOUT_WORLD));
89   CHKERRQ(VecDestroy(&x));
90   CHKERRQ(VecDestroy(&y));
91   CHKERRQ(VecScatterDestroy(&vscat));
92 
93   ierr = PetscFinalize();
94   return ierr;
95 }
96 
97 /*TEST
98 
99    testset:
100       # N=10 is divisible by nsize, to trigger Allgather/Gather in SF
101       nsize: 2
102       # Exact numbers really matter here
103       diff_args: -j
104       filter: grep -v "type"
105       output_file: output/ex8_1.out
106 
107       test:
108         suffix: 1_standard
109 
110       test:
111         suffix: 1_cuda
112         # sf_backend cuda is not needed if compiling only with cuda
113         args: -vec_type cuda -sf_backend cuda
114         requires: cuda
115 
116       test:
117         suffix: 1_hip
118         args: -vec_type hip -sf_backend hip
119         requires: hip
120 
121       test:
122         suffix: 1_cuda_aware_mpi
123         # sf_backend cuda is not needed if compiling only with cuda
124         args: -vec_type cuda -sf_backend cuda
125         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
126 
127    testset:
128       # N=10 is not divisible by nsize, to trigger Allgatherv/Gatherv in SF
129       nsize: 3
130       # Exact numbers really matter here
131       diff_args: -j
132       filter: grep -v "type"
133       output_file: output/ex8_2.out
134 
135       test:
136         suffix: 2_standard
137 
138       test:
139         suffix: 2_cuda
140         # sf_backend cuda is not needed if compiling only with cuda
141         args: -vec_type cuda -sf_backend cuda
142         requires: cuda
143 
144       test:
145         suffix: 2_hip
146         # sf_backend hip is not needed if compiling only with hip
147         args: -vec_type hip -sf_backend hip
148         requires: hip
149 
150       test:
151         suffix: 2_cuda_aware_mpi
152         args: -vec_type cuda
153         requires: cuda defined(PETSC_HAVE_MPI_GPU_AWARE)
154 
155 TEST*/
156