xref: /petsc/src/vec/is/sf/tests/ex17.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static const char help[] = "Test PetscSF with MPI large count (more than 2 billion elements in messages)\n\n";
2 
3 #include <petscsys.h>
4 #include <petscsf.h>
5 
6 int main(int argc,char **argv)
7 {
8   PetscErrorCode    ierr;
9   PetscSF           sf;
10   PetscInt          i,nroots,nleaves;
11   PetscInt          n = (1ULL<<31) + 1024; /* a little over 2G elements */
12   PetscSFNode       *iremote = NULL;
13   PetscMPIInt       rank,size;
14   char              *rootdata=NULL,*leafdata=NULL;
15   Vec               x,y;
16   VecScatter        vscat;
17   PetscInt          rstart,rend;
18   IS                ix;
19   const PetscScalar *xv;
20 
21   ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
22   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
23   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
24   PetscCheckFalse(size != 2,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"The test can only run with two MPI ranks");
25 
26   /* Test PetscSF */
27   CHKERRQ(PetscSFCreate(PETSC_COMM_WORLD,&sf));
28   CHKERRQ(PetscSFSetFromOptions(sf));
29 
30   if (!rank) {
31     nroots  = n;
32     nleaves = 0;
33   } else {
34     nroots  = 0;
35     nleaves = n;
36     CHKERRQ(PetscMalloc1(nleaves,&iremote));
37     for (i=0; i<nleaves; i++) {
38       iremote[i].rank  = 0;
39       iremote[i].index = i;
40     }
41   }
42   CHKERRQ(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES));
43   CHKERRQ(PetscMalloc2(nroots,&rootdata,nleaves,&leafdata));
44   if (!rank) {
45     memset(rootdata,11,nroots);
46     rootdata[nroots-1] = 12; /* Use a different value at the end */
47   }
48 
49   CHKERRQ(PetscSFBcastBegin(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE)); /* rank 0->1, bcast rootdata to leafdata */
50   CHKERRQ(PetscSFBcastEnd(sf,MPI_SIGNED_CHAR,rootdata,leafdata,MPI_REPLACE));
51   CHKERRQ(PetscSFReduceBegin(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM)); /* rank 1->0, add leafdata to rootdata */
52   CHKERRQ(PetscSFReduceEnd(sf,MPI_SIGNED_CHAR,leafdata,rootdata,MPI_SUM));
53   if (!rank) {
54     PetscCheckFalse(rootdata[0] != 22 || rootdata[nroots-1] != 24,PETSC_COMM_SELF,PETSC_ERR_PLIB,"SF: wrong results");
55   }
56 
57   CHKERRQ(PetscFree2(rootdata,leafdata));
58   CHKERRQ(PetscFree(iremote));
59   CHKERRQ(PetscSFDestroy(&sf));
60 
61   /* Test VecScatter */
62   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&x));
63   CHKERRQ(VecCreate(PETSC_COMM_WORLD,&y));
64   CHKERRQ(VecSetSizes(x,rank==0? n : 64,PETSC_DECIDE));
65   CHKERRQ(VecSetSizes(y,rank==0? 64 : n,PETSC_DECIDE));
66   CHKERRQ(VecSetFromOptions(x));
67   CHKERRQ(VecSetFromOptions(y));
68 
69   CHKERRQ(VecGetOwnershipRange(x,&rstart,&rend));
70   CHKERRQ(ISCreateStride(PETSC_COMM_SELF,rend-rstart,rstart,1,&ix));
71   CHKERRQ(VecScatterCreate(x,ix,y,ix,&vscat));
72 
73   CHKERRQ(VecSet(x,3.0));
74   CHKERRQ(VecScatterBegin(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
75   CHKERRQ(VecScatterEnd(vscat,x,y,INSERT_VALUES,SCATTER_FORWARD));
76 
77   CHKERRQ(VecScatterBegin(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
78   CHKERRQ(VecScatterEnd(vscat,y,x,ADD_VALUES,SCATTER_REVERSE));
79 
80   CHKERRQ(VecGetArrayRead(x,&xv));
81   PetscCheckFalse(xv[0] != 6.0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"VecScatter: wrong results");
82   CHKERRQ(VecRestoreArrayRead(x,&xv));
83   CHKERRQ(VecDestroy(&x));
84   CHKERRQ(VecDestroy(&y));
85   CHKERRQ(VecScatterDestroy(&vscat));
86   CHKERRQ(ISDestroy(&ix));
87 
88   ierr = PetscFinalize();
89   return ierr;
90 }
91 
92 /**TEST
93    test:
94      requires: defined(PETSC_HAVE_MPI_LARGE_COUNT) defined(PETSC_USE_64BIT_INDICES)
95      TODO: need a machine with big memory (~150GB) to run the test
96      nsize: 2
97      args: -sf_type {{basic neighbor}}
98 
99 TEST**/
100