xref: /petsc/src/vec/vec/tutorials/ex9.c (revision 2dfe29262b4468d5fbb057f165946753e529c684)
1 
2 static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
3 
4 /*T
5    Concepts: vectors^assembling vectors;
6    Concepts: vectors^ghost padding;
7    Processors: n
8 
9    Description: Ghost padding is one way to handle local calculations that
10       involve values from other processors. VecCreateGhost() provides
11       a way to create vectors with extra room at the end of the vector
12       array to contain the needed ghost values from other processors,
13       vector computations are otherwise unaffected.
14 T*/
15 
16 /*
17   Include "petscvec.h" so that we can use vectors.  Note that this file
18   automatically includes:
19      petscsys.h       - base PETSc routines   petscis.h     - index sets
20      petscviewer.h - viewers
21 */
22 #include <petscvec.h>
23 
24 int main(int argc,char **argv)
25 {
26   PetscMPIInt    rank,size;
27   PetscInt       nlocal = 6,nghost = 2,ifrom[2],i,rstart,rend;
28   PetscErrorCode ierr;
29   PetscBool      flg,flg2,flg3;
30   PetscScalar    value,*array,*tarray=0;
31   Vec            lx,gx,gxs;
32 
33   ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
34   ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRMPI(ierr);
35   ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRMPI(ierr);
36   if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run example with two processors\n");
37 
38   /*
39      Construct a two dimensional graph connecting nlocal degrees of
40      freedom per processor. From this we will generate the global
41      indices of needed ghost values
42 
43      For simplicity we generate the entire graph on each processor:
44      in real application the graph would stored in parallel, but this
45      example is only to demonstrate the management of ghost padding
46      with VecCreateGhost().
47 
48      In this example we consider the vector as representing
49      degrees of freedom in a one dimensional grid with periodic
50      boundary conditions.
51 
52         ----Processor  1---------  ----Processor 2 --------
53          0    1   2   3   4    5    6    7   8   9   10   11
54                                |----|
55          |-------------------------------------------------|
56 
57   */
58 
59   if (rank == 0) {
60     ifrom[0] = 11; ifrom[1] = 6;
61   } else {
62     ifrom[0] = 0;  ifrom[1] = 5;
63   }
64 
65   /*
66      Create the vector with two slots for ghost points. Note that both
67      the local vector (lx) and the global vector (gx) share the same
68      array for storing vector values.
69   */
70   ierr = PetscOptionsHasName(NULL,NULL,"-allocate",&flg);CHKERRQ(ierr);
71   ierr = PetscOptionsHasName(NULL,NULL,"-vecmpisetghost",&flg2);CHKERRQ(ierr);
72   ierr = PetscOptionsHasName(NULL,NULL,"-minvalues",&flg3);CHKERRQ(ierr);
73   if (flg) {
74     ierr = PetscMalloc1(nlocal+nghost,&tarray);CHKERRQ(ierr);
75     ierr = VecCreateGhostWithArray(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,tarray,&gxs);CHKERRQ(ierr);
76   } else if (flg2) {
77     ierr = VecCreate(PETSC_COMM_WORLD,&gxs);CHKERRQ(ierr);
78     ierr = VecSetType(gxs,VECMPI);CHKERRQ(ierr);
79     ierr = VecSetSizes(gxs,nlocal,PETSC_DECIDE);CHKERRQ(ierr);
80     ierr = VecMPISetGhost(gxs,nghost,ifrom);CHKERRQ(ierr);
81   } else {
82     ierr = VecCreateGhost(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,&gxs);CHKERRQ(ierr);
83   }
84 
85   /*
86       Test VecDuplicate()
87   */
88   ierr = VecDuplicate(gxs,&gx);CHKERRQ(ierr);
89   ierr = VecDestroy(&gxs);CHKERRQ(ierr);
90 
91   /*
92      Access the local representation
93   */
94   ierr = VecGhostGetLocalForm(gx,&lx);CHKERRQ(ierr);
95 
96   /*
97      Set the values from 0 to 12 into the "global" vector
98   */
99   ierr = VecGetOwnershipRange(gx,&rstart,&rend);CHKERRQ(ierr);
100   for (i=rstart; i<rend; i++) {
101     value = (PetscScalar) i;
102     ierr  = VecSetValues(gx,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
103   }
104   ierr = VecAssemblyBegin(gx);CHKERRQ(ierr);
105   ierr = VecAssemblyEnd(gx);CHKERRQ(ierr);
106 
107   ierr = VecGhostUpdateBegin(gx,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
108   ierr = VecGhostUpdateEnd(gx,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
109 
110   /*
111      Print out each vector, including the ghost padding region.
112   */
113   ierr = VecGetArray(lx,&array);CHKERRQ(ierr);
114   for (i=0; i<nlocal+nghost; i++) {
115     ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %g\n",i,(double)PetscRealPart(array[i]));CHKERRQ(ierr);
116   }
117   ierr = VecRestoreArray(lx,&array);CHKERRQ(ierr);
118   ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr);
119   ierr = VecGhostRestoreLocalForm(gx,&lx);CHKERRQ(ierr);
120 
121   /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
122   if (flg3) {
123     if (rank == 0){ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nTesting VecGhostUpdate with MIN_VALUES\n");CHKERRQ(ierr);}
124     ierr = VecGhostGetLocalForm(gx,&lx);CHKERRQ(ierr);
125     ierr = VecGetArray(lx,&array);CHKERRQ(ierr);
126     for (i=0; i<nghost; i++) array[nlocal+i] = rank ? (PetscScalar)4 : (PetscScalar)8;
127     ierr = VecRestoreArray(lx,&array);CHKERRQ(ierr);
128     ierr = VecGhostRestoreLocalForm(gx,&lx);CHKERRQ(ierr);
129 
130     ierr = VecGhostUpdateBegin(gx,MIN_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
131     ierr = VecGhostUpdateEnd(gx,MIN_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
132 
133     ierr = VecGhostGetLocalForm(gx,&lx);CHKERRQ(ierr);
134     ierr = VecGetArray(lx,&array);CHKERRQ(ierr);
135 
136     for (i=0; i<nlocal+nghost; i++) {
137       ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %g\n",i,(double)PetscRealPart(array[i]));CHKERRQ(ierr);
138     }
139     ierr = VecRestoreArray(lx,&array);CHKERRQ(ierr);
140     ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr);
141     ierr = VecGhostRestoreLocalForm(gx,&lx);CHKERRQ(ierr);
142   }
143 
144   ierr = VecDestroy(&gx);CHKERRQ(ierr);
145 
146   if (flg) {ierr = PetscFree(tarray);CHKERRQ(ierr);}
147   ierr = PetscFinalize();
148   return ierr;
149 }
150 
151 /*TEST
152 
153      test:
154        nsize: 2
155 
156      test:
157        suffix: 2
158        nsize: 2
159        args: -allocate
160        output_file: output/ex9_1.out
161 
162      test:
163        suffix: 3
164        nsize: 2
165        args: -vecmpisetghost
166        output_file: output/ex9_1.out
167 
168      test:
169        suffix: 4
170        nsize: 2
171        args: -minvalues
172        output_file: output/ex9_2.out
173        requires: !complex
174 
175 TEST*/
176 
177