xref: /petsc/src/binding/petsc4py/demo/legacy/wrap-cython/Bratu3Dimpl.c (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97)
1 /* ------------------------------------------------------------------------
2 
3     Solid Fuel Ignition (SFI) problem.  This problem is modeled by the
4     partial differential equation
5 
6             -Laplacian(u) - lambda * exp(u) = 0,  0 < x,y,z < 1,
7 
8     with boundary conditions
9 
10              u = 0  for  x = 0, x = 1, y = 0, y = 1, z = 0, z = 1
11 
12     A finite difference approximation with the usual 7-point stencil
13     is used to discretize the boundary value problem to obtain a
14     nonlinear system of equations. The problem is solved in a 3D
15     rectangular domain, using distributed arrays (DAs) to partition
16     the parallel grid.
17 
18   ------------------------------------------------------------------------- */
19 
20 #include "Bratu3Dimpl.h"
21 
22 PetscErrorCode FormInitGuess(DM da, Vec X, Params *p)
23 {
24   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
25   PetscReal      lambda,temp1,hx,hy,hz,tempk,tempj;
26   PetscScalar    ***x;
27 
28   PetscFunctionBegin;
29   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
30 
31   lambda = p->lambda_;
32   hx     = 1.0/(PetscReal)(Mx-1);
33   hy     = 1.0/(PetscReal)(My-1);
34   hz     = 1.0/(PetscReal)(Mz-1);
35   temp1  = lambda/(lambda + 1.0);
36 
37   /*
38     Get a pointer to vector data.
39 
40     - For default PETSc vectors, VecGetArray() returns a pointer to
41       the data array.  Otherwise, the routine is implementation
42       dependent.
43 
44     - You MUST call VecRestoreArray() when you no longer need access
45       to the array.
46   */
47   PetscCall(DMDAVecGetArray(da,X,&x));
48 
49   /*
50     Get local grid boundaries (for 3-dimensional DMDA):
51 
52     - xs, ys, zs: starting grid indices (no ghost points)
53 
54     - xm, ym, zm: widths of local grid (no ghost points)
55   */
56   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
57 
58   /*
59     Compute initial guess over the locally owned part of the grid
60   */
61   for (k=zs; k<zs+zm; k++) {
62     tempk = (PetscReal)(PetscMin(k,Mz-k-1))*hz;
63     for (j=ys; j<ys+ym; j++) {
64       tempj = PetscMin((PetscReal)(PetscMin(j,My-j-1))*hy,tempk);
65       for (i=xs; i<xs+xm; i++) {
66         if (i == 0 || j == 0 || k == 0 || i == Mx-1 || j == My-1 || k == Mz-1) {
67           /* boundary conditions are all zero Dirichlet */
68           x[k][j][i] = 0.0;
69         } else {
70           x[k][j][i] = temp1*sqrt(PetscMin((PetscReal)(PetscMin(i,Mx-i-1))*hx,tempj));
71         }
72       }
73     }
74   }
75 
76   /*
77     Restore vector
78   */
79   PetscCall(DMDAVecRestoreArray(da,X,&x));
80 
81   PetscFunctionReturn(PETSC_SUCCESS);
82 }
83 
84 PetscErrorCode FormFunction(DM da, Vec X, Vec F, Params *p)
85 {
86   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
87   PetscReal      two = 2.0,lambda,hx,hy,hz,hxhzdhy,hyhzdhx,hxhydhz,sc;
88   PetscScalar    u_north,u_south,u_east,u_west,u_up,u_down,u,u_xx,u_yy,u_zz,***x,***f;
89   Vec            localX;
90 
91   PetscFunctionBegin;
92   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
93   lambda = p->lambda_;
94   hx     = 1.0/(PetscReal)(Mx-1);
95   hy     = 1.0/(PetscReal)(My-1);
96   hz     = 1.0/(PetscReal)(Mz-1);
97   sc     = hx*hy*hz*lambda;
98   hxhzdhy = hx*hz/hy;
99   hyhzdhx = hy*hz/hx;
100   hxhydhz = hx*hy/hz;
101 
102   /*
103 
104    */
105   PetscCall(DMGetLocalVector(da,&localX));
106 
107   /*
108     Scatter ghost points to local vector,using the 2-step process
109     DMGlobalToLocalBegin(),DMGlobalToLocalEnd().  By placing code
110     between these two statements, computations can be done while
111     messages are in transition.
112   */
113   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
114   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
115 
116   /*
117     Get pointers to vector data.
118   */
119   PetscCall(DMDAVecGetArray(da,localX,&x));
120   PetscCall(DMDAVecGetArray(da,F,&f));
121 
122   /*
123     Get local grid boundaries.
124   */
125   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
126 
127   /*
128     Compute function over the locally owned part of the grid.
129   */
130   for (k=zs; k<zs+zm; k++) {
131     for (j=ys; j<ys+ym; j++) {
132       for (i=xs; i<xs+xm; i++) {
133         if (i == 0 || j == 0 || k == 0 || i == Mx-1 || j == My-1 || k == Mz-1) {
134           /* boundary points */
135           f[k][j][i] = x[k][j][i] - 0.0;
136         } else {
137           /* interior grid points */
138           u           = x[k][j][i];
139           u_east      = x[k][j][i+1];
140           u_west      = x[k][j][i-1];
141           u_north     = x[k][j+1][i];
142           u_south     = x[k][j-1][i];
143           u_up        = x[k+1][j][i];
144           u_down      = x[k-1][j][i];
145           u_xx        = (-u_east + two*u - u_west)*hyhzdhx;
146           u_yy        = (-u_north + two*u - u_south)*hxhzdhy;
147           u_zz        = (-u_up + two*u - u_down)*hxhydhz;
148           f[k][j][i]  = u_xx + u_yy + u_zz - sc*PetscExpScalar(u);
149         }
150       }
151     }
152   }
153 
154   /*
155     Restore vectors.
156   */
157   PetscCall(DMDAVecRestoreArray(da,F,&f));
158   PetscCall(DMDAVecRestoreArray(da,localX,&x));
159   PetscCall(DMRestoreLocalVector(da,&localX));
160   PetscCall(PetscLogFlops(11.0*ym*xm));
161 
162   PetscFunctionReturn(PETSC_SUCCESS);
163 }
164 
165 PetscErrorCode FormJacobian(DM da, Vec X, Mat J, Params *p)
166 {
167   PetscInt       i,j,k,Mx,My,Mz,xs,ys,zs,xm,ym,zm;
168   PetscReal      lambda,hx,hy,hz,hxhzdhy,hyhzdhx,hxhydhz,sc;
169   PetscScalar    v[7],***x;
170   MatStencil     col[7],row;
171   Vec            localX;
172 
173   PetscFunctionBegin;
174   PetscCall(DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,&Mz,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE));
175   lambda = p->lambda_;
176   hx     = 1.0/(PetscReal)(Mx-1);
177   hy     = 1.0/(PetscReal)(My-1);
178   hz     = 1.0/(PetscReal)(Mz-1);
179   sc     = hx*hy*hz*lambda;
180   hxhzdhy = hx*hz/hy;
181   hyhzdhx = hy*hz/hx;
182   hxhydhz = hx*hy/hz;
183 
184   /*
185 
186    */
187   PetscCall(DMGetLocalVector(da,&localX));
188 
189   /*
190     Scatter ghost points to local vector, using the 2-step process
191     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().  By placing code
192     between these two statements, computations can be done while
193     messages are in transition.
194   */
195   PetscCall(DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX));
196   PetscCall(DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX));
197 
198   /*
199     Get pointer to vector data.
200   */
201   PetscCall(DMDAVecGetArray(da,localX,&x));
202 
203   /*
204     Get local grid boundaries.
205   */
206   PetscCall(DMDAGetCorners(da,&xs,&ys,&zs,&xm,&ym,&zm));
207 
208   /*
209     Compute entries for the locally owned part of the Jacobian.
210 
211     - Currently, all PETSc parallel matrix formats are partitioned by
212       contiguous chunks of rows across the processors.
213 
214     - Each processor needs to insert only elements that it owns
215       locally (but any non-local elements will be sent to the
216       appropriate processor during matrix assembly).
217 
218     - Here, we set all entries for a particular row at once.
219 
220     - We can set matrix entries either using either
221       MatSetValuesLocal() or MatSetValues(), as discussed above.
222   */
223   for (k=zs; k<zs+zm; k++) {
224     for (j=ys; j<ys+ym; j++) {
225       for (i=xs; i<xs+xm; i++) {
226         row.k = k; row.j = j; row.i = i;
227         /* boundary points */
228         if (i == 0 || j == 0 || k == 0|| i == Mx-1 || j == My-1 || k == Mz-1) {
229           v[0] = 1.0;
230           PetscCall(MatSetValuesStencil(J,1,&row,1,&row,v,INSERT_VALUES));
231         } else {
232         /* interior grid points */
233           v[0] = -hxhydhz; col[0].k=k-1;col[0].j=j;  col[0].i = i;
234           v[1] = -hxhzdhy; col[1].k=k;  col[1].j=j-1;col[1].i = i;
235           v[2] = -hyhzdhx; col[2].k=k;  col[2].j=j;  col[2].i = i-1;
236           v[3] = 2.0*(hyhzdhx+hxhzdhy+hxhydhz)-sc*PetscExpScalar(x[k][j][i]);col[3].k=row.k;col[3].j=row.j;col[3].i = row.i;
237           v[4] = -hyhzdhx; col[4].k=k;  col[4].j=j;  col[4].i = i+1;
238           v[5] = -hxhzdhy; col[5].k=k;  col[5].j=j+1;col[5].i = i;
239           v[6] = -hxhydhz; col[6].k=k+1;col[6].j=j;  col[6].i = i;
240           PetscCall(MatSetValuesStencil(J,1,&row,7,col,v,INSERT_VALUES));
241         }
242       }
243     }
244   }
245   PetscCall(DMDAVecRestoreArray(da,localX,&x));
246   PetscCall(DMRestoreLocalVector(da,&localX));
247 
248   /*
249     Assemble matrix, using the 2-step process: MatAssemblyBegin(),
250     MatAssemblyEnd().
251   */
252   PetscCall(MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY));
253   PetscCall(MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY));
254 
255   /*
256     Tell the matrix we will never add a new nonzero location to the
257     matrix. If we do, it will generate an error.
258   */
259   PetscCall(MatSetOption(J,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE));
260 
261   PetscFunctionReturn(PETSC_SUCCESS);
262 }
263