xref: /petsc/src/dm/tutorials/ex13f90.F90 (revision c4762a1b19cd2af06abeed90e8f9d34fb975dd94)
1*c4762a1bSJed Brownprogram main
2*c4762a1bSJed Brown!
3*c4762a1bSJed Brown! This example intends to show how DMDA is used to solve a PDE on a decomposed
4*c4762a1bSJed Brown! domain. The equation we are solving is not a PDE, but a toy example: van der
5*c4762a1bSJed Brown! Pol's 2-variable ODE duplicated onto a 3D grid:
6*c4762a1bSJed Brown! dx/dt = y
7*c4762a1bSJed Brown! dy/dt = mu(1-x**2)y - x
8*c4762a1bSJed Brown!
9*c4762a1bSJed Brown! So we are solving the same equation on all grid points, with no spatial
10*c4762a1bSJed Brown! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
11*c4762a1bSJed Brown! have communication between different parts of the domain.
12*c4762a1bSJed Brown!
13*c4762a1bSJed Brown! The example is structured so that one can replace the RHS function and
14*c4762a1bSJed Brown! the forw_euler routine with a suitable RHS and a suitable time-integration
15*c4762a1bSJed Brown! scheme and make little or no modifications to the DMDA parts. In particular,
16*c4762a1bSJed Brown! the "inner" parts of the RHS and time-integration do not "know about" the
17*c4762a1bSJed Brown! decomposed domain.
18*c4762a1bSJed Brown!
19*c4762a1bSJed Brown!     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
20*c4762a1bSJed Brown!
21*c4762a1bSJed Brown!     Contributed by Aasmund Ervik (asmunder at pvv.org)
22*c4762a1bSJed Brown!
23*c4762a1bSJed Brown
24*c4762a1bSJed Brown
25*c4762a1bSJed Brown  use ex13f90aux
26*c4762a1bSJed Brown
27*c4762a1bSJed Brown#include <petsc/finclude/petscdmda.h>
28*c4762a1bSJed Brown  use petscdmda
29*c4762a1bSJed Brown
30*c4762a1bSJed Brown  PetscErrorCode   ierr
31*c4762a1bSJed Brown  PetscMPIInt      rank,size
32*c4762a1bSJed Brown  MPI_Comm         comm
33*c4762a1bSJed Brown  Vec              Lvec,coords
34*c4762a1bSJed Brown  DM               SolScal,CoordDM
35*c4762a1bSJed Brown  DMBoundaryType b_x,b_y,b_z
36*c4762a1bSJed Brown  PetscReal, pointer :: array(:,:,:,:)
37*c4762a1bSJed Brown  PetscReal :: t,tend,dt,xmin,xmax,ymin,ymax,zmin,zmax,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax
38*c4762a1bSJed Brown  PetscReal, allocatable :: f(:,:,:,:), grid(:,:,:,:)
39*c4762a1bSJed Brown  PetscInt :: i,j,k,igmax,jgmax,kgmax,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,itime,maxstep,nscreen,dof,stw,ndim
40*c4762a1bSJed Brown
41*c4762a1bSJed Brown  ! Fire up PETSc:
42*c4762a1bSJed Brown  call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
43*c4762a1bSJed Brown  if (ierr .ne. 0) then
44*c4762a1bSJed Brown    print*,'Unable to initialize PETSc'
45*c4762a1bSJed Brown    stop
46*c4762a1bSJed Brown  endif
47*c4762a1bSJed Brown  comm = PETSC_COMM_WORLD
48*c4762a1bSJed Brown  call MPI_Comm_rank(comm,rank,ierr);CHKERRA(ierr)
49*c4762a1bSJed Brown  call MPI_Comm_size(comm,size,ierr);CHKERRA(ierr)
50*c4762a1bSJed Brown  if (rank == 0) then
51*c4762a1bSJed Brown    write(*,*) 'Hi! We are solving van der Pol using ',size,' processes.'
52*c4762a1bSJed Brown    write(*,*) ' '
53*c4762a1bSJed Brown    write(*,*) '  t     x1         x2'
54*c4762a1bSJed Brown  endif
55*c4762a1bSJed Brown
56*c4762a1bSJed Brown  ! Set up the global grid
57*c4762a1bSJed Brown  igmax = 50
58*c4762a1bSJed Brown  jgmax = 50
59*c4762a1bSJed Brown  kgmax = 50
60*c4762a1bSJed Brown  xgmin = 0.0
61*c4762a1bSJed Brown  ygmin = 0.0
62*c4762a1bSJed Brown  zgmin = 0.0
63*c4762a1bSJed Brown  xgmax = 1.0
64*c4762a1bSJed Brown  ygmax = 1.0
65*c4762a1bSJed Brown  zgmax = 1.0
66*c4762a1bSJed Brown  stw = 1 ! stencil width
67*c4762a1bSJed Brown  dof = 2 ! number of variables in this DA
68*c4762a1bSJed Brown  ndim = 3 ! 3D code
69*c4762a1bSJed Brown
70*c4762a1bSJed Brown  ! Get the BCs and create the DMDA
71*c4762a1bSJed Brown  call get_boundary_cond(b_x,b_y,b_z);CHKERRA(ierr)
72*c4762a1bSJed Brown  call DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw,  &
73*c4762a1bSJed Brown                    PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,SolScal,ierr);CHKERRA(ierr)
74*c4762a1bSJed Brown  call DMSetFromOptions(SolScal,ierr);CHKERRA(ierr)
75*c4762a1bSJed Brown  call DMSetUp(SolScal,ierr);CHKERRA(ierr)
76*c4762a1bSJed Brown
77*c4762a1bSJed Brown  ! Set global coordinates, get a global and a local work vector
78*c4762a1bSJed Brown  call DMDASetUniformCoordinates(SolScal,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax,ierr);CHKERRA(ierr)
79*c4762a1bSJed Brown  call DMCreateLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
80*c4762a1bSJed Brown
81*c4762a1bSJed Brown  ! Get ib1,imax,ibn etc. of the local grid.
82*c4762a1bSJed Brown  ! Our convention is:
83*c4762a1bSJed Brown  ! the first local ghost cell is ib1
84*c4762a1bSJed Brown  ! the first local       cell is 1
85*c4762a1bSJed Brown  ! the last  local       cell is imax
86*c4762a1bSJed Brown  ! the last  local ghost cell is ibn.
87*c4762a1bSJed Brown  !
88*c4762a1bSJed Brown  ! i,j,k must be in this call, but are not used
89*c4762a1bSJed Brown  call DMDAGetCorners(SolScal,i,j,k,imax,jmax,kmax,ierr);CHKERRA(ierr)
90*c4762a1bSJed Brown  ib1=1-stw
91*c4762a1bSJed Brown  jb1=1-stw
92*c4762a1bSJed Brown  kb1=1-stw
93*c4762a1bSJed Brown  ibn=imax+stw
94*c4762a1bSJed Brown  jbn=jmax+stw
95*c4762a1bSJed Brown  kbn=kmax+stw
96*c4762a1bSJed Brown  allocate(f(dof,ib1:ibn,jb1:jbn,kb1:kbn))
97*c4762a1bSJed Brown  allocate(grid(ndim,ib1:ibn,jb1:jbn,kb1:kbn))
98*c4762a1bSJed Brown
99*c4762a1bSJed Brown  ! Get xmin,xmax etc. for the local grid
100*c4762a1bSJed Brown  ! The "coords" local vector here is borrowed, so we shall not destroy it.
101*c4762a1bSJed Brown  call DMGetCoordinatesLocal(SolScal,coords,ierr);CHKERRA(ierr)
102*c4762a1bSJed Brown  ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
103*c4762a1bSJed Brown  call DMGetCoordinateDM(SolScal,CoordDM,ierr);CHKERRA(ierr)
104*c4762a1bSJed Brown  ! petsc_to_local and local_to_petsc are convenience functions, see
105*c4762a1bSJed Brown  ! ex13f90aux.F90.
106*c4762a1bSJed Brown  call petsc_to_local(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
107*c4762a1bSJed Brown  xmin=grid(1,1,1,1)
108*c4762a1bSJed Brown  ymin=grid(2,1,1,1)
109*c4762a1bSJed Brown  zmin=grid(3,1,1,1)
110*c4762a1bSJed Brown  xmax=grid(1,imax,jmax,kmax)
111*c4762a1bSJed Brown  ymax=grid(2,imax,jmax,kmax)
112*c4762a1bSJed Brown  zmax=grid(3,imax,jmax,kmax)
113*c4762a1bSJed Brown  call local_to_petsc(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
114*c4762a1bSJed Brown
115*c4762a1bSJed Brown  ! Note that we never use xmin,xmax in this example, but the preceding way of
116*c4762a1bSJed Brown  ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
117*c4762a1bSJed Brown  ! is not documented in any other examples I could find.
118*c4762a1bSJed Brown
119*c4762a1bSJed Brown  ! Set up the time-stepping
120*c4762a1bSJed Brown  t = 0.0
121*c4762a1bSJed Brown  tend = 100.0
122*c4762a1bSJed Brown  dt = 1e-3
123*c4762a1bSJed Brown  maxstep=ceiling((tend-t)/dt)
124*c4762a1bSJed Brown  ! Write output every second (of simulation-time)
125*c4762a1bSJed Brown  nscreen = int(1.0/dt)+1
126*c4762a1bSJed Brown
127*c4762a1bSJed Brown  ! Set initial condition
128*c4762a1bSJed Brown  call DMDAVecGetArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
129*c4762a1bSJed Brown  array(0,:,:,:) = 0.5
130*c4762a1bSJed Brown  array(1,:,:,:) = 0.5
131*c4762a1bSJed Brown  call DMDAVecRestoreArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
132*c4762a1bSJed Brown
133*c4762a1bSJed Brown  ! Initial set-up finished.
134*c4762a1bSJed Brown  ! Time loop
135*c4762a1bSJed Brown  maxstep = 5
136*c4762a1bSJed Brown  do itime=1,maxstep
137*c4762a1bSJed Brown
138*c4762a1bSJed Brown    ! Communicate such that everyone has the correct values in ghost cells
139*c4762a1bSJed Brown    call DMLocalToLocalBegin(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
140*c4762a1bSJed Brown    call DMLocalToLocalEnd(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
141*c4762a1bSJed Brown
142*c4762a1bSJed Brown    ! Get the old solution from the PETSc data structures
143*c4762a1bSJed Brown    call petsc_to_local(SolScal,Lvec,array,f,dof,stw);CHKERRA(ierr)
144*c4762a1bSJed Brown
145*c4762a1bSJed Brown    ! Do the time step
146*c4762a1bSJed Brown    call forw_euler(t,dt,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,dof,f,dfdt_vdp)
147*c4762a1bSJed Brown    t=t+dt
148*c4762a1bSJed Brown
149*c4762a1bSJed Brown    ! Write result to screen (if main process and it's time to)
150*c4762a1bSJed Brown    if (rank == 0 .and. mod(itime,nscreen) == 0) then
151*c4762a1bSJed Brown      write(*,101) t,f(1,1,1,1),f(2,1,1,1)
152*c4762a1bSJed Brown    endif
153*c4762a1bSJed Brown
154*c4762a1bSJed Brown    ! Put our new solution in the PETSc data structures
155*c4762a1bSJed Brown    call local_to_petsc(SolScal,Lvec,array,f,dof,stw)
156*c4762a1bSJed Brown  end do
157*c4762a1bSJed Brown
158*c4762a1bSJed Brown  ! Deallocate and finalize
159*c4762a1bSJed Brown  call DMRestoreLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
160*c4762a1bSJed Brown  call DMDestroy(SolScal,ierr);CHKERRA(ierr)
161*c4762a1bSJed Brown  deallocate(f)
162*c4762a1bSJed Brown  deallocate(grid)
163*c4762a1bSJed Brown  call PetscFinalize(ierr)
164*c4762a1bSJed Brown
165*c4762a1bSJed Brown  ! Format for writing output to screen
166*c4762a1bSJed Brown101 format(F5.1,2F11.6)
167*c4762a1bSJed Brown
168*c4762a1bSJed Brownend program main
169*c4762a1bSJed Brown
170*c4762a1bSJed Brown!/*TEST
171*c4762a1bSJed Brown!
172*c4762a1bSJed Brown!   build:
173*c4762a1bSJed Brown!     requires: !complex
174*c4762a1bSJed Brown!     depends:  ex13f90aux.F90
175*c4762a1bSJed Brown!
176*c4762a1bSJed Brown!   test:
177*c4762a1bSJed Brown!     nsize: 4
178*c4762a1bSJed Brown!
179*c4762a1bSJed Brown!TEST*/
180*c4762a1bSJed Brown
181