xref: /petsc/src/ksp/ksp/tutorials/ex61f.F90 (revision a2fddd78f770fa4fc19a8af67e65be331f27d92b)
1!
2!        Demonstrates having each OpenMP thread manage its own PETSc objects and solves
3!           - each thread is ONLY allowed to access objects that IT created
4!                  that is, threads cannot shared objects
5!
6!        Run with "export OMP_NUM_THREADS=16 ./ex61f"
7!           to use 16 independent threads
8!
9!        ./configure --with-threadsafety --with-log=0 --with-openmp
10!
11         module omp_module
12         implicit none
13         contains
14         subroutine split_indices(total,num_pieces,ibeg,iend)
15           implicit none
16
17           integer :: total
18           integer :: num_pieces
19           integer :: ibeg(num_pieces), iend(num_pieces)
20           integer :: itmp1, itmp2, ioffset, i
21
22           itmp1 = total/num_pieces
23           itmp2 = mod(total,num_pieces)
24           ioffset = 0
25           do i=1,itmp2
26              ibeg(i) = ioffset + 1
27              iend(i) = ioffset + (itmp1+1)
28              ioffset = iend(i)
29           enddo
30           do i=itmp2+1,num_pieces
31              ibeg(i) = ioffset + 1
32              if (ibeg(i) > total) then
33                 iend(i) = ibeg(i) - 1
34              else
35                 iend(i) = ioffset + itmp1
36                 ioffset = iend(i)
37              endif
38           enddo
39
40         end subroutine split_indices
41       end module omp_module
42
43      module assert_mod
44      implicit none
45      contains
46      subroutine assert(lcond,msg,icase)
47      logical,intent(in) :: lcond
48      character(len=*), intent(in) :: msg
49      integer, intent(in) :: icase
50
51      if (.not.lcond) then
52         write(*,*) msg, icase
53         stop 'assertion error '
54      endif
55      return
56      end subroutine assert
57      end module assert_mod
58
59      program tpetsc
60
61#include <petsc/finclude/petsc.h>
62      use assert_mod
63      use omp_module
64      use petsc
65      implicit none
66!     ----------------------------
67!     test concurrent petsc solver
68!     ----------------------------
69
70      integer, parameter :: NCASES=100
71      integer, parameter :: MAXTHREADS=64
72      real(8), parameter :: tol = 1.0d-6
73
74      integer, dimension(MAXTHREADS) :: ibeg,iend
75
76!$   integer, external :: omp_get_num_threads
77
78      Mat, target :: Mcol_f_mat(MAXTHREADS)
79      Vec, target :: Mcol_f_vecb(MAXTHREADS)
80      Vec, target :: Mcol_f_vecx(MAXTHREADS)
81      KSP, target :: Mcol_f_ksp(MAXTHREADS)
82      PC,  target :: MPC(MAXTHREADS)
83
84      Mat, pointer :: col_f_mat
85      Vec, pointer :: col_f_vecb
86      Vec, pointer :: col_f_vecx
87      KSP, pointer :: col_f_ksp
88      PC, pointer :: pc
89
90      PetscInt :: ith, nthreads
91      PetscErrorCode ierr
92
93      integer, parameter :: nz_per_row = 9
94      integer, parameter :: n =100
95      integer :: i,j,ij,ij2,ii,jj,nz,ip, dx,dy,icase
96      integer, allocatable :: ilist(:),jlist(:)
97      PetscScalar :: aij
98      PetscScalar, allocatable :: alist(:)
99      logical :: isvalid_ii, isvalid_jj, is_diag
100
101      PetscInt nrow
102      PetscInt ncol
103      PetscScalar, allocatable :: x(:), b(:)
104      real(8) :: err(NCASES)
105
106      integer :: t1,t2,count_rate
107      real :: ttime
108
109      call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
110      if (ierr .ne. 0) then
111        print*,'Unable to initialize PETSc'
112        stop
113      endif
114
115      allocate(ilist(n*n*nz_per_row),jlist(n*n*nz_per_row),alist(n*n*nz_per_row))
116      allocate(x(0:(n*n-1)),b(0:(n*n-1)))
117      nrow = n*n
118      ncol = nrow
119
120      nthreads = 1
121!$omp parallel
122!$omp master
123!$      nthreads = omp_get_num_threads()
124!$omp end master
125!$omp end parallel
126      print*,'nthreads = ', nthreads,' NCASES = ', NCASES
127
128      call split_indices(NCASES,nthreads,ibeg,iend)
129
130!$omp parallel do                                                        &
131!$omp private(ith,ierr)                                                  &
132!$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
133      do ith=1,nthreads
134         col_f_mat => Mcol_f_mat(ith)
135         col_f_vecb => Mcol_f_vecb(ith)
136         col_f_vecx => Mcol_f_vecx(ith)
137         col_f_ksp => Mcol_f_ksp(ith)
138
139         call MatCreateSeqAIJ( PETSC_COMM_SELF, nrow,ncol, nz_per_row,PETSC_NULL_INTEGER, col_f_mat, ierr)
140         call assert(ierr.eq.0,'matcreateseqaij return ',ierr)
141
142         call VecCreateSeqWithArray(PETSC_COMM_SELF,1,nrow,PETSC_NULL_SCALAR, col_f_vecb, ierr)
143         call assert(ierr.eq.0,'veccreateseqwitharray return ierr',ierr)
144
145         call VecDuplicate(col_f_vecb, col_f_vecx,ierr)
146         call assert(ierr.eq.0,'vecduplicate return ierr',ierr)
147
148         call KSPCreate(PETSC_COMM_SELF, col_f_ksp,ierr)
149         call assert(ierr.eq.0,'kspcreate return ierr',ierr)
150
151       enddo
152
153!      -----------------------
154!      setup sparsity pattern
155!      -----------------------
156       nz = 0
157       do j=1,n
158       do i=1,n
159        ij = i + (j-1)*n
160        do dx=-1,1
161        do dy=-1,1
162          ii = i + dx
163          jj = j + dy
164
165          ij2 = ii + (jj-1)*n
166          isvalid_ii = (1 <= ii).and.(ii <= n)
167          isvalid_jj = (1 <= jj).and.(jj <= n)
168          if (isvalid_ii.and.isvalid_jj) then
169             is_diag = (ij .eq. ij2)
170             nz = nz + 1
171             ilist(nz) = ij
172             jlist(nz) = ij2
173             if (is_diag) then
174               aij = 4.0
175             else
176               aij = -1.0
177             endif
178             alist(nz) = aij
179           endif
180          enddo
181          enddo
182         enddo
183         enddo
184
185       print*,'nz = ', nz
186
187!      ---------------------------------
188!      convert from fortan to c indexing
189!      ---------------------------------
190       ilist(1:nz) = ilist(1:nz) - 1
191       jlist(1:nz) = jlist(1:nz) - 1
192
193!      --------------
194!      setup matrices
195!      --------------
196       call system_clock(t1,count_rate)
197
198!$omp  parallel do                                                      &
199!$omp& private(ith,icase,ip,i,j,ii,jj,aij,ierr,x,b)                      &
200!$omp& private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp,pc)
201       do ith=1,nthreads
202         col_f_mat => Mcol_f_mat(ith)
203         col_f_vecb => Mcol_f_vecb(ith)
204         col_f_vecx => Mcol_f_vecx(ith)
205         col_f_ksp => Mcol_f_ksp(ith)
206         pc => MPC(ith)
207
208        do icase=ibeg(ith),iend(ith)
209
210         do ip=1,nz
211           ii = ilist(ip)
212           jj = jlist(ip)
213           aij = alist(ip)
214           call MatSetValue(col_f_mat,ii,jj,aij,INSERT_VALUES,ierr)
215           call assert(ierr.eq.0,'matsetvalue return ierr',ierr)
216         enddo
217         call MatAssemblyBegin(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
218         call assert(ierr.eq.0,'matassemblybegin return ierr',ierr)
219
220         call MatAssemblyEnd(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
221         call assert(ierr.eq.0,'matassemblyend return ierr',ierr)
222
223         call KSPSetOperators(col_f_ksp,col_f_mat,col_f_mat,ierr)
224         call assert(ierr.eq.0,'KSPSetOperators return ierr',ierr)
225
226         ! set linear solver
227         call KSPGetPC(col_f_ksp,pc,ierr)
228         call assert(ierr.eq.0,'KSPGetPC return ierr ', ierr)
229
230         call PCSetType(pc,PCLU,ierr)
231         call assert(ierr.eq.0,'PCSetType return ierr ',ierr)
232
233         ! associate petsc vector with allocated array
234         x(:) = 1
235!$omp    critical
236         call VecPlaceArray(col_f_vecx,x,ierr)
237!$omp    end critical
238         call assert(ierr.eq.0,'VecPlaceArray col_f_vecx return ',ierr)
239
240         b(:) = 0
241         do ip=1,nz
242           i = ilist(ip)
243           j = jlist(ip)
244           aij = alist(ip)
245           b(i) = b(i) + aij*x(j)
246         enddo
247         x = 0
248!$omp    critical
249         call VecPlaceArray(col_f_vecb,b,ierr)
250!$omp    end critical
251         call assert(ierr.eq.0,'VecPlaceArray col_f_vecb return ',ierr)
252
253!  -----------------------------------------------------------
254!  key test, need to solve multiple linear systems in parallel
255!  -----------------------------------------------------------
256         call KSPSetFromOptions(col_f_ksp,ierr)
257         call assert(ierr.eq.0,'KSPSetFromOptions return ierr ',ierr)
258
259         call KSPSetUp(col_f_ksp,ierr)
260         call assert(ierr.eq.0,'KSPSetup return ierr ',ierr)
261
262         call KSPSolve(col_f_ksp,col_f_vecb,col_f_vecx,ierr)
263         call assert(ierr.eq.0,'KSPSolve return ierr ',ierr)
264
265!        ------------
266!        check answer
267!        ------------
268         err(icase) = maxval(abs(x(:)-1))
269
270!$omp    critical
271         call VecResetArray(col_f_vecx,ierr)
272!$omp    end critical
273         call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)
274
275!$omp    critical
276         call VecResetArray(col_f_vecb,ierr)
277!$omp    end critical
278         call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)
279
280       enddo
281       enddo
282
283!$omp parallel do                                                        &
284!$omp private(ith,ierr)                                                  &
285!$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
286      do ith=1,nthreads
287         col_f_mat => Mcol_f_mat(ith)
288         col_f_vecb => Mcol_f_vecb(ith)
289         col_f_vecx => Mcol_f_vecx(ith)
290         col_f_ksp => Mcol_f_ksp(ith)
291
292         call MatDestroy(col_f_mat, ierr)
293         call assert(ierr.eq.0,'matdestroy return ',ierr)
294
295         call VecDestroy(col_f_vecb, ierr)
296         call assert(ierr.eq.0,'vecdestroy return ierr',ierr)
297
298         call VecDestroy(col_f_vecx,ierr)
299         call assert(ierr.eq.0,'vecdestroy return ierr',ierr)
300
301         call KSPDestroy(col_f_ksp,ierr)
302         call assert(ierr.eq.0,'kspdestroy return ierr',ierr)
303
304       enddo
305
306       call system_clock(t2,count_rate)
307       ttime = real(t2-t1)/real(count_rate)
308       write(*,*) 'total time is ',ttime
309
310       write(*,*) 'maxval(err) ', maxval(err)
311       do icase=1,NCASES
312        if (err(icase) > tol) then
313         write(*,*) 'icase,err(icase) ',icase,err(icase)
314        endif
315       enddo
316
317       deallocate(ilist,jlist,alist)
318       deallocate(x,b)
319       call PetscFinalize(ierr)
320       call assert(ierr.eq.0,'petscfinalize return ierr',ierr)
321
322       end program tpetsc
323
324!/*TEST
325!
326!   build:
327!      requires: double !complex !define(PETSC_USE_64BIT_INDICES)
328!
329!   test:
330!      output_file: output/ex61f_1.out
331!      TODO: Need to determine how to test OpenMP code
332!
333!TEST*/
334