xref: /petsc/src/ksp/pc/tests/ex8f.F90 (revision ceeae6b5899f2879f7a95602f98efecbe51ff614)
1!
2!   Tests PCMGSetResidual
3!
4! -----------------------------------------------------------------------
5#include <petsc/finclude/petscksp.h>
6program main
7  use petscksp
8  implicit none
9
10! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
11!                   Variable declarations
12! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
13!
14!  Variables:
15!     ksp     - linear solver context
16!     x, b, u  - approx solution, right-hand side, exact solution vectors
17!     A        - matrix that defines linear system
18!     its      - iterations for convergence
19!     norm     - norm of error in solution
20!     rctx     - random number context
21!
22
23  Mat A
24  Vec x, b, u
25  PC pc
26  PetscInt n, dim, istart, iend
27  PetscInt i, j, jj, ii, one, zero
28  PetscErrorCode ierr
29  PetscScalar v
30  external MyResidual
31  PetscScalar pfive
32  KSP ksp
33
34! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35!                 Beginning of program
36! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
37
38  PetscCallA(PetscInitialize(ierr))
39  pfive = .5
40  n = 6
41  dim = n*n
42  one = 1
43  zero = 0
44
45! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46!      Compute the matrix and right-hand-side vector that define
47!      the linear system, Ax = b.
48! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
49
50!  Create parallel matrix, specifying only its global dimensions.
51!  When using MatCreate(), the matrix format can be specified at
52!  runtime. Also, the parallel partitioning of the matrix is
53!  determined by PETSc at runtime.
54
55  PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
56  PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
57  PetscCallA(MatSetFromOptions(A, ierr))
58  PetscCallA(MatSetUp(A, ierr))
59
60!  Currently, all PETSc parallel matrix formats are partitioned by
61!  contiguous chunks of rows across the processors.  Determine which
62!  rows of the matrix are locally owned.
63
64  PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))
65
66!  Set matrix elements in parallel.
67!   - Each processor needs to insert only elements that it owns
68!     locally (but any non-local elements will be sent to the
69!     appropriate processor during matrix assembly).
70!   - Always specify global rows and columns of matrix entries.
71
72  do II = Istart, Iend - 1
73    v = -1.0
74    i = II/n
75    j = II - i*n
76    if (i > 0) then
77      JJ = II - n
78      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
79    end if
80    if (i < n - 1) then
81      JJ = II + n
82      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
83    end if
84    if (j > 0) then
85      JJ = II - 1
86      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
87    end if
88    if (j < n - 1) then
89      JJ = II + 1
90      PetscCallA(MatSetValues(A, one, [II], one, [JJ], [v], ADD_VALUES, ierr))
91    end if
92    v = 4.0
93    PetscCallA(MatSetValues(A, one, [II], one, [II], [v], ADD_VALUES, ierr))
94  end do
95
96!  Assemble matrix, using the 2-step process:
97!       MatAssemblyBegin(), MatAssemblyEnd()
98!  Computations can be done while messages are in transition
99!  by placing code between these two statements.
100
101  PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
102  PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))
103
104!  Create parallel vectors.
105!   - Here, the parallel partitioning of the vector is determined by
106!     PETSc at runtime.  We could also specify the local dimensions
107!     if desired.
108!   - Note: We form 1 vector from scratch and then duplicate as needed.
109
110  PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
111  PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
112  PetscCallA(VecSetFromOptions(u, ierr))
113  PetscCallA(VecDuplicate(u, b, ierr))
114  PetscCallA(VecDuplicate(b, x, ierr))
115
116!  Set exact solution; then compute right-hand-side vector.
117
118  PetscCallA(VecSet(u, pfive, ierr))
119  PetscCallA(MatMult(A, u, b, ierr))
120
121! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122!         Create the linear solver and set various options
123! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124
125!  Create linear solver context
126
127  PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))
128  PetscCallA(KSPGetPC(ksp, pc, ierr))
129  PetscCallA(PCSetType(pc, PCMG, ierr))
130  PetscCallA(PCMGSetLevels(pc, one, PETSC_NULL_MPI_COMM, ierr))
131  PetscCallA(PCMGSetResidual(pc, zero, MyResidual, A, ierr))
132
133!  Set operators. Here the matrix that defines the linear system
134!  also serves as the matrix used to construct the preconditioner.
135
136  PetscCallA(KSPSetOperators(ksp, A, A, ierr))
137
138  PetscCallA(KSPDestroy(ksp, ierr))
139  PetscCallA(VecDestroy(u, ierr))
140  PetscCallA(VecDestroy(x, ierr))
141  PetscCallA(VecDestroy(b, ierr))
142  PetscCallA(MatDestroy(A, ierr))
143
144  PetscCallA(PetscFinalize(ierr))
145end
146
147subroutine MyResidual(A, b, x, r, ierr)
148  use petscksp
149  implicit none
150  Mat A
151  Vec b, x, r
152  integer ierr
153end
154
155!/*TEST
156!
157!   test:
158!      nsize: 1
159!      output_file: output/empty.out
160!TEST*/
161