Actual source code: ex11f.F90

  1: !
  2: !  Description: Solves a complex linear system in parallel with KSP (Fortran code).
  3: !

  5: !
  6: !  The model problem:
  7: !     Solve Helmholtz equation on the unit square: (0,1) x (0,1)
  8: !          -delta u - sigma1*u + i*sigma2*u = f,
  9: !           where delta = Laplace operator
 10: !     Dirichlet b.c.'s on all sides
 11: !     Use the 2-D, five-point finite difference stencil.
 12: !
 13: !     Compiling the code:
 14: !      This code uses the complex numbers version of PETSc, so configure
 15: !      must be run to enable this
 16: !
 17: !
 18: ! -----------------------------------------------------------------------
 19: #include <petsc/finclude/petscksp.h>
 20: program main
 21:   use petscksp
 22:   implicit none

 24: !
 25: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 26: !                   Variable declarations
 27: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 28: !
 29: !  Variables:
 30: !     ksp     - linear solver context
 31: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 32: !     A        - matrix that defines linear system
 33: !     its      - iterations for convergence
 34: !     norm     - norm of error in solution
 35: !     rctx     - random number context
 36: !

 38:   KSP ksp
 39:   Mat A
 40:   Vec x, b, u
 41:   PetscRandom rctx
 42:   PetscReal norm, h2, sigma1
 43:   PetscScalar, parameter :: czero = 0.0, none = -1.0, pfive = .5
 44:   PetscScalar sigma2, v
 45:   PetscInt dim, its, n, Istart, Iend
 46:   PetscInt i, j, II, JJ
 47:   PetscErrorCode ierr
 48:   PetscMPIInt rank
 49:   PetscBool flg
 50:   logical use_random

 52: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 53: !                 Beginning of program
 54: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 56:   PetscCallA(PetscInitialize(ierr))
 57:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))
 58:   sigma1 = 100.0
 59:   PetscCallA(PetscOptionsGetReal(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-sigma1', sigma1, flg, ierr))
 60:   n = 6
 61:   PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-n', n, flg, ierr))
 62:   dim = n*n

 64: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 65: !      Compute the matrix and right-hand-side vector that define
 66: !      the linear system, Ax = b.
 67: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 69: !  Create parallel matrix, specifying only its global dimensions.
 70: !  When using MatCreate(), the matrix format can be specified at
 71: !  runtime. Also, the parallel partitioning of the matrix is
 72: !  determined by PETSc at runtime.

 74:   PetscCallA(MatCreate(PETSC_COMM_WORLD, A, ierr))
 75:   PetscCallA(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim, ierr))
 76:   PetscCallA(MatSetFromOptions(A, ierr))
 77:   PetscCallA(MatSetUp(A, ierr))

 79: !  Currently, all PETSc parallel matrix formats are partitioned by
 80: !  contiguous chunks of rows across the processors.  Determine which
 81: !  rows of the matrix are locally owned.

 83:   PetscCallA(MatGetOwnershipRange(A, Istart, Iend, ierr))

 85: !  Set matrix elements in parallel.
 86: !   - Each processor needs to insert only elements that it owns
 87: !     locally (but any non-local elements will be sent to the
 88: !     appropriate processor during matrix assembly).
 89: !   - Always specify global rows and columns of matrix entries.

 91:   PetscCallA(PetscOptionsHasName(PETSC_NULL_OPTIONS, PETSC_NULL_CHARACTER, '-norandom', flg, ierr))
 92:   if (flg) then
 93:     use_random = .false.
 94:     sigma2 = 10.0*PETSC_i
 95:   else
 96:     use_random = .true.
 97:     PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
 98:     PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
 99:     PetscCallA(PetscRandomSetInterval(rctx, czero, PETSC_i, ierr))
100:   end if
101:   h2 = 1.0/real((n + 1)*(n + 1))

103:   do II = Istart, Iend - 1
104:     v = -1.0
105:     i = II/n
106:     j = II - i*n
107:     if (i > 0) then
108:       JJ = II - n
109:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
110:     end if
111:     if (i < n - 1) then
112:       JJ = II + n
113:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
114:     end if
115:     if (j > 0) then
116:       JJ = II - 1
117:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
118:     end if
119:     if (j < n - 1) then
120:       JJ = II + 1
121:       PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [JJ], [v], ADD_VALUES, ierr))
122:     end if
123:     if (use_random) PetscCallA(PetscRandomGetValue(rctx, sigma2, ierr))
124:     v = 4.0 - sigma1*h2 + sigma2*h2
125:     PetscCallA(MatSetValues(A, 1_PETSC_INT_KIND, [II], 1_PETSC_INT_KIND, [II], [v], ADD_VALUES, ierr))
126:   end do
127:   if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))

129: !  Assemble matrix, using the 2-step process:
130: !       MatAssemblyBegin(), MatAssemblyEnd()
131: !  Computations can be done while messages are in transition
132: !  by placing code between these two statements.

134:   PetscCallA(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY, ierr))
135:   PetscCallA(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY, ierr))

137: !  Create parallel vectors.
138: !   - Here, the parallel partitioning of the vector is determined by
139: !     PETSc at runtime.  We could also specify the local dimensions
140: !     if desired.
141: !   - Note: We form 1 vector from scratch and then duplicate as needed.

143:   PetscCallA(VecCreate(PETSC_COMM_WORLD, u, ierr))
144:   PetscCallA(VecSetSizes(u, PETSC_DECIDE, dim, ierr))
145:   PetscCallA(VecSetFromOptions(u, ierr))
146:   PetscCallA(VecDuplicate(u, b, ierr))
147:   PetscCallA(VecDuplicate(b, x, ierr))

149: !  Set exact solution; then compute right-hand-side vector.

151:   if (use_random) then
152:     PetscCallA(PetscRandomCreate(PETSC_COMM_WORLD, rctx, ierr))
153:     PetscCallA(PetscRandomSetFromOptions(rctx, ierr))
154:     PetscCallA(VecSetRandom(u, rctx, ierr))
155:   else
156:     PetscCallA(VecSet(u, pfive, ierr))
157:   end if
158:   PetscCallA(MatMult(A, u, b, ierr))

160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: !         Create the linear solver and set various options
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

164: !  Create linear solver context

166:   PetscCallA(KSPCreate(PETSC_COMM_WORLD, ksp, ierr))

168: !  Set operators. Here the matrix that defines the linear system
169: !  also serves as the matrix used to construct the preconditioner.

171:   PetscCallA(KSPSetOperators(ksp, A, A, ierr))

173: !  Set runtime options, e.g.,
174: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>

176:   PetscCallA(KSPSetFromOptions(ksp, ierr))

178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: !                      Solve the linear system
180: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

182:   PetscCallA(KSPSolve(ksp, b, x, ierr))

184: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
185: !                     Check solution and clean up
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

188: !  Check the error

190:   PetscCallA(VecAXPY(x, none, u, ierr))
191:   PetscCallA(VecNorm(x, NORM_2, norm, ierr))
192:   PetscCallA(KSPGetIterationNumber(ksp, its, ierr))
193:   if (rank == 0) then
194:     if (norm > 1.e-12) then
195:       write (6, 100) norm, its
196:     else
197:       write (6, 110) its
198:     end if
199:   end if
200: 100 format('Norm of error ', e11.4, ',iterations ', i5)
201: 110 format('Norm of error < 1.e-12,iterations ', i5)

203: !  Free work space.  All PETSc objects should be destroyed when they
204: !  are no longer needed.

206:   if (use_random) PetscCallA(PetscRandomDestroy(rctx, ierr))
207:   PetscCallA(KSPDestroy(ksp, ierr))
208:   PetscCallA(VecDestroy(u, ierr))
209:   PetscCallA(VecDestroy(x, ierr))
210:   PetscCallA(VecDestroy(b, ierr))
211:   PetscCallA(MatDestroy(A, ierr))

213:   PetscCallA(PetscFinalize(ierr))
214: end

216: !
217: !/*TEST
218: !
219: !   build:
220: !      requires: complex
221: !
222: !   test:
223: !      args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
224: !      output_file: output/ex11f_1.out
225: !
226: !TEST*/