Actual source code: ex6f.F

petsc-3.7.6 2017-04-24
Report Typos and Errors
```  1: !
2: !  Description: This example demonstrates repeated linear solves as
3: !  well as the use of different preconditioner and linear system
4: !  matrices.  This example also illustrates how to save PETSc objects
5: !  in common blocks.
6: !
7: !/*T
8: !  Concepts: KSP^repeatedly solving linear systems;
9: !  Concepts: KSP^different matrices for linear system and preconditioner;
10: !  Processors: n
11: !T*/
12: !
13: !  The following include statements are required for KSP Fortran programs:
14: !     petscsys.h       - base PETSc routines
15: !     petscvec.h    - vectors
16: !     petscmat.h    - matrices
17: !     petscpc.h     - preconditioners
18: !     petscksp.h    - Krylov subspace methods
19: !  Other include statements may be needed if using additional PETSc
20: !  routines in a Fortran program, e.g.,
21: !     petscviewer.h - viewers
22: !     petscis.h     - index sets
23: !
24:       program main
25: #include <petsc/finclude/petscsys.h>
26: #include <petsc/finclude/petscvec.h>
27: #include <petsc/finclude/petscmat.h>
28: #include <petsc/finclude/petscpc.h>
29: #include <petsc/finclude/petscksp.h>

31: !  Variables:
32: !
33: !  A       - matrix that defines linear system
34: !  ksp    - KSP context
35: !  ksp     - KSP context
36: !  x, b, u - approx solution, RHS, exact solution vectors
37: !
38:       Vec     x,u,b
39:       Mat     A
40:       KSP    ksp
41:       PetscInt i,j,II,JJ,m,n
42:       PetscInt Istart,Iend
43:       PetscInt nsteps,one
44:       PetscErrorCode ierr
45:       PetscBool  flg
46:       PetscScalar  v

49:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
50:       m      = 3
51:       n      = 3
52:       nsteps = 2
53:       one    = 1
54:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
55:      &                        '-m',m,flg,ierr)
56:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
57:      &                        '-n',n,flg,ierr)
58:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
59:      &                        '-nsteps',nsteps,flg,ierr)

61: !  Create parallel matrix, specifying only its global dimensions.
62: !  When using MatCreate(), the matrix format can be specified at
63: !  runtime. Also, the parallel partitioning of the matrix is
64: !  determined by PETSc at runtime.

66:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
67:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
68:       call MatSetFromOptions(A,ierr)
69:       call MatSetUp(A,ierr)

71: !  The matrix is partitioned by contiguous chunks of rows across the
72: !  processors.  Determine which rows of the matrix are locally owned.

74:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

76: !  Set matrix elements.
77: !   - Each processor needs to insert only elements that it owns
78: !     locally (but any non-local elements will be sent to the
79: !     appropriate processor during matrix assembly).
80: !   - Always specify global rows and columns of matrix entries.

82:       do 10, II=Istart,Iend-1
83:         v = -1.0
84:         i = II/n
85:         j = II - i*n
86:         if (i.gt.0) then
87:           JJ = II - n
89:         endif
90:         if (i.lt.m-1) then
91:           JJ = II + n
93:         endif
94:         if (j.gt.0) then
95:           JJ = II - 1
97:         endif
98:         if (j.lt.n-1) then
99:           JJ = II + 1
101:         endif
102:         v = 4.0
104:  10   continue

106: !  Assemble matrix, using the 2-step process:
107: !       MatAssemblyBegin(), MatAssemblyEnd()
108: !  Computations can be done while messages are in transition
109: !  by placing code between these two statements.

111:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
112:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

114: !  Create parallel vectors.
115: !   - When using VecCreate(), the parallel partitioning of the vector
116: !     is determined by PETSc at runtime.
117: !   - Note: We form 1 vector from scratch and then duplicate as needed.

119:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
120:       call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
121:       call VecSetFromOptions(u,ierr)
122:       call VecDuplicate(u,b,ierr)
123:       call VecDuplicate(b,x,ierr)

125: !  Create linear solver context

127:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

129: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

131:       call KSPSetFromOptions(ksp,ierr)

133: !  Solve several linear systems in succession

135:       do 100 i=1,nsteps
136:          call solve1(ksp,A,x,b,u,i,nsteps,ierr)
137:  100  continue

139: !  Free work space.  All PETSc objects should be destroyed when they
140: !  are no longer needed.

142:       call VecDestroy(u,ierr)
143:       call VecDestroy(x,ierr)
144:       call VecDestroy(b,ierr)
145:       call MatDestroy(A,ierr)
146:       call KSPDestroy(ksp,ierr)

148:       call PetscFinalize(ierr)
149:       end

151: ! -----------------------------------------------------------------------
152: !
153:       subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)

155: #include <petsc/finclude/petscsys.h>
156: #include <petsc/finclude/petscvec.h>
157: #include <petsc/finclude/petscmat.h>
158: #include <petsc/finclude/petscpc.h>
159: #include <petsc/finclude/petscksp.h>

161: !
162: !   solve1 - This routine is used for repeated linear system solves.
163: !   We update the linear system matrix each time, but retain the same
164: !   preconditioning matrix for all linear solves.
165: !
166: !      A - linear system matrix
167: !      A2 - preconditioning matrix
168: !
169:       PetscScalar  v,val
170:       PetscInt II,Istart,Iend
171:       PetscInt count,nsteps,one
172:       PetscErrorCode ierr
173:       Mat     A
174:       KSP     ksp
175:       Vec     x,b,u

177: ! Use common block to retain matrix between successive subroutine calls
178:       Mat              A2
179:       PetscMPIInt      rank
180:       PetscBool        pflag
181:       common /my_data/ A2,pflag,rank

183:       one = 1
184: ! First time thorough: Create new matrix to define the linear system
185:       if (count .eq. 1) then
186:         call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
187:         pflag = .false.
188:         call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,     &
189:      &                           '-mat_view',pflag,ierr)
190:         if (pflag) then
191:           if (rank .eq. 0) write(6,100)
192:           call flush(6)
193:         endif
194:         call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
195: ! All other times: Set previous solution as initial guess for next solve.
196:       else
197:         call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
198:       endif

200: ! Alter the matrix A a bit
201:       call MatGetOwnershipRange(A,Istart,Iend,ierr)
202:       do 20, II=Istart,Iend-1
203:         v = 2.0
205:  20   continue
206:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
207:       if (pflag) then
208:         if (rank .eq. 0) write(6,110)
209:         call flush(6)
210:       endif
211:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

213: ! Set the exact solution; compute the right-hand-side vector
214:       val = 1.0*real(count)
215:       call VecSet(u,val,ierr)
216:       call MatMult(A,u,b,ierr)

218: ! Set operators, keeping the identical preconditioner matrix for
219: ! all linear solves.  This approach is often effective when the
220: ! linear systems do not change very much between successive steps.
221:       call KSPSetReusePreconditioner(ksp,PETSC_TRUE,ierr)
222:       call KSPSetOperators(ksp,A,A2,ierr)

224: ! Solve linear system
225:       call KSPSolve(ksp,b,x,ierr)

227: ! Destroy the preconditioner matrix on the last time through
228:       if (count .eq. nsteps) call MatDestroy(A2,ierr)

230:  100  format('previous matrix: preconditioning')
231:  110  format('next matrix: defines linear system')

233:       end

```