Actual source code: ex7f.F90

petsc-3.11.2 2019-05-18
Report Typos and Errors

  2: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
  3: ! The code indicates the procedures for setting the particular block sizes and
  4: ! for using different linear solvers on the individual blocks

  6: ! This example focuses on ways to customize the block Jacobi preconditioner.
  7: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
  8: ! (including working with matrices and vectors)

 10: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.

 12: !/*T
 13: !   Concepts: KSP^customizing the block Jacobi preconditioner
 14: !   Processors: n
 15: !T*/


 18: program main
 19:  #include <petsc/finclude/petscksp.h>
 20:       use petscksp
 21: 
 22:       implicit none
 23:       Vec             :: x,b,u      ! approx solution, RHS, exact solution
 24:       Mat             :: A            ! linear system matrix
 25:       KSP             :: ksp         ! KSP context
 26:       PC              :: myPc           ! PC context
 27:       PC              :: subpc        ! PC context for subdomain
 28:       PetscReal       :: norm         ! norm of solution error
 29:       PetscReal,parameter :: tol = 1.e-6
 30:       PetscErrorCode  :: ierr
 31:       PetscInt        :: i,j,Ii,JJ,n
 32:       PetscInt, parameter :: m = 4
 33:       PetscMPIInt     :: myRank,mySize
 34:       PetscInt        :: its,nlocal,first,Istart,Iend
 35:       PetscScalar     :: v
 36:       PetscScalar, parameter :: &
 37:         myNone = -1.0, &
 38:         one    = 1.0
 39:       PetscBool       :: isbjacobi,flg
 40:       KSP,allocatable,dimension(:) ::   subksp     ! array of local KSP contexts on this processor
 41:       PetscInt,allocatable,dimension(:) :: blks
 42:       character(len=80)    :: outputString
 43: 
 44:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 45:       if (ierr /= 0) then
 46:         write(6,*)'Unable to initialize PETSc'
 47:         stop
 48:       endif
 49: 
 50:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 51:       CHKERRA(ierr)
 52:       call MPI_Comm_rank(PETSC_COMM_WORLD,myRank,ierr)
 53:       CHKERRA(ierr)
 54:       call MPI_Comm_size(PETSC_COMM_WORLD,mySize,ierr)
 55:       CHKERRA(ierr)
 56:       n=m+2
 57: 
 58:       !-------------------------------------------------------------------
 59:       ! Compute the matrix and right-hand-side vector that define
 60:       ! the linear system, Ax = b.
 61:       !---------------------------------------------------------------

 63:       ! Create and assemble parallel matrix
 64: 
 65:       call  MatCreate(PETSC_COMM_WORLD,A,ierr)
 66:       call  MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 67:       call  MatSetFromOptions(A,ierr)
 68:       call  MatMPIAIJSetPreallocation(A,5,PETSC_NULL_INTEGER,5,PETSC_NULL_INTEGER,ierr)
 69:       call  MatSeqAIJSetPreallocation(A,5,PETSC_NULL_INTEGER,ierr)
 70:       call  MatGetOwnershipRange(A,Istart,Iend,ierr)
 71: 
 72:       do Ii=Istart,Iend-1
 73:           v =-1.0; i = Ii/n; j = Ii - i*n
 74:           if (i>0) then
 75:             JJ = Ii - n
 76:             call MatSetValues(A,1,Ii,1,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 77:           endif
 78: 
 79:           if (i<m-1) then
 80:             JJ = Ii + n
 81:             call MatSetValues(A,1,Ii,1,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 82:           endif
 83: 
 84:           if (j>0) then
 85:             JJ = Ii - 1
 86:             call MatSetValues(A,1,Ii,1,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 87:           endif
 88: 
 89:           if (j<n-1) then
 90:             JJ = Ii + 1
 91:             call MatSetValues(A,1,Ii,1,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 92:           endif
 93: 
 94:           v=4.0
 95:           call MatSetValues(A,1,Ii,1,Ii,v,ADD_VALUES,ierr);CHKERRA(ierr)
 96: 
 97:         enddo
 98: 
 99:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)
100:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)

102:       ! Create parallel vectors

104:       call  VecCreate(PETSC_COMM_WORLD,u,ierr)
105:       CHKERRA(ierr)
106:       call  VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
107:       CHKERRA(ierr)
108:       call  VecSetFromOptions(u,ierr)
109:       CHKERRA(ierr)
110:       call  VecDuplicate(u,b,ierr)
111:       call  VecDuplicate(b,x,ierr)
112: 
113:       ! Set exact solution; then compute right-hand-side vector.
114: 
115:       call Vecset(u,one,ierr)
116:       CHKERRA(ierr)
117:       call MatMult(A,u,b,ierr)
118:       CHKERRA(ierr)
119: 
120:       ! Create linear solver context
121: 
122:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
123:       CHKERRA(ierr)
124: 
125:       ! Set operators. Here the matrix that defines the linear system
126:       ! also serves as the preconditioning matrix.
127: 
128:       call KSPSetOperators(ksp,A,A,ierr)
129:       CHKERRA(ierr)
130: 
131:       ! Set default preconditioner for this program to be block Jacobi.
132:       ! This choice can be overridden at runtime with the option
133:       ! -pc_type <type>
134: 
135:       call KSPGetPC(ksp,myPc,ierr)
136:       CHKERRA(ierr)
137:       call PCSetType(myPc,PCBJACOBI,ierr)
138:       CHKERRA(ierr)

140:       ! -----------------------------------------------------------------
141:       !            Define the problem decomposition
142:       !-------------------------------------------------------------------

144:       ! Call PCBJacobiSetTotalBlocks() to set individually the size of
145:       ! each block in the preconditioner.  This could also be done with
146:       ! the runtime option -pc_bjacobi_blocks <blocks>
147:       ! Also, see the command PCBJacobiSetLocalBlocks() to set the
148:       ! local blocks.
149: 
150:       ! Note: The default decomposition is 1 block per processor.
151: 
152:       allocate(blks(m),source = n)
153: 
154:       call PCBJacobiSetTotalBlocks(myPc,m,blks,ierr)
155:       CHKERRA(ierr)
156:       deallocate(blks)
157: 
158:       !-------------------------------------------------------------------
159:       !       Set the linear solvers for the subblocks
160:       !-------------------------------------------------------------------
161: 
162:       !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163:       ! Basic method, should be sufficient for the needs of most users.
164:       !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165:       ! By default, the block Jacobi method uses the same solver on each
166:       ! block of the problem.  To set the same solver options on all blocks,
167:       ! use the prefix -sub before the usual PC and KSP options, e.g.,
168:       ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
169: 
170:       !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171:       !  Advanced method, setting different solvers for various blocks.
172:       !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: 
174:       ! Note that each block's KSP context is completely independent of
175:       ! the others, and the full range of uniprocessor KSP options is
176:       ! available for each block. The following section of code is intended
177:       ! to be a simple illustration of setting different linear solvers for
178:       ! the individual blocks.  These choices are obviously not recommended
179:       ! for solving this particular problem.

181:       call PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr)
182: 
183: 
184:       if (isbjacobi) then
185: 
186:         ! Call KSPSetUp() to set the block Jacobi data structures (including
187:         ! creation of an internal KSP context for each block).
188:         ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()

190:         call KSPSetUp(ksp,ierr)
191: 
192:         ! Extract the array of KSP contexts for the local blocks

194:         call PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr)

196:         call PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr)
197:         allocate(subksp(nlocal))
198:         call PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr)
199: 

201:         ! Loop over the local blocks, setting various KSP options for each block
202: 
203:         do i=0,nlocal-1
204: 
205:           call KSPGetPC(subksp(i+1),subpc,ierr)
206: 
207:           if (myRank>0) then
208: 
209:             if (mod(i,2)==1) then
210:               call PCSetType(subpc,PCILU,ierr); CHKERRA(ierr)
211: 
212:             else
213:               call PCSetType(subpc,PCNONE,ierr); CHKERRA(ierr)
214:               call KSPSetType(subksp(i+1),KSPBCGS,ierr); CHKERRA(ierr)
215:               call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
216:               CHKERRA(ierr)
217:             endif
218: 
219:           else
220:             call PCSetType(subpc,PCJACOBI,ierr); CHKERRA(ierr)
221:             call KSPSetType(subksp(i+1),KSPGMRES,ierr); CHKERRA(ierr)
222:             call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
223:             CHKERRA(ierr)
224:           endif
225: 
226:         end do
227: 
228:       endif

230:       !----------------------------------------------------------------
231:       !                Solve the linear system
232:       !-----------------------------------------------------------------
233: 
234:       ! Set runtime options
235: 
236:       call KSPSetFromOptions(ksp,ierr); CHKERRA(ierr)
237: 
238:       ! Solve the linear system
239: 
240:       call KSPSolve(ksp,b,x,ierr); CHKERRA(ierr)
241: 
242:       !  -----------------------------------------------------------------
243:       !               Check solution and clean up
244:       !-------------------------------------------------------------------
245: 
246: 
247:       !  -----------------------------------------------------------------
248:       ! Check the error
249:       !  -----------------------------------------------------------------
250: 
251:       !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)

253:       call VecAXPY(x,myNone,u,ierr)
254: 
255:       !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)


258:       call VecNorm(x,NORM_2,norm,ierr); CHKERRA(ierr)
259:       call KSPGetIterationNumber(ksp,its,ierr); CHKERRA(ierr)
260:       write(outputString,*)'Norm of error',norm,'Iterations',its,'\n'
261:       call PetscPrintf(PETSC_COMM_WORLD,outputString,ierr); CHKERRA(ierr)
262: 
263:       ! Free work space.  All PETSc objects should be destroyed when they
264:       ! are no longer needed.
265:       deallocate(subksp)
266:       call KSPDestroy(ksp,ierr);CHKERRA(ierr)
267:       call VecDestroy(u,ierr); CHKERRA(ierr)
268:       call VecDestroy(b,ierr); CHKERRA(ierr)
269:       call MatDestroy(A,ierr); CHKERRA(ierr)
270:       call VecDestroy(x,ierr); CHKERRA(ierr)
271:       call PetscFinalize(ierr); CHKERRA(ierr)
272: 
273: end program main