Actual source code: ex2f.F

petsc-master 2016-09-27
Report Typos and Errors
  1: !
  2: !  Description: Solves a linear system in parallel with KSP (Fortran code).
  3: !               Also shows how to set a user-defined monitoring routine.
  4: !
  5: !
  6: !/*T
  7: !  Concepts: KSP^basic parallel example
  8: !  Concepts: KSP^setting a user-defined monitoring routine
  9: !  Processors: n
 10: !T*/
 11: !
 12: ! -----------------------------------------------------------------------

 14:       program main
 15:       implicit none

 17: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 18: !                    Include files
 19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: !
 21: !  This program uses CPP for preprocessing, as indicated by the use of
 22: !  PETSc include files in the directory petsc/include/petsc/finclude.  This
 23: !  convention enables use of the CPP preprocessor, which allows the use
 24: !  of the #include statements that define PETSc objects and variables.
 25: !
 26: !  Use of the conventional Fortran include statements is also supported
 27: !  In this case, the PETsc include files are located in the directory
 28: !  petsc/include/foldinclude.
 29: !
 30: !  Since one must be very careful to include each file no more than once
 31: !  in a Fortran routine, application programmers must exlicitly list
 32: !  each file needed for the various PETSc components within their
 33: !  program (unlike the C/C++ interface).
 34: !
 35: !  See the Fortran section of the PETSc users manual for details.
 36: !
 37: !  The following include statements are required for KSP Fortran programs:
 38: !     petscsys.h       - base PETSc routines
 39: !     petscvec.h    - vectors
 40: !     petscmat.h    - matrices
 41: !     petscpc.h     - preconditioners
 42: !     petscksp.h    - Krylov subspace methods
 43: !  Additional include statements may be needed if using additional
 44: !  PETSc routines in a Fortran program, e.g.,
 45: !     petscviewer.h - viewers
 46: !     petscis.h     - index sets
 47: !
 48:  #include <petsc/finclude/petscsys.h>
 49:  #include <petsc/finclude/petscvec.h>
 50:  #include <petsc/finclude/petscmat.h>
 51:  #include <petsc/finclude/petscpc.h>
 52:  #include <petsc/finclude/petscksp.h>
 53:  #include <petsc/finclude/petscviewer.h>
 54: !
 55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 56: !                   Variable declarations
 57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 58: !
 59: !  Variables:
 60: !     ksp     - linear solver context
 61: !     ksp      - Krylov subspace method context
 62: !     pc       - preconditioner context
 63: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 64: !     A        - matrix that defines linear system
 65: !     its      - iterations for convergence
 66: !     norm     - norm of error in solution
 67: !     rctx     - random number generator context
 68: !
 69: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 70: !  are mathematical objects that contain more than just an array of
 71: !  double precision numbers. I.e., vectors in PETSc are not just
 72: !        double precision x(*).
 73: !  However, local vector data can be easily accessed via VecGetArray().
 74: !  See the Fortran section of the PETSc users manual for details.
 75: !
 76:       PetscReal  norm
 77:       PetscInt  i,j,II,JJ,m,n,its
 78:       PetscInt  Istart,Iend,ione
 79:       PetscErrorCode ierr
 80:       PetscMPIInt     rank,size
 81:       PetscBool   flg
 82:       PetscScalar v,one,neg_one
 83:       Vec         x,b,u
 84:       Mat         A
 85:       KSP         ksp
 86:       PetscRandom rctx
 87:       PetscViewerAndFormat vf;

 89: !  These variables are not currently used.
 90: !      PC          pc
 91: !      PCType      ptype
 92: !      PetscReal tol


 95: !  Note: Any user-defined Fortran routines (such as MyKSPMonitor)
 96: !  MUST be declared as external.

 98:       external MyKSPMonitor,MyKSPConverged

100: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
101: !                 Beginning of program
102: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

104:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
105:       if (ierr .ne. 0) then
106:         print*,'Unable to initialize PETSc'
107:         stop
108:       endif
109:       m = 3
110:       n = 3
111:       one  = 1.0
112:       neg_one = -1.0
113:       ione    = 1
114:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
115:      &                        '-m',m,flg,ierr)
116:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
117:      &                        '-n',n,flg,ierr)
118:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
119:       call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)

121: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
122: !      Compute the matrix and right-hand-side vector that define
123: !      the linear system, Ax = b.
124: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

126: !  Create parallel matrix, specifying only its global dimensions.
127: !  When using MatCreate(), the matrix format can be specified at
128: !  runtime. Also, the parallel partitioning of the matrix is
129: !  determined by PETSc at runtime.

131:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
132:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
133:       call MatSetFromOptions(A,ierr)
134:       call MatSetUp(A,ierr)

136: !  Currently, all PETSc parallel matrix formats are partitioned by
137: !  contiguous chunks of rows across the processors.  Determine which
138: !  rows of the matrix are locally owned.

140:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

142: !  Set matrix elements for the 2-D, five-point stencil in parallel.
143: !   - Each processor needs to insert only elements that it owns
144: !     locally (but any non-local elements will be sent to the
145: !     appropriate processor during matrix assembly).
146: !   - Always specify global row and columns of matrix entries.
147: !   - Note that MatSetValues() uses 0-based row and column numbers
148: !     in Fortran as well as in C.

150: !     Note: this uses the less common natural ordering that orders first
151: !     all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
152: !     instead of JJ = II +- m as you might expect. The more standard ordering
153: !     would first do all variables for y = h, then y = 2h etc.

155:       do 10, II=Istart,Iend-1
156:         v = -1.0
157:         i = II/n
158:         j = II - i*n
159:         if (i.gt.0) then
160:           JJ = II - n
161:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
162:         endif
163:         if (i.lt.m-1) then
164:           JJ = II + n
165:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
166:         endif
167:         if (j.gt.0) then
168:           JJ = II - 1
169:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
170:         endif
171:         if (j.lt.n-1) then
172:           JJ = II + 1
173:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
174:         endif
175:         v = 4.0
176:         call  MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
177:  10   continue

179: !  Assemble matrix, using the 2-step process:
180: !       MatAssemblyBegin(), MatAssemblyEnd()
181: !  Computations can be done while messages are in transition,
182: !  by placing code between these two statements.

184:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
185:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

187: !  Create parallel vectors.
188: !   - Here, the parallel partitioning of the vector is determined by
189: !     PETSc at runtime.  We could also specify the local dimensions
190: !     if desired -- or use the more general routine VecCreate().
191: !   - When solving a linear system, the vectors and matrices MUST
192: !     be partitioned accordingly.  PETSc automatically generates
193: !     appropriately partitioned matrices and vectors when MatCreate()
194: !     and VecCreate() are used with the same communicator.
195: !   - Note: We form 1 vector from scratch and then duplicate as needed.

197:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
198:       call VecSetFromOptions(u,ierr)
199:       call VecDuplicate(u,b,ierr)
200:       call VecDuplicate(b,x,ierr)

202: !  Set exact solution; then compute right-hand-side vector.
203: !  By default we use an exact solution of a vector with all
204: !  elements of 1.0;  Alternatively, using the runtime option
205: !  -random_sol forms a solution vector with random components.

207:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,                    &
208:      &             '-random_exact_sol',flg,ierr)
209:       if (flg) then
210:          call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
211:          call PetscRandomSetFromOptions(rctx,ierr)
212:          call VecSetRandom(u,rctx,ierr)
213:          call PetscRandomDestroy(rctx,ierr)
214:       else
215:          call VecSet(u,one,ierr)
216:       endif
217:       call MatMult(A,u,b,ierr)

219: !  View the exact solution vector if desired

221:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,                    &
222:      &             '-view_exact_sol',flg,ierr)
223:       if (flg) then
224:          call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
225:       endif

227: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
228: !         Create the linear solver and set various options
229: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

231: !  Create linear solver context

233:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

235: !  Set operators. Here the matrix that defines the linear system
236: !  also serves as the preconditioning matrix.

238:       call KSPSetOperators(ksp,A,A,ierr)

240: !  Set linear solver defaults for this problem (optional).
241: !   - By extracting the KSP and PC contexts from the KSP context,
242: !     we can then directly directly call any KSP and PC routines
243: !     to set various options.
244: !   - The following four statements are optional; all of these
245: !     parameters could alternatively be specified at runtime via
246: !     KSPSetFromOptions(). All of these defaults can be
247: !     overridden at runtime, as indicated below.

249: !     We comment out this section of code since the Jacobi
250: !     preconditioner is not a good general default.

252: !      call KSPGetPC(ksp,pc,ierr)
253: !      ptype = PCJACOBI
254: !      call PCSetType(pc,ptype,ierr)
255: !      tol = 1.e-7
256: !      call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,
257: !     &     PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)

259: !  Set user-defined monitoring routine if desired

261:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,       &
262:      &                         '-my_ksp_monitor',flg,ierr)
263:       if (flg) then
264:         call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT,          &
265:      &        PETSC_NULL_FUNCTION,ierr)
266: !
267: !     Also use the default KSP monitor routine showing how it may be used from Fortran
268: !
269:         call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,      &
270:      &          PETSC_VIEWER_DEFAULT,vf,ierr)
271:         call KSPMonitorSet(ksp,KSPMonitorDefault,vf,                    &
272:      &                     PetscViewerAndFormatDestroy,ierr)
273:       endif


276: !  Set runtime options, e.g.,
277: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
278: !  These options will override those specified above as long as
279: !  KSPSetFromOptions() is called _after_ any other customization
280: !  routines.

282:       call KSPSetFromOptions(ksp,ierr)

284: !  Set convergence test routine if desired

286:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,       &
287:      &                         '-my_ksp_convergence',flg,ierr)
288:       if (flg) then
289:         call KSPSetConvergenceTest(ksp,MyKSPConverged,                  &
290:      &          PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr)
291:       endif
292: !
293: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
294: !                      Solve the linear system
295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

297:       call KSPSolve(ksp,b,x,ierr)

299: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
300: !                     Check solution and clean up
301: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

303: !  Check the error
304:       call VecAXPY(x,neg_one,u,ierr)
305:       call VecNorm(x,NORM_2,norm,ierr)
306:       call KSPGetIterationNumber(ksp,its,ierr)
307:       if (rank .eq. 0) then
308:         if (norm .gt. 1.e-12) then
309:            write(6,100) norm,its
310:         else
311:            write(6,110) its
312:         endif
313:       endif
314:   100 format('Norm of error ',e11.4,' iterations ',i5)
315:   110 format('Norm of error < 1.e-12 iterations ',i5)

317: !  Free work space.  All PETSc objects should be destroyed when they
318: !  are no longer needed.

320:       call KSPDestroy(ksp,ierr)
321:       call VecDestroy(u,ierr)
322:       call VecDestroy(x,ierr)
323:       call VecDestroy(b,ierr)
324:       call MatDestroy(A,ierr)

326: !  Always call PetscFinalize() before exiting a program.  This routine
327: !    - finalizes the PETSc libraries as well as MPI
328: !    - provides summary and diagnostic information if certain runtime
329: !      options are chosen (e.g., -log_summary).  See PetscFinalize()
330: !      manpage for more information.

332:       call PetscFinalize(ierr)
333:       end

335: ! --------------------------------------------------------------
336: !
337: !  MyKSPMonitor - This is a user-defined routine for monitoring
338: !  the KSP iterative solvers.
339: !
340: !  Input Parameters:
341: !    ksp   - iterative context
342: !    n     - iteration number
343: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
344: !    dummy - optional user-defined monitor context (unused here)
345: !
346:       subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)

348:       implicit none

350:  #include <petsc/finclude/petscsys.h>
351:  #include <petsc/finclude/petscvec.h>
352:  #include <petsc/finclude/petscksp.h>

354:       KSP              ksp
355:       Vec              x
356:       PetscErrorCode ierr
357:       PetscInt n,dummy
358:       PetscMPIInt rank
359:       PetscReal rnorm

361: !  Build the solution vector

363:       call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr)

365: !  Write the solution vector and residual norm to stdout
366: !   - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
367: !     handles data from multiple processors so that the
368: !     output is not jumbled.

370:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
371:       if (rank .eq. 0) write(6,100) n
372:       call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
373:       if (rank .eq. 0) write(6,200) n,rnorm

375:  100  format('iteration ',i5,' solution vector:')
376:  200  format('iteration ',i5,' residual norm ',e11.4)
377:       0
378:       end

380: ! --------------------------------------------------------------
381: !
382: !  MyKSPConverged - This is a user-defined routine for testing
383: !  convergence of the KSP iterative solvers.
384: !
385: !  Input Parameters:
386: !    ksp   - iterative context
387: !    n     - iteration number
388: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
389: !    dummy - optional user-defined monitor context (unused here)
390: !
391:       subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)

393:       implicit none

395:  #include <petsc/finclude/petscsys.h>
396:  #include <petsc/finclude/petscvec.h>
397:  #include <petsc/finclude/petscksp.h>

399:       KSP              ksp
400:       PetscErrorCode ierr
401:       PetscInt n,dummy
402:       KSPConvergedReason flag
403:       PetscReal rnorm

405:       if (rnorm .le. .05) then
406:         flag = 1
407:       else
408:         flag = 0
409:       endif
410:       0

412:       end