1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: ! Program usage: mpiexec -n <procs> ex2f [-help] [all PETSc options]
6: !
7: !/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: KSP^setting a user-defined monitoring routine
10: ! Processors: n
11: !T*/
12: !
13: ! -----------------------------------------------------------------------
15: program main
16: implicit none
18: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
19: ! Include files
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: !
22: ! This program uses CPP for preprocessing, as indicated by the use of
23: ! PETSc include files in the directory petsc/include/finclude. This
24: ! convention enables use of the CPP preprocessor, which allows the use
25: ! of the #include statements that define PETSc objects and variables.
26: !
27: ! Use of the conventional Fortran include statements is also supported
28: ! In this case, the PETsc include files are located in the directory
29: ! petsc/include/foldinclude.
30: !
31: ! Since one must be very careful to include each file no more than once
32: ! in a Fortran routine, application programmers must exlicitly list
33: ! each file needed for the various PETSc components within their
34: ! program (unlike the C/C++ interface).
35: !
36: ! See the Fortran section of the PETSc users manual for details.
37: !
38: ! The following include statements are required for KSP Fortran programs:
39: ! petscsys.h - base PETSc routines
40: ! petscvec.h - vectors
41: ! petscmat.h - matrices
42: ! petscpc.h - preconditioners
43: ! petscksp.h - Krylov subspace methods
44: ! Additional include statements may be needed if using additional
45: ! PETSc routines in a Fortran program, e.g.,
46: ! petscviewer.h - viewers
47: ! petscis.h - index sets
48: !
49: #include <finclude/petscsys.h>
50: #include <finclude/petscvec.h>
51: #include <finclude/petscmat.h>
52: #include <finclude/petscpc.h>
53: #include <finclude/petscksp.h>
54: !
55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
56: ! Variable declarations
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: !
59: ! Variables:
60: ! ksp - linear solver context
61: ! ksp - Krylov subspace method context
62: ! pc - preconditioner context
63: ! x, b, u - approx solution, right-hand-side, exact solution vectors
64: ! A - matrix that defines linear system
65: ! its - iterations for convergence
66: ! norm - norm of error in solution
67: ! rctx - random number generator context
68: !
69: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
70: ! are mathematical objects that contain more than just an array of
71: ! double precision numbers. I.e., vectors in PETSc are not just
72: ! double precision x(*).
73: ! However, local vector data can be easily accessed via VecGetArray().
74: ! See the Fortran section of the PETSc users manual for details.
75: !
76: double precision norm
77: PetscInt i,j,II,JJ,m,n,its
78: PetscInt Istart,Iend,ione
79: PetscErrorCode ierr
80: PetscMPIInt rank,size
81: PetscBool flg
82: PetscScalar v,one,neg_one
83: Vec x,b,u
84: Mat A
85: KSP ksp
86: PetscRandom rctx
88: ! These variables are not currently used.
89: ! PC pc
90: ! PCType ptype
91: ! double precision tol
94: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
95: ! MUST be declared as external.
97: external MyKSPMonitor,MyKSPConverged
99: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100: ! Beginning of program
101: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
104: m = 3
105: n = 3
106: one = 1.0
107: neg_one = -1.0
108: ione = 1
109: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
110: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
111: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
112: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
114: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115: ! Compute the matrix and right-hand-side vector that define
116: ! the linear system, Ax = b.
117: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119: ! Create parallel matrix, specifying only its global dimensions.
120: ! When using MatCreate(), the matrix format can be specified at
121: ! runtime. Also, the parallel partitioning of the matrix is
122: ! determined by PETSc at runtime.
124: call MatCreate(PETSC_COMM_WORLD,A,ierr)
125: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
126: call MatSetFromOptions(A,ierr)
127: call MatSetUp(A,ierr)
129: ! Currently, all PETSc parallel matrix formats are partitioned by
130: ! contiguous chunks of rows across the processors. Determine which
131: ! rows of the matrix are locally owned.
133: call MatGetOwnershipRange(A,Istart,Iend,ierr)
135: ! Set matrix elements for the 2-D, five-point stencil in parallel.
136: ! - Each processor needs to insert only elements that it owns
137: ! locally (but any non-local elements will be sent to the
138: ! appropriate processor during matrix assembly).
139: ! - Always specify global row and columns of matrix entries.
140: ! - Note that MatSetValues() uses 0-based row and column numbers
141: ! in Fortran as well as in C.
143: ! Note: this uses the less common natural ordering that orders first
144: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
145: ! instead of JJ = II +- m as you might expect. The more standard ordering
146: ! would first do all variables for y = h, then y = 2h etc.
148: do 10, II=Istart,Iend-1
149: v = -1.0
150: i = II/n
151: j = II - i*n
152: if (i.gt.0) then
153: JJ = II - n
154: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
155: endif
156: if (i.lt.m-1) then
157: JJ = II + n
158: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
159: endif
160: if (j.gt.0) then
161: JJ = II - 1
162: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
163: endif
164: if (j.lt.n-1) then
165: JJ = II + 1
166: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
167: endif
168: v = 4.0
169: call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
170: 10 continue
172: ! Assemble matrix, using the 2-step process:
173: ! MatAssemblyBegin(), MatAssemblyEnd()
174: ! Computations can be done while messages are in transition,
175: ! by placing code between these two statements.
177: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
178: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
180: ! Create parallel vectors.
181: ! - Here, the parallel partitioning of the vector is determined by
182: ! PETSc at runtime. We could also specify the local dimensions
183: ! if desired -- or use the more general routine VecCreate().
184: ! - When solving a linear system, the vectors and matrices MUST
185: ! be partitioned accordingly. PETSc automatically generates
186: ! appropriately partitioned matrices and vectors when MatCreate()
187: ! and VecCreate() are used with the same communicator.
188: ! - Note: We form 1 vector from scratch and then duplicate as needed.
190: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
191: call VecSetFromOptions(u,ierr)
192: call VecDuplicate(u,b,ierr)
193: call VecDuplicate(b,x,ierr)
195: ! Set exact solution; then compute right-hand-side vector.
196: ! By default we use an exact solution of a vector with all
197: ! elements of 1.0; Alternatively, using the runtime option
198: ! -random_sol forms a solution vector with random components.
200: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
201: & "-random_exact_sol",flg,ierr)
202: if (flg) then
203: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
204: call PetscRandomSetFromOptions(rctx,ierr)
205: call VecSetRandom(u,rctx,ierr)
206: call PetscRandomDestroy(rctx,ierr)
207: else
208: call VecSet(u,one,ierr)
209: endif
210: call MatMult(A,u,b,ierr)
212: ! View the exact solution vector if desired
214: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
215: & "-view_exact_sol",flg,ierr)
216: if (flg) then
217: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
218: endif
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221: ! Create the linear solver and set various options
222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: ! Create linear solver context
226: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
228: ! Set operators. Here the matrix that defines the linear system
229: ! also serves as the preconditioning matrix.
231: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
233: ! Set linear solver defaults for this problem (optional).
234: ! - By extracting the KSP and PC contexts from the KSP context,
235: ! we can then directly directly call any KSP and PC routines
236: ! to set various options.
237: ! - The following four statements are optional; all of these
238: ! parameters could alternatively be specified at runtime via
239: ! KSPSetFromOptions(). All of these defaults can be
240: ! overridden at runtime, as indicated below.
242: ! We comment out this section of code since the Jacobi
243: ! preconditioner is not a good general default.
245: ! call KSPGetPC(ksp,pc,ierr)
246: ! ptype = PCJACOBI247: ! call PCSetType(pc,ptype,ierr)
248: ! tol = 1.e-7
249: ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION,
250: ! & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
252: ! Set user-defined monitoring routine if desired
254: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-my_ksp_monitor', &
255: & flg,ierr)
256: if (flg) then
257: call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT, &
258: & PETSC_NULL_FUNCTION,ierr)
259: endif
262: ! Set runtime options, e.g.,
263: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
264: ! These options will override those specified above as long as
265: ! KSPSetFromOptions() is called _after_ any other customization
266: ! routines.
268: call KSPSetFromOptions(ksp,ierr)
270: ! Set convergence test routine if desired
272: call PetscOptionsHasName(PETSC_NULL_CHARACTER, &
273: & '-my_ksp_convergence',flg,ierr)
274: if (flg) then
275: call KSPSetConvergenceTest(ksp,MyKSPConverged, &
276: & PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr)
277: endif
278: !
279: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
280: ! Solve the linear system
281: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283: call KSPSolve(ksp,b,x,ierr)
285: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
286: ! Check solution and clean up
287: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
289: ! Check the error
290: call VecAXPY(x,neg_one,u,ierr)
291: call VecNorm(x,NORM_2,norm,ierr)
292: call KSPGetIterationNumber(ksp,its,ierr)
293: if (rank .eq. 0) then
294: if (norm .gt. 1.e-12) then
295: write(6,100) norm,its
296: else
297: write(6,110) its
298: endif
299: endif
300: 100 format('Norm of error ',e11.4,' iterations ',i5)
301: 110 format('Norm of error < 1.e-12,iterations ',i5)
303: ! Free work space. All PETSc objects should be destroyed when they
304: ! are no longer needed.
306: call KSPDestroy(ksp,ierr)
307: call VecDestroy(u,ierr)
308: call VecDestroy(x,ierr)
309: call VecDestroy(b,ierr)
310: call MatDestroy(A,ierr)
312: ! Always call PetscFinalize() before exiting a program. This routine
313: ! - finalizes the PETSc libraries as well as MPI
314: ! - provides summary and diagnostic information if certain runtime
315: ! options are chosen (e.g., -log_summary). See PetscFinalize()
316: ! manpage for more information.
318: call PetscFinalize(ierr)
319: end
321: ! --------------------------------------------------------------
322: !
323: ! MyKSPMonitor - This is a user-defined routine for monitoring
324: ! the KSP iterative solvers.
325: !
326: ! Input Parameters:
327: ! ksp - iterative context
328: ! n - iteration number
329: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
330: ! dummy - optional user-defined monitor context (unused here)
331: !
332: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
334: implicit none
336: #include <finclude/petscsys.h>
337: #include <finclude/petscvec.h>
338: #include <finclude/petscksp.h>
340: KSP ksp
341: Vec x
342: PetscErrorCode ierr
343: PetscInt n,dummy
344: PetscMPIInt rank
345: double precision rnorm
347: ! Build the solution vector
349: call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr)
351: ! Write the solution vector and residual norm to stdout
352: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD353: ! handles data from multiple processors so that the
354: ! output is not jumbled.
356: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
357: if (rank .eq. 0) write(6,100) n
358: call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
359: if (rank .eq. 0) write(6,200) n,rnorm
361: 100 format('iteration ',i5,' solution vector:')
362: 200 format('iteration ',i5,' residual norm ',e11.4)
363: 0
364: end
366: ! --------------------------------------------------------------
367: !
368: ! MyKSPConverged - This is a user-defined routine for testing
369: ! convergence of the KSP iterative solvers.
370: !
371: ! Input Parameters:
372: ! ksp - iterative context
373: ! n - iteration number
374: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
375: ! dummy - optional user-defined monitor context (unused here)
376: !
377: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
379: implicit none
381: #include <finclude/petscsys.h>
382: #include <finclude/petscvec.h>
383: #include <finclude/petscksp.h>
385: KSP ksp
386: PetscErrorCode ierr
387: PetscInt n,dummy
388: KSPConvergedReason flag
389: double precision rnorm
391: if (rnorm .le. .05) then
392: flag = 1
393: else
394: flag = 0
395: endif
396: 0
398: end