Actual source code: ex21f.F
petsc-3.3-p7 2013-05-11
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: !
5: ! Program usage: mpiexec ex21f [-help] [all PETSc options]
6: !
7: !/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
12: !
13: ! -------------------------------------------------------------------------
15: program main
16: implicit none
18: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
19: ! Include files
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: !
22: ! petscsys.h - base PETSc routines petscvec.h - vectors
23: ! petscmat.h - matrices
24: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
26: #include <finclude/petscsys.h>
27: #include <finclude/petscvec.h>
28: #include <finclude/petscmat.h>
29: #include <finclude/petscpc.h>
30: #include <finclude/petscksp.h>
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: ! Variable declarations
34: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: !
36: ! Variables:
37: ! ksp - linear solver context
38: ! ksp - Krylov subspace method context
39: ! pc - preconditioner context
40: ! x, b, u - approx solution, right-hand-side, exact solution vectors
41: ! A - matrix that defines linear system
42: ! its - iterations for convergence
43: ! norm - norm of solution error
45: Vec x,b,u
46: Mat A
47: PC pc
48: KSP ksp
49: PetscScalar v,one,neg_one
50: double precision norm,tol
51: PetscInt i,j,II,JJ,Istart
52: PetscInt Iend,m,n,its,ione
53: PetscMPIInt rank
54: PetscBool flg
55: PetscErrorCode ierr
57: ! Note: Any user-defined Fortran routines MUST be declared as external.
59: external SampleShellPCSetUp,SampleShellPCApply
61: ! Common block to store data for user-provided preconditioner
62: common /mypcs/ jacobi,sor,work
63: PC jacobi,sor
64: Vec work
66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
67: ! Beginning of program
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
71: one = 1.0
72: neg_one = -1.0
73: m = 8
74: n = 7
75: ione = 1
76: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
77: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
78: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
80: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
81: ! Compute the matrix and right-hand-side vector that define
82: ! the linear system, Ax = b.
83: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
85: ! Create parallel matrix, specifying only its global dimensions.
86: ! When using MatCreate(), the matrix format can be specified at
87: ! runtime. Also, the parallel partitioning of the matrix is
88: ! determined by PETSc at runtime.
90: call MatCreate(PETSC_COMM_WORLD,A,ierr)
91: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
92: call MatSetFromOptions(A,ierr)
93: call MatSetUp(A,ierr)
95: ! Currently, all PETSc parallel matrix formats are partitioned by
96: ! contiguous chunks of rows across the processors. Determine which
97: ! rows of the matrix are locally owned.
99: call MatGetOwnershipRange(A,Istart,Iend,ierr)
101: ! Set matrix elements for the 2-D, five-point stencil in parallel.
102: ! - Each processor needs to insert only elements that it owns
103: ! locally (but any non-local elements will be sent to the
104: ! appropriate processor during matrix assembly).
105: ! - Always specify global row and columns of matrix entries.
106: ! - Note that MatSetValues() uses 0-based row and column numbers
107: ! in Fortran as well as in C.
109: do 10, II=Istart,Iend-1
110: v = -1.0
111: i = II/n
112: j = II - i*n
113: if (i.gt.0) then
114: JJ = II - n
115: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
116: endif
117: if (i.lt.m-1) then
118: JJ = II + n
119: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
120: endif
121: if (j.gt.0) then
122: JJ = II - 1
123: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
124: endif
125: if (j.lt.n-1) then
126: JJ = II + 1
127: call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
128: endif
129: v = 4.0
130: call MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
131: 10 continue
133: ! Assemble matrix, using the 2-step process:
134: ! MatAssemblyBegin(), MatAssemblyEnd()
135: ! Computations can be done while messages are in transition,
136: ! by placing code between these two statements.
138: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
139: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
141: ! Create parallel vectors.
142: ! - Here, the parallel partitioning of the vector is determined by
143: ! PETSc at runtime. We could also specify the local dimensions
144: ! if desired -- or use the more general routine VecCreate().
145: ! - When solving a linear system, the vectors and matrices MUST
146: ! be partitioned accordingly. PETSc automatically generates
147: ! appropriately partitioned matrices and vectors when MatCreate()
148: ! and VecCreate() are used with the same communicator.
149: ! - Note: We form 1 vector from scratch and then duplicate as needed.
151: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
152: call VecDuplicate(u,b,ierr)
153: call VecDuplicate(b,x,ierr)
155: ! Set exact solution; then compute right-hand-side vector.
157: call VecSet(u,one,ierr)
158: call MatMult(A,u,b,ierr)
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: ! Create the linear solver and set various options
162: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Create linear solver context
166: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
168: ! Set operators. Here the matrix that defines the linear system
169: ! also serves as the preconditioning matrix.
171: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
173: ! Set linear solver defaults for this problem (optional).
174: ! - By extracting the KSP and PC contexts from the KSP context,
175: ! we can then directly directly call any KSP and PC routines
176: ! to set various options.
178: call KSPGetPC(ksp,pc,ierr)
179: tol = 1.e-7
180: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
181: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
183: !
184: ! Set a user-defined shell preconditioner
185: !
187: ! (Required) Indicate to PETSc that we are using a shell preconditioner
188: call PCSetType(pc,PCSHELL,ierr)
190: ! (Required) Set the user-defined routine for applying the preconditioner
191: call PCShellSetApply(pc,SampleShellPCApply,ierr)
193: ! (Optional) Do any setup required for the preconditioner
194: ! Note: if you use PCShellSetSetUp, this will be done for your
195: call SampleShellPCSetUp(pc,x,ierr)
198: ! Set runtime options, e.g.,
199: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
200: ! These options will override those specified above as long as
201: ! KSPSetFromOptions() is called _after_ any other customization
202: ! routines.
204: call KSPSetFromOptions(ksp,ierr)
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: ! Solve the linear system
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: call KSPSolve(ksp,b,x,ierr)
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: ! Check solution and clean up
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: ! Check the error
218: call VecAXPY(x,neg_one,u,ierr)
219: call VecNorm(x,NORM_2,norm,ierr)
220: call KSPGetIterationNumber(ksp,its,ierr)
222: if (rank .eq. 0) then
223: if (norm .gt. 1.e-12) then
224: write(6,100) norm,its
225: else
226: write(6,110) its
227: endif
228: endif
229: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
230: 110 format('Norm of error < 1.e-12,iterations ',i5)
233: ! Free work space. All PETSc objects should be destroyed when they
234: ! are no longer needed.
236: call KSPDestroy(ksp,ierr)
237: call VecDestroy(u,ierr)
238: call VecDestroy(x,ierr)
239: call VecDestroy(b,ierr)
240: call MatDestroy(A,ierr)
242: ! Free up PCShell data
243: call PCDestroy(sor,ierr)
244: call PCDestroy(jacobi,ierr)
245: call VecDestroy(work,ierr)
248: ! Always call PetscFinalize() before exiting a program.
250: call PetscFinalize(ierr)
251: end
253: !/***********************************************************************/
254: !/* Routines for a user-defined shell preconditioner */
255: !/***********************************************************************/
257: !
258: ! SampleShellPCSetUp - This routine sets up a user-defined
259: ! preconditioner context.
260: !
261: ! Input Parameters:
262: ! pc - preconditioner object
263: ! x - vector
264: !
265: ! Output Parameter:
266: ! ierr - error code (nonzero if error has been detected)
267: !
268: ! Notes:
269: ! In this example, we define the shell preconditioner to be Jacobi
270: ! method. Thus, here we create a work vector for storing the reciprocal
271: ! of the diagonal of the preconditioner matrix; this vector is then
272: ! used within the routine SampleShellPCApply().
273: !
274: subroutine SampleShellPCSetUp(pc,x,ierr)
276: implicit none
278: #include <finclude/petscsys.h>
279: #include <finclude/petscvec.h>
280: #include <finclude/petscmat.h>
282: PC pc
283: Vec x
284: Mat pmat
285: PetscErrorCode ierr
287: ! Common block to store data for user-provided preconditioner
288: common /mypcs/ jacobi,sor,work
289: PC jacobi,sor
290: Vec work
292: call PCGetOperators(pc,PETSC_NULL_OBJECT,pmat,PETSC_NULL_INTEGER, &
293: & ierr)
294: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
295: call PCSetType(jacobi,PCJACOBI,ierr)
296: call PCSetOperators(jacobi,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
297: & ierr)
298: call PCSetUp(jacobi,ierr)
300: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
301: call PCSetType(sor,PCSOR,ierr)
302: call PCSetOperators(sor,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
303: & ierr)
304: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
305: call PCSetUp(sor,ierr)
307: call VecDuplicate(x,work,ierr)
309: end
311: ! -------------------------------------------------------------------
312: !
313: ! SampleShellPCApply - This routine demonstrates the use of a
314: ! user-provided preconditioner.
315: !
316: ! Input Parameters:
317: ! pc - preconditioner object
318: ! x - input vector
319: !
320: ! Output Parameters:
321: ! y - preconditioned vector
322: ! ierr - error code (nonzero if error has been detected)
323: !
324: ! Notes:
325: ! This code implements the Jacobi preconditioner plus the
326: ! SOR preconditioner
327: !
328: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
329: ! mpiexec -n 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
330: !
331: subroutine SampleShellPCApply(pc,x,y,ierr)
333: implicit none
335: #include <finclude/petscsys.h>
336: #include <finclude/petscvec.h>
337: #include <finclude/petscpc.h>
339: PC pc
340: Vec x,y
341: PetscErrorCode ierr
342: PetscScalar one
343:
344: ! Common block to store data for user-provided preconditioner
345: common /mypcs/ jacobi,sor,work
346: PC jacobi,sor
347: Vec work
349: one = 1.0
350: call PCApply(jacobi,x,y,ierr)
351: call PCApply(sor,x,work,ierr)
352: call VecAXPY(y,one,work,ierr)
354: end