Actual source code: ex15f.F
petsc-3.5.4 2015-05-23
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
7: !/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
12: !
13: ! -------------------------------------------------------------------------
15: program main
16: implicit none
18: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
19: ! Include files
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: !
22: ! petscsys.h - base PETSc routines petscvec.h - vectors
23: ! petscmat.h - matrices
24: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
26: #include <finclude/petscsys.h>
27: #include <finclude/petscvec.h>
28: #include <finclude/petscmat.h>
29: #include <finclude/petscpc.h>
30: #include <finclude/petscksp.h>
32: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
33: ! Variable declarations
34: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: !
36: ! Variables:
37: ! ksp - linear solver context
38: ! ksp - Krylov subspace method context
39: ! pc - preconditioner context
40: ! x, b, u - approx solution, right-hand-side, exact solution vectors
41: ! A - matrix that defines linear system
42: ! its - iterations for convergence
43: ! norm - norm of solution error
45: Vec x,b,u
46: Mat A
47: PC pc
48: KSP ksp
49: PetscScalar v,one,neg_one
50: PetscReal norm,tol
51: PetscErrorCode ierr
52: PetscInt i,j,II,JJ,Istart
53: PetscInt Iend,m,n,i1,its,five
54: PetscMPIInt rank
55: PetscBool user_defined_pc,flg
57: ! Note: Any user-defined Fortran routines MUST be declared as external.
59: external SampleShellPCSetUp, SampleShellPCApply
60: external SampleShellPCDestroy
62: ! Common block to store data for user-provided preconditioner
63: common /myshellpc/ diag
64: Vec diag
66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
67: ! Beginning of program
68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
70: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
71: one = 1.0
72: neg_one = -1.0
73: i1 = 1
74: m = 8
75: n = 7
76: five = 5
77: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
78: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
79: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Compute the matrix and right-hand-side vector that define
83: ! the linear system, Ax = b.
84: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
86: ! Create parallel matrix, specifying only its global dimensions.
87: ! When using MatCreate(), the matrix format can be specified at
88: ! runtime. Also, the parallel partitioning of the matrix is
89: ! determined by PETSc at runtime.
91: call MatCreate(PETSC_COMM_WORLD,A,ierr)
92: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
93: call MatSetType(A, MATAIJ,ierr)
94: call MatSetFromOptions(A,ierr)
95: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five, &
96: & PETSC_NULL_INTEGER,ierr)
97: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
99: ! Currently, all PETSc parallel matrix formats are partitioned by
100: ! contiguous chunks of rows across the processors. Determine which
101: ! rows of the matrix are locally owned.
103: call MatGetOwnershipRange(A,Istart,Iend,ierr)
105: ! Set matrix elements for the 2-D, five-point stencil in parallel.
106: ! - Each processor needs to insert only elements that it owns
107: ! locally (but any non-local elements will be sent to the
108: ! appropriate processor during matrix assembly).
109: ! - Always specify global row and columns of matrix entries.
110: ! - Note that MatSetValues() uses 0-based row and column numbers
111: ! in Fortran as well as in C.
113: do 10, II=Istart,Iend-1
114: v = -1.0
115: i = II/n
116: j = II - i*n
117: if (i.gt.0) then
118: JJ = II - n
119: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
120: endif
121: if (i.lt.m-1) then
122: JJ = II + n
123: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
124: endif
125: if (j.gt.0) then
126: JJ = II - 1
127: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
128: endif
129: if (j.lt.n-1) then
130: JJ = II + 1
131: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
132: endif
133: v = 4.0
134: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
135: 10 continue
137: ! Assemble matrix, using the 2-step process:
138: ! MatAssemblyBegin(), MatAssemblyEnd()
139: ! Computations can be done while messages are in transition,
140: ! by placing code between these two statements.
142: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
143: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
145: ! Create parallel vectors.
146: ! - Here, the parallel partitioning of the vector is determined by
147: ! PETSc at runtime. We could also specify the local dimensions
148: ! if desired -- or use the more general routine VecCreate().
149: ! - When solving a linear system, the vectors and matrices MUST
150: ! be partitioned accordingly. PETSc automatically generates
151: ! appropriately partitioned matrices and vectors when MatCreate()
152: ! and VecCreate() are used with the same communicator.
153: ! - Note: We form 1 vector from scratch and then duplicate as needed.
155: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
156: call VecDuplicate(u,b,ierr)
157: call VecDuplicate(b,x,ierr)
159: ! Set exact solution; then compute right-hand-side vector.
161: call VecSet(u,one,ierr)
162: call MatMult(A,u,b,ierr)
164: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: ! Create the linear solver and set various options
166: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: ! Create linear solver context
170: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
172: ! Set operators. Here the matrix that defines the linear system
173: ! also serves as the preconditioning matrix.
175: call KSPSetOperators(ksp,A,A,ierr)
177: ! Set linear solver defaults for this problem (optional).
178: ! - By extracting the KSP and PC contexts from the KSP context,
179: ! we can then directly directly call any KSP and PC routines
180: ! to set various options.
182: call KSPGetPC(ksp,pc,ierr)
183: tol = 1.e-7
184: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL, &
185: & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
187: !
188: ! Set a user-defined shell preconditioner if desired
189: !
190: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-user_defined_pc', &
191: & user_defined_pc,ierr)
193: if (user_defined_pc) then
195: ! (Required) Indicate to PETSc that we are using a shell preconditioner
196: call PCSetType(pc,PCSHELL,ierr)
198: ! (Required) Set the user-defined routine for applying the preconditioner
199: call PCShellSetApply(pc,SampleShellPCApply,ierr)
201: ! (Optional) Do any setup required for the preconditioner
202: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
204: ! (Optional) Frees any objects we created for the preconditioner
205: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
207: else
208: call PCSetType(pc,PCJACOBI,ierr)
209: endif
211: ! Set runtime options, e.g.,
212: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
213: ! These options will override those specified above as long as
214: ! KSPSetFromOptions() is called _after_ any other customization
215: ! routines.
217: call KSPSetFromOptions(ksp,ierr)
219: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: ! Solve the linear system
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: call KSPSolve(ksp,b,x,ierr)
225: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226: ! Check solution and clean up
227: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
229: ! Check the error
231: call VecAXPY(x,neg_one,u,ierr)
232: call VecNorm(x,NORM_2,norm,ierr)
233: call KSPGetIterationNumber(ksp,its,ierr)
235: if (rank .eq. 0) then
236: if (norm .gt. 1.e-12) then
237: write(6,100) norm,its
238: else
239: write(6,110) its
240: endif
241: endif
242: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
243: 110 format('Norm of error < 1.e-12,iterations ',i5)
245: ! Free work space. All PETSc objects should be destroyed when they
246: ! are no longer needed.
248: call KSPDestroy(ksp,ierr)
249: call VecDestroy(u,ierr)
250: call VecDestroy(x,ierr)
251: call VecDestroy(b,ierr)
252: call MatDestroy(A,ierr)
254: ! Always call PetscFinalize() before exiting a program.
256: call PetscFinalize(ierr)
257: end
259: !/***********************************************************************/
260: !/* Routines for a user-defined shell preconditioner */
261: !/***********************************************************************/
263: !
264: ! SampleShellPCSetUp - This routine sets up a user-defined
265: ! preconditioner context.
266: !
267: ! Input Parameters:
268: ! pc - preconditioner object
269: !
270: ! Output Parameter:
271: ! ierr - error code (nonzero if error has been detected)
272: !
273: ! Notes:
274: ! In this example, we define the shell preconditioner to be Jacobi
275: ! method. Thus, here we create a work vector for storing the reciprocal
276: ! of the diagonal of the preconditioner matrix; this vector is then
277: ! used within the routine SampleShellPCApply().
278: !
279: subroutine SampleShellPCSetUp(pc,ierr)
281: implicit none
283: #include <finclude/petscsys.h>
284: #include <finclude/petscvec.h>
285: #include <finclude/petscmat.h>
286: PC pc
288: Mat pmat
289: integer ierr
291: ! Common block to store data for user-provided preconditioner
292: ! Normally we would recommend storing all the work data (like diag) in
293: ! the context set with PCShellSetContext()
295: common /myshellpc/ diag
296: Vec diag
298: call PCGetOperators(pc,PETSC_NULL_OBJECT,pmat,ierr)
299: call MatGetVecs(pmat,diag,PETSC_NULL_OBJECT,ierr)
300: call MatGetDiagonal(pmat,diag,ierr)
301: call VecReciprocal(diag,ierr)
303: end
305: ! -------------------------------------------------------------------
306: !
307: ! SampleShellPCApply - This routine demonstrates the use of a
308: ! user-provided preconditioner.
309: !
310: ! Input Parameters:
311: ! pc - preconditioner object
312: ! x - input vector
313: !
314: ! Output Parameters:
315: ! y - preconditioned vector
316: ! ierr - error code (nonzero if error has been detected)
317: !
318: ! Notes:
319: ! This code implements the Jacobi preconditioner, merely as an
320: ! example of working with a PCSHELL. Note that the Jacobi method
321: ! is already provided within PETSc.
322: !
323: subroutine SampleShellPCApply(pc,x,y,ierr)
325: implicit none
327: #include <finclude/petscsys.h>
328: #include <finclude/petscvec.h>
330: PC pc
331: Vec x,y
332: integer ierr
334: ! Common block to store data for user-provided preconditioner
335: common /myshellpc/ diag
336: Vec diag
338: call VecPointwiseMult(y,x,diag,ierr)
340: end
342: !/***********************************************************************/
343: !/* Routines for a user-defined shell preconditioner */
344: !/***********************************************************************/
346: !
347: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
348: ! objects we made for the preconditioner
349: !
350: ! Input Parameters:
351: ! pc - for this example we use the actual PC as our shell context
352: !
353: ! Output Parameter:
354: ! ierr - error code (nonzero if error has been detected)
355: !
357: subroutine SampleShellPCDestroy(pc,ierr)
359: implicit none
361: #include <finclude/petscsys.h>
362: #include <finclude/petscvec.h>
363: #include <finclude/petscmat.h>
364: PC pc
365: PetscErrorCode ierr
367: ! Common block to store data for user-provided preconditioner
368: ! Normally we would recommend storing all the work data (like diag) in
369: ! the context set with PCShellSetContext()
371: common /myshellpc/ diag
372: Vec diag
374: call VecDestroy(diag,ierr)
376: end