Actual source code: ex13f90.F90
1: module ex13f90module
2: #include <petsc/finclude/petscksp.h>
3: use petscksp
4: type User
5: Vec x
6: Vec b
7: Mat A
8: KSP ksp
9: PetscInt m
10: PetscInt n
11: end type User
12: end module
14: program main
15: use ex13f90module
16: implicit none
18: ! User-defined context that contains all the data structures used
19: ! in the linear solution process.
21: ! Vec x,b /* solution vector, right hand side vector and work vector */
22: ! Mat A /* sparse matrix */
23: ! KSP ksp /* linear solver context */
24: ! int m,n /* grid dimensions */
25: !
26: ! Since we cannot store Scalars and integers in the same context,
27: ! we store the integers/pointers in the user-defined context, and
28: ! the scalar values are carried in the common block.
29: ! The scalar values in this simplistic example could easily
30: ! be recalculated in each routine, where they are needed.
31: !
32: ! Scalar hx2,hy2 /* 1/(m+1)*(m+1) and 1/(n+1)*(n+1) */
34: ! Note: Any user-defined Fortran routines MUST be declared as external.
36: external UserInitializeLinearSolver
37: external UserFinalizeLinearSolver
38: external UserDoLinearSolver
40: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
41: ! Variable declarations
42: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
44: PetscScalar hx,hy,x,y
45: type(User) userctx
46: PetscErrorCode ierr
47: PetscInt m,n,t,tmax,i,j
48: PetscBool flg
49: PetscMPIInt size
50: PetscReal enorm
51: PetscScalar cnorm
52: PetscScalar,ALLOCATABLE :: userx(:,:)
53: PetscScalar,ALLOCATABLE :: userb(:,:)
54: PetscScalar,ALLOCATABLE :: solution(:,:)
55: PetscScalar,ALLOCATABLE :: rho(:,:)
57: PetscReal hx2,hy2
58: common /param/ hx2,hy2
60: tmax = 2
61: m = 6
62: n = 7
64: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
65: ! Beginning of program
66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
68: PetscCallA(PetscInitialize(ierr))
69: PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,size,ierr))
70: PetscCheckA(size .eq. 1,PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,'This is a uniprocessor example only')
72: ! The next two lines are for testing only; these allow the user to
73: ! decide the grid size at runtime.
75: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr))
76: PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr))
78: ! Create the empty sparse matrix and linear solver data structures
80: PetscCallA(UserInitializeLinearSolver(m,n,userctx,ierr))
82: ! Allocate arrays to hold the solution to the linear system. This
83: ! approach is not normally done in PETSc programs, but in this case,
84: ! since we are calling these routines from a non-PETSc program, we
85: ! would like to reuse the data structures from another code. So in
86: ! the context of a larger application these would be provided by
87: ! other (non-PETSc) parts of the application code.
89: ALLOCATE (userx(m,n),userb(m,n),solution(m,n))
91: ! Allocate an array to hold the coefficients in the elliptic operator
93: ALLOCATE (rho(m,n))
95: ! Fill up the array rho[] with the function rho(x,y) = x; fill the
96: ! right-hand-side b[] and the solution with a known problem for testing.
98: hx = 1.0/real(m+1)
99: hy = 1.0/real(n+1)
100: y = hy
101: do 20 j=1,n
102: x = hx
103: do 10 i=1,m
104: rho(i,j) = x
105: solution(i,j) = sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
106: userb(i,j) = -2.*PETSC_PI*cos(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y) + 8*PETSC_PI*PETSC_PI*x*sin(2.*PETSC_PI*x)*sin(2.*PETSC_PI*y)
107: x = x + hx
108: 10 continue
109: y = y + hy
110: 20 continue
112: ! Loop over a bunch of timesteps, setting up and solver the linear
113: ! system for each time-step.
114: ! Note that this loop is somewhat artificial. It is intended to
115: ! demonstrate how one may reuse the linear solvers in each time-step.
117: do 100 t=1,tmax
118: PetscCallA(UserDoLinearSolver(rho,userctx,userb,userx,ierr))
120: ! Compute error: Note that this could (and usually should) all be done
121: ! using the PETSc vector operations. Here we demonstrate using more
122: ! standard programming practices to show how they may be mixed with
123: ! PETSc.
124: cnorm = 0.0
125: do 90 j=1,n
126: do 80 i=1,m
127: cnorm = cnorm + PetscConj(solution(i,j)-userx(i,j))*(solution(i,j)-userx(i,j))
128: 80 continue
129: 90 continue
130: enorm = PetscRealPart(cnorm*hx*hy)
131: write(6,115) m,n,enorm
132: 115 format ('m = ',I2,' n = ',I2,' error norm = ',1PE11.4)
133: 100 continue
135: ! We are finished solving linear systems, so we clean up the
136: ! data structures.
138: DEALLOCATE (userx,userb,solution,rho)
140: PetscCallA(UserFinalizeLinearSolver(userctx,ierr))
141: PetscCallA(PetscFinalize(ierr))
142: end
144: ! ----------------------------------------------------------------
145: subroutine UserInitializeLinearSolver(m,n,userctx,ierr)
146: use ex13f90module
147: implicit none
149: PetscInt m,n
150: PetscErrorCode ierr
151: type(User) userctx
153: common /param/ hx2,hy2
154: PetscReal hx2,hy2
156: ! Local variable declararions
157: Mat A
158: Vec b,x
159: KSP ksp
160: PetscInt Ntot,five,one
162: ! Here we assume use of a grid of size m x n, with all points on the
163: ! interior of the domain, i.e., we do not include the points corresponding
164: ! to homogeneous Dirichlet boundary conditions. We assume that the domain
165: ! is [0,1]x[0,1].
167: hx2 = (m+1)*(m+1)
168: hy2 = (n+1)*(n+1)
169: Ntot = m*n
171: five = 5
172: one = 1
174: ! Create the sparse matrix. Preallocate 5 nonzeros per row.
176: PetscCall(MatCreateSeqAIJ(PETSC_COMM_SELF,Ntot,Ntot,five,PETSC_NULL_INTEGER,A,ierr))
177: !
178: ! Create vectors. Here we create vectors with no memory allocated.
179: ! This way, we can use the data structures already in the program
180: ! by using VecPlaceArray() subroutine at a later stage.
181: !
182: PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,one,Ntot,PETSC_NULL_SCALAR,b,ierr))
183: PetscCall(VecDuplicate(b,x,ierr))
185: ! Create linear solver context. This will be used repeatedly for all
186: ! the linear solves needed.
188: PetscCall(KSPCreate(PETSC_COMM_SELF,ksp,ierr))
190: userctx%x = x
191: userctx%b = b
192: userctx%A = A
193: userctx%ksp = ksp
194: userctx%m = m
195: userctx%n = n
197: return
198: end
199: ! -----------------------------------------------------------------------
201: ! Solves -div (rho grad psi) = F using finite differences.
202: ! rho is a 2-dimensional array of size m by n, stored in Fortran
203: ! style by columns. userb is a standard one-dimensional array.
205: subroutine UserDoLinearSolver(rho,userctx,userb,userx,ierr)
206: use ex13f90module
207: implicit none
209: PetscErrorCode ierr
210: type(User) userctx
211: PetscScalar rho(*),userb(*),userx(*)
213: common /param/ hx2,hy2
214: PetscReal hx2,hy2
216: PC pc
217: KSP ksp
218: Vec b,x
219: Mat A
220: PetscInt m,n,one
221: PetscInt i,j,II,JJ
222: PetscScalar v
224: one = 1
225: x = userctx%x
226: b = userctx%b
227: A = userctx%A
228: ksp = userctx%ksp
229: m = userctx%m
230: n = userctx%n
232: ! This is not the most efficient way of generating the matrix,
233: ! but let's not worry about it. We should have separate code for
234: ! the four corners, each edge and then the interior. Then we won't
235: ! have the slow if-tests inside the loop.
236: !
237: ! Compute the operator
238: ! -div rho grad
239: ! on an m by n grid with zero Dirichlet boundary conditions. The rho
240: ! is assumed to be given on the same grid as the finite difference
241: ! stencil is applied. For a staggered grid, one would have to change
242: ! things slightly.
244: II = 0
245: do 110 j=1,n
246: do 100 i=1,m
247: if (j .gt. 1) then
248: JJ = II - m
249: v = -0.5*(rho(II+1) + rho(JJ+1))*hy2
250: PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
251: endif
252: if (j .lt. n) then
253: JJ = II + m
254: v = -0.5*(rho(II+1) + rho(JJ+1))*hy2
255: PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
256: endif
257: if (i .gt. 1) then
258: JJ = II - 1
259: v = -0.5*(rho(II+1) + rho(JJ+1))*hx2
260: PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
261: endif
262: if (i .lt. m) then
263: JJ = II + 1
264: v = -0.5*(rho(II+1) + rho(JJ+1))*hx2
265: PetscCall(MatSetValues(A,one,II,one,JJ,v,INSERT_VALUES,ierr))
266: endif
267: v = 2*rho(II+1)*(hx2+hy2)
268: PetscCall(MatSetValues(A,one,II,one,II,v,INSERT_VALUES,ierr))
269: II = II+1
270: 100 continue
271: 110 continue
272: !
273: ! Assemble matrix
274: !
275: PetscCall(MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr))
276: PetscCall(MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr))
278: !
279: ! Set operators. Here the matrix that defines the linear system
280: ! also serves as the preconditioning matrix. Since all the matrices
281: ! will have the same nonzero pattern here, we indicate this so the
282: ! linear solvers can take advantage of this.
283: !
284: PetscCall(KSPSetOperators(ksp,A,A,ierr))
286: !
287: ! Set linear solver defaults for this problem (optional).
288: ! - Here we set it to use direct LU factorization for the solution
289: !
290: PetscCall(KSPGetPC(ksp,pc,ierr))
291: PetscCall(PCSetType(pc,PCLU,ierr))
293: !
294: ! Set runtime options, e.g.,
295: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
296: ! These options will override those specified above as long as
297: ! KSPSetFromOptions() is called _after_ any other customization
298: ! routines.
299: !
300: ! Run the program with the option -help to see all the possible
301: ! linear solver options.
302: !
303: PetscCall(KSPSetFromOptions(ksp,ierr))
305: !
306: ! This allows the PETSc linear solvers to compute the solution
307: ! directly in the user's array rather than in the PETSc vector.
308: !
309: ! This is essentially a hack and not highly recommend unless you
310: ! are quite comfortable with using PETSc. In general, users should
311: ! write their entire application using PETSc vectors rather than
312: ! arrays.
313: !
314: PetscCall(VecPlaceArray(x,userx,ierr))
315: PetscCall(VecPlaceArray(b,userb,ierr))
317: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
318: ! Solve the linear system
319: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
321: PetscCall(KSPSolve(ksp,b,x,ierr))
323: PetscCall(VecResetArray(x,ierr))
324: PetscCall(VecResetArray(b,ierr))
325: return
326: end
328: ! ------------------------------------------------------------------------
330: subroutine UserFinalizeLinearSolver(userctx,ierr)
331: use ex13f90module
332: implicit none
334: PetscErrorCode ierr
335: type(User) userctx
337: !
338: ! We are all done and don't need to solve any more linear systems, so
339: ! we free the work space. All PETSc objects should be destroyed when
340: ! they are no longer needed.
341: !
342: PetscCall(VecDestroy(userctx%x,ierr))
343: PetscCall(VecDestroy(userctx%b,ierr))
344: PetscCall(MatDestroy(userctx%A,ierr))
345: PetscCall(KSPDestroy(userctx%ksp,ierr))
346: return
347: end
349: !
350: !/*TEST
351: !
352: ! test:
353: ! args: -m 19 -n 20 -ksp_gmres_cgs_refinement_type refine_always
354: ! output_file: output/ex13f90_1.out
355: !
356: !TEST*/