Actual source code: ex5f90.F
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: ! Concepts: SNES^parallel Bratu example
11: ! Concepts: DA^using distributed arrays;
12: ! Processors: n
13: !T*/
14: !
15: ! --------------------------------------------------------------------------
16: !
17: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
18: ! the partial differential equation
19: !
20: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
21: !
22: ! with boundary conditions
23: !
24: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
25: !
26: ! A finite difference approximation with the usual 5-point stencil
27: ! is used to discretize the boundary value problem to obtain a nonlinear
28: ! system of equations.
29: !
30: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: ! --------------------------------------------------------------------------
33: ! The following define must be used before including any PETSc include files
34: ! into a module or interface. This is because they can't handle declarations
35: ! in them
36: !
38: module f90module
39: type userctx
40: #define PETSC_AVOID_DECLARATIONS
41: #include include/finclude/petsc.h
42: #include include/finclude/petscvec.h
43: #include include/finclude/petscda.h
44: #undef PETSC_AVOID_DECLARATIONS
45: DA da
46: PetscInt xs,xe,xm,gxs,gxe,gxm
47: PetscInt ys,ye,ym,gys,gye,gym
48: PetscInt mx,my
49: PetscMPIInt rank
50: double precision lambda
51: end type userctx
52: contains
53: ! ---------------------------------------------------------------------
54: !
55: ! FormFunction - Evaluates nonlinear function, F(x).
56: !
57: ! Input Parameters:
58: ! snes - the SNES context
59: ! X - input vector
60: ! dummy - optional user-defined context, as set by SNESSetFunction()
61: ! (not used here)
62: !
63: ! Output Parameter:
64: ! F - function vector
65: !
66: ! Notes:
67: ! This routine serves as a wrapper for the lower-level routine
68: ! "FormFunctionLocal", where the actual computations are
69: ! done using the standard Fortran style of treating the local
70: ! vector data as a multidimensional array over the local mesh.
71: ! This routine merely handles ghost point scatters and accesses
72: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
73: !
74: subroutine FormFunction(snes,X,F,user,ierr)
75: implicit none
77: #include include/finclude/petsc.h
78: #include include/finclude/petscvec.h
79: #include include/finclude/petscda.h
80: #include include/finclude/petscis.h
81: #include include/finclude/petscmat.h
82: #include include/finclude/petscksp.h
83: #include include/finclude/petscpc.h
84: #include include/finclude/petscsnes.h
86: #include "include/finclude/petscvec.h90"
89: ! Input/output variables:
90: SNES snes
91: Vec X,F
92: PetscErrorCode ierr
93: type (userctx) user
95: ! Declarations for use with local arrays:
96: PetscScalar,pointer :: lx_v(:),lf_v(:)
97: Vec localX
99: ! Scatter ghost points to local vector, using the 2-step process
100: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
101: ! By placing code between these two statements, computations can
102: ! be done while messages are in transition.
104: call DAGetLocalVector(user%da,localX,ierr)
105: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES, &
106: & localX,ierr)
107: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
109: ! Get a pointer to vector data.
110: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
111: ! the data array. Otherwise, the routine is implementation dependent.
112: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
113: ! the array.
114: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
115: ! and is useable from Fortran-90 Only.
117: call VecGetArrayF90(localX,lx_v,ierr)
118: call VecGetArrayF90(F,lf_v,ierr)
120: ! Compute function over the locally owned part of the grid
122: call FormFunctionLocal(lx_v,lf_v,user,ierr)
124: ! Restore vectors
126: call VecRestoreArrayF90(localX,lx_v,ierr)
127: call VecRestoreArrayF90(F,lf_v,ierr)
129: ! Insert values into global vector
131: call DARestoreLocalVector(user%da,localX,ierr)
132: call PetscLogFlops(11*user%ym*user%xm,ierr)
134: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
135: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
137: return
138: end subroutine formfunction
139: end module f90module
143: program main
144: use f90module
145: implicit none
146: !
147: !
148: #include include/finclude/petsc.h
149: #include include/finclude/petscvec.h
150: #include include/finclude/petscda.h
151: #include include/finclude/petscis.h
152: #include include/finclude/petscmat.h
153: #include include/finclude/petscksp.h
154: #include include/finclude/petscpc.h
155: #include include/finclude/petscsnes.h
156: #include "include/finclude/petscvec.h90"
157: #include "include/finclude/petscda.h90"
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: ! Variable declarations
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: !
163: ! Variables:
164: ! snes - nonlinear solver
165: ! x, r - solution, residual vectors
166: ! J - Jacobian matrix
167: ! its - iterations for convergence
168: ! Nx, Ny - number of preocessors in x- and y- directions
169: ! matrix_free - flag - 1 indicates matrix-free version
170: !
171: !
172: SNES snes
173: Vec x,r
174: Mat J
175: PetscErrorCode ierr
176: PetscInt its
177: PetscTruth flg,matrix_free
178: PetscInt ione,nfour
179: double precision lambda_max,lambda_min
180: type (userctx) user
182: ! Note: Any user-defined Fortran routines (such as FormJacobian)
183: ! MUST be declared as external.
185: external FormInitialGuess,FormJacobian
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: ! Initialize program
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
192: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
194: ! Initialize problem parameters
196: lambda_max = 6.81
197: lambda_min = 0.0
198: user%lambda = 6.0
199: ione = 1
200: nfour = -4
201: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
202: & user%lambda,flg,ierr)
203: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
204: & then
205: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
206: SETERRQ(1,' ',ierr)
207: endif
210: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: ! Create nonlinear solver context
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
216: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: ! Create vector data structures; set function evaluation routine
218: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: ! Create distributed array (DA) to manage parallel grid and vectors
222: ! This really needs only the star-type stencil, but we use the box
223: ! stencil temporarily.
224: call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX, &
225: & nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione, &
226: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
227: call DAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my, &
228: & PETSC_NULL_INTEGER, &
229: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
230: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
231: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
232: & PETSC_NULL_INTEGER,ierr)
233:
234: !
235: ! Visualize the distribution of the array across the processors
236: !
237: ! call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)
239: ! Extract global and local vectors from DA; then duplicate for remaining
240: ! vectors that are the same types
242: call DACreateGlobalVector(user%da,x,ierr)
243: call VecDuplicate(x,r,ierr)
245: ! Get local grid boundaries (for 2-dimensional DA)
247: call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER, &
248: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
249: call DAGetGhostCorners(user%da,user%gxs,user%gys, &
250: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
251: & PETSC_NULL_INTEGER,ierr)
253: ! Here we shift the starting indices up by one so that we can easily
254: ! use the Fortran convention of 1-based indices (rather 0-based indices).
256: user%xs = user%xs+1
257: user%ys = user%ys+1
258: user%gxs = user%gxs+1
259: user%gys = user%gys+1
261: user%ye = user%ys+user%ym-1
262: user%xe = user%xs+user%xm-1
263: user%gye = user%gys+user%gym-1
264: user%gxe = user%gxs+user%gxm-1
266: ! Set function evaluation routine and vector
268: call SNESSetFunction(snes,r,FormFunction,user,ierr)
270: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
271: ! Create matrix data structure; set Jacobian evaluation routine
272: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
274: ! Set Jacobian matrix data structure and default Jacobian evaluation
275: ! routine. User can override with:
276: ! -snes_fd : default finite differencing approximation of Jacobian
277: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
278: ! (unless user explicitly sets preconditioner)
279: ! -snes_mf_operator : form preconditioning matrix as set by the user,
280: ! but use matrix-free approx for Jacobian-vector
281: ! products within Newton-Krylov method
282: !
283: ! Note: For the parallel case, vectors and matrices MUST be partitioned
284: ! accordingly. When using distributed arrays (DAs) to create vectors,
285: ! the DAs determine the problem partitioning. We must explicitly
286: ! specify the local matrix dimensions upon its creation for compatibility
287: ! with the vector distribution. Thus, the generic MatCreate() routine
288: ! is NOT sufficient when working with distributed arrays.
289: !
290: ! Note: Here we only approximately preallocate storage space for the
291: ! Jacobian. See the users manual for a discussion of better techniques
292: ! for preallocating matrix memory.
294: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
295: & matrix_free,ierr)
296: if (matrix_free .eq. 0) then
297: call DAGetMatrix(user%da,MATAIJ,J,ierr)
298: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
299: endif
301: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
302: ! Customize nonlinear solver; set runtime options
303: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
305: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
307: call SNESSetFromOptions(snes,ierr)
309: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
310: ! Evaluate initial guess; then solve nonlinear system.
311: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
313: ! Note: The user should initialize the vector, x, with the initial guess
314: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
315: ! to employ an initial guess of zero, the user should explicitly set
316: ! this vector to zero by calling VecSet().
318: call FormInitialGuess(user,x,ierr)
319: call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
320: call SNESGetIterationNumber(snes,its,ierr);
321: if (user%rank .eq. 0) then
322: write(6,100) its
323: endif
324: 100 format('Number of Newton iterations = ',i5)
326: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
327: ! Free work space. All PETSc objects should be destroyed when they
328: ! are no longer needed.
329: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
331: if (matrix_free .eq. 0) call MatDestroy(J,ierr)
332: call VecDestroy(x,ierr)
333: call VecDestroy(r,ierr)
334: call SNESDestroy(snes,ierr)
335: call DADestroy(user%da,ierr)
336: call PetscFinalize(ierr)
337: end
339: ! ---------------------------------------------------------------------
340: !
341: ! FormInitialGuess - Forms initial approximation.
342: !
343: ! Input Parameters:
344: ! X - vector
345: !
346: ! Output Parameter:
347: ! X - vector
348: !
349: ! Notes:
350: ! This routine serves as a wrapper for the lower-level routine
351: ! "InitialGuessLocal", where the actual computations are
352: ! done using the standard Fortran style of treating the local
353: ! vector data as a multidimensional array over the local mesh.
354: ! This routine merely handles ghost point scatters and accesses
355: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
356: !
357: subroutine FormInitialGuess(user,X,ierr)
358: use f90module
359: implicit none
361: #include "include/finclude/petscvec.h90"
362: #include include/finclude/petsc.h
363: #include include/finclude/petscvec.h
364: #include include/finclude/petscda.h
365: #include include/finclude/petscis.h
366: #include include/finclude/petscmat.h
367: #include include/finclude/petscksp.h
368: #include include/finclude/petscpc.h
369: #include include/finclude/petscsnes.h
371: ! Input/output variables:
372: type (userctx) user
373: Vec X
374: PetscErrorCode ierr
375:
376: ! Declarations for use with local arrays:
377: PetscScalar,pointer :: lx_v(:)
378: Vec localX
380: 0
382: ! Get a pointer to vector data.
383: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
384: ! the data array. Otherwise, the routine is implementation dependent.
385: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
386: ! the array.
387: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
388: ! and is useable from Fortran-90 Only.
390: call DAGetLocalVector(user%da,localX,ierr)
391: call VecGetArrayF90(localX,lx_v,ierr)
393: ! Compute initial guess over the locally owned part of the grid
395: call InitialGuessLocal(user,lx_v,ierr)
397: ! Restore vector
399: call VecRestoreArrayF90(localX,lx_v,ierr)
401: ! Insert values into global vector
403: call DALocalToGlobal(user%da,localX,INSERT_VALUES,X,ierr)
404: call DARestoreLocalVector(user%da,localX,ierr)
406: return
407: end
409: ! ---------------------------------------------------------------------
410: !
411: ! InitialGuessLocal - Computes initial approximation, called by
412: ! the higher level routine FormInitialGuess().
413: !
414: ! Input Parameter:
415: ! x - local vector data
416: !
417: ! Output Parameters:
418: ! x - local vector data
419: ! ierr - error code
420: !
421: ! Notes:
422: ! This routine uses standard Fortran-style computations over a 2-dim array.
423: !
424: subroutine InitialGuessLocal(user,x,ierr)
425: use f90module
426: implicit none
428: #include include/finclude/petsc.h
429: #include include/finclude/petscvec.h
430: #include include/finclude/petscda.h
431: #include include/finclude/petscis.h
432: #include include/finclude/petscmat.h
433: #include include/finclude/petscksp.h
434: #include include/finclude/petscpc.h
435: #include include/finclude/petscsnes.h
437: ! Input/output variables:
438: type (userctx) user
439: PetscScalar x(user%gxs:user%gxe, &
440: & user%gys:user%gye)
441: PetscErrorCode ierr
443: ! Local variables:
444: PetscInt i,j
445: PetscScalar temp1,temp,hx,hy
446: PetscScalar one
448: ! Set parameters
450: 0
451: one = 1.0
452: hx = one/(dble(user%mx-1))
453: hy = one/(dble(user%my-1))
454: temp1 = user%lambda/(user%lambda + one)
456: do 20 j=user%ys,user%ye
457: temp = dble(min(j-1,user%my-j))*hy
458: do 10 i=user%xs,user%xe
459: if (i .eq. 1 .or. j .eq. 1 &
460: & .or. i .eq. user%mx .or. j .eq. user%my) then
461: x(i,j) = 0.0
462: else
463: x(i,j) = temp1 * &
464: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
465: endif
466: 10 continue
467: 20 continue
469: return
470: end
472: ! ---------------------------------------------------------------------
473: !
474: ! FormFunctionLocal - Computes nonlinear function, called by
475: ! the higher level routine FormFunction().
476: !
477: ! Input Parameter:
478: ! x - local vector data
479: !
480: ! Output Parameters:
481: ! f - local vector data, f(x)
482: ! ierr - error code
483: !
484: ! Notes:
485: ! This routine uses standard Fortran-style computations over a 2-dim array.
486: !
487: subroutine FormFunctionLocal(x,f,user,ierr)
488: use f90module
490: implicit none
492: ! Input/output variables:
493: type (userctx) user
494: PetscScalar x(user%gxs:user%gxe, &
495: & user%gys:user%gye)
496: PetscScalar f(user%xs:user%xe, &
497: & user%ys:user%ye)
498: PetscErrorCode ierr
500: ! Local variables:
501: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
502: PetscScalar u,uxx,uyy
503: PetscInt i,j
505: one = 1.0
506: two = 2.0
507: hx = one/dble(user%mx-1)
508: hy = one/dble(user%my-1)
509: sc = hx*hy*user%lambda
510: hxdhy = hx/hy
511: hydhx = hy/hx
513: ! Compute function over the locally owned part of the grid
515: do 20 j=user%ys,user%ye
516: do 10 i=user%xs,user%xe
517: if (i .eq. 1 .or. j .eq. 1 &
518: & .or. i .eq. user%mx .or. j .eq. user%my) then
519: f(i,j) = x(i,j)
520: else
521: u = x(i,j)
522: uxx = hydhx * (two*u &
523: & - x(i-1,j) - x(i+1,j))
524: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
525: f(i,j) = uxx + uyy - sc*exp(u)
526: endif
527: 10 continue
528: 20 continue
530: return
531: end
533: ! ---------------------------------------------------------------------
534: !
535: ! FormJacobian - Evaluates Jacobian matrix.
536: !
537: ! Input Parameters:
538: ! snes - the SNES context
539: ! x - input vector
540: ! dummy - optional user-defined context, as set by SNESSetJacobian()
541: ! (not used here)
542: !
543: ! Output Parameters:
544: ! jac - Jacobian matrix
545: ! jac_prec - optionally different preconditioning matrix (not used here)
546: ! flag - flag indicating matrix structure
547: !
548: ! Notes:
549: ! This routine serves as a wrapper for the lower-level routine
550: ! "FormJacobianLocal", where the actual computations are
551: ! done using the standard Fortran style of treating the local
552: ! vector data as a multidimensional array over the local mesh.
553: ! This routine merely accesses the local vector data via
554: ! VecGetArrayF90() and VecRestoreArrayF90().
555: !
556: ! Notes:
557: ! Due to grid point reordering with DAs, we must always work
558: ! with the local grid points, and then transform them to the new
559: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
560: ! We cannot work directly with the global numbers for the original
561: ! uniprocessor grid!
562: !
563: ! Two methods are available for imposing this transformation
564: ! when setting matrix entries:
565: ! (A) MatSetValuesLocal(), using the local ordering (including
566: ! ghost points!)
567: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
568: ! - Associate this map with the matrix by calling
569: ! MatSetLocalToGlobalMapping() once
570: ! - Set matrix entries using the local ordering
571: ! by calling MatSetValuesLocal()
572: ! (B) MatSetValues(), using the global ordering
573: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
574: ! - Then apply this map explicitly yourself
575: ! - Set matrix entries using the global ordering by calling
576: ! MatSetValues()
577: ! Option (A) seems cleaner/easier in many cases, and is the procedure
578: ! used in this example.
579: !
580: subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
581: use f90module
582: implicit none
584: #include include/finclude/petsc.h
585: #include include/finclude/petscvec.h
586: #include include/finclude/petscda.h
587: #include include/finclude/petscis.h
588: #include include/finclude/petscmat.h
589: #include include/finclude/petscksp.h
590: #include include/finclude/petscpc.h
591: #include include/finclude/petscsnes.h
593: #include "include/finclude/petscvec.h90"
595: ! Input/output variables:
596: SNES snes
597: Vec X
598: Mat jac,jac_prec
599: MatStructure flag
600: type(userctx) user
601: PetscErrorCode ierr
603: ! Declarations for use with local arrays:
604: PetscScalar,pointer :: lx_v(:)
605: Vec localX
607: ! Scatter ghost points to local vector, using the 2-step process
608: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
609: ! Computations can be done while messages are in transition,
610: ! by placing code between these two statements.
612: call DAGetLocalVector(user%da,localX,ierr)
613: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX, &
614: & ierr)
615: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
617: ! Get a pointer to vector data
619: call VecGetArrayF90(localX,lx_v,ierr)
621: ! Compute entries for the locally owned part of the Jacobian.
623: call FormJacobianLocal(lx_v,jac,jac_prec,user,ierr)
625: ! Assemble matrix, using the 2-step process:
626: ! MatAssemblyBegin(), MatAssemblyEnd()
627: ! Computations can be done while messages are in transition,
628: ! by placing code between these two statements.
630: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
631: call VecRestoreArrayF90(localX,lx_v,ierr)
632: call DARestoreLocalVector(user%da,localX,ierr)
633: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
635: ! Set flag to indicate that the Jacobian matrix retains an identical
636: ! nonzero structure throughout all nonlinear iterations (although the
637: ! values of the entries change). Thus, we can save some work in setting
638: ! up the preconditioner (e.g., no need to redo symbolic factorization for
639: ! ILU/ICC preconditioners).
640: ! - If the nonzero structure of the matrix is different during
641: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
642: ! must be used instead. If you are unsure whether the matrix
643: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
644: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
645: ! believes your assertion and does not check the structure
646: ! of the matrix. If you erroneously claim that the structure
647: ! is the same when it actually is not, the new preconditioner
648: ! will not function correctly. Thus, use this optimization
649: ! feature with caution!
651: flag = SAME_NONZERO_PATTERN
653: ! Tell the matrix we will never add a new nonzero location to the
654: ! matrix. If we do it will generate an error.
656: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,ierr)
658: return
659: end
661: ! ---------------------------------------------------------------------
662: !
663: ! FormJacobianLocal - Computes Jacobian matrix, called by
664: ! the higher level routine FormJacobian().
665: !
666: ! Input Parameters:
667: ! x - local vector data
668: !
669: ! Output Parameters:
670: ! jac - Jacobian matrix
671: ! jac_prec - optionally different preconditioning matrix (not used here)
672: ! ierr - error code
673: !
674: ! Notes:
675: ! This routine uses standard Fortran-style computations over a 2-dim array.
676: !
677: ! Notes:
678: ! Due to grid point reordering with DAs, we must always work
679: ! with the local grid points, and then transform them to the new
680: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
681: ! We cannot work directly with the global numbers for the original
682: ! uniprocessor grid!
683: !
684: ! Two methods are available for imposing this transformation
685: ! when setting matrix entries:
686: ! (A) MatSetValuesLocal(), using the local ordering (including
687: ! ghost points!)
688: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
689: ! - Associate this map with the matrix by calling
690: ! MatSetLocalToGlobalMapping() once
691: ! - Set matrix entries using the local ordering
692: ! by calling MatSetValuesLocal()
693: ! (B) MatSetValues(), using the global ordering
694: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
695: ! - Then apply this map explicitly yourself
696: ! - Set matrix entries using the global ordering by calling
697: ! MatSetValues()
698: ! Option (A) seems cleaner/easier in many cases, and is the procedure
699: ! used in this example.
700: !
701: subroutine FormJacobianLocal(x,jac,jac_prec,user,ierr)
702: use f90module
703: implicit none
705: #include include/finclude/petsc.h
706: #include include/finclude/petscvec.h
707: #include include/finclude/petscda.h
708: #include include/finclude/petscis.h
709: #include include/finclude/petscmat.h
710: #include include/finclude/petscksp.h
711: #include include/finclude/petscpc.h
712: #include include/finclude/petscsnes.h
714: ! Input/output variables:
715: type (userctx) user
716: PetscScalar x(user%gxs:user%gxe, &
717: & user%gys:user%gye)
718: Mat jac,jac_prec
719: PetscErrorCode ierr
721: ! Local variables:
722: PetscInt row,col(5),i,j
723: PetscInt ione,ifive
724: PetscScalar two,one,hx,hy,hxdhy
725: PetscScalar hydhx,sc,v(5)
727: ! Set parameters
728: ione = 1
729: ifive = 5
730: one = 1.0
731: two = 2.0
732: hx = one/dble(user%mx-1)
733: hy = one/dble(user%my-1)
734: sc = hx*hy
735: hxdhy = hx/hy
736: hydhx = hy/hx
738: ! Compute entries for the locally owned part of the Jacobian.
739: ! - Currently, all PETSc parallel matrix formats are partitioned by
740: ! contiguous chunks of rows across the processors.
741: ! - Each processor needs to insert only elements that it owns
742: ! locally (but any non-local elements will be sent to the
743: ! appropriate processor during matrix assembly).
744: ! - Here, we set all entries for a particular row at once.
745: ! - We can set matrix entries either using either
746: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
747: ! - Note that MatSetValues() uses 0-based row and column numbers
748: ! in Fortran as well as in C.
750: do 20 j=user%ys,user%ye
751: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
752: do 10 i=user%xs,user%xe
753: row = row + 1
754: ! boundary points
755: if (i .eq. 1 .or. j .eq. 1 &
756: & .or. i .eq. user%mx .or. j .eq. user%my) then
757: col(1) = row
758: v(1) = one
759: call MatSetValuesLocal(jac,ione,row,ione,col,v, &
760: & INSERT_VALUES,ierr)
761: ! interior grid points
762: else
763: v(1) = -hxdhy
764: v(2) = -hydhx
765: v(3) = two*(hydhx + hxdhy) &
766: & - sc*user%lambda*exp(x(i,j))
767: v(4) = -hydhx
768: v(5) = -hxdhy
769: col(1) = row - user%gxm
770: col(2) = row - 1
771: col(3) = row
772: col(4) = row + 1
773: col(5) = row + user%gxm
774: call MatSetValuesLocal(jac,ione,row,ifive,col,v, &
775: & INSERT_VALUES,ierr)
776: endif
777: 10 continue
778: 20 continue
780: return
781: end