Actual source code: ex5f90.F

petsc-3.5.4 2015-05-23
Report Typos and Errors
```  1: !
2: !  Description: Solves a nonlinear system in parallel with SNES.
3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: !  The command line options include:
6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: !  Concepts: SNES^parallel Bratu example
11: !  Concepts: DMDA^using distributed arrays;
12: !  Processors: n
13: !T*/
14: !
15: !  --------------------------------------------------------------------------
16: !
17: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
18: !  the partial differential equation
19: !
20: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
21: !
22: !  with boundary conditions
23: !
24: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
25: !
26: !  A finite difference approximation with the usual 5-point stencil
27: !  is used to discretize the boundary value problem to obtain a nonlinear
28: !  system of equations.
29: !
30: !  The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: !  --------------------------------------------------------------------------
33: !  The following define must be used before including any PETSc include files
34: !  into a module or interface. This is because they can't handle declarations
35: !  in them
36: !

38:       module f90module
39:       type userctx
40: #include <finclude/petscsysdef.h>
41: #include <finclude/petscvecdef.h>
42: #include <finclude/petscdmdef.h>
43:         PetscInt xs,xe,xm,gxs,gxe,gxm
44:         PetscInt ys,ye,ym,gys,gye,gym
45:         PetscInt mx,my
46:         PetscMPIInt rank
47:         PetscReal lambda
48:       end type userctx

50:       contains
51: ! ---------------------------------------------------------------------
52: !
53: !  FormFunction - Evaluates nonlinear function, F(x).
54: !
55: !  Input Parameters:
56: !  snes - the SNES context
57: !  X - input vector
58: !  dummy - optional user-defined context, as set by SNESSetFunction()
59: !          (not used here)
60: !
61: !  Output Parameter:
62: !  F - function vector
63: !
64: !  Notes:
65: !  This routine serves as a wrapper for the lower-level routine
66: !  "FormFunctionLocal", where the actual computations are
67: !  done using the standard Fortran style of treating the local
68: !  vector data as a multidimensional array over the local mesh.
69: !  This routine merely handles ghost point scatters and accesses
70: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
71: !
72:       subroutine FormFunction(snes,X,F,user,ierr)
73:       implicit none

75: #include <finclude/petscsys.h>
76: #include <finclude/petscvec.h>
77: #include <finclude/petscdm.h>
78: #include <finclude/petscdmda.h>
79: #include <finclude/petscis.h>
80: #include <finclude/petscmat.h>
81: #include <finclude/petscksp.h>
82: #include <finclude/petscpc.h>
83: #include <finclude/petscsnes.h>
84: #include <finclude/petscvec.h90>
85: #include <finclude/petscsnes.h90>

87: !  Input/output variables:
88:       SNES           snes
89:       Vec            X,F
90:       PetscErrorCode ierr
91:       type (userctx) user
92:       DM             da

94: !  Declarations for use with local arrays:
95:       PetscScalar,pointer :: lx_v(:),lf_v(:)
96:       Vec            localX

98: !  Scatter ghost points to local vector, using the 2-step process
99: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
100: !  By placing code between these two statements, computations can
101: !  be done while messages are in transition.
102:       call SNESGetDM(snes,da,ierr)
103:       call DMGetLocalVector(da,localX,ierr)
104:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,                     &
105:      &     localX,ierr)
106:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

108: !  Get a pointer to vector data.
109: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
110: !      the data array. Otherwise, the routine is implementation dependent.
111: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
112: !      the array.
113: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
114: !      and is useable from Fortran-90 Only.

116:       call VecGetArrayF90(localX,lx_v,ierr)
117:       call VecGetArrayF90(F,lf_v,ierr)

119: !  Compute function over the locally owned part of the grid
120:       call FormFunctionLocal(lx_v,lf_v,user,ierr)

122: !  Restore vectors
123:       call VecRestoreArrayF90(localX,lx_v,ierr)
124:       call VecRestoreArrayF90(F,lf_v,ierr)

126: !  Insert values into global vector

128:       call DMRestoreLocalVector(da,localX,ierr)
129:       call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)

131: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
132: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
133:       return
134:       end subroutine formfunction
135:       end module f90module

137:       module f90moduleinterfaces
138:         use f90module

140:       Interface SNESSetApplicationContext
141:         Subroutine SNESSetApplicationContext(snes,ctx,ierr)
142:         use f90module
143:           SNES snes
144:           type(userctx) ctx
145:           PetscErrorCode ierr
146:         End Subroutine
147:       End Interface SNESSetApplicationContext

149:       Interface SNESGetApplicationContext
150:         Subroutine SNESGetApplicationContext(snes,ctx,ierr)
151:         use f90module
152:           SNES snes
153:           type(userctx), pointer :: ctx
154:           PetscErrorCode ierr
155:         End Subroutine
156:       End Interface SNESGetApplicationContext
157:       end module f90moduleinterfaces

159:       program main
160:       use f90module
161:       use f90moduleinterfaces
162:       implicit none
163: !
164: #include <finclude/petscsys.h>
165: #include <finclude/petscvec.h>
166: #include <finclude/petscdm.h>
167: #include <finclude/petscdmda.h>
168: #include <finclude/petscis.h>
169: #include <finclude/petscmat.h>
170: #include <finclude/petscksp.h>
171: #include <finclude/petscpc.h>
172: #include <finclude/petscsnes.h>
173: #include <finclude/petscvec.h90>
174: #include <finclude/petscdmda.h90>

176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: !                   Variable declarations
178: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
179: !
180: !  Variables:
181: !     snes        - nonlinear solver
182: !     x, r        - solution, residual vectors
183: !     J           - Jacobian matrix
184: !     its         - iterations for convergence
185: !     Nx, Ny      - number of preocessors in x- and y- directions
186: !     matrix_free - flag - 1 indicates matrix-free version
187: !
188:       SNES             snes
189:       Vec              x,r
190:       Mat              J
191:       PetscErrorCode   ierr
192:       PetscInt         its
193:       PetscBool        flg,matrix_free
194:       PetscInt         ione,nfour
195:       PetscReal lambda_max,lambda_min
196:       type (userctx)   user
197:       DM               da

199: !  Note: Any user-defined Fortran routines (such as FormJacobian)
200: !  MUST be declared as external.
201:       external FormInitialGuess,FormJacobian

203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: !  Initialize program
205: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
206:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
207:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

209: !  Initialize problem parameters
210:       lambda_max  = 6.81
211:       lambda_min  = 0.0
212:       user%lambda = 6.0
213:       ione = 1
214:       nfour = -4
215:       call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par',             &
216:      &     user%lambda,flg,ierr)
217:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
218:      &     then
219:          if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
220:          SETERRQ(PETSC_COMM_SELF,1,' ',ierr)
221:       endif

223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: !  Create nonlinear solver context
225: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
226:       call SNESCreate(PETSC_COMM_WORLD,snes,ierr)

228: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
229: !  Create vector data structures; set function evaluation routine
230: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

232: !  Create distributed array (DMDA) to manage parallel grid and vectors

234: ! This really needs only the star-type stencil, but we use the box
235: ! stencil temporarily.
236:       call DMDACreate2d(PETSC_COMM_WORLD,                               &
237:      &     DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,                          &
238:      &     DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,          &
239:      &     ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
240:       call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my,           &
241:      &               PETSC_NULL_INTEGER,                                &
242:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
243:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
244:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
245:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
246:      &               PETSC_NULL_INTEGER,ierr)

248: !
249: !   Visualize the distribution of the array across the processors
250: !
251: !     call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)

253: !  Extract global and local vectors from DMDA; then duplicate for remaining
254: !  vectors that are the same types
255:       call DMCreateGlobalVector(da,x,ierr)
256:       call VecDuplicate(x,r,ierr)

258: !  Get local grid boundaries (for 2-dimensional DMDA)
259:       call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER,        &
260:      &     user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
261:       call DMDAGetGhostCorners(da,user%gxs,user%gys,                    &
262:      &     PETSC_NULL_INTEGER,user%gxm,user%gym,                        &
263:      &     PETSC_NULL_INTEGER,ierr)

265: !  Here we shift the starting indices up by one so that we can easily
266: !  use the Fortran convention of 1-based indices (rather 0-based indices).
267:       user%xs  = user%xs+1
268:       user%ys  = user%ys+1
269:       user%gxs = user%gxs+1
270:       user%gys = user%gys+1

272:       user%ye  = user%ys+user%ym-1
273:       user%xe  = user%xs+user%xm-1
274:       user%gye = user%gys+user%gym-1
275:       user%gxe = user%gxs+user%gxm-1

277:       call SNESSetApplicationContext(snes,user,ierr)

279: !  Set function evaluation routine and vector
280:       call SNESSetFunction(snes,r,FormFunction,user,ierr)

282: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
283: !  Create matrix data structure; set Jacobian evaluation routine
284: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

286: !  Set Jacobian matrix data structure and default Jacobian evaluation
287: !  routine. User can override with:
288: !     -snes_fd : default finite differencing approximation of Jacobian
289: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
290: !                (unless user explicitly sets preconditioner)
291: !     -snes_mf_operator : form preconditioning matrix as set by the user,
292: !                         but use matrix-free approx for Jacobian-vector
293: !                         products within Newton-Krylov method
294: !
295: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
296: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
297: !     the DMDAs determine the problem partitioning.  We must explicitly
298: !     specify the local matrix dimensions upon its creation for compatibility
299: !     with the vector distribution.  Thus, the generic MatCreate() routine
300: !     is NOT sufficient when working with distributed arrays.
301: !
302: !     Note: Here we only approximately preallocate storage space for the
303: !     Jacobian.  See the users manual for a discussion of better techniques
304: !     for preallocating matrix memory.

306:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf',         &
307:      &     matrix_free,ierr)
308:       if (.not. matrix_free) then
309:         call DMSetMatType(da,MATAIJ,ierr)
310:         call DMCreateMatrix(da,J,ierr)
311:         call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
312:       endif

314: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315: !  Customize nonlinear solver; set runtime options
316: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
317: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
318:       call SNESSetDM(snes,da,ierr)
319:       call SNESSetFromOptions(snes,ierr)

322: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
323: !  Evaluate initial guess; then solve nonlinear system.
324: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
325: !  Note: The user should initialize the vector, x, with the initial guess
326: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
327: !  to employ an initial guess of zero, the user should explicitly set
328: !  this vector to zero by calling VecSet().

330:       call FormInitialGuess(snes,x,ierr)
331:       call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
332:       call SNESGetIterationNumber(snes,its,ierr);
333:       if (user%rank .eq. 0) then
334:          write(6,100) its
335:       endif
336:   100 format('Number of SNES iterations = ',i5)

338: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
339: !  Free work space.  All PETSc objects should be destroyed when they
340: !  are no longer needed.
341: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
342:       if (.not. matrix_free) call MatDestroy(J,ierr)
343:       call VecDestroy(x,ierr)
344:       call VecDestroy(r,ierr)
345:       call SNESDestroy(snes,ierr)
346:       call DMDestroy(da,ierr)

348:       call PetscFinalize(ierr)
349:       end

351: ! ---------------------------------------------------------------------
352: !
353: !  FormInitialGuess - Forms initial approximation.
354: !
355: !  Input Parameters:
356: !  X - vector
357: !
358: !  Output Parameter:
359: !  X - vector
360: !
361: !  Notes:
362: !  This routine serves as a wrapper for the lower-level routine
363: !  "InitialGuessLocal", where the actual computations are
364: !  done using the standard Fortran style of treating the local
365: !  vector data as a multidimensional array over the local mesh.
366: !  This routine merely handles ghost point scatters and accesses
367: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
368: !
369:       subroutine FormInitialGuess(snes,X,ierr)
370:       use f90module
371:       use f90moduleinterfaces
372:       implicit none

374: #include <finclude/petscvec.h90>
375: #include <finclude/petscsys.h>
376: #include <finclude/petscvec.h>
377: #include <finclude/petscdm.h>
378: #include <finclude/petscdmda.h>
379: #include <finclude/petscis.h>
380: #include <finclude/petscmat.h>
381: #include <finclude/petscksp.h>
382: #include <finclude/petscpc.h>
383: #include <finclude/petscsnes.h>

385: !  Input/output variables:
386:       SNES           snes
387:       type(userctx), pointer:: puser
388:       Vec            X
389:       PetscErrorCode ierr
390:       DM             da

392: !  Declarations for use with local arrays:
393:       PetscScalar,pointer :: lx_v(:)
394:       Vec               localX

396:       0
397:       call SNESGetDM(snes,da,ierr)
398:       call SNESGetApplicationContext(snes,puser,ierr)
399: !  Get a pointer to vector data.
400: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
401: !      the data array. Otherwise, the routine is implementation dependent.
402: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
403: !      the array.
404: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
405: !      and is useable from Fortran-90 Only.

407:       call DMGetLocalVector(da,localX,ierr)
408:       call VecGetArrayF90(localX,lx_v,ierr)

410: !  Compute initial guess over the locally owned part of the grid
411:       call InitialGuessLocal(puser,lx_v,ierr)

413: !  Restore vector
414:       call VecRestoreArrayF90(localX,lx_v,ierr)

416: !  Insert values into global vector
417:       call DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X,ierr)
418:       call DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X,ierr)
419:       call DMRestoreLocalVector(da,localX,ierr)

421:       return
422:       end

424: ! ---------------------------------------------------------------------
425: !
426: !  InitialGuessLocal - Computes initial approximation, called by
427: !  the higher level routine FormInitialGuess().
428: !
429: !  Input Parameter:
430: !  x - local vector data
431: !
432: !  Output Parameters:
433: !  x - local vector data
434: !  ierr - error code
435: !
436: !  Notes:
437: !  This routine uses standard Fortran-style computations over a 2-dim array.
438: !
439:       subroutine InitialGuessLocal(user,x,ierr)
440:       use f90module
441:       implicit none

443: #include <finclude/petscsys.h>
444: #include <finclude/petscvec.h>
445: #include <finclude/petscdm.h>
446: #include <finclude/petscdmda.h>
447: #include <finclude/petscis.h>
448: #include <finclude/petscmat.h>
449: #include <finclude/petscksp.h>
450: #include <finclude/petscpc.h>
451: #include <finclude/petscsnes.h>

453: !  Input/output variables:
454:       type (userctx)         user
455:       PetscScalar  x(user%gxs:user%gxe,                                 &
456:      &              user%gys:user%gye)
457:       PetscErrorCode ierr

459: !  Local variables:
460:       PetscInt  i,j
461:       PetscScalar   temp1,temp,hx,hy
462:       PetscScalar   one

464: !  Set parameters

466:       0
467:       one    = 1.0
468:       hx     = one/(dble(user%mx-1))
469:       hy     = one/(dble(user%my-1))
470:       temp1  = user%lambda/(user%lambda + one)

472:       do 20 j=user%ys,user%ye
473:          temp = dble(min(j-1,user%my-j))*hy
474:          do 10 i=user%xs,user%xe
475:             if (i .eq. 1 .or. j .eq. 1                                  &
476:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
477:               x(i,j) = 0.0
478:             else
479:               x(i,j) = temp1 *                                          &
480:      &          sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
481:             endif
482:  10      continue
483:  20   continue

485:       return
486:       end

488: ! ---------------------------------------------------------------------
489: !
490: !  FormFunctionLocal - Computes nonlinear function, called by
491: !  the higher level routine FormFunction().
492: !
493: !  Input Parameter:
494: !  x - local vector data
495: !
496: !  Output Parameters:
497: !  f - local vector data, f(x)
498: !  ierr - error code
499: !
500: !  Notes:
501: !  This routine uses standard Fortran-style computations over a 2-dim array.
502: !
503:       subroutine FormFunctionLocal(x,f,user,ierr)
504:       use f90module

506:       implicit none

508: !  Input/output variables:
509:       type (userctx) user
510:       PetscScalar  x(user%gxs:user%gxe,                                         &
511:      &              user%gys:user%gye)
512:       PetscScalar  f(user%xs:user%xe,                                           &
513:      &              user%ys:user%ye)
514:       PetscErrorCode ierr

516: !  Local variables:
517:       PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
518:       PetscScalar u,uxx,uyy
519:       PetscInt  i,j

521:       one    = 1.0
522:       two    = 2.0
523:       hx     = one/dble(user%mx-1)
524:       hy     = one/dble(user%my-1)
525:       sc     = hx*hy*user%lambda
526:       hxdhy  = hx/hy
527:       hydhx  = hy/hx

529: !  Compute function over the locally owned part of the grid

531:       do 20 j=user%ys,user%ye
532:          do 10 i=user%xs,user%xe
533:             if (i .eq. 1 .or. j .eq. 1                                  &
534:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
535:                f(i,j) = x(i,j)
536:             else
537:                u = x(i,j)
538:                uxx = hydhx * (two*u                                     &
539:      &                - x(i-1,j) - x(i+1,j))
540:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
541:                f(i,j) = uxx + uyy - sc*exp(u)
542:             endif
543:  10      continue
544:  20   continue

546:       return
547:       end

549: ! ---------------------------------------------------------------------
550: !
551: !  FormJacobian - Evaluates Jacobian matrix.
552: !
553: !  Input Parameters:
554: !  snes     - the SNES context
555: !  x        - input vector
556: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
557: !             (not used here)
558: !
559: !  Output Parameters:
560: !  jac      - Jacobian matrix
561: !  jac_prec - optionally different preconditioning matrix (not used here)
562: !  flag     - flag indicating matrix structure
563: !
564: !  Notes:
565: !  This routine serves as a wrapper for the lower-level routine
566: !  "FormJacobianLocal", where the actual computations are
567: !  done using the standard Fortran style of treating the local
568: !  vector data as a multidimensional array over the local mesh.
569: !  This routine merely accesses the local vector data via
570: !  VecGetArrayF90() and VecRestoreArrayF90().
571: !
572: !  Notes:
573: !  Due to grid point reordering with DMDAs, we must always work
574: !  with the local grid points, and then transform them to the new
575: !  global numbering with the "ltog" mapping
576: !  We cannot work directly with the global numbers for the original
577: !  uniprocessor grid!
578: !
579: !  Two methods are available for imposing this transformation
580: !  when setting matrix entries:
581: !    (A) MatSetValuesLocal(), using the local ordering (including
582: !        ghost points!)
583: !        - Set matrix entries using the local ordering
584: !          by calling MatSetValuesLocal()
585: !    (B) MatSetValues(), using the global ordering

587: !        - Set matrix entries using the global ordering by calling
588: !          MatSetValues()
589: !  Option (A) seems cleaner/easier in many cases, and is the procedure
590: !  used in this example.
591: !
592:       subroutine FormJacobian(snes,X,jac,jac_prec,user,ierr)
593:       use f90module
594:       implicit none

596: #include <finclude/petscsys.h>
597: #include <finclude/petscvec.h>
598: #include <finclude/petscdm.h>
599: #include <finclude/petscdmda.h>
600: #include <finclude/petscis.h>
601: #include <finclude/petscmat.h>
602: #include <finclude/petscksp.h>
603: #include <finclude/petscpc.h>
604: #include <finclude/petscsnes.h>

606: #include <finclude/petscvec.h90>

608: !  Input/output variables:
609:       SNES         snes
610:       Vec          X
611:       Mat          jac,jac_prec
612:       type(userctx)  user
613:       PetscErrorCode ierr
614:       DM             da

616: !  Declarations for use with local arrays:
617:       PetscScalar,pointer :: lx_v(:)
618:       Vec            localX

620: !  Scatter ghost points to local vector, using the 2-step process
621: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
622: !  Computations can be done while messages are in transition,
623: !  by placing code between these two statements.

625:       call SNESGetDM(snes,da,ierr)
626:       call DMGetLocalVector(da,localX,ierr)
627:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,              &
628:      &     ierr)
629:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

631: !  Get a pointer to vector data
632:       call VecGetArrayF90(localX,lx_v,ierr)

634: !  Compute entries for the locally owned part of the Jacobian preconditioner.
635:       call FormJacobianLocal(lx_v,jac_prec,user,ierr)

637: !  Assemble matrix, using the 2-step process:
638: !     MatAssemblyBegin(), MatAssemblyEnd()
639: !  Computations can be done while messages are in transition,
640: !  by placing code between these two statements.

642:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
643:       if (jac .ne. jac_prec) then
644:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
645:       endif
646:       call VecRestoreArrayF90(localX,lx_v,ierr)
647:       call DMRestoreLocalVector(da,localX,ierr)
648:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
649:       if (jac .ne. jac_prec) then
650:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
651:       endif

653: !  Tell the matrix we will never add a new nonzero location to the
654: !  matrix. If we do it will generate an error.

656:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,      &
657:      &                  ierr)

659:       return
660:       end

662: ! ---------------------------------------------------------------------
663: !
664: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
665: !  called by the higher level routine FormJacobian().
666: !
667: !  Input Parameters:
668: !  x        - local vector data
669: !
670: !  Output Parameters:
671: !  jac_prec - Jacobian preconditioner matrix
672: !  ierr     - error code
673: !
674: !  Notes:
675: !  This routine uses standard Fortran-style computations over a 2-dim array.
676: !
677: !  Notes:
678: !  Due to grid point reordering with DMDAs, we must always work
679: !  with the local grid points, and then transform them to the new
680: !  global numbering with the "ltog" mapping
681: !  We cannot work directly with the global numbers for the original
682: !  uniprocessor grid!
683: !
684: !  Two methods are available for imposing this transformation
685: !  when setting matrix entries:
686: !    (A) MatSetValuesLocal(), using the local ordering (including
687: !        ghost points!)
688: !        - Set matrix entries using the local ordering
689: !          by calling MatSetValuesLocal()
690: !    (B) MatSetValues(), using the global ordering
691: !        - Then apply this map explicitly yourself
692: !        - Set matrix entries using the global ordering by calling
693: !          MatSetValues()
694: !  Option (A) seems cleaner/easier in many cases, and is the procedure
695: !  used in this example.
696: !
697:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
698:       use f90module
699:       implicit none

701: #include <finclude/petscsys.h>
702: #include <finclude/petscvec.h>
703: #include <finclude/petscdm.h>
704: #include <finclude/petscdmda.h>
705: #include <finclude/petscis.h>
706: #include <finclude/petscmat.h>
707: #include <finclude/petscksp.h>
708: #include <finclude/petscpc.h>
709: #include <finclude/petscsnes.h>

711: !  Input/output variables:
712:       type (userctx) user
713:       PetscScalar    x(user%gxs:user%gxe,                                      &
714:      &               user%gys:user%gye)
715:       Mat            jac_prec
716:       PetscErrorCode ierr

718: !  Local variables:
719:       PetscInt    row,col(5),i,j
720:       PetscInt    ione,ifive
721:       PetscScalar two,one,hx,hy,hxdhy
722:       PetscScalar hydhx,sc,v(5)

724: !  Set parameters
725:       ione   = 1
726:       ifive  = 5
727:       one    = 1.0
728:       two    = 2.0
729:       hx     = one/dble(user%mx-1)
730:       hy     = one/dble(user%my-1)
731:       sc     = hx*hy
732:       hxdhy  = hx/hy
733:       hydhx  = hy/hx

735: !  Compute entries for the locally owned part of the Jacobian.
736: !   - Currently, all PETSc parallel matrix formats are partitioned by
737: !     contiguous chunks of rows across the processors.
738: !   - Each processor needs to insert only elements that it owns
739: !     locally (but any non-local elements will be sent to the
740: !     appropriate processor during matrix assembly).
741: !   - Here, we set all entries for a particular row at once.
742: !   - We can set matrix entries either using either
743: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
744: !   - Note that MatSetValues() uses 0-based row and column numbers
745: !     in Fortran as well as in C.

747:       do 20 j=user%ys,user%ye
748:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
749:          do 10 i=user%xs,user%xe
750:             row = row + 1
751: !           boundary points
752:             if (i .eq. 1 .or. j .eq. 1                                  &
753:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
754:                col(1) = row
755:                v(1)   = one
756:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,          &
757:      &                           INSERT_VALUES,ierr)
758: !           interior grid points
759:             else
760:                v(1) = -hxdhy
761:                v(2) = -hydhx
762:                v(3) = two*(hydhx + hxdhy)                               &
763:      &                  - sc*user%lambda*exp(x(i,j))
764:                v(4) = -hydhx
765:                v(5) = -hxdhy
766:                col(1) = row - user%gxm
767:                col(2) = row - 1
768:                col(3) = row
769:                col(4) = row + 1
770:                col(5) = row + user%gxm
771:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,         &
772:      &                                INSERT_VALUES,ierr)
773:             endif
774:  10      continue
775:  20   continue

777:       return
778:       end

```