Actual source code: ex14f.F

petsc-master 2016-09-27
Report Typos and Errors
  1: !
  2: !
  3: !  Solves a nonlinear system in parallel with a user-defined
  4: !  Newton method that uses KSP to solve the linearized Newton sytems.  This solver
  5: !  is a very simplistic inexact Newton method.  The intent of this code is to
  6: !  demonstrate the repeated solution of linear sytems with the same nonzero pattern.
  7: !
  8: !  This is NOT the recommended approach for solving nonlinear problems with PETSc!
  9: !  We urge users to employ the SNES component for solving nonlinear problems whenever
 10: !  possible, as it offers many advantages over coding nonlinear solvers independently.
 11: !
 12: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
 13: !  domain, using distributed arrays (DMDAs) to partition the parallel grid.
 14: !
 15: !  The command line options include:
 16: !  -par <parameter>, where <parameter> indicates the problem's nonlinearity
 17: !     problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
 18: !  -mx <xg>, where <xg> = number of grid points in the x-direction
 19: !  -my <yg>, where <yg> = number of grid points in the y-direction
 20: !  -Nx <npx>, where <npx> = number of processors in the x-direction
 21: !  -Ny <npy>, where <npy> = number of processors in the y-direction
 22: !  -mf use matrix free for matrix vector product
 23: !
 24: !/*T
 25: !   Concepts: KSP^writing a user-defined nonlinear solver
 26: !   Concepts: DMDA^using distributed arrays
 27: !   Processors: n
 28: !T*/
 29: !  ------------------------------------------------------------------------
 30: !
 31: !    Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 32: !    the partial differential equation
 33: !
 34: !            -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 35: !
 36: !    with boundary conditions
 37: !
 38: !             u = 0  for  x = 0, x = 1, y = 0, y = 1.
 39: !
 40: !    A finite difference approximation with the usual 5-point stencil
 41: !    is used to discretize the boundary value problem to obtain a nonlinear
 42: !    system of equations.
 43: !
 44: !    The SNES version of this problem is:  snes/examples/tutorials/ex5f.F
 45: !
 46: !  -------------------------------------------------------------------------

 48:       program main
 49:       implicit none

 51: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 52: !                    Include files
 53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 54: !
 55: !     petscsys.h       - base PETSc routines   petscvec.h - vectors
 56: !     petscmat.h - matrices
 57: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
 58: !     petscviewer.h - viewers               petscpc.h  - preconditioners

 60:  #include <petsc/finclude/petscsys.h>
 61:  #include <petsc/finclude/petscis.h>
 62:  #include <petsc/finclude/petscvec.h>
 63:  #include <petsc/finclude/petscmat.h>
 64:  #include <petsc/finclude/petscpc.h>
 65:  #include <petsc/finclude/petscksp.h>
 66:  #include <petsc/finclude/petscdm.h>
 67:  #include <petsc/finclude/petscdmda.h>

 69:       MPI_Comm comm
 70:       Vec      X,Y,F,localX
 71:       Mat      J,B
 72:       DM       da
 73:       KSP      ksp

 75:       PetscInt  Nx,Ny,N,mx,my,ifive,ithree
 76:       PetscBool  flg,nooutput,usemf
 77:       common   /mycommon/ mx,my,B,localX,da
 78: !
 79: !
 80: !      This is the routine to use for matrix-free approach
 81: !
 82:       external mymult

 84: !     --------------- Data to define nonlinear solver --------------
 85:       PetscReal   rtol,ttol
 86:       PetscReal   fnorm,ynorm,xnorm
 87:       PetscInt            max_nonlin_its,one
 88:       PetscInt            lin_its
 89:       PetscInt           i,m
 90:       PetscScalar        mone
 91:       PetscErrorCode ierr

 93:       mone           = -1.0
 94:       rtol           = 1.e-8
 95:       max_nonlin_its = 10
 96:       one            = 1
 97:       ifive          = 5
 98:       ithree         = 3

100:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
101:       if (ierr .ne. 0) then
102:         print*,'Unable to initialize PETSc'
103:         stop
104:       endif
105:       comm = PETSC_COMM_WORLD

107: !  Initialize problem parameters

109: !
110:       mx = 4
111:       my = 4
112:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
113:      &                        '-mx',mx,flg,ierr)
114:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
115:      &                        '-my',my,flg,ierr)
116:       N = mx*my

118:       nooutput = .false.
119:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,    &
120:      &                         '-no_output',nooutput,ierr)

122: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123: !     Create linear solver context
124: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

126:       call KSPCreate(comm,ksp,ierr)

128: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
129: !     Create vector data structures
130: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

132: !
133: !  Create distributed array (DMDA) to manage parallel grid and vectors
134: !
135:       Nx = PETSC_DECIDE
136:       Ny = PETSC_DECIDE
137:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,   &
138:      &                        '-Nx',Nx,flg,ierr)
139:       call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,   &
140:      &                         '-Ny',Ny,flg,ierr)
141:       call DMDACreate2d(comm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,             &
142:      &     DMDA_STENCIL_STAR,mx,my,Nx,Ny,one,one,                           &
143:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
144:       call DMSetFromOptions(da,ierr)
145:       call DMSetUp(da,ierr)
146: !
147: !  Extract global and local vectors from DMDA then duplicate for remaining
148: !  vectors that are the same types
149: !
150:        call DMCreateGlobalVector(da,X,ierr)
151:        call DMCreateLocalVector(da,localX,ierr)
152:        call VecDuplicate(X,F,ierr)
153:        call VecDuplicate(X,Y,ierr)


156: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
157: !     Create matrix data structure for Jacobian
158: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: !
160: !     Note:  For the parallel case, vectors and matrices MUST be partitioned
161: !     accordingly.  When using distributed arrays (DMDAs) to create vectors,
162: !     the DMDAs determine the problem partitioning.  We must explicitly
163: !     specify the local matrix dimensions upon its creation for compatibility
164: !     with the vector distribution.
165: !
166: !     Note: Here we only approximately preallocate storage space for the
167: !     Jacobian.  See the users manual for a discussion of better techniques
168: !     for preallocating matrix memory.
169: !
170:       call VecGetLocalSize(X,m,ierr)
171:       call MatCreateAIJ(comm,m,m,N,N,ifive,PETSC_NULL_INTEGER,ithree,         &
172:      &     PETSC_NULL_INTEGER,B,ierr)

174: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: !     if usemf is on then matrix vector product is done via matrix free
176: !     approach. Note this is just an example, and not realistic because
177: !     we still use the actual formed matrix, but in reality one would
178: !     provide their own subroutine that would directly do the matrix
179: !     vector product and not call MatMult()
180: !     Note: we put B into a common block so it will be visible to the
181: !     mymult() routine
182:       usemf = .false.
183:       call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER,       &
184:      &                         '-mf',usemf,ierr)
185:       if (usemf) then
186:          call MatCreateShell(comm,m,m,N,N,PETSC_NULL_INTEGER,J,ierr)
187:          call MatShellSetOperation(J,MATOP_MULT,mymult,ierr)
188:       else
189: !        If not doing matrix free then matrix operator, J,  and matrix used
190: !        to construct preconditioner, B, are the same
191:         J = B
192:       endif

194: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: !     Customize linear solver set runtime options
196: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: !
198: !     Set runtime options (e.g., -ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
199: !
200:        call KSPSetFromOptions(ksp,ierr)

202: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203: !     Evaluate initial guess
204: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

206:        call FormInitialGuess(X,ierr)
207:        call ComputeFunction(X,F,ierr)
208:        call VecNorm(F,NORM_2,fnorm,ierr)
209:        ttol = fnorm*rtol
210:        if (.not. nooutput) then
211:          print*, 'Initial function norm ',fnorm
212:        endif

214: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
215: !     Solve nonlinear system with a user-defined method
216: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

218: !  This solver is a very simplistic inexact Newton method, with no
219: !  no damping strategies or bells and whistles. The intent of this code
220: !  is merely to demonstrate the repeated solution with KSP of linear
221: !  sytems with the same nonzero structure.
222: !
223: !  This is NOT the recommended approach for solving nonlinear problems
224: !  with PETSc!  We urge users to employ the SNES component for solving
225: !  nonlinear problems whenever possible with application codes, as it
226: !  offers many advantages over coding nonlinear solvers independently.

228:        do 10 i=0,max_nonlin_its

230: !  Compute the Jacobian matrix.  See the comments in this routine for
231: !  important information about setting the flag mat_flag.

233:          call ComputeJacobian(X,B,ierr)

235: !  Solve J Y = F, where J is the Jacobian matrix.
236: !    - First, set the KSP linear operators.  Here the matrix that
237: !      defines the linear system also serves as the preconditioning
238: !      matrix.
239: !    - Then solve the Newton system.

241:          call KSPSetOperators(ksp,J,B,ierr)
242:          call KSPSolve(ksp,F,Y,ierr)

244: !  Compute updated iterate

246:          call VecNorm(Y,NORM_2,ynorm,ierr)
247:          call VecAYPX(Y,mone,X,ierr)
248:          call VecCopy(Y,X,ierr)
249:          call VecNorm(X,NORM_2,xnorm,ierr)
250:          call KSPGetIterationNumber(ksp,lin_its,ierr)
251:          if (.not. nooutput) then
252:            print*,'linear solve iterations = ',lin_its,' xnorm = ',     &
253:      &         xnorm,' ynorm = ',ynorm
254:          endif

256: !  Evaluate nonlinear function at new location

258:          call ComputeFunction(X,F,ierr)
259:          call VecNorm(F,NORM_2,fnorm,ierr)
260:          if (.not. nooutput) then
261:            print*, 'Iteration ',i+1,' function norm',fnorm
262:          endif

264: !  Test for convergence

266:        if (fnorm .le. ttol) then
267:          if (.not. nooutput) then
268:            print*,'Converged: function norm ',fnorm,' tolerance ',ttol
269:          endif
270:          goto 20
271:        endif
272:  10   continue
273:  20   continue

275:       write(6,100) i+1
276:  100  format('Number of SNES iterations =',I2)

278: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: !     Free work space.  All PETSc objects should be destroyed when they
280: !     are no longer needed.
281: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

283:        call MatDestroy(B,ierr)
284:        if (usemf) then
285:          call MatDestroy(J,ierr)
286:        endif
287:        call VecDestroy(localX,ierr)
288:        call VecDestroy(X,ierr)
289:        call VecDestroy(Y,ierr)
290:        call VecDestroy(F,ierr)
291:        call KSPDestroy(ksp,ierr)
292:        call DMDestroy(da,ierr)
293:        call PetscFinalize(ierr)
294:        end

296: ! -------------------------------------------------------------------
297: !
298: !   FormInitialGuess - Forms initial approximation.
299: !
300: !   Input Parameters:
301: !   X - vector
302: !
303: !   Output Parameter:
304: !   X - vector
305: !
306:       subroutine FormInitialGuess(X,ierr)
307:       implicit none

309: !     petscsys.h       - base PETSc routines   petscvec.h - vectors
310: !     petscmat.h - matrices
311: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
312: !     petscviewer.h - viewers               petscpc.h  - preconditioners

314:  #include <petsc/finclude/petscsys.h>
315:  #include <petsc/finclude/petscis.h>
316:  #include <petsc/finclude/petscvec.h>
317:  #include <petsc/finclude/petscmat.h>
318:  #include <petsc/finclude/petscpc.h>
319:  #include <petsc/finclude/petscksp.h>
320:  #include <petsc/finclude/petscdm.h>
321:  #include <petsc/finclude/petscdmda.h>
322:       PetscErrorCode    ierr
323:       PetscOffset      idx
324:       Vec       X,localX
325:       PetscInt  i,j,row,mx
326:       PetscInt  my, xs,ys,xm
327:       PetscInt  ym
328:       PetscReal one,lambda,temp1,temp,hx,hy
329:       PetscScalar      xx(2)
330:       DM               da
331:       Mat              B
332:       common   /mycommon/ mx,my,B,localX,da

334:       one    = 1.0
335:       lambda = 6.0
336:       hx     = one/(mx-1)
337:       hy     = one/(my-1)
338:       temp1  = lambda/(lambda + one)

340: !  Get a pointer to vector data.
341: !    - VecGetArray() returns a pointer to the data array.
342: !    - You MUST call VecRestoreArray() when you no longer need access to
343: !      the array.
344:        call VecGetArray(X,xx,idx,ierr)

346: !  Get local grid boundaries (for 2-dimensional DMDA):
347: !    xs, ys   - starting grid indices (no ghost points)
348: !    xm, ym   - widths of local grid (no ghost points)

350:        call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,             &
351:      &      PETSC_NULL_INTEGER,ierr)

353: !  Compute initial guess over the locally owned part of the grid

355:       do 30 j=ys,ys+ym-1
356:         temp = (min(j,my-j-1))*hy
357:         do 40 i=xs,xs+xm-1
358:           row = i - xs + (j - ys)*xm + 1
359:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
360:      &        j .eq. my-1) then
361:             xx(idx+row) = 0.0
362:             continue
363:           endif
364:           xx(idx+row) = temp1*sqrt(min((min(i,mx-i-1))*hx,temp))
365:  40     continue
366:  30   continue

368: !     Restore vector

370:        call VecRestoreArray(X,xx,idx,ierr)
371:        return
372:        end

374: ! -------------------------------------------------------------------
375: !
376: !   ComputeFunction - Evaluates nonlinear function, F(x).
377: !
378: !   Input Parameters:
379: !.  X - input vector
380: !
381: !   Output Parameter:
382: !.  F - function vector
383: !
384:       subroutine  ComputeFunction(X,F,ierr)
385:       implicit none

387: !     petscsys.h       - base PETSc routines   petscvec.h - vectors
388: !     petscmat.h - matrices
389: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
390: !     petscviewer.h - viewers               petscpc.h  - preconditioners

392:  #include <petsc/finclude/petscsys.h>
393:  #include <petsc/finclude/petscis.h>
394:  #include <petsc/finclude/petscvec.h>
395:  #include <petsc/finclude/petscmat.h>
396:  #include <petsc/finclude/petscpc.h>
397:  #include <petsc/finclude/petscksp.h>
398:  #include <petsc/finclude/petscdm.h>
399:  #include <petsc/finclude/petscdmda.h>

401:       Vec              X,F,localX
402:       PetscInt         gys,gxm,gym
403:       PetscOffset      idx,idf
404:       PetscErrorCode ierr
405:       PetscInt i,j,row,mx,my,xs,ys,xm,ym,gxs
406:       PetscInt rowf
407:       PetscReal two,one,lambda,hx
408:       PetscReal hy,hxdhy,hydhx,sc
409:       PetscScalar      u,uxx,uyy,xx(2),ff(2)
410:       DM               da
411:       Mat              B
412:       common   /mycommon/ mx,my,B,localX,da

414:       two    = 2.0
415:       one    = 1.0
416:       lambda = 6.0

418:       hx     = one/(mx-1)
419:       hy     = one/(my-1)
420:       sc     = hx*hy*lambda
421:       hxdhy  = hx/hy
422:       hydhx  = hy/hx

424: !  Scatter ghost points to local vector, using the 2-step process
425: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
426: !  By placing code between these two statements, computations can be
427: !  done while messages are in transition.
428: !
429:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
430:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

432: !  Get pointers to vector data

434:       call VecGetArray(localX,xx,idx,ierr)
435:       call VecGetArray(F,ff,idf,ierr)

437: !  Get local grid boundaries

439:       call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
440:      &     PETSC_NULL_INTEGER,ierr)
441:       call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
442:      &     PETSC_NULL_INTEGER,ierr)

444: !  Compute function over the locally owned part of the grid
445:       rowf = 0
446:       do 50 j=ys,ys+ym-1

448:         row  = (j - gys)*gxm + xs - gxs
449:         do 60 i=xs,xs+xm-1
450:           row  = row + 1
451:           rowf = rowf + 1

453:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
454:      &        j .eq. my-1) then
455:             ff(idf+rowf) = xx(idx+row)
456:             goto 60
457:           endif
458:           u   = xx(idx+row)
459:           uxx = (two*u - xx(idx+row-1) - xx(idx+row+1))*hydhx
460:           uyy = (two*u - xx(idx+row-gxm) - xx(idx+row+gxm))*hxdhy
461:           ff(idf+rowf) = uxx + uyy - sc*exp(u)
462:  60     continue
463:  50   continue

465: !  Restore vectors

467:        call VecRestoreArray(localX,xx,idx,ierr)
468:        call VecRestoreArray(F,ff,idf,ierr)
469:        return
470:        end

472: ! -------------------------------------------------------------------
473: !
474: !   ComputeJacobian - Evaluates Jacobian matrix.
475: !
476: !   Input Parameters:
477: !   x - input vector
478: !
479: !   Output Parameters:
480: !   jac - Jacobian matrix
481: !   flag - flag indicating matrix structure
482: !
483: !   Notes:
484: !   Due to grid point reordering with DMDAs, we must always work
485: !   with the local grid points, and then transform them to the new
486: !   global numbering with the 'ltog' mapping
487: !   We cannot work directly with the global numbers for the original
488: !   uniprocessor grid!
489: !
490:       subroutine ComputeJacobian(X,jac,ierr)
491:       implicit none

493: !     petscsys.h  - base PETSc routines   petscvec.h - vectors
494: !     petscmat.h - matrices
495: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
496: !     petscviewer.h - viewers               petscpc.h  - preconditioners

498:  #include <petsc/finclude/petscsys.h>
499:  #include <petsc/finclude/petscis.h>
500:  #include <petsc/finclude/petscvec.h>
501:  #include <petsc/finclude/petscmat.h>
502:  #include <petsc/finclude/petscpc.h>
503:  #include <petsc/finclude/petscksp.h>
504:  #include <petsc/finclude/petscdm.h>
505:  #include <petsc/finclude/petscdmda.h>

507:       Vec         X
508:       Mat         jac
509:       Vec         localX
510:       DM          da
511:       PetscInt     ltog(2)
512:       PetscOffset idltog,idx
513:       PetscErrorCode ierr
514:       PetscInt xs,ys,xm,ym
515:       PetscInt gxs,gys,gxm,gym
516:       PetscInt grow(1),i,j
517:       PetscInt row,mx,my,ione
518:       PetscInt col(5),ifive
519:       PetscScalar two,one,lambda
520:       PetscScalar v(5),hx,hy,hxdhy
521:       PetscScalar hydhx,sc,xx(2)
522:       Mat         B
523:       ISLocalToGlobalMapping ltogm
524:       common   /mycommon/ mx,my,B,localX,da

526:       ione   = 1
527:       ifive  = 5
528:       one    = 1.0
529:       two    = 2.0
530:       hx     = one/(mx-1)
531:       hy     = one/(my-1)
532:       sc     = hx*hy
533:       hxdhy  = hx/hy
534:       hydhx  = hy/hx
535:       lambda = 6.0

537: !  Scatter ghost points to local vector, using the 2-step process
538: !     DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
539: !  By placing code between these two statements, computations can be
540: !  done while messages are in transition.

542:       call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
543:       call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

545: !  Get pointer to vector data

547:       call VecGetArray(localX,xx,idx,ierr)

549: !  Get local grid boundaries

551:       call DMDAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
552:      &     PETSC_NULL_INTEGER,ierr)
553:       call DMDAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
554:      &                        PETSC_NULL_INTEGER,ierr)

556: !  Get the global node numbers for all local nodes, including ghost points

558:       call DMGetLocalToGlobalMapping(da,ltogm,ierr)
559:       call ISLocalToGlobalMappingGetIndices(ltogm,ltog,idltog,ierr)

561: !  Compute entries for the locally owned part of the Jacobian.
562: !   - Currently, all PETSc parallel matrix formats are partitioned by
563: !     contiguous chunks of rows across the processors. The 'grow'
564: !     parameter computed below specifies the global row number
565: !     corresponding to each local grid point.
566: !   - Each processor needs to insert only elements that it owns
567: !     locally (but any non-local elements will be sent to the
568: !     appropriate processor during matrix assembly).
569: !   - Always specify global row and columns of matrix entries.
570: !   - Here, we set all entries for a particular row at once.

572:       do 10 j=ys,ys+ym-1
573:         row = (j - gys)*gxm + xs - gxs
574:         do 20 i=xs,xs+xm-1
575:           row = row + 1
576:           grow(1) = ltog(idltog+row)
577:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or.            &
578:      &        j .eq. (my-1)) then
579:              call MatSetValues(jac,ione,grow,ione,grow,one,             &
580:      &                         INSERT_VALUES,ierr)
581:              go to 20
582:           endif
583:           v(1)   = -hxdhy
584:           col(1) = ltog(idltog+row - gxm)
585:           v(2)   = -hydhx
586:           col(2) = ltog(idltog+row - 1)
587:           v(3)   = two*(hydhx + hxdhy) - sc*lambda*exp(xx(idx+row))
588:           col(3) = grow(1)
589:           v(4)   = -hydhx
590:           col(4) = ltog(idltog+row + 1)
591:           v(5)   = -hxdhy
592:           col(5) = ltog(idltog+row + gxm)
593:           call MatSetValues(jac,ione,grow,ifive,col,v,INSERT_VALUES,       &
594:      &                      ierr)
595:  20     continue
596:  10   continue

598:       call ISLocalToGlobalMappingRestoreIndices(ltogm,ltog,idltog,ierr)

600: !  Assemble matrix, using the 2-step process:
601: !    MatAssemblyBegin(), MatAssemblyEnd().
602: !  By placing code between these two statements, computations can be
603: !  done while messages are in transition.

605:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
606:       call VecRestoreArray(localX,xx,idx,ierr)
607:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
608:       return
609:       end


612: ! -------------------------------------------------------------------
613: !
614: !   MyMult - user provided matrix multiply
615: !
616: !   Input Parameters:
617: !.  X - input vector
618: !
619: !   Output Parameter:
620: !.  F - function vector
621: !
622:       subroutine  MyMult(J,X,F,ierr)
623:       implicit none
624:       Mat     J,B
625:       Vec     X,F
626:       PetscErrorCode ierr
627:       PetscInt mx,my
628:       DM      da
629:       Vec     localX

631:       common   /mycommon/ mx,my,B,localX,da
632: !
633: !       Here we use the actual formed matrix B; users would
634: !     instead write their own matrix vector product routine
635: !
636:       call MatMult(B,X,F,ierr)
637:       return
638:       end