Actual source code: ex2.c

petsc-3.3-p7 2013-05-11
  2: static char help[] ="Solves a time-dependent nonlinear PDE. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\n";

  8: /*
  9:    Concepts: TS^time-dependent nonlinear problems
 10:    Processors: n
 11: */

 13: /* ------------------------------------------------------------------------

 15:    This program solves the PDE

 17:                u * u_xx 
 18:          u_t = ---------
 19:                2*(t+1)^2 

 21:     on the domain 0 <= x <= 1, with boundary conditions
 22:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 23:     and initial condition
 24:          u(0,x) = 1 + x*x.

 26:     The exact solution is:
 27:          u(t,x) = (1 + x*x) * (1 + t)

 29:     Note that since the solution is linear in time and quadratic in x,
 30:     the finite difference scheme actually computes the "exact" solution.

 32:     We use by default the backward Euler method.

 34:   ------------------------------------------------------------------------- */

 36: /*
 37:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 38:    this file automatically includes "petscsys.h" and other lower-level
 39:    PETSc include files.

 41:    Include the "petscdmda.h" to allow us to use the distributed array data 
 42:    structures to manage the parallel grid.
 43: */
 44: #include <petscts.h>
 45: #include <petscdmda.h>

 47: /* 
 48:    User-defined application context - contains data needed by the 
 49:    application-provided callback routines.
 50: */
 51: typedef struct {
 52:   MPI_Comm   comm;          /* communicator */
 53:   DM         da;            /* distributed array data structure */
 54:   Vec        localwork;     /* local ghosted work vector */
 55:   Vec        u_local;       /* local ghosted approximate solution vector */
 56:   Vec        solution;      /* global exact solution vector */
 57:   PetscInt   m;             /* total number of grid points */
 58:   PetscReal  h;             /* mesh width: h = 1/(m-1) */
 59:   PetscBool  debug;         /* flag (1 indicates activation of debugging printouts) */
 60: } AppCtx;

 62: /* 
 63:    User-defined routines, provided below.
 64: */
 65: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 66: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 67: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
 68: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
 69: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);

 71: /*
 72:    Utility routine for finite difference Jacobian approximation
 73: */
 74: extern PetscErrorCode RHSJacobianFD(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);

 78: int main(int argc,char **argv)
 79: {
 80:   AppCtx         appctx;                 /* user-defined application context */
 81:   TS             ts;                     /* timestepping context */
 82:   Mat            A;                      /* Jacobian matrix data structure */
 83:   Vec            u;                      /* approximate solution vector */
 84:   PetscInt       time_steps_max = 100;  /* default max timesteps */
 86:   PetscReal      dt;
 87:   PetscReal      time_total_max = 100.0; /* default max total time */
 88:   PetscBool      flg,mymonitor = PETSC_FALSE;
 89:   PetscReal      bounds[] = {1.0, 3.3};

 91:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 92:      Initialize program and set problem parameters
 93:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 94: 
 95:   PetscInitialize(&argc,&argv,(char*)0,help);
 96:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);
 97:   appctx.comm = PETSC_COMM_WORLD;
 98:   appctx.m    = 60;
 99:   PetscOptionsGetInt(PETSC_NULL,"-M",&appctx.m,PETSC_NULL);
100:   PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
101:   PetscOptionsHasName(PETSC_NULL,"-mymonitor",&mymonitor);
102:   appctx.h    = 1.0/(appctx.m-1.0);

104:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
105:      Create vector data structures
106:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

108:   /*
109:      Create distributed array (DMDA) to manage parallel grid and vectors
110:      and to set up the ghost point communication pattern.  There are M 
111:      total grid values spread equally among all the processors.
112:   */
113:   DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,appctx.m,1,1,PETSC_NULL,
114:                     &appctx.da);

116:   /*
117:      Extract global and local vectors from DMDA; we use these to store the
118:      approximate solution.  Then duplicate these for remaining vectors that
119:      have the same types.
120:   */
121:   DMCreateGlobalVector(appctx.da,&u);
122:   DMCreateLocalVector(appctx.da,&appctx.u_local);

124:   /*
125:      Create local work vector for use in evaluating right-hand-side function;
126:      create global work vector for storing exact solution.
127:   */
128:   VecDuplicate(appctx.u_local,&appctx.localwork);
129:   VecDuplicate(u,&appctx.solution);

131:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132:      Create timestepping solver context; set callback routine for
133:      right-hand-side function evaluation.
134:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

136:   TSCreate(PETSC_COMM_WORLD,&ts);
137:   TSSetProblemType(ts,TS_NONLINEAR);
138:   TSSetRHSFunction(ts,PETSC_NULL,RHSFunction,&appctx);

140:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141:      Set optional user-defined monitoring routine
142:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

144:   if (mymonitor) {
145:     TSMonitorSet(ts,Monitor,&appctx,PETSC_NULL);
146:   }

148:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:      For nonlinear problems, the user can provide a Jacobian evaluation
150:      routine (or use a finite differencing approximation).

152:      Create matrix data structure; set Jacobian evaluation routine.
153:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

155:   MatCreate(PETSC_COMM_WORLD,&A);
156:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
157:   MatSetFromOptions(A);
158:   MatSetUp(A);
159:   PetscOptionsHasName(PETSC_NULL,"-fdjac",&flg);
160:   if (flg) {
161:     TSSetRHSJacobian(ts,A,A,RHSJacobianFD,&appctx);
162:   } else {
163:     TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);
164:   }

166:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167:      Set solution vector and initial timestep
168:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

170:   dt   = appctx.h/2.0;
171:   TSSetInitialTimeStep(ts,0.0,dt);

173:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174:      Customize timestepping solver:  
175:        - Set the solution method to be the Backward Euler method.
176:        - Set timestepping duration info 
177:      Then set runtime options, which can override these defaults.
178:      For example,
179:           -ts_max_steps <maxsteps> -ts_final_time <maxtime>
180:      to override the defaults set by TSSetDuration().
181:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

183:   TSSetType(ts,TSBEULER);
184:   TSSetDuration(ts,time_steps_max,time_total_max);
185:   TSSetFromOptions(ts);

187:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188:      Solve the problem
189:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

191:   /*
192:      Evaluate initial conditions
193:   */
194:   InitialConditions(u,&appctx);

196:   /*
197:      Run the timestepping solver
198:   */
199:   TSSolve(ts,u,PETSC_NULL);

201:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202:      Free work space.  All PETSc objects should be destroyed when they
203:      are no longer needed.
204:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

206:   TSDestroy(&ts);
207:   VecDestroy(&u);
208:   MatDestroy(&A);
209:   DMDestroy(&appctx.da);
210:   VecDestroy(&appctx.localwork);
211:   VecDestroy(&appctx.solution);
212:   VecDestroy(&appctx.u_local);

214:   /*
215:      Always call PetscFinalize() before exiting a program.  This routine
216:        - finalizes the PETSc libraries as well as MPI
217:        - provides summary and diagnostic information if certain runtime
218:          options are chosen (e.g., -log_summary). 
219:   */
220:   PetscFinalize();
221:   return 0;
222: }
223: /* --------------------------------------------------------------------- */
226: /*
227:    InitialConditions - Computes the solution at the initial time. 

229:    Input Parameters:
230:    u - uninitialized solution vector (global)
231:    appctx - user-defined application context

233:    Output Parameter:
234:    u - vector with solution at initial time (global)
235: */
236: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
237: {
238:   PetscScalar    *u_localptr,h = appctx->h,x;
239:   PetscInt       i,mybase,myend;

242:   /* 
243:      Determine starting point of each processor's range of
244:      grid values.
245:   */
246:   VecGetOwnershipRange(u,&mybase,&myend);

248:   /* 
249:     Get a pointer to vector data.
250:     - For default PETSc vectors, VecGetArray() returns a pointer to
251:       the data array.  Otherwise, the routine is implementation dependent.
252:     - You MUST call VecRestoreArray() when you no longer need access to
253:       the array.
254:     - Note that the Fortran interface to VecGetArray() differs from the
255:       C version.  See the users manual for details.
256:   */
257:   VecGetArray(u,&u_localptr);

259:   /* 
260:      We initialize the solution array by simply writing the solution
261:      directly into the array locations.  Alternatively, we could use
262:      VecSetValues() or VecSetValuesLocal().
263:   */
264:   for (i=mybase; i<myend; i++) {
265:     x = h*(PetscReal)i; /* current location in global grid */
266:     u_localptr[i-mybase] = 1.0 + x*x;
267:   }

269:   /* 
270:      Restore vector
271:   */
272:   VecRestoreArray(u,&u_localptr);

274:   /* 
275:      Print debugging information if desired
276:   */
277:   if (appctx->debug) {
278:      PetscPrintf(appctx->comm,"initial guess vector\n");
279:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
280:   }

282:   return 0;
283: }
284: /* --------------------------------------------------------------------- */
287: /*
288:    ExactSolution - Computes the exact solution at a given time.

290:    Input Parameters:
291:    t - current time
292:    solution - vector in which exact solution will be computed
293:    appctx - user-defined application context

295:    Output Parameter:
296:    solution - vector with the newly computed exact solution
297: */
298: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
299: {
300:   PetscScalar    *s_localptr,h = appctx->h,x;
301:   PetscInt       i,mybase,myend;

304:   /* 
305:      Determine starting and ending points of each processor's 
306:      range of grid values
307:   */
308:   VecGetOwnershipRange(solution,&mybase,&myend);

310:   /*
311:      Get a pointer to vector data.
312:   */
313:   VecGetArray(solution,&s_localptr);

315:   /* 
316:      Simply write the solution directly into the array locations.
317:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
318:   */
319:   for (i=mybase; i<myend; i++) {
320:     x = h*(PetscReal)i;
321:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
322:   }

324:   /* 
325:      Restore vector
326:   */
327:   VecRestoreArray(solution,&s_localptr);
328:   return 0;
329: }
330: /* --------------------------------------------------------------------- */
333: /*
334:    Monitor - User-provided routine to monitor the solution computed at 
335:    each timestep.  This example plots the solution and computes the
336:    error in two different norms.

338:    Input Parameters:
339:    ts     - the timestep context
340:    step   - the count of the current step (with 0 meaning the
341:             initial condition)
342:    time   - the current time
343:    u      - the solution at this timestep
344:    ctx    - the user-provided context for this monitoring routine.
345:             In this case we use the application context which contains 
346:             information about the problem size, workspace and the exact 
347:             solution.
348: */
349: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
350: {
351:   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
353:   PetscReal      en2,en2s,enmax;
354:   PetscDraw      draw;

356:   /*
357:      We use the default X windows viewer
358:              PETSC_VIEWER_DRAW_(appctx->comm)
359:      that is associated with the current communicator. This saves
360:      the effort of calling PetscViewerDrawOpen() to create the window.
361:      Note that if we wished to plot several items in separate windows we
362:      would create each viewer with PetscViewerDrawOpen() and store them in
363:      the application context, appctx.

365:      PetscReal buffering makes graphics look better.
366:   */
367:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
368:   PetscDrawSetDoubleBuffer(draw);
369:   VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));

371:   /*
372:      Compute the exact solution at this timestep
373:   */
374:   ExactSolution(time,appctx->solution,appctx);

376:   /*
377:      Print debugging information if desired
378:   */
379:   if (appctx->debug) {
380:      PetscPrintf(appctx->comm,"Computed solution vector\n");
381:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
382:      PetscPrintf(appctx->comm,"Exact solution vector\n");
383:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
384:   }

386:   /*
387:      Compute the 2-norm and max-norm of the error
388:   */
389:   VecAXPY(appctx->solution,-1.0,u);
390:   VecNorm(appctx->solution,NORM_2,&en2);
391:   en2s  = PetscSqrtReal(appctx->h)*en2; /* scale the 2-norm by the grid spacing */
392:   VecNorm(appctx->solution,NORM_MAX,&enmax);

394:   /*
395:      PetscPrintf() causes only the first processor in this 
396:      communicator to print the timestep information.
397:   */
398:   PetscPrintf(appctx->comm,"Timestep %D: time = %G 2-norm error = %G  max norm error = %G\n",step,time,en2s,enmax);

400:   /*
401:      Print debugging information if desired
402:   */
403:   if (appctx->debug) {
404:      PetscPrintf(appctx->comm,"Error vector\n");
405:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
406:   }
407:   return 0;
408: }
409: /* --------------------------------------------------------------------- */
412: /*
413:    RHSFunction - User-provided routine that evalues the right-hand-side
414:    function of the ODE.  This routine is set in the main program by 
415:    calling TSSetRHSFunction().  We compute:
416:           global_out = F(global_in)

418:    Input Parameters:
419:    ts         - timesteping context
420:    t          - current time
421:    global_in  - vector containing the current iterate
422:    ctx        - (optional) user-provided context for function evaluation.
423:                 In this case we use the appctx defined above.

425:    Output Parameter:
426:    global_out - vector containing the newly evaluated function
427: */
428: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
429: {
430:   AppCtx         *appctx = (AppCtx*) ctx;       /* user-defined application context */
431:   DM             da = appctx->da;               /* distributed array */
432:   Vec            local_in = appctx->u_local;    /* local ghosted input vector */
433:   Vec            localwork = appctx->localwork; /* local ghosted work vector */
435:   PetscInt       i,localsize;
436:   PetscMPIInt    rank,size;
437:   PetscScalar    *copyptr,*localptr,sc;

439:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
440:      Get ready for local function computations
441:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
442:   /*
443:      Scatter ghost points to local vector, using the 2-step process
444:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
445:      By placing code between these two statements, computations can be
446:      done while messages are in transition.
447:   */
448:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
449:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

451:   /*
452:       Access directly the values in our local INPUT work array
453:   */
454:   VecGetArray(local_in,&localptr);

456:   /*
457:       Access directly the values in our local OUTPUT work array
458:   */
459:   VecGetArray(localwork,&copyptr);

461:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

463:   /*
464:       Evaluate our function on the nodes owned by this processor
465:   */
466:   VecGetLocalSize(local_in,&localsize);

468:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
469:      Compute entries for the locally owned part 
470:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

472:   /*
473:      Handle boundary conditions: This is done by using the boundary condition 
474:         u(t,boundary) = g(t,boundary) 
475:      for some function g. Now take the derivative with respect to t to obtain
476:         u_{t}(t,boundary) = g_{t}(t,boundary)

478:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1 
479:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
480:   */
481:   MPI_Comm_rank(appctx->comm,&rank);
482:   MPI_Comm_size(appctx->comm,&size);
483:   if (!rank)          copyptr[0]           = 1.0;
484:   if (rank == size-1) copyptr[localsize-1] = 2.0;

486:   /*
487:      Handle the interior nodes where the PDE is replace by finite 
488:      difference operators.
489:   */
490:   for (i=1; i<localsize-1; i++) {
491:     copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);
492:   }

494:   /* 
495:      Restore vectors
496:   */
497:   VecRestoreArray(local_in,&localptr);
498:   VecRestoreArray(localwork,&copyptr);

500:   /*
501:      Insert values from the local OUTPUT vector into the global 
502:      output vector
503:   */
504:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
505:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

507:   /* Print debugging information if desired */
508:   if (appctx->debug) {
509:      PetscPrintf(appctx->comm,"RHS function vector\n");
510:      VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
511:   }

513:   return 0;
514: }
515: /* --------------------------------------------------------------------- */
518: /*
519:    RHSJacobian - User-provided routine to compute the Jacobian of
520:    the nonlinear right-hand-side function of the ODE.

522:    Input Parameters:
523:    ts - the TS context
524:    t - current time
525:    global_in - global input vector
526:    dummy - optional user-defined context, as set by TSetRHSJacobian()

528:    Output Parameters:
529:    AA - Jacobian matrix
530:    BB - optionally different preconditioning matrix
531:    str - flag indicating matrix structure

533:   Notes:
534:   RHSJacobian computes entries for the locally owned part of the Jacobian.
535:    - Currently, all PETSc parallel matrix formats are partitioned by
536:      contiguous chunks of rows across the processors. 
537:    - Each processor needs to insert only elements that it owns
538:      locally (but any non-local elements will be sent to the
539:      appropriate processor during matrix assembly). 
540:    - Always specify global row and columns of matrix entries when
541:      using MatSetValues().
542:    - Here, we set all entries for a particular row at once.
543:    - Note that MatSetValues() uses 0-based row and column numbers
544:      in Fortran as well as in C.
545: */
546: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
547: {
548:   Mat            B = *BB;                      /* Jacobian matrix */
549:   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
550:   Vec            local_in = appctx->u_local;   /* local ghosted input vector */
551:   DM             da = appctx->da;              /* distributed array */
552:   PetscScalar    v[3],*localptr,sc;
554:   PetscInt       i,mstart,mend,mstarts,mends,idx[3],is;

556:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
557:      Get ready for local Jacobian computations
558:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
559:   /*
560:      Scatter ghost points to local vector, using the 2-step process
561:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
562:      By placing code between these two statements, computations can be
563:      done while messages are in transition.
564:   */
565:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
566:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

568:   /*
569:      Get pointer to vector data
570:   */
571:   VecGetArray(local_in,&localptr);

573:   /* 
574:      Get starting and ending locally owned rows of the matrix
575:   */
576:   MatGetOwnershipRange(B,&mstarts,&mends);
577:   mstart = mstarts; mend = mends;

579:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
580:      Compute entries for the locally owned part of the Jacobian.
581:       - Currently, all PETSc parallel matrix formats are partitioned by
582:         contiguous chunks of rows across the processors. 
583:       - Each processor needs to insert only elements that it owns
584:         locally (but any non-local elements will be sent to the
585:         appropriate processor during matrix assembly). 
586:       - Here, we set all entries for a particular row at once.
587:       - We can set matrix entries either using either
588:         MatSetValuesLocal() or MatSetValues().
589:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

591:   /* 
592:      Set matrix rows corresponding to boundary data
593:   */
594:   if (mstart == 0) {
595:     v[0] = 0.0;
596:     MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
597:     mstart++;
598:   }
599:   if (mend == appctx->m) {
600:     mend--;
601:     v[0] = 0.0;
602:     MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
603:   }

605:   /*
606:      Set matrix rows corresponding to interior data.  We construct the 
607:      matrix one row at a time.
608:   */
609:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
610:   for (i=mstart; i<mend; i++) {
611:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
612:     is     = i - mstart + 1;
613:     v[0]   = sc*localptr[is];
614:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
615:     v[2]   = sc*localptr[is];
616:     MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
617:   }

619:   /* 
620:      Restore vector
621:   */
622:   VecRestoreArray(local_in,&localptr);

624:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
625:      Complete the matrix assembly process and set some options
626:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
627:   /*
628:      Assemble matrix, using the 2-step process:
629:        MatAssemblyBegin(), MatAssemblyEnd()
630:      Computations can be done while messages are in transition
631:      by placing code between these two statements.
632:   */
633:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
634:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

636:   /*
637:      Set flag to indicate that the Jacobian matrix retains an identical
638:      nonzero structure throughout all timestepping iterations (although the
639:      values of the entries change). Thus, we can save some work in setting
640:      up the preconditioner (e.g., no need to redo symbolic factorization for
641:      ILU/ICC preconditioners).
642:       - If the nonzero structure of the matrix is different during
643:         successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
644:         must be used instead.  If you are unsure whether the matrix
645:         structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
646:       - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
647:         believes your assertion and does not check the structure
648:         of the matrix.  If you erroneously claim that the structure
649:         is the same when it actually is not, the new preconditioner
650:         will not function correctly.  Thus, use this optimization
651:         feature with caution!
652:   */
653:   *str = SAME_NONZERO_PATTERN;

655:   /*
656:      Set and option to indicate that we will never add a new nonzero location 
657:      to the matrix. If we do, it will generate an error.
658:   */
659:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

661:   return 0;
662: }