Actual source code: ex46.c

petsc-3.4.5 2014-06-29
  1: static char help[] = "Surface processes in geophysics.\n\n";

  3: /*T
  4:    Concepts: SNES^parallel Surface process example
  5:    Concepts: DMDA^using distributed arrays;
  6:    Concepts: IS coloirng types;
  7:    Processors: n
  8: T*/


 11: #include <petscsnes.h>
 12: #include <petscdmda.h>

 14: /*
 15:    User-defined application context - contains data needed by the
 16:    application-provided call-back routines, FormJacobianLocal() and
 17:    FormFunctionLocal().
 18: */
 19: typedef struct {
 20:   PassiveReal D;  /* The diffusion coefficient */
 21:   PassiveReal K;  /* The advection coefficient */
 22:   PetscInt    m;  /* Exponent for A */
 23: } AppCtx;

 25: /*
 26:    User-defined routines
 27: */
 28: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo*,PetscScalar**,PetscScalar**,AppCtx*);
 29: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo*,PetscScalar**,Mat,AppCtx*);

 33: int main(int argc,char **argv)
 34: {
 35:   SNES           snes;                         /* nonlinear solver */
 36:   AppCtx         user;                         /* user-defined work context */
 37:   PetscInt       its;                          /* iterations for convergence */
 39:   DM             da;

 41:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 42:      Initialize program
 43:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 45:   PetscInitialize(&argc,&argv,(char*)0,help);

 47:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 48:      Initialize problem parameters
 49:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 50:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
 51:   user.D = 1.0;
 52:   PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL);
 53:   user.K = 1.0;
 54:   PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL);
 55:   user.m = 1;
 56:   PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL);
 57:   PetscOptionsEnd();

 59:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 60:      Create distributed array (DMDA) to manage parallel grid and vectors
 61:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 62:   DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,-4,-4,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&da);
 63:   DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
 64:   DMSetApplicationContext(da,&user);
 65:   SNESCreate(PETSC_COMM_WORLD, &snes);
 66:   SNESSetDM(snes, da);

 68:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 69:      Set local function evaluation routine
 70:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 71:   DMDASNESSetFunctionLocal(da,INSERT_VALUES,(PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))FormFunctionLocal,&user);

 73:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 74:      Customize solver; set runtime options
 75:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 76:   SNESSetFromOptions(snes);


 79:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 80:      Solve nonlinear system
 81:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 82:   SNESSolve(snes,0,0);
 83:   SNESGetIterationNumber(snes,&its);

 85:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 86:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 87:   PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n",its);

 89:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90:      Free work space.  All PETSc objects should be destroyed when they
 91:      are no longer needed.
 92:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 94:   SNESDestroy(&snes);
 95:   DMDestroy(&da);

 97:   PetscFinalize();
 98:   return(0);
 99: }

103: PetscScalar funcU(DMDACoor2d *coords)
104: {
105:   return coords->x + coords->y;
106: }

110: PetscScalar funcA(PetscScalar z, AppCtx *user)
111: {
112:   PetscScalar v = 1.0;
113:   PetscInt    i;

115:   for (i = 0; i < user->m; ++i) v *= z;
116:   return v;
117: }

121: PetscScalar funcADer(PetscScalar z, AppCtx *user)
122: {
123:   PetscScalar v = 1.0;
124:   PetscInt    i;

126:   for (i = 0; i < user->m-1; ++i) v *= z;
127:   return (PetscScalar)user->m*v;
128: }

132: /*
133:    FormFunctionLocal - Evaluates nonlinear function, F(x).
134: */
135: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info,PetscScalar **x,PetscScalar **f,AppCtx *user)
136: {
137:   DM             coordDA;
138:   Vec            coordinates;
139:   DMDACoor2d     **coords;
140:   PetscScalar    u, ux, uy, uxx, uyy;
141:   PetscReal      D, K, hx, hy, hxdhy, hydhx;
142:   PetscInt       i,j;

146:   D     = user->D;
147:   K     = user->K;
148:   hx    = 1.0/(PetscReal)(info->mx-1);
149:   hy    = 1.0/(PetscReal)(info->my-1);
150:   hxdhy = hx/hy;
151:   hydhx = hy/hx;
152:   /*
153:      Compute function over the locally owned part of the grid
154:   */
155:   DMGetCoordinateDM(info->da, &coordDA);
156:   DMGetCoordinates(info->da, &coordinates);
157:   DMDAVecGetArray(coordDA, coordinates, &coords);
158:   for (j=info->ys; j<info->ys+info->ym; j++) {
159:     for (i=info->xs; i<info->xs+info->xm; i++) {
160:       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) f[j][i] = x[j][i];
161:       else {
162:         u       = x[j][i];
163:         ux      = (x[j][i+1] - x[j][i])/hx;
164:         uy      = (x[j+1][i] - x[j][i])/hy;
165:         uxx     = (2.0*u - x[j][i-1] - x[j][i+1])*hydhx;
166:         uyy     = (2.0*u - x[j-1][i] - x[j+1][i])*hxdhy;
167:         f[j][i] = D*(uxx + uyy) - (K*funcA(x[j][i], user)*sqrt(ux*ux + uy*uy) + funcU(&coords[j][i]))*hx*hy;
168:         if (PetscIsInfOrNanScalar(f[j][i])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", PetscRealPart(f[j][i]));
169:       }
170:     }
171:   }
172:   DMDAVecRestoreArray(coordDA, coordinates, &coords);
173:   PetscLogFlops(11*info->ym*info->xm);
174:   return(0);
175: }

179: /*
180:    FormJacobianLocal - Evaluates Jacobian matrix.
181: */
182: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info,PetscScalar **x,Mat jac,AppCtx *user)
183: {
184:   MatStencil     col[5], row;
185:   PetscScalar    D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
186:   PetscReal      normGradZ;
187:   PetscInt       i, j,k;

191:   D     = user->D;
192:   K     = user->K;
193:   hx    = 1.0/(PetscReal)(info->mx-1);
194:   hy    = 1.0/(PetscReal)(info->my-1);
195:   hxdhy = hx/hy;
196:   hydhx = hy/hx;

198:   /*
199:      Compute entries for the locally owned part of the Jacobian.
200:       - Currently, all PETSc parallel matrix formats are partitioned by
201:         contiguous chunks of rows across the processors.
202:       - Each processor needs to insert only elements that it owns
203:         locally (but any non-local elements will be sent to the
204:         appropriate processor during matrix assembly).
205:       - Here, we set all entries for a particular row at once.
206:       - We can set matrix entries either using either
207:         MatSetValuesLocal() or MatSetValues(), as discussed above.
208:   */
209:   for (j=info->ys; j<info->ys+info->ym; j++) {
210:     for (i=info->xs; i<info->xs+info->xm; i++) {
211:       row.j = j; row.i = i;
212:       if (i == 0 || j == 0 || i == info->mx-1 || j == info->my-1) {
213:         /* boundary points */
214:         v[0] = 1.0;
215:         MatSetValuesStencil(jac,1,&row,1,&row,v,INSERT_VALUES);
216:       } else {
217:         /* interior grid points */
218:         ux        = (x[j][i+1] - x[j][i])/hx;
219:         uy        = (x[j+1][i] - x[j][i])/hy;
220:         normGradZ = PetscRealPart(sqrt(ux*ux + uy*uy));
221:         /* PetscPrintf(PETSC_COMM_SELF, "i: %d j: %d normGradZ: %g\n", i, j, normGradZ); */
222:         if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
223:         A = funcA(x[j][i], user);

225:         v[0] = -D*hxdhy;                                                                          col[0].j = j - 1; col[0].i = i;
226:         v[1] = -D*hydhx;                                                                          col[1].j = j;     col[1].i = i-1;
227:         v[2] = D*2.0*(hydhx + hxdhy) + K*(funcADer(x[j][i], user)*normGradZ - A/normGradZ)*hx*hy; col[2].j = row.j; col[2].i = row.i;
228:         v[3] = -D*hydhx + K*A*hx*hy/(2.0*normGradZ);                                              col[3].j = j;     col[3].i = i+1;
229:         v[4] = -D*hxdhy + K*A*hx*hy/(2.0*normGradZ);                                              col[4].j = j + 1; col[4].i = i;
230:         for (k = 0; k < 5; ++k) {
231:           if (PetscIsInfOrNanScalar(v[k])) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FP, "Invalid residual: %g", PetscRealPart(v[k]));
232:         }
233:         MatSetValuesStencil(jac,1,&row,5,col,v,INSERT_VALUES);
234:       }
235:     }
236:   }

238:   /*
239:      Assemble matrix, using the 2-step process:
240:        MatAssemblyBegin(), MatAssemblyEnd().
241:   */
242:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
243:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
244:   /*
245:      Tell the matrix we will never add a new nonzero location to the
246:      matrix. If we do, it will generate an error.
247:   */
248:   MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
249:   return(0);
250: }