Actual source code: ex46.c

  1: static char help[] = "Surface processes in geophysics.\n\n";

  3: #include <petscsnes.h>
  4: #include <petscdm.h>
  5: #include <petscdmda.h>

  7: /*
  8:    User-defined application context - contains data needed by the
  9:    application-provided call-back routines, FormJacobianLocal() and
 10:    FormFunctionLocal().
 11: */
 12: typedef struct {
 13:   PetscReal D; /* The diffusion coefficient */
 14:   PetscReal K; /* The advection coefficient */
 15:   PetscInt  m; /* Exponent for A */
 16: } AppCtx;

 18: /*
 19:    User-defined routines
 20: */
 21: extern PetscErrorCode FormFunctionLocal(DMDALocalInfo *, PetscScalar **, PetscScalar **, AppCtx *);
 22: extern PetscErrorCode FormJacobianLocal(DMDALocalInfo *, PetscScalar **, Mat, AppCtx *);

 24: int main(int argc, char **argv)
 25: {
 26:   SNES     snes; /* nonlinear solver */
 27:   AppCtx   user; /* user-defined work context */
 28:   PetscInt its;  /* iterations for convergence */
 29:   DM       da;

 31:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 32:      Initialize program
 33:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 35:   PetscFunctionBeginUser;
 36:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));

 38:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 39:      Initialize problem parameters
 40:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 41:   PetscOptionsBegin(PETSC_COMM_WORLD, "", "Surface Process Problem Options", "SNES");
 42:   user.D = 1.0;
 43:   PetscCall(PetscOptionsReal("-D", "The diffusion coefficient D", __FILE__, user.D, &user.D, NULL));
 44:   user.K = 1.0;
 45:   PetscCall(PetscOptionsReal("-K", "The advection coefficient K", __FILE__, user.K, &user.K, NULL));
 46:   user.m = 1;
 47:   PetscCall(PetscOptionsInt("-m", "The exponent for A", __FILE__, user.m, &user.m, NULL));
 48:   PetscOptionsEnd();

 50:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 51:      Create distributed array (DMDA) to manage parallel grid and vectors
 52:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 53:   PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, 4, 4, PETSC_DECIDE, PETSC_DECIDE, 1, 1, NULL, NULL, &da));
 54:   PetscCall(DMSetFromOptions(da));
 55:   PetscCall(DMSetUp(da));
 56:   PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
 57:   PetscCall(DMSetApplicationContext(da, &user));
 58:   PetscCall(SNESCreate(PETSC_COMM_WORLD, &snes));
 59:   PetscCall(SNESSetDM(snes, da));

 61:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 62:      Set local function evaluation routine
 63:   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 64:   PetscCall(DMDASNESSetFunctionLocal(da, INSERT_VALUES, (PetscErrorCode(*)(DMDALocalInfo *, void *, void *, void *))FormFunctionLocal, &user));

 66:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 67:      Customize solver; set runtime options
 68:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 69:   PetscCall(SNESSetFromOptions(snes));

 71:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 72:      Solve nonlinear system
 73:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 74:   PetscCall(SNESSolve(snes, 0, 0));
 75:   PetscCall(SNESGetIterationNumber(snes, &its));

 77:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 78:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 79:   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Number of SNES iterations = %" PetscInt_FMT "\n", its));

 81:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 82:      Free work space.  All PETSc objects should be destroyed when they
 83:      are no longer needed.
 84:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 86:   PetscCall(SNESDestroy(&snes));
 87:   PetscCall(DMDestroy(&da));

 89:   PetscCall(PetscFinalize());
 90:   return 0;
 91: }

 93: PetscScalar funcU(DMDACoor2d *coords)
 94: {
 95:   return coords->x + coords->y;
 96: }

 98: PetscScalar funcA(PetscScalar z, AppCtx *user)
 99: {
100:   PetscScalar v = 1.0;
101:   PetscInt    i;

103:   for (i = 0; i < user->m; ++i) v *= z;
104:   return v;
105: }

107: PetscScalar funcADer(PetscScalar z, AppCtx *user)
108: {
109:   PetscScalar v = 1.0;
110:   PetscInt    i;

112:   for (i = 0; i < user->m - 1; ++i) v *= z;
113:   return (PetscScalar)user->m * v;
114: }

116: /*
117:    FormFunctionLocal - Evaluates nonlinear function, F(x).
118: */
119: PetscErrorCode FormFunctionLocal(DMDALocalInfo *info, PetscScalar **x, PetscScalar **f, AppCtx *user)
120: {
121:   DM           coordDA;
122:   Vec          coordinates;
123:   DMDACoor2d **coords;
124:   PetscScalar  u, ux, uy, uxx, uyy;
125:   PetscReal    D, K, hx, hy, hxdhy, hydhx;
126:   PetscInt     i, j;

128:   PetscFunctionBeginUser;
129:   D     = user->D;
130:   K     = user->K;
131:   hx    = 1.0 / (PetscReal)(info->mx - 1);
132:   hy    = 1.0 / (PetscReal)(info->my - 1);
133:   hxdhy = hx / hy;
134:   hydhx = hy / hx;
135:   /*
136:      Compute function over the locally owned part of the grid
137:   */
138:   PetscCall(DMGetCoordinateDM(info->da, &coordDA));
139:   PetscCall(DMGetCoordinates(info->da, &coordinates));
140:   PetscCall(DMDAVecGetArray(coordDA, coordinates, &coords));
141:   for (j = info->ys; j < info->ys + info->ym; j++) {
142:     for (i = info->xs; i < info->xs + info->xm; i++) {
143:       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) f[j][i] = x[j][i];
144:       else {
145:         u       = x[j][i];
146:         ux      = (x[j][i + 1] - x[j][i]) / hx;
147:         uy      = (x[j + 1][i] - x[j][i]) / hy;
148:         uxx     = (2.0 * u - x[j][i - 1] - x[j][i + 1]) * hydhx;
149:         uyy     = (2.0 * u - x[j - 1][i] - x[j + 1][i]) * hxdhy;
150:         f[j][i] = D * (uxx + uyy) - (K * funcA(x[j][i], user) * PetscSqrtScalar(ux * ux + uy * uy) + funcU(&coords[j][i])) * hx * hy;
151:         PetscCheck(!PetscIsInfOrNanScalar(f[j][i]), PETSC_COMM_SELF, PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(f[j][i]));
152:       }
153:     }
154:   }
155:   PetscCall(DMDAVecRestoreArray(coordDA, coordinates, &coords));
156:   PetscCall(PetscLogFlops(11.0 * info->ym * info->xm));
157:   PetscFunctionReturn(PETSC_SUCCESS);
158: }

160: /*
161:    FormJacobianLocal - Evaluates Jacobian matrix.
162: */
163: PetscErrorCode FormJacobianLocal(DMDALocalInfo *info, PetscScalar **x, Mat jac, AppCtx *user)
164: {
165:   MatStencil  col[5], row;
166:   PetscScalar D, K, A, v[5], hx, hy, hxdhy, hydhx, ux, uy;
167:   PetscReal   normGradZ;
168:   PetscInt    i, j, k;

170:   PetscFunctionBeginUser;
171:   D     = user->D;
172:   K     = user->K;
173:   hx    = 1.0 / (PetscReal)(info->mx - 1);
174:   hy    = 1.0 / (PetscReal)(info->my - 1);
175:   hxdhy = hx / hy;
176:   hydhx = hy / hx;

178:   /*
179:      Compute entries for the locally owned part of the Jacobian.
180:       - Currently, all PETSc parallel matrix formats are partitioned by
181:         contiguous chunks of rows across the processors.
182:       - Each processor needs to insert only elements that it owns
183:         locally (but any non-local elements will be sent to the
184:         appropriate processor during matrix assembly).
185:       - Here, we set all entries for a particular row at once.
186:       - We can set matrix entries either using either
187:         MatSetValuesLocal() or MatSetValues(), as discussed above.
188:   */
189:   for (j = info->ys; j < info->ys + info->ym; j++) {
190:     for (i = info->xs; i < info->xs + info->xm; i++) {
191:       row.j = j;
192:       row.i = i;
193:       if (i == 0 || j == 0 || i == info->mx - 1 || j == info->my - 1) {
194:         /* boundary points */
195:         v[0] = 1.0;
196:         PetscCall(MatSetValuesStencil(jac, 1, &row, 1, &row, v, INSERT_VALUES));
197:       } else {
198:         /* interior grid points */
199:         ux        = (x[j][i + 1] - x[j][i]) / hx;
200:         uy        = (x[j + 1][i] - x[j][i]) / hy;
201:         normGradZ = PetscRealPart(PetscSqrtScalar(ux * ux + uy * uy));
202:         if (normGradZ < 1.0e-8) normGradZ = 1.0e-8;
203:         A = funcA(x[j][i], user);

205:         v[0]     = -D * hxdhy;
206:         col[0].j = j - 1;
207:         col[0].i = i;
208:         v[1]     = -D * hydhx;
209:         col[1].j = j;
210:         col[1].i = i - 1;
211:         v[2]     = D * 2.0 * (hydhx + hxdhy) + K * (funcADer(x[j][i], user) * normGradZ - A / normGradZ) * hx * hy;
212:         col[2].j = row.j;
213:         col[2].i = row.i;
214:         v[3]     = -D * hydhx + K * A * hx * hy / (2.0 * normGradZ);
215:         col[3].j = j;
216:         col[3].i = i + 1;
217:         v[4]     = -D * hxdhy + K * A * hx * hy / (2.0 * normGradZ);
218:         col[4].j = j + 1;
219:         col[4].i = i;
220:         for (k = 0; k < 5; ++k) PetscCheck(!PetscIsInfOrNanScalar(v[k]), PETSC_COMM_SELF, PETSC_ERR_FP, "Invalid residual: %g", (double)PetscRealPart(v[k]));
221:         PetscCall(MatSetValuesStencil(jac, 1, &row, 5, col, v, INSERT_VALUES));
222:       }
223:     }
224:   }

226:   /*
227:      Assemble matrix, using the 2-step process:
228:        MatAssemblyBegin(), MatAssemblyEnd().
229:   */
230:   PetscCall(MatAssemblyBegin(jac, MAT_FINAL_ASSEMBLY));
231:   PetscCall(MatAssemblyEnd(jac, MAT_FINAL_ASSEMBLY));
232:   /*
233:      Tell the matrix we will never add a new nonzero location to the
234:      matrix. If we do, it will generate an error.
235:   */
236:   PetscCall(MatSetOption(jac, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
237:   PetscFunctionReturn(PETSC_SUCCESS);
238: }

240: /*TEST

242:    test:
243:       args: -snes_view -snes_monitor_short -da_refine 1 -pc_type mg -ksp_type fgmres -pc_mg_type full -mg_levels_ksp_chebyshev_esteig 0.5,1.1

245:    test:
246:       suffix: ew_1
247:       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 1
248:       requires: !single

250:    test:
251:       suffix: ew_2
252:       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 2

254:    test:
255:       suffix: ew_3
256:       args: -snes_monitor_short -ksp_converged_reason -da_grid_x 20 -da_grid_y 20 -snes_ksp_ew -snes_ksp_ew_version 3
257:       requires: !single

259:    test:
260:       suffix: fm_rise_2
261:       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 1 -snes_ngmres_restart_fm_rise
262:       requires: !single

264:    test:
265:       suffix: fm_rise_4
266:       args: -K 3 -m 1 -D 0.2 -snes_monitor_short -snes_type ngmres -snes_npc_side right -npc_snes_type newtonls -npc_snes_linesearch_type basic -snes_ngmres_restart_it 2 -snes_ngmres_restart_fm_rise -snes_rtol 1.e-2 -snes_max_it 5

268: TEST*/