Actual source code: ex62.c

petsc-master 2017-06-21
Report Typos and Errors
  1: static char help[] = "Stokes Problem in 2d and 3d with simplicial finite elements.\n\
  2: We solve the Stokes problem in a rectangular\n\
  3: domain, using a parallel unstructured mesh (DMPLEX) to discretize it.\n\n\n";

  5: /*
  6: The isoviscous Stokes problem, which we discretize using the finite
  7: element method on an unstructured mesh. The weak form equations are

  9:   < \nabla v, \nabla u + {\nabla u}^T > - < \nabla\cdot v, p > + < v, f > = 0
 10:   < q, \nabla\cdot u >                                                    = 0

 12: We start with homogeneous Dirichlet conditions. We will expand this as the set
 13: of test problems is developed.

 15: Discretization:

 17: We use PetscFE to generate a tabulation of the finite element basis functions
 18: at quadrature points. We can currently generate an arbitrary order Lagrange
 19: element.

 21: Field Data:

 23:   DMPLEX data is organized by point, and the closure operation just stacks up the
 24: data from each sieve point in the closure. Thus, for a P_2-P_1 Stokes element, we
 25: have

 27:   cl{e} = {f e_0 e_1 e_2 v_0 v_1 v_2}
 28:   x     = [u_{e_0} v_{e_0} u_{e_1} v_{e_1} u_{e_2} v_{e_2} u_{v_0} v_{v_0} p_{v_0} u_{v_1} v_{v_1} p_{v_1} u_{v_2} v_{v_2} p_{v_2}]

 30: The problem here is that we would like to loop over each field separately for
 31: integration. Therefore, the closure visitor in DMPlexVecGetClosure() reorders
 32: the data so that each field is contiguous

 34:   x'    = [u_{e_0} v_{e_0} u_{e_1} v_{e_1} u_{e_2} v_{e_2} u_{v_0} v_{v_0} u_{v_1} v_{v_1} u_{v_2} v_{v_2} p_{v_0} p_{v_1} p_{v_2}]

 36: Likewise, DMPlexVecSetClosure() takes data partitioned by field, and correctly
 37: puts it into the Sieve ordering.

 39: Next Steps:

 41: - Refine and show convergence of correct order automatically (use femTest.py)
 42: - Fix InitialGuess for arbitrary disc (means making dual application work again)
 43: - Redo slides from GUCASTutorial for this new example

 45: For tensor product meshes, see SNES ex67, ex72
 46: */

 48: /*T
 49:   requires: !mpiuni
 50: T*/

 52:  #include <petscdmplex.h>
 53:  #include <petscsnes.h>
 54:  #include <petscds.h>

 56: PetscInt spatialDim = 0;

 58: typedef enum {NEUMANN, DIRICHLET} BCType;
 59: typedef enum {RUN_FULL, RUN_TEST} RunType;

 61: typedef struct {
 62:   PetscInt      debug;             /* The debugging level */
 63:   RunType       runType;           /* Whether to run tests, or solve the full problem */
 64:   PetscLogEvent createMeshEvent;
 65:   PetscBool     showInitial, showSolution, showError;
 66:   /* Domain and mesh definition */
 67:   PetscInt      dim;               /* The topological mesh dimension */
 68:   PetscBool     interpolate;       /* Generate intermediate mesh elements */
 69:   PetscBool     simplex;           /* Use simplices or tensor product cells */
 70:   PetscReal     refinementLimit;   /* The largest allowable cell volume */
 71:   PetscBool     testPartition;     /* Use a fixed partitioning for testing */
 72:   /* Problem definition */
 73:   BCType        bcType;
 74:   PetscErrorCode (**exactFuncs)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
 75: } AppCtx;

 77: PetscErrorCode zero_scalar(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
 78: {
 79:   u[0] = 0.0;
 80:   return 0;
 81: }
 82: PetscErrorCode zero_vector(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
 83: {
 84:   PetscInt d;
 85:   for (d = 0; d < spatialDim; ++d) u[d] = 0.0;
 86:   return 0;
 87: }

 89: /*
 90:   In 2D we use exact solution:

 92:     u = x^2 + y^2
 93:     v = 2 x^2 - 2xy
 94:     p = x + y - 1
 95:     f_x = f_y = 3

 97:   so that

 99:     -\Delta u + \nabla p + f = <-4, -4> + <1, 1> + <3, 3> = 0
100:     \nabla \cdot u           = 2x - 2x                    = 0
101: */
102: PetscErrorCode quadratic_u_2d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
103: {
104:   u[0] = x[0]*x[0] + x[1]*x[1];
105:   u[1] = 2.0*x[0]*x[0] - 2.0*x[0]*x[1];
106:   return 0;
107: }

109: PetscErrorCode linear_p_2d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
110: {
111:   *p = x[0] + x[1] - 1.0;
112:   return 0;
113: }
114: PetscErrorCode constant_p(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
115: {
116:   *p = 1.0;
117:   return 0;
118: }

120: void f0_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
121:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
122:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
123:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
124: {
125:   PetscInt c;
126:   for (c = 0; c < dim; ++c) f0[c] = 3.0;
127: }

129: /* gradU[comp*dim+d] = {u_x, u_y, v_x, v_y} or {u_x, u_y, u_z, v_x, v_y, v_z, w_x, w_y, w_z}
130:    u[Ncomp]          = {p} */
131: void f1_u(PetscInt dim, PetscInt Nf, PetscInt NfAux,
132:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
133:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
134:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
135: {
136:   const PetscInt Ncomp = dim;
137:   PetscInt       comp, d;

139:   for (comp = 0; comp < Ncomp; ++comp) {
140:     for (d = 0; d < dim; ++d) {
141:       /* f1[comp*dim+d] = 0.5*(gradU[comp*dim+d] + gradU[d*dim+comp]); */
142:       f1[comp*dim+d] = u_x[comp*dim+d];
143:     }
144:     f1[comp*dim+comp] -= u[Ncomp];
145:   }
146: }

148: /* gradU[comp*dim+d] = {u_x, u_y, v_x, v_y} or {u_x, u_y, u_z, v_x, v_y, v_z, w_x, w_y, w_z} */
149: void f0_p(PetscInt dim, PetscInt Nf, PetscInt NfAux,
150:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
151:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
152:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
153: {
154:   PetscInt d;
155:   for (d = 0, f0[0] = 0.0; d < dim; ++d) f0[0] += u_x[d*dim+d];
156: }

158: void f1_p(PetscInt dim, PetscInt Nf, PetscInt NfAux,
159:           const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
160:           const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
161:           PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f1[])
162: {
163:   PetscInt d;
164:   for (d = 0; d < dim; ++d) f1[d] = 0.0;
165: }

167: /* < q, \nabla\cdot u >
168:    NcompI = 1, NcompJ = dim */
169: void g1_pu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
170:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
171:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
172:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g1[])
173: {
174:   PetscInt d;
175:   for (d = 0; d < dim; ++d) g1[d*dim+d] = 1.0; /* \frac{\partial\phi^{u_d}}{\partial x_d} */
176: }

178: /* -< \nabla\cdot v, p >
179:     NcompI = dim, NcompJ = 1 */
180: void g2_up(PetscInt dim, PetscInt Nf, PetscInt NfAux,
181:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
182:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
183:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g2[])
184: {
185:   PetscInt d;
186:   for (d = 0; d < dim; ++d) g2[d*dim+d] = -1.0; /* \frac{\partial\psi^{u_d}}{\partial x_d} */
187: }

189: /* < \nabla v, \nabla u + {\nabla u}^T >
190:    This just gives \nabla u, give the perdiagonal for the transpose */
191: void g3_uu(PetscInt dim, PetscInt Nf, PetscInt NfAux,
192:            const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[],
193:            const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[],
194:            PetscReal t, PetscReal u_tShift, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar g3[])
195: {
196:   const PetscInt Ncomp = dim;
197:   PetscInt       compI, d;

199:   for (compI = 0; compI < Ncomp; ++compI) {
200:     for (d = 0; d < dim; ++d) {
201:       g3[((compI*Ncomp+compI)*dim+d)*dim+d] = 1.0;
202:     }
203:   }
204: }

206: /*
207:   In 3D we use exact solution:

209:     u = x^2 + y^2
210:     v = y^2 + z^2
211:     w = x^2 + y^2 - 2(x+y)z
212:     p = x + y + z - 3/2
213:     f_x = f_y = f_z = 3

215:   so that

217:     -\Delta u + \nabla p + f = <-4, -4, -4> + <1, 1, 1> + <3, 3, 3> = 0
218:     \nabla \cdot u           = 2x + 2y - 2(x + y)                   = 0
219: */
220: PetscErrorCode quadratic_u_3d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx)
221: {
222:   u[0] = x[0]*x[0] + x[1]*x[1];
223:   u[1] = x[1]*x[1] + x[2]*x[2];
224:   u[2] = x[0]*x[0] + x[1]*x[1] - 2.0*(x[0] + x[1])*x[2];
225:   return 0;
226: }

228: PetscErrorCode linear_p_3d(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *p, void *ctx)
229: {
230:   *p = x[0] + x[1] + x[2] - 1.5;
231:   return 0;
232: }

234: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
235: {
236:   const char    *bcTypes[2]  = {"neumann", "dirichlet"};
237:   const char    *runTypes[2] = {"full", "test"};
238:   PetscInt       bc, run;

242:   options->debug           = 0;
243:   options->runType         = RUN_FULL;
244:   options->dim             = 2;
245:   options->interpolate     = PETSC_FALSE;
246:   options->simplex         = PETSC_TRUE;
247:   options->refinementLimit = 0.0;
248:   options->testPartition   = PETSC_FALSE;
249:   options->bcType          = DIRICHLET;
250:   options->showInitial     = PETSC_FALSE;
251:   options->showSolution    = PETSC_TRUE;
252:   options->showError       = PETSC_FALSE;

254:   PetscOptionsBegin(comm, "", "Stokes Problem Options", "DMPLEX");
255:   PetscOptionsInt("-debug", "The debugging level", "ex62.c", options->debug, &options->debug, NULL);
256:   run  = options->runType;
257:   PetscOptionsEList("-run_type", "The run type", "ex62.c", runTypes, 2, runTypes[options->runType], &run, NULL);

259:   options->runType = (RunType) run;

261:   PetscOptionsInt("-dim", "The topological mesh dimension", "ex62.c", options->dim, &options->dim, NULL);
262:   spatialDim = options->dim;
263:   PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex62.c", options->interpolate, &options->interpolate, NULL);
264:   PetscOptionsBool("-simplex", "Use simplices or tensor product cells", "ex62.c", options->simplex, &options->simplex, NULL);
265:   PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex62.c", options->refinementLimit, &options->refinementLimit, NULL);
266:   PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex62.c", options->testPartition, &options->testPartition, NULL);
267:   bc   = options->bcType;
268:   PetscOptionsEList("-bc_type","Type of boundary condition","ex62.c",bcTypes,2,bcTypes[options->bcType],&bc,NULL);

270:   options->bcType = (BCType) bc;

272:   PetscOptionsBool("-show_initial", "Output the initial guess for verification", "ex62.c", options->showInitial, &options->showInitial, NULL);
273:   PetscOptionsBool("-show_solution", "Output the solution for verification", "ex62.c", options->showSolution, &options->showSolution, NULL);
274:   PetscOptionsBool("-show_error", "Output the error for verification", "ex62.c", options->showError, &options->showError, NULL);
275:   PetscOptionsEnd();

277:   PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);
278:   return(0);
279: }

281: PetscErrorCode DMVecViewLocal(DM dm, Vec v)
282: {
283:   Vec            lv;

287:   DMGetLocalVector(dm, &lv);
288:   DMGlobalToLocalBegin(dm, v, INSERT_VALUES, lv);
289:   DMGlobalToLocalEnd(dm, v, INSERT_VALUES, lv);
290:   DMPrintLocalVec(dm, "Local function", 0.0, lv);
291:   DMRestoreLocalVector(dm, &lv);
292:   return(0);
293: }

295: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
296: {
297:   PetscInt       dim             = user->dim;
298:   PetscBool      interpolate     = user->interpolate;
299:   PetscReal      refinementLimit = user->refinementLimit;
300:   const PetscInt cells[3]        = {3, 3, 3};

304:   PetscLogEventBegin(user->createMeshEvent,0,0,0,0);
305:   if (user->simplex) {DMPlexCreateBoxMesh(comm, dim, dim == 2 ? 2 : 1, interpolate, dm);}
306:   else               {DMPlexCreateHexBoxMesh(comm, dim, cells, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, dm);}
307:   {
308:     DM refinedMesh     = NULL;
309:     DM distributedMesh = NULL;

311:     /* Refine mesh using a volume constraint */
312:     DMPlexSetRefinementLimit(*dm, refinementLimit);
313:     if (user->simplex) {DMRefine(*dm, comm, &refinedMesh);}
314:     if (refinedMesh) {
315:       DMDestroy(dm);
316:       *dm  = refinedMesh;
317:     }
318:     /* Setup test partitioning */
319:     if (user->testPartition) {
320:       PetscInt         triSizes_n2[2]       = {4, 4};
321:       PetscInt         triPoints_n2[8]      = {3, 5, 6, 7, 0, 1, 2, 4};
322:       PetscInt         triSizes_n3[3]       = {2, 3, 3};
323:       PetscInt         triPoints_n3[8]      = {3, 5, 1, 6, 7, 0, 2, 4};
324:       PetscInt         triSizes_n5[5]       = {1, 2, 2, 1, 2};
325:       PetscInt         triPoints_n5[8]      = {3, 5, 6, 4, 7, 0, 1, 2};
326:       PetscInt         triSizes_ref_n2[2]   = {8, 8};
327:       PetscInt         triPoints_ref_n2[16] = {1, 5, 6, 7, 10, 11, 14, 15, 0, 2, 3, 4, 8, 9, 12, 13};
328:       PetscInt         triSizes_ref_n3[3]   = {5, 6, 5};
329:       PetscInt         triPoints_ref_n3[16] = {1, 7, 10, 14, 15, 2, 6, 8, 11, 12, 13, 0, 3, 4, 5, 9};
330:       PetscInt         triSizes_ref_n5[5]   = {3, 4, 3, 3, 3};
331:       PetscInt         triPoints_ref_n5[16] = {1, 7, 10, 2, 11, 13, 14, 5, 6, 15, 0, 8, 9, 3, 4, 12};
332:       const PetscInt  *sizes = NULL;
333:       const PetscInt  *points = NULL;
334:       PetscPartitioner part;
335:       PetscInt         cEnd;
336:       PetscMPIInt      rank, size;

338:       MPI_Comm_rank(comm, &rank);
339:       MPI_Comm_size(comm, &size);
340:       DMPlexGetHeightStratum(*dm, 0, NULL, &cEnd);
341:       if (!rank) {
342:         if (dim == 2 && user->simplex && size == 2 && cEnd == 8) {
343:            sizes = triSizes_n2; points = triPoints_n2;
344:         } else if (dim == 2 && user->simplex && size == 3 && cEnd == 8) {
345:           sizes = triSizes_n3; points = triPoints_n3;
346:         } else if (dim == 2 && user->simplex && size == 5 && cEnd == 8) {
347:           sizes = triSizes_n5; points = triPoints_n5;
348:         } else if (dim == 2 && user->simplex && size == 2 && cEnd == 16) {
349:            sizes = triSizes_ref_n2; points = triPoints_ref_n2;
350:         } else if (dim == 2 && user->simplex && size == 3 && cEnd == 16) {
351:           sizes = triSizes_ref_n3; points = triPoints_ref_n3;
352:         } else if (dim == 2 && user->simplex && size == 5 && cEnd == 16) {
353:           sizes = triSizes_ref_n5; points = triPoints_ref_n5;
354:         } else SETERRQ(comm, PETSC_ERR_ARG_WRONG, "No stored partition matching run parameters");
355:       }
356:       DMPlexGetPartitioner(*dm, &part);
357:       PetscPartitionerSetType(part, PETSCPARTITIONERSHELL);
358:       PetscPartitionerShellSetPartition(part, size, sizes, points);
359:     } else {
360:       PetscPartitioner part;

362:       DMPlexGetPartitioner(*dm, &part);
363:       PetscPartitionerSetFromOptions(part);
364:     }
365:     /* Distribute mesh over processes */
366:     DMPlexDistribute(*dm, 0, NULL, &distributedMesh);
367:     if (distributedMesh) {
368:       DMDestroy(dm);
369:       *dm  = distributedMesh;
370:     }
371:   }
372:   DMSetFromOptions(*dm);
373:   DMViewFromOptions(*dm, NULL, "-dm_view");
374:   PetscLogEventEnd(user->createMeshEvent,0,0,0,0);
375:   return(0);
376: }

378: PetscErrorCode SetupProblem(PetscDS prob, AppCtx *user)
379: {
380:   const PetscInt id = 1;

384:   PetscDSSetResidual(prob, 0, f0_u, f1_u);
385:   PetscDSSetResidual(prob, 1, f0_p, f1_p);
386:   PetscDSSetJacobian(prob, 0, 0, NULL, NULL,  NULL,  g3_uu);
387:   PetscDSSetJacobian(prob, 0, 1, NULL, NULL,  g2_up, NULL);
388:   PetscDSSetJacobian(prob, 1, 0, NULL, g1_pu, NULL,  NULL);
389:   switch (user->dim) {
390:   case 2:
391:     user->exactFuncs[0] = quadratic_u_2d;
392:     user->exactFuncs[1] = linear_p_2d;
393:     break;
394:   case 3:
395:     user->exactFuncs[0] = quadratic_u_3d;
396:     user->exactFuncs[1] = linear_p_3d;
397:     break;
398:   default:
399:     SETERRQ1(PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Invalid dimension %d", user->dim);
400:   }
401:   PetscDSAddBoundary(prob, user->bcType == DIRICHLET ? DM_BC_ESSENTIAL : DM_BC_NATURAL, "wall", user->bcType == NEUMANN ? "boundary" : "marker", 0, 0, NULL, (void (*)()) user->exactFuncs[0], 1, &id, user);
402:   return(0);
403: }

405: PetscErrorCode SetupDiscretization(DM dm, AppCtx *user)
406: {
407:   DM              cdm   = dm;
408:   const PetscInt  dim   = user->dim;
409:   PetscFE         fe[2];
410:   PetscQuadrature q;
411:   PetscDS         prob;
412:   PetscErrorCode  ierr;

415:   /* Create finite element */
416:   PetscFECreateDefault(dm, dim, dim, user->simplex, "vel_", PETSC_DEFAULT, &fe[0]);
417:   PetscObjectSetName((PetscObject) fe[0], "velocity");
418:   PetscFEGetQuadrature(fe[0], &q);
419:   PetscFECreateDefault(dm, dim, 1, user->simplex, "pres_", PETSC_DEFAULT, &fe[1]);
420:   PetscFESetQuadrature(fe[1], q);
421:   PetscObjectSetName((PetscObject) fe[1], "pressure");
422:   /* Set discretization and boundary conditions for each mesh */
423:   DMGetDS(dm, &prob);
424:   PetscDSSetDiscretization(prob, 0, (PetscObject) fe[0]);
425:   PetscDSSetDiscretization(prob, 1, (PetscObject) fe[1]);
426:   SetupProblem(prob, user);
427:   while (cdm) {
428:     DMSetDS(cdm, prob);
429:     DMGetCoarseDM(cdm, &cdm);
430:   }
431:   PetscFEDestroy(&fe[0]);
432:   PetscFEDestroy(&fe[1]);
433:   return(0);
434: }

436: PetscErrorCode CreatePressureNullSpace(DM dm, AppCtx *user, Vec *v, MatNullSpace *nullSpace)
437: {
438:   Vec              vec;
439:   PetscErrorCode (*funcs[2])(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void* ctx) = {zero_vector, constant_p};
440:   PetscErrorCode   ierr;

443:   DMGetGlobalVector(dm, &vec);
444:   DMProjectFunction(dm, 0.0, funcs, NULL, INSERT_ALL_VALUES, vec);
445:   VecNormalize(vec, NULL);
446:   if (user->debug) {
447:     PetscPrintf(PetscObjectComm((PetscObject)dm), "Pressure Null Space\n");
448:     VecView(vec, PETSC_VIEWER_STDOUT_WORLD);
449:   }
450:   MatNullSpaceCreate(PetscObjectComm((PetscObject)dm), PETSC_FALSE, 1, &vec, nullSpace);
451:   if (v) {
452:     DMCreateGlobalVector(dm, v);
453:     VecCopy(vec, *v);
454:   }
455:   DMRestoreGlobalVector(dm, &vec);
456:   /* New style for field null spaces */
457:   {
458:     PetscObject  pressure;
459:     MatNullSpace nullSpacePres;

461:     DMGetField(dm, 1, &pressure);
462:     MatNullSpaceCreate(PetscObjectComm(pressure), PETSC_TRUE, 0, NULL, &nullSpacePres);
463:     PetscObjectCompose(pressure, "nullspace", (PetscObject) nullSpacePres);
464:     MatNullSpaceDestroy(&nullSpacePres);
465:   }
466:   return(0);
467: }

469: int main(int argc, char **argv)
470: {
471:   SNES           snes;                 /* nonlinear solver */
472:   DM             dm;                   /* problem definition */
473:   Vec            u,r;                  /* solution, residual vectors */
474:   Mat            A,J;                  /* Jacobian matrix */
475:   MatNullSpace   nullSpace;            /* May be necessary for pressure */
476:   AppCtx         user;                 /* user-defined work context */
477:   PetscInt       its;                  /* iterations for convergence */
478:   PetscReal      error         = 0.0;  /* L_2 error in the solution */
479:   PetscReal      ferrors[2];

482:   PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
483:   ProcessOptions(PETSC_COMM_WORLD, &user);
484:   SNESCreate(PETSC_COMM_WORLD, &snes);
485:   CreateMesh(PETSC_COMM_WORLD, &user, &dm);
486:   SNESSetDM(snes, dm);
487:   DMSetApplicationContext(dm, &user);

489:   PetscMalloc(2 * sizeof(void (*)(const PetscReal[], PetscScalar *, void *)), &user.exactFuncs);
490:   SetupDiscretization(dm, &user);
491:   DMPlexCreateClosureIndex(dm, NULL);

493:   DMCreateGlobalVector(dm, &u);
494:   VecDuplicate(u, &r);

496:   DMCreateMatrix(dm, &J);
497:   A = J;
498:   CreatePressureNullSpace(dm, &user, NULL, &nullSpace);
499:   MatSetNullSpace(A, nullSpace);

501:   DMPlexSetSNESLocalFEM(dm,&user,&user,&user);
502:   SNESSetJacobian(snes, A, J, NULL, NULL);

504:   SNESSetFromOptions(snes);

506:   DMProjectFunction(dm, 0.0, user.exactFuncs, NULL, INSERT_ALL_VALUES, u);
507:   if (user.showInitial) {DMVecViewLocal(dm, u);}
508:   if (user.runType == RUN_FULL) {
509:     PetscErrorCode (*initialGuess[2])(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void* ctx) = {zero_vector, zero_scalar};

511:     DMProjectFunction(dm, 0.0, initialGuess, NULL, INSERT_VALUES, u);
512:     if (user.debug) {
513:       PetscPrintf(PETSC_COMM_WORLD, "Initial guess\n");
514:       VecView(u, PETSC_VIEWER_STDOUT_WORLD);
515:     }
516:     SNESSolve(snes, NULL, u);
517:     SNESGetIterationNumber(snes, &its);
518:     PetscPrintf(PETSC_COMM_WORLD, "Number of SNES iterations = %D\n", its);
519:     DMComputeL2Diff(dm, 0.0, user.exactFuncs, NULL, u, &error);
520:     DMComputeL2FieldDiff(dm, 0.0, user.exactFuncs, NULL, u, ferrors);
521:     PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: %.3g [%.3g, %.3g]\n", error, ferrors[0], ferrors[1]);
522:     if (user.showError) {
523:       Vec r;
524:       DMGetGlobalVector(dm, &r);
525:       DMProjectFunction(dm, 0.0, user.exactFuncs, NULL, INSERT_ALL_VALUES, r);
526:       VecAXPY(r, -1.0, u);
527:       PetscPrintf(PETSC_COMM_WORLD, "Solution Error\n");
528:       VecView(r, PETSC_VIEWER_STDOUT_WORLD);
529:       DMRestoreGlobalVector(dm, &r);
530:     }
531:     if (user.showSolution) {
532:       PetscPrintf(PETSC_COMM_WORLD, "Solution\n");
533:       VecChop(u, 3.0e-9);
534:       VecView(u, PETSC_VIEWER_STDOUT_WORLD);
535:     }
536:   } else {
537:     PetscReal res = 0.0;

539:     /* Check discretization error */
540:     PetscPrintf(PETSC_COMM_WORLD, "Initial guess\n");
541:     VecView(u, PETSC_VIEWER_STDOUT_WORLD);
542:     DMComputeL2Diff(dm, 0.0, user.exactFuncs, NULL, u, &error);
543:     if (error >= 1.0e-11) {PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: %g\n", error);}
544:     else                  {PetscPrintf(PETSC_COMM_WORLD, "L_2 Error: < 1.0e-11\n");}
545:     /* Check residual */
546:     SNESComputeFunction(snes, u, r);
547:     PetscPrintf(PETSC_COMM_WORLD, "Initial Residual\n");
548:     VecChop(r, 1.0e-10);
549:     VecView(r, PETSC_VIEWER_STDOUT_WORLD);
550:     VecNorm(r, NORM_2, &res);
551:     PetscPrintf(PETSC_COMM_WORLD, "L_2 Residual: %g\n", res);
552:     /* Check Jacobian */
553:     {
554:       Vec          b;
555:       PetscBool    isNull;

557:       SNESComputeJacobian(snes, u, A, A);
558:       MatNullSpaceTest(nullSpace, J, &isNull);
559:       VecDuplicate(u, &b);
560:       VecSet(r, 0.0);
561:       SNESComputeFunction(snes, r, b);
562:       MatMult(A, u, r);
563:       VecAXPY(r, 1.0, b);
564:       VecDestroy(&b);
565:       PetscPrintf(PETSC_COMM_WORLD, "Au - b = Au + F(0)\n");
566:       VecChop(r, 1.0e-10);
567:       VecView(r, PETSC_VIEWER_STDOUT_WORLD);
568:       VecNorm(r, NORM_2, &res);
569:       PetscPrintf(PETSC_COMM_WORLD, "Linear L_2 Residual: %g\n", res);
570:     }
571:   }
572:   VecViewFromOptions(u, NULL, "-sol_vec_view");

574:   MatNullSpaceDestroy(&nullSpace);
575:   if (A != J) {MatDestroy(&A);}
576:   MatDestroy(&J);
577:   VecDestroy(&u);
578:   VecDestroy(&r);
579:   SNESDestroy(&snes);
580:   DMDestroy(&dm);
581:   PetscFree(user.exactFuncs);
582:   PetscFinalize();
583:   return ierr;
584: }

586: /*TEST

588:   # 2D serial P1 tests 0-3
589:   test:
590:     suffix: 0
591:     requires: triangle
592:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
593:   test:
594:     suffix: 1
595:     requires: triangle
596:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
597:   test:
598:     suffix: 2
599:     requires: triangle
600:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
601:   test:
602:     suffix: 3
603:     requires: triangle
604:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
605:   # 2D serial P2 tests 4-5
606:   test:
607:     suffix: 4
608:     requires: triangle
609:     args: -run_type test -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
610:   test:
611:     suffix: 5
612:     requires: triangle
613:     args: -run_type test -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
614:   # Parallel tests 6-17
615:   test:
616:     suffix: 6
617:     requires: triangle
618:     nsize: 2
619:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
620:   test:
621:     suffix: 7
622:     requires: triangle
623:     nsize: 3
624:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
625:   test:
626:     suffix: 8
627:     requires: triangle
628:     nsize: 5
629:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
630:   test:
631:     suffix: 9
632:     requires: triangle
633:     nsize: 2
634:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
635:   test:
636:     suffix: 10
637:     requires: triangle
638:     nsize: 3
639:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
640:   test:
641:     suffix: 11
642:     requires: triangle
643:     nsize: 5
644:     args: -run_type test -refinement_limit 0.0    -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
645:   test:
646:     suffix: 12
647:     requires: triangle
648:     nsize: 2
649:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
650:   test:
651:     suffix: 13
652:     requires: triangle
653:     nsize: 3
654:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
655:   test:
656:     suffix: 14
657:     requires: triangle
658:     nsize: 5
659:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
660:   test:
661:     suffix: 15
662:     requires: triangle
663:     nsize: 2
664:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
665:   test:
666:     suffix: 16
667:     requires: triangle
668:     nsize: 3
669:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
670:   test:
671:     suffix: 17
672:     requires: triangle
673:     nsize: 5
674:     args: -run_type test -refinement_limit 0.0625 -test_partition -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -dm_plex_print_fem 1
675:   # 3D serial P1 tests 43-46
676:   test:
677:     suffix: 43
678:     requires: ctetgen
679:     args: -run_type test -dim 3 -refinement_limit 0.0    -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
680:   test:
681:     suffix: 44
682:     requires: ctetgen
683:     args: -run_type test -dim 3 -refinement_limit 0.0    -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
684:   test:
685:     suffix: 45
686:     requires: ctetgen
687:     args: -run_type test -dim 3 -refinement_limit 0.0125 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
688:   test:
689:     suffix: 46
690:     requires: ctetgen
691:     args: -run_type test -dim 3 -refinement_limit 0.0125 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -show_initial -dm_plex_print_fem 1
692:   # Full solutions 18-29
693:   test:
694:     suffix: 18
695:     requires: triangle
696:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
697:   test:
698:     suffix: 19
699:     requires: triangle
700:     nsize: 2
701:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
702:   test:
703:     suffix: 20
704:     requires: triangle
705:     nsize: 3
706:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
707:   test:
708:     suffix: 21
709:     requires: triangle
710:     nsize: 5
711:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 0 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
712:   test:
713:     suffix: 22
714:     requires: triangle
715:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
716:   test:
717:     suffix: 23
718:     requires: triangle
719:     nsize: 2
720:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
721:   test:
722:     suffix: 24
723:     requires: triangle
724:     nsize: 3
725:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
726:   test:
727:     suffix: 25
728:     requires: triangle
729:     nsize: 5
730:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 1 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
731:   test:
732:     suffix: 26
733:     requires: triangle
734:     args: -run_type full -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
735:   test:
736:     suffix: 27
737:     requires: triangle
738:     nsize: 2
739:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
740:   test:
741:     suffix: 28
742:     requires: triangle
743:     nsize: 3
744:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
745:   test:
746:     suffix: 29
747:     requires: triangle
748:     nsize: 5
749:     args: -run_type full -petscpartitioner_type simple -refinement_limit 0.0625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view
750:   # Full solutions with quads
751:   #   FULL Schur with LU/Jacobi
752:   test:
753:     suffix: quad_q2q1_full
754:     requires: !single
755:     args: -run_type full -simplex 0 -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -vel_petscspace_poly_tensor -pres_petscspace_order 1 -pres_petscspace_poly_tensor -ksp_type fgmres -ksp_gmres_restart 10 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
756:   test:
757:     suffix: quad_q2p1_full
758:     requires: !single
759:     args: -run_type full -simplex 0 -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -vel_petscspace_poly_tensor -pres_petscspace_order 1 -pres_petscdualspace_lagrange_continuity 0 -ksp_type fgmres -ksp_gmres_restart 10 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
760:   # Stokes preconditioners 30-36
761:   #   Jacobi
762:   test:
763:     suffix: 30
764:     requires: triangle
765:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_gmres_restart 100 -pc_type jacobi -ksp_rtol 1.0e-9 -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
766:   #  Block diagonal \begin{pmatrix} A & 0 \\ 0 & I \end{pmatrix}
767:   test:
768:     suffix: 31
769:     requires: triangle
770:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-4 -pc_type fieldsplit -pc_fieldsplit_type additive -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
771:   #  Block triangular \begin{pmatrix} A & B \\ 0 & I \end{pmatrix}
772:   test:
773:     suffix: 32
774:     requires: triangle
775:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type multiplicative -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
776:   #  Diagonal Schur complement \begin{pmatrix} A & 0 \\ 0 & S \end{pmatrix}
777:   test:
778:     suffix: 33
779:     requires: triangle
780:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type diag -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
781:   #  Upper triangular Schur complement \begin{pmatrix} A & B \\ 0 & S \end{pmatrix}
782:   test:
783:     suffix: 34
784:     requires: triangle
785:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type upper -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
786:   #  Lower triangular Schur complement \begin{pmatrix} A & B \\ 0 & S \end{pmatrix}
787:   test:
788:     suffix: 35
789:     requires: triangle
790:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type lower -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
791:   #  Full Schur complement \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & S \end{pmatrix} \begin{pmatrix} I & A^{-1} B \\ 0 & I \end{pmatrix}
792:   test:
793:     suffix: 36
794:     requires: triangle
795:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
796:   #  SIMPLE \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & B^T diag(A)^{-1} B \end{pmatrix} \begin{pmatrix} I & diag(A)^{-1} B \\ 0 & I \end{pmatrix}
797:   test:
798:     suffix: pc_simple
799:     requires: triangle
800:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -fieldsplit_pressure_inner_ksp_type preonly -fieldsplit_pressure_inner_pc_type jacobi -fieldsplit_pressure_upper_ksp_type preonly -fieldsplit_pressure_upper_pc_type jacobi -snes_error_if_not_converged -ksp_error_if_not_converged -snes_view -show_solution 0
801:   #  SIMPLEC \begin{pmatrix} I & 0 \\ B^T A^{-1} & I \end{pmatrix} \begin{pmatrix} A & 0 \\ 0 & B^T rowsum(A)^{-1} B \end{pmatrix} \begin{pmatrix} I & rowsum(A)^{-1} B \\ 0 & I \end{pmatrix}
802:   test:
803:     suffix: pc_simplec
804:     requires: triangle
805:     args: -run_type full -refinement_limit 0.00625 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -ksp_type fgmres -ksp_max_it 5 -ksp_gmres_restart 100 -ksp_rtol 1.0e-9 -pc_type fieldsplit -pc_fieldsplit_type schur -pc_fieldsplit_schur_factorization_type full -fieldsplit_pressure_ksp_rtol 1e-10 -fieldsplit_velocity_ksp_type gmres -fieldsplit_velocity_pc_type lu -fieldsplit_pressure_pc_type jacobi -fieldsplit_pressure_inner_ksp_type preonly -fieldsplit_pressure_inner_pc_type jacobi -fieldsplit_pressure_inner_pc_jacobi_type rowsum -fieldsplit_pressure_upper_ksp_type preonly -fieldsplit_pressure_upper_pc_type jacobi -fieldsplit_pressure_upper_pc_jacobi_type rowsum -snes_converged_reason -ksp_converged_reason -snes_view -show_solution 0
806:   # FETI-DP solvers
807:   test:
808:     suffix: fetidp_2d_tri
809:     requires: triangle suitesparse
810:     nsize: 5
811:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_p_ksp_max_it 1 -fetidp_p_ksp_type richardson -fetidp_p_ksp_richardson_scale 200 -fetidp_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 2 -fetidp_bddc_pc_bddc_dirichlet_pc_factor_mat_solver_package umfpack -fetidp_bddc_pc_bddc_neumann_pc_factor_mat_solver_package umfpack -petscpartitioner_type simple
812:   test:
813:     suffix: fetidp_3d_tet
814:     requires: ctetgen mumps suitesparse
815:     nsize: 5
816:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_p_ksp_max_it 1 -fetidp_p_ksp_type richardson -fetidp_p_ksp_richardson_scale 1000 -fetidp_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 3 -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_benign_trick -fetidp_bddc_pc_bddc_deluxe_singlemat -dim 3 -fetidp_pc_discrete_harmonic -fetidp_harmonic_pc_factor_mat_solver_package cholmod -fetidp_harmonic_pc_type cholesky -fetidp_bddelta_pc_factor_mat_solver_package umfpack
817:   test:
818:     suffix: fetidp_2d_quad
819:     requires: suitesparse
820:     filter: grep -v "CG or CGNE: variant" | sed -e "s/BDDC: Graph max count: 9223372036854775807/BDDC: Graph max count: 2147483647/g"
821:     nsize: 5
822:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_p_ksp_max_it 1 -fetidp_p_ksp_type richardson -fetidp_p_ksp_richardson_scale 200 -fetidp_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 2 -fetidp_bddc_pc_bddc_dirichlet_pc_factor_mat_solver_package umfpack -fetidp_bddc_pc_bddc_neumann_pc_factor_mat_solver_package umfpack -simplex 0 -pres_petscspace_poly_tensor -simplex 0 -vel_petscspace_poly_tensor -petscpartitioner_type simple
823:   test:
824:     suffix: fetidp_3d_hex
825:     requires: mumps suitesparse
826:     nsize: 5
827:     args: -run_type full -dm_refine 2 -bc_type dirichlet -interpolate 1 -vel_petscspace_order 2 -pres_petscspace_order 1 -snes_view -snes_error_if_not_converged -show_solution 0 -dm_mat_type is -ksp_type fetidp -ksp_rtol 1.0e-8 -ksp_fetidp_saddlepoint -fetidp_ksp_type cg -fetidp_p_ksp_max_it 1 -fetidp_p_ksp_type richardson -fetidp_p_ksp_richardson_scale 20000 -fetidp_p_pc_type none -ksp_fetidp_saddlepoint_flip 1 -fetidp_bddc_pc_bddc_vertex_size 3 -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_benign_trick -fetidp_bddc_pc_bddc_deluxe_singlemat -dim 3 -simplex 0 -pres_petscspace_poly_tensor -simplex 0 -vel_petscspace_poly_tensor -fetidp_pc_discrete_harmonic -fetidp_harmonic_pc_factor_mat_solver_package cholmod -fetidp_harmonic_pc_type cholesky -fetidp_bddelta_pc_factor_mat_solver_package umfpack -petscpartitioner_type simple

829: TEST*/