Actual source code: wb.c

  1: #include <petscdmda.h>
  2: #include <petsc/private/pcmgimpl.h>
  3: #include <petsc/private/hashmapi.h>

  5: typedef struct {
  6:   PCExoticType type;
  7:   Mat          P;           /* the constructed interpolation matrix */
  8:   PetscBool    directSolve; /* use direct LU factorization to construct interpolation */
  9:   KSP          ksp;
 10: } PC_Exotic;

 12: const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};

 14: /*
 15:       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space

 17: */
 18: static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
 19: {
 20:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
 21:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf;
 22:   Mat                    Xint, Xsurf, Xint_tmp;
 23:   IS                     isint, issurf, is, row, col;
 24:   ISLocalToGlobalMapping ltg;
 25:   MPI_Comm               comm;
 26:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
 27:   MatFactorInfo          info;
 28:   PetscScalar           *xsurf, *xint;
 29:   const PetscScalar     *rxint;
 30: #if defined(PETSC_USE_DEBUG_foo)
 31:   PetscScalar tmp;
 32: #endif
 33:   PetscHMapI ht = NULL;

 35:   PetscFunctionBegin;
 36:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
 37:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
 38:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
 39:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
 40:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
 41:   istart = istart ? -1 : 0;
 42:   jstart = jstart ? -1 : 0;
 43:   kstart = kstart ? -1 : 0;

 45:   /*
 46:     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
 47:     to all the local degrees of freedom (this includes the vertices, edges and faces).

 49:     Xint are the subset of the interpolation into the interior

 51:     Xface are the interpolation onto faces but not into the interior

 53:     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
 54:                                       Xint
 55:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
 56:                                       Xsurf
 57:   */
 58:   N     = (m - istart) * (n - jstart) * (p - kstart);
 59:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
 60:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
 61:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
 62:   Nsurf = Nface + Nwire;
 63:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
 64:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
 65:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

 67:   /*
 68:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
 69:      Xsurf will be all zero (thus making the coarse matrix singular).
 70:   */
 71:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
 72:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
 73:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

 75:   cnt = 0;

 77:   xsurf[cnt++] = 1;
 78:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
 79:   xsurf[cnt++ + 2 * Nsurf] = 1;

 81:   for (j = 1; j < n - 1 - jstart; j++) {
 82:     xsurf[cnt++ + 3 * Nsurf] = 1;
 83:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
 84:     xsurf[cnt++ + 5 * Nsurf] = 1;
 85:   }

 87:   xsurf[cnt++ + 6 * Nsurf] = 1;
 88:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
 89:   xsurf[cnt++ + 8 * Nsurf] = 1;

 91:   for (k = 1; k < p - 1 - kstart; k++) {
 92:     xsurf[cnt++ + 9 * Nsurf] = 1;
 93:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
 94:     xsurf[cnt++ + 11 * Nsurf] = 1;

 96:     for (j = 1; j < n - 1 - jstart; j++) {
 97:       xsurf[cnt++ + 12 * Nsurf] = 1;
 98:       /* these are the interior nodes */
 99:       xsurf[cnt++ + 13 * Nsurf] = 1;
100:     }

102:     xsurf[cnt++ + 14 * Nsurf] = 1;
103:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
104:     xsurf[cnt++ + 16 * Nsurf] = 1;
105:   }

107:   xsurf[cnt++ + 17 * Nsurf] = 1;
108:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
109:   xsurf[cnt++ + 19 * Nsurf] = 1;

111:   for (j = 1; j < n - 1 - jstart; j++) {
112:     xsurf[cnt++ + 20 * Nsurf] = 1;
113:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
114:     xsurf[cnt++ + 22 * Nsurf] = 1;
115:   }

117:   xsurf[cnt++ + 23 * Nsurf] = 1;
118:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
119:   xsurf[cnt++ + 25 * Nsurf] = 1;

121:   /* interpolations only sum to 1 when using direct solver */
122: #if defined(PETSC_USE_DEBUG_foo)
123:   for (i = 0; i < Nsurf; i++) {
124:     tmp = 0.0;
125:     for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
126:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
127:   }
128: #endif
129:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
130:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

132:   /*
133:        I are the indices for all the needed vertices (in global numbering)
134:        Iint are the indices for the interior values, I surf for the surface values
135:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
136:              is NOT the local DMDA ordering.)
137:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
138:   */
139: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
140:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
141:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
142:   for (k = 0; k < p - kstart; k++) {
143:     for (j = 0; j < n - jstart; j++) {
144:       for (i = 0; i < m - istart; i++) {
145:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

147:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
148:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
149:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
150:         } else {
151:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
152:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
153:         }
154:       }
155:     }
156:   }
157: #undef Endpoint
158:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
159:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
160:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
161:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
162:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
163:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
164:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
165:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
166:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
167:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
168:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
169:   PetscCall(PetscFree3(II, Iint, Isurf));

171:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
172:   A = *Aholder;
173:   PetscCall(PetscFree(Aholder));

175:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
176:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
177:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

179:   /*
180:      Solve for the interpolation onto the interior Xint
181:   */
182:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
183:   PetscCall(MatScale(Xint_tmp, -1.0));
184:   if (exotic->directSolve) {
185:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
186:     PetscCall(MatFactorInfoInitialize(&info));
187:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
188:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
189:     PetscCall(ISDestroy(&row));
190:     PetscCall(ISDestroy(&col));
191:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
192:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
193:     PetscCall(MatDestroy(&iAii));
194:   } else {
195:     Vec          b, x;
196:     PetscScalar *xint_tmp;

198:     PetscCall(MatDenseGetArray(Xint, &xint));
199:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
200:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
201:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
202:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
203:     for (i = 0; i < 26; i++) {
204:       PetscCall(VecPlaceArray(x, xint + i * Nint));
205:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
206:       PetscCall(KSPSolve(exotic->ksp, b, x));
207:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
208:       PetscCall(VecResetArray(x));
209:       PetscCall(VecResetArray(b));
210:     }
211:     PetscCall(MatDenseRestoreArray(Xint, &xint));
212:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
213:     PetscCall(VecDestroy(&x));
214:     PetscCall(VecDestroy(&b));
215:   }
216:   PetscCall(MatDestroy(&Xint_tmp));

218: #if defined(PETSC_USE_DEBUG_foo)
219:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
220:   for (i = 0; i < Nint; i++) {
221:     tmp = 0.0;
222:     for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];

224:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
225:   }
226:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
227:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
228: #endif

230:   /*         total vertices             total faces                                  total edges */
231:   Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);

233:   /*
234:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
235:   */
236:   cnt = 0;

238:   gl[cnt++] = 0;
239:   {
240:     gl[cnt++] = 1;
241:   }
242:   gl[cnt++] = m - istart - 1;
243:   {
244:     gl[cnt++] = mwidth;
245:     {
246:       gl[cnt++] = mwidth + 1;
247:     }
248:     gl[cnt++] = mwidth + m - istart - 1;
249:   }
250:   gl[cnt++] = mwidth * (n - jstart - 1);
251:   {
252:     gl[cnt++] = mwidth * (n - jstart - 1) + 1;
253:   }
254:   gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
255:   {
256:     gl[cnt++] = mwidth * nwidth;
257:     {
258:       gl[cnt++] = mwidth * nwidth + 1;
259:     }
260:     gl[cnt++] = mwidth * nwidth + m - istart - 1;
261:     {
262:       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
263:       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
264:     }
265:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
266:     {
267:       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
268:     }
269:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
270:   }
271:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
272:   {
273:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1;
274:   }
275:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
276:   {
277:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
278:     {
279:       gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
280:     }
281:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
282:   }
283:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
284:   {
285:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1;
286:   }
287:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;

289:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
290:   /* convert that to global numbering and get them on all processes */
291:   PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
292:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
293:   PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
294:   PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));

296:   /* Number the coarse grid points from 0 to Ntotal */
297:   PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
298:   for (i = 0, cnt = 0; i < 26 * mp * np * pp; i++) {
299:     PetscHashIter it      = 0;
300:     PetscBool     missing = PETSC_TRUE;

302:     PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
303:     if (missing) {
304:       ++cnt;
305:       PetscCall(PetscHMapIIterSet(ht, it, cnt));
306:     }
307:   }
308:   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
309:   PetscCall(PetscFree(globals));
310:   for (i = 0; i < 26; i++) {
311:     PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
312:     --(gl[i]);
313:   }
314:   PetscCall(PetscHMapIDestroy(&ht));
315:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */

317:   /* construct global interpolation matrix */
318:   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
319:   if (reuse == MAT_INITIAL_MATRIX) {
320:     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
321:   } else {
322:     PetscCall(MatZeroEntries(*P));
323:   }
324:   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
325:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
326:   PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
327:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
328:   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
329:   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
330:   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
331:   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
332:   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
333:   PetscCall(PetscFree2(IIint, IIsurf));

335: #if defined(PETSC_USE_DEBUG_foo)
336:   {
337:     Vec          x, y;
338:     PetscScalar *yy;
339:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
340:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
341:     PetscCall(VecSet(x, 1.0));
342:     PetscCall(MatMult(*P, x, y));
343:     PetscCall(VecGetArray(y, &yy));
344:     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
345:     PetscCall(VecRestoreArray(y, &yy));
346:     PetscCall(VecDestroy(x));
347:     PetscCall(VecDestroy(y));
348:   }
349: #endif

351:   PetscCall(MatDestroy(&Aii));
352:   PetscCall(MatDestroy(&Ais));
353:   PetscCall(MatDestroy(&Asi));
354:   PetscCall(MatDestroy(&A));
355:   PetscCall(ISDestroy(&is));
356:   PetscCall(ISDestroy(&isint));
357:   PetscCall(ISDestroy(&issurf));
358:   PetscCall(MatDestroy(&Xint));
359:   PetscCall(MatDestroy(&Xsurf));
360:   PetscFunctionReturn(PETSC_SUCCESS);
361: }

363: /*
364:       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space

366: */
367: static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
368: {
369:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
370:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf;
371:   Mat                    Xint, Xsurf, Xint_tmp;
372:   IS                     isint, issurf, is, row, col;
373:   ISLocalToGlobalMapping ltg;
374:   MPI_Comm               comm;
375:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
376:   MatFactorInfo          info;
377:   PetscScalar           *xsurf, *xint;
378:   const PetscScalar     *rxint;
379: #if defined(PETSC_USE_DEBUG_foo)
380:   PetscScalar tmp;
381: #endif
382:   PetscHMapI ht;

384:   PetscFunctionBegin;
385:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
386:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
387:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
388:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
389:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
390:   istart = istart ? -1 : 0;
391:   jstart = jstart ? -1 : 0;
392:   kstart = kstart ? -1 : 0;

394:   /*
395:     the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
396:     to all the local degrees of freedom (this includes the vertices, edges and faces).

398:     Xint are the subset of the interpolation into the interior

400:     Xface are the interpolation onto faces but not into the interior

402:     Xsurf are the interpolation onto the vertices and edges (the surfbasket)
403:                                       Xint
404:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
405:                                       Xsurf
406:   */
407:   N     = (m - istart) * (n - jstart) * (p - kstart);
408:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
409:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
410:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
411:   Nsurf = Nface + Nwire;
412:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
413:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
414:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

416:   /*
417:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
418:      Xsurf will be all zero (thus making the coarse matrix singular).
419:   */
420:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
421:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
422:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

424:   cnt = 0;
425:   for (j = 1; j < n - 1 - jstart; j++) {
426:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
427:   }

429:   for (k = 1; k < p - 1 - kstart; k++) {
430:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
431:     for (j = 1; j < n - 1 - jstart; j++) {
432:       xsurf[cnt++ + 2 * Nsurf] = 1;
433:       /* these are the interior nodes */
434:       xsurf[cnt++ + 3 * Nsurf] = 1;
435:     }
436:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
437:   }
438:   for (j = 1; j < n - 1 - jstart; j++) {
439:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
440:   }

442: #if defined(PETSC_USE_DEBUG_foo)
443:   for (i = 0; i < Nsurf; i++) {
444:     tmp = 0.0;
445:     for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];

447:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
448:   }
449: #endif
450:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
451:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

453:   /*
454:        I are the indices for all the needed vertices (in global numbering)
455:        Iint are the indices for the interior values, I surf for the surface values
456:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
457:              is NOT the local DMDA ordering.)
458:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
459:   */
460: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
461:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
462:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
463:   for (k = 0; k < p - kstart; k++) {
464:     for (j = 0; j < n - jstart; j++) {
465:       for (i = 0; i < m - istart; i++) {
466:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

468:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
469:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
470:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
471:         } else {
472:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
473:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
474:         }
475:       }
476:     }
477:   }
478: #undef Endpoint
479:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
480:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
481:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
482:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
483:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
484:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
485:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
486:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
487:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
488:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
489:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
490:   PetscCall(PetscFree3(II, Iint, Isurf));

492:   PetscCall(ISSort(is));
493:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
494:   A = *Aholder;
495:   PetscCall(PetscFree(Aholder));

497:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
498:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
499:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

501:   /*
502:      Solve for the interpolation onto the interior Xint
503:   */
504:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
505:   PetscCall(MatScale(Xint_tmp, -1.0));

507:   if (exotic->directSolve) {
508:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
509:     PetscCall(MatFactorInfoInitialize(&info));
510:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
511:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
512:     PetscCall(ISDestroy(&row));
513:     PetscCall(ISDestroy(&col));
514:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
515:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
516:     PetscCall(MatDestroy(&iAii));
517:   } else {
518:     Vec          b, x;
519:     PetscScalar *xint_tmp;

521:     PetscCall(MatDenseGetArray(Xint, &xint));
522:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
523:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
524:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
525:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
526:     for (i = 0; i < 6; i++) {
527:       PetscCall(VecPlaceArray(x, xint + i * Nint));
528:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
529:       PetscCall(KSPSolve(exotic->ksp, b, x));
530:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
531:       PetscCall(VecResetArray(x));
532:       PetscCall(VecResetArray(b));
533:     }
534:     PetscCall(MatDenseRestoreArray(Xint, &xint));
535:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
536:     PetscCall(VecDestroy(&x));
537:     PetscCall(VecDestroy(&b));
538:   }
539:   PetscCall(MatDestroy(&Xint_tmp));

541: #if defined(PETSC_USE_DEBUG_foo)
542:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
543:   for (i = 0; i < Nint; i++) {
544:     tmp = 0.0;
545:     for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];

547:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
548:   }
549:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
550:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
551: #endif

553:   /*         total faces    */
554:   Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);

556:   /*
557:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
558:   */
559:   cnt = 0;
560:   {
561:     gl[cnt++] = mwidth + 1;
562:   }
563:   {
564:     {
565:       gl[cnt++] = mwidth * nwidth + 1;
566:     }
567:     {
568:       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
569:       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
570:     }
571:     {
572:       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
573:     }
574:   }
575:   {
576:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
577:   }

579:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
580:   /* convert that to global numbering and get them on all processes */
581:   PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
582:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
583:   PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
584:   PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));

586:   /* Number the coarse grid points from 0 to Ntotal */
587:   PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
588:   for (i = 0, cnt = 0; i < 6 * mp * np * pp; i++) {
589:     PetscHashIter it      = 0;
590:     PetscBool     missing = PETSC_TRUE;

592:     PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
593:     if (missing) {
594:       ++cnt;
595:       PetscCall(PetscHMapIIterSet(ht, it, cnt));
596:     }
597:   }
598:   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
599:   PetscCall(PetscFree(globals));
600:   for (i = 0; i < 6; i++) {
601:     PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
602:     --(gl[i]);
603:   }
604:   PetscCall(PetscHMapIDestroy(&ht));
605:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */

607:   /* construct global interpolation matrix */
608:   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
609:   if (reuse == MAT_INITIAL_MATRIX) {
610:     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
611:   } else {
612:     PetscCall(MatZeroEntries(*P));
613:   }
614:   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
615:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
616:   PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
617:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
618:   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
619:   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
620:   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
621:   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
622:   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
623:   PetscCall(PetscFree2(IIint, IIsurf));

625: #if defined(PETSC_USE_DEBUG_foo)
626:   {
627:     Vec          x, y;
628:     PetscScalar *yy;
629:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
630:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
631:     PetscCall(VecSet(x, 1.0));
632:     PetscCall(MatMult(*P, x, y));
633:     PetscCall(VecGetArray(y, &yy));
634:     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
635:     PetscCall(VecRestoreArray(y, &yy));
636:     PetscCall(VecDestroy(x));
637:     PetscCall(VecDestroy(y));
638:   }
639: #endif

641:   PetscCall(MatDestroy(&Aii));
642:   PetscCall(MatDestroy(&Ais));
643:   PetscCall(MatDestroy(&Asi));
644:   PetscCall(MatDestroy(&A));
645:   PetscCall(ISDestroy(&is));
646:   PetscCall(ISDestroy(&isint));
647:   PetscCall(ISDestroy(&issurf));
648:   PetscCall(MatDestroy(&Xint));
649:   PetscCall(MatDestroy(&Xsurf));
650:   PetscFunctionReturn(PETSC_SUCCESS);
651: }

653: /*@
654:   PCExoticSetType - Sets the type of coarse grid interpolation to use

656:   Logically Collective

658:   Input Parameters:
659: + pc   - the preconditioner context
660: - type - either PC_EXOTIC_FACE or PC_EXOTIC_WIREBASKET (defaults to face)

662:   Notes:
663:   The face based interpolation has 1 degree of freedom per face and ignores the
664:   edge and vertex values completely in the coarse problem. For any seven point
665:   stencil the interpolation of a constant on all faces into the interior is that constant.

667:   The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
668:   per face. A constant on the subdomain boundary is interpolated as that constant
669:   in the interior of the domain.

671:   The coarse grid matrix is obtained via the Galerkin computation A_c = R A R^T, hence
672:   if A is nonsingular A_c is also nonsingular.

674:   Both interpolations are suitable for only scalar problems.

676:   Level: intermediate

678: .seealso: [](ch_ksp), `PCEXOTIC`, `PCExoticType()`
679: @*/
680: PetscErrorCode PCExoticSetType(PC pc, PCExoticType type)
681: {
682:   PetscFunctionBegin;
685:   PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
686:   PetscFunctionReturn(PETSC_SUCCESS);
687: }

689: static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type)
690: {
691:   PC_MG     *mg  = (PC_MG *)pc->data;
692:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

694:   PetscFunctionBegin;
695:   ctx->type = type;
696:   PetscFunctionReturn(PETSC_SUCCESS);
697: }

699: static PetscErrorCode PCSetUp_Exotic(PC pc)
700: {
701:   Mat        A;
702:   PC_MG     *mg    = (PC_MG *)pc->data;
703:   PC_Exotic *ex    = (PC_Exotic *)mg->innerctx;
704:   MatReuse   reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;

706:   PetscFunctionBegin;
707:   PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
708:   PetscCall(PCGetOperators(pc, NULL, &A));
709:   PetscCheck(ex->type == PC_EXOTIC_FACE || ex->type == PC_EXOTIC_WIREBASKET, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
710:   if (ex->type == PC_EXOTIC_FACE) {
711:     PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
712:   } else /* if (ex->type == PC_EXOTIC_WIREBASKET) */ {
713:     PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
714:   }
715:   PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
716:   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
717:   PetscCall(PCSetDM(pc, NULL));
718:   PetscCall(PCSetUp_MG(pc));
719:   PetscFunctionReturn(PETSC_SUCCESS);
720: }

722: static PetscErrorCode PCDestroy_Exotic(PC pc)
723: {
724:   PC_MG     *mg  = (PC_MG *)pc->data;
725:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

727:   PetscFunctionBegin;
728:   PetscCall(MatDestroy(&ctx->P));
729:   PetscCall(KSPDestroy(&ctx->ksp));
730:   PetscCall(PetscFree(ctx));
731:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
732:   PetscCall(PCDestroy_MG(pc));
733:   PetscFunctionReturn(PETSC_SUCCESS);
734: }

736: static PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer)
737: {
738:   PC_MG     *mg = (PC_MG *)pc->data;
739:   PetscBool  iascii;
740:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

742:   PetscFunctionBegin;
743:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
744:   if (iascii) {
745:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Exotic type = %s\n", PCExoticTypes[ctx->type]));
746:     if (ctx->directSolve) {
747:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using direct solver to construct interpolation\n"));
748:     } else {
749:       PetscViewer sviewer;
750:       PetscMPIInt rank;

752:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using iterative solver to construct interpolation\n"));
753:       PetscCall(PetscViewerASCIIPushTab(viewer));
754:       PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
755:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
756:       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
757:       if (rank == 0) PetscCall(KSPView(ctx->ksp, sviewer));
758:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
759:       PetscCall(PetscViewerASCIIPopTab(viewer));
760:       PetscCall(PetscViewerASCIIPopTab(viewer));
761:     }
762:   }
763:   PetscCall(PCView_MG(pc, viewer));
764:   PetscFunctionReturn(PETSC_SUCCESS);
765: }

767: static PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject)
768: {
769:   PetscBool    flg;
770:   PC_MG       *mg = (PC_MG *)pc->data;
771:   PCExoticType mgctype;
772:   PC_Exotic   *ctx = (PC_Exotic *)mg->innerctx;

774:   PetscFunctionBegin;
775:   PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
776:   PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
777:   if (flg) PetscCall(PCExoticSetType(pc, mgctype));
778:   PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
779:   if (!ctx->directSolve) {
780:     if (!ctx->ksp) {
781:       const char *prefix;
782:       PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
783:       PetscCall(KSPSetNestLevel(ctx->ksp, pc->kspnestlevel));
784:       PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
785:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
786:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
787:       PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
788:       PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
789:     }
790:     PetscCall(KSPSetFromOptions(ctx->ksp));
791:   }
792:   PetscOptionsHeadEnd();
793:   PetscFunctionReturn(PETSC_SUCCESS);
794: }

796: /*MC
797:      PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces

799:      This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse
800:    grid spaces.

802:    Level: advanced

804:    Notes:
805:    Must be used with `DMDA`

807:    By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES`

809:    These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz {cite}`bramble1989construction`.
810:    They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith, {cite}`smith1990domain`.
811:    They were then explored in great detail in Dryja, Smith, Widlund {cite}`dryja1994schwarz`. These were developed in the context of iterative substructuring preconditioners.

813:    They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
814:    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example, {cite}`dohrmann2008extending`, {cite}`dohrmann2008family`,
815:    {cite}`dohrmann2008domain`, {cite}`dohrmann2009overlapping`.

817:    The usual `PCMG` options are supported, such as -mg_levels_pc_type <type> -mg_coarse_pc_type <type> -mg_fine_pc_type <type> and -pc_mg_type <type>

819: .seealso: [](ch_ksp), `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
820: M*/

822: PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
823: {
824:   PC_Exotic *ex;
825:   PC_MG     *mg;

827:   PetscFunctionBegin;
828:   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
829:   PetscTryTypeMethod(pc, destroy);
830:   pc->data = NULL;

832:   PetscCall(PetscFree(((PetscObject)pc)->type_name));
833:   ((PetscObject)pc)->type_name = NULL;

835:   PetscCall(PCSetType(pc, PCMG));
836:   PetscCall(PCMGSetLevels(pc, 2, NULL));
837:   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
838:   PetscCall(PetscNew(&ex));
839:   ex->type     = PC_EXOTIC_FACE;
840:   mg           = (PC_MG *)pc->data;
841:   mg->innerctx = ex;

843:   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
844:   pc->ops->view           = PCView_Exotic;
845:   pc->ops->destroy        = PCDestroy_Exotic;
846:   pc->ops->setup          = PCSetUp_Exotic;

848:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
849:   PetscFunctionReturn(PETSC_SUCCESS);
850: }