Actual source code: fetidp.c
petsc-3.8.0 2017-09-26
1: #include <petsc/private/kspimpl.h> /*I <petscksp.h> I*/
2: #include <../src/ksp/pc/impls/bddc/bddc.h>
3: #include <../src/ksp/pc/impls/bddc/bddcprivate.h>
5: static PetscBool cited = PETSC_FALSE;
6: static PetscBool cited2 = PETSC_FALSE;
7: static const char citation[] =
8: "@article{ZampiniPCBDDC,\n"
9: "author = {Stefano Zampini},\n"
10: "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n"
11: "journal = {SIAM Journal on Scientific Computing},\n"
12: "volume = {38},\n"
13: "number = {5},\n"
14: "pages = {S282-S306},\n"
15: "year = {2016},\n"
16: "doi = {10.1137/15M1025785},\n"
17: "URL = {http://dx.doi.org/10.1137/15M1025785},\n"
18: "eprint = {http://dx.doi.org/10.1137/15M1025785}\n"
19: "}\n"
20: "@article{ZampiniDualPrimal,\n"
21: "author = {Stefano Zampini},\n"
22: "title = {{D}ual-{P}rimal methods for the cardiac {B}idomain model},\n"
23: "volume = {24},\n"
24: "number = {04},\n"
25: "pages = {667-696},\n"
26: "year = {2014},\n"
27: "doi = {10.1142/S0218202513500632},\n"
28: "URL = {http://www.worldscientific.com/doi/abs/10.1142/S0218202513500632},\n"
29: "eprint = {http://www.worldscientific.com/doi/pdf/10.1142/S0218202513500632}\n"
30: "}\n";
31: static const char citation2[] =
32: "@article{li2013nonoverlapping,\n"
33: "title={A nonoverlapping domain decomposition method for incompressible Stokes equations with continuous pressures},\n"
34: "author={Li, Jing and Tu, Xuemin},\n"
35: "journal={SIAM Journal on Numerical Analysis},\n"
36: "volume={51},\n"
37: "number={2},\n"
38: "pages={1235--1253},\n"
39: "year={2013},\n"
40: "publisher={Society for Industrial and Applied Mathematics}\n"
41: "}\n";
43: /*
44: This file implements the FETI-DP method in PETSc as part of KSP.
45: */
46: typedef struct {
47: KSP parentksp;
48: } KSP_FETIDPMon;
50: typedef struct {
51: KSP innerksp; /* the KSP for the Lagrange multipliers */
52: PC innerbddc; /* the inner BDDC object */
53: PetscBool fully_redundant; /* true for using a fully redundant set of multipliers */
54: PetscBool userbddc; /* true if the user provided the PCBDDC object */
55: PetscBool saddlepoint; /* support for saddle point problems */
56: IS pP; /* index set for pressure variables */
57: Vec rhs_flip; /* see KSPFETIDPSetUpOperators */
58: KSP_FETIDPMon *monctx; /* monitor context, used to pass user defined monitors
59: in the physical space */
60: PetscObjectState matstate; /* these are needed just in the saddle point case */
61: PetscObjectState matnnzstate; /* where we are going to use MatZeroRows on pmat */
62: PetscBool statechanged;
63: PetscBool check;
64: } KSP_FETIDP;
66: static PetscErrorCode KSPFETIDPSetPressureOperator_FETIDP(KSP ksp, Mat P)
67: {
68: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
72: if (P) fetidp->saddlepoint = PETSC_TRUE;
73: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)P);
74: return(0);
75: }
77: /*@
78: KSPFETIDPSetPressureOperator - Sets the operator used to setup the pressure preconditioner for saddle point FETI-DP.
80: Collective on KSP
82: Input Parameters:
83: + ksp - the FETI-DP Krylov solver
84: - P - the linear operator to be preconditioned, usually the mass matrix.
86: Level: advanced
88: Notes: The operator can be either passed in a) monolithic global ordering, b) pressure-only global ordering
89: or c) interface pressure ordering (if -ksp_fetidp_pressure_all false).
90: In cases b) and c), the pressure ordering of dofs needs to satisfy
91: pid_1 < pid_2 iff gid_1 < gid_2
92: where pid_1 and pid_2 are two different pressure dof numbers and gid_1 and gid_2 the corresponding
93: id in the monolithic global ordering.
95: .seealso: MATIS, PCBDDC, KSPFETIDPGetInnerBDDC, KSPFETIDPGetInnerKSP, KSPSetOperators
96: @*/
97: PetscErrorCode KSPFETIDPSetPressureOperator(KSP ksp, Mat P)
98: {
104: PetscTryMethod(ksp,"KSPFETIDPSetPressureOperator_C",(KSP,Mat),(ksp,P));
105: return(0);
106: }
108: static PetscErrorCode KSPFETIDPGetInnerKSP_FETIDP(KSP ksp, KSP* innerksp)
109: {
110: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
113: *innerksp = fetidp->innerksp;
114: return(0);
115: }
117: /*@
118: KSPFETIDPGetInnerKSP - Gets the KSP object for the Lagrange multipliers
120: Input Parameters:
121: + ksp - the FETI-DP KSP
122: - innerksp - the KSP for the multipliers
124: Level: advanced
126: Notes:
128: .seealso: MATIS, PCBDDC, KSPFETIDPSetInnerBDDC, KSPFETIDPGetInnerBDDC
129: @*/
130: PetscErrorCode KSPFETIDPGetInnerKSP(KSP ksp, KSP* innerksp)
131: {
137: PetscUseMethod(ksp,"KSPFETIDPGetInnerKSP_C",(KSP,KSP*),(ksp,innerksp));
138: return(0);
139: }
141: static PetscErrorCode KSPFETIDPGetInnerBDDC_FETIDP(KSP ksp, PC* pc)
142: {
143: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
146: *pc = fetidp->innerbddc;
147: return(0);
148: }
150: /*@
151: KSPFETIDPGetInnerBDDC - Gets the BDDC preconditioner used to setup the FETI-DP matrix for the Lagrange multipliers
153: Input Parameters:
154: + ksp - the FETI-DP Krylov solver
155: - pc - the BDDC preconditioner
157: Level: advanced
159: Notes:
161: .seealso: MATIS, PCBDDC, KSPFETIDPSetInnerBDDC, KSPFETIDPGetInnerKSP
162: @*/
163: PetscErrorCode KSPFETIDPGetInnerBDDC(KSP ksp, PC* pc)
164: {
170: PetscUseMethod(ksp,"KSPFETIDPGetInnerBDDC_C",(KSP,PC*),(ksp,pc));
171: return(0);
172: }
174: static PetscErrorCode KSPFETIDPSetInnerBDDC_FETIDP(KSP ksp, PC pc)
175: {
176: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
180: PetscObjectReference((PetscObject)pc);
181: PCDestroy(&fetidp->innerbddc);
182: fetidp->innerbddc = pc;
183: fetidp->userbddc = PETSC_TRUE;
184: return(0);
185: }
187: /*@
188: KSPFETIDPSetInnerBDDC - Sets the BDDC preconditioner used to setup the FETI-DP matrix for the Lagrange multipliers
190: Collective on KSP
192: Input Parameters:
193: + ksp - the FETI-DP Krylov solver
194: - pc - the BDDC preconditioner
196: Level: advanced
198: Notes:
200: .seealso: MATIS, PCBDDC, KSPFETIDPGetInnerBDDC, KSPFETIDPGetInnerKSP
201: @*/
202: PetscErrorCode KSPFETIDPSetInnerBDDC(KSP ksp, PC pc)
203: {
204: PetscBool isbddc;
210: PetscObjectTypeCompare((PetscObject)pc,PCBDDC,&isbddc);
211: if (!isbddc) SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_ARG_WRONG,"KSPFETIDPSetInnerBDDC need a PCBDDC preconditioner");
212: PetscTryMethod(ksp,"KSPFETIDPSetInnerBDDC_C",(KSP,PC),(ksp,pc));
213: return(0);
214: }
216: static PetscErrorCode KSPBuildSolution_FETIDP(KSP ksp,Vec v,Vec *V)
217: {
218: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
219: Mat F;
220: Vec Xl;
224: KSPGetOperators(fetidp->innerksp,&F,NULL);
225: KSPBuildSolution(fetidp->innerksp,NULL,&Xl);
226: if (v) {
227: PCBDDCMatFETIDPGetSolution(F,Xl,v);
228: *V = v;
229: } else {
230: PCBDDCMatFETIDPGetSolution(F,Xl,*V);
231: }
232: return(0);
233: }
235: static PetscErrorCode KSPMonitor_FETIDP(KSP ksp,PetscInt it,PetscReal rnorm,void* ctx)
236: {
237: KSP_FETIDPMon *monctx = (KSP_FETIDPMon*)ctx;
241: KSPMonitor(monctx->parentksp,it,rnorm);
242: return(0);
243: }
245: static PetscErrorCode KSPComputeEigenvalues_FETIDP(KSP ksp,PetscInt nmax,PetscReal *r,PetscReal *c,PetscInt *neig)
246: {
247: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
251: KSPComputeEigenvalues(fetidp->innerksp,nmax,r,c,neig);
252: return(0);
253: }
255: static PetscErrorCode KSPComputeExtremeSingularValues_FETIDP(KSP ksp,PetscReal *emax,PetscReal *emin)
256: {
257: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
261: KSPComputeExtremeSingularValues(fetidp->innerksp,emax,emin);
262: return(0);
263: }
265: static PetscErrorCode KSPFETIDPCheckOperators(KSP ksp, PetscViewer viewer)
266: {
267: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
268: PC_BDDC *pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
269: PC_IS *pcis = (PC_IS*)fetidp->innerbddc->data;
270: Mat_IS *matis = (Mat_IS*)fetidp->innerbddc->pmat->data;
271: Mat F;
272: FETIDPMat_ctx fetidpmat_ctx;
273: Vec test_vec,test_vec_p = NULL,fetidp_global;
274: IS dirdofs,isvert;
275: MPI_Comm comm = PetscObjectComm((PetscObject)ksp);
276: PetscScalar sval,*array;
277: PetscReal val,rval;
278: const PetscInt *vertex_indices;
279: PetscInt i,n_vertices;
280: PetscBool isascii;
285: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
286: if (!isascii) SETERRQ(comm,PETSC_ERR_SUP,"Unsupported viewer");
287: PetscViewerASCIIPrintf(viewer,"----------FETI-DP MAT --------------\n");
288: PetscViewerASCIIAddTab(viewer,2);
289: KSPGetOperators(fetidp->innerksp,&F,NULL);
290: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
291: MatView(F,viewer);
292: PetscViewerPopFormat(viewer);
293: PetscViewerASCIISubtractTab(viewer,2);
294: MatShellGetContext(F,(void**)&fetidpmat_ctx);
295: PetscViewerASCIIPrintf(viewer,"----------FETI-DP TESTS--------------\n");
296: PetscViewerASCIIPrintf(viewer,"All tests should return zero!\n");
297: PetscViewerASCIIPrintf(viewer,"FETIDP MAT context in the ");
298: if (fetidp->fully_redundant) {
299: PetscViewerASCIIPrintf(viewer,"fully redundant case for lagrange multipliers.\n");
300: } else {
301: PetscViewerASCIIPrintf(viewer,"Non-fully redundant case for lagrange multiplier.\n");
302: }
303: PetscViewerFlush(viewer);
305: /* Get Vertices used to define the BDDC */
306: PCBDDCGraphGetCandidatesIS(pcbddc->mat_graph,NULL,NULL,NULL,NULL,&isvert);
307: ISGetLocalSize(isvert,&n_vertices);
308: ISGetIndices(isvert,&vertex_indices);
310: /******************************************************************/
311: /* TEST A/B: Test numbering of global fetidp dofs */
312: /******************************************************************/
313: MatCreateVecs(F,&fetidp_global,NULL);
314: VecDuplicate(fetidpmat_ctx->lambda_local,&test_vec);
315: VecSet(fetidp_global,1.0);
316: VecSet(test_vec,1.);
317: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
318: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
319: if (fetidpmat_ctx->l2g_p) {
320: VecDuplicate(fetidpmat_ctx->vP,&test_vec_p);
321: VecSet(test_vec_p,1.);
322: VecScatterBegin(fetidpmat_ctx->l2g_p,fetidp_global,fetidpmat_ctx->vP,INSERT_VALUES,SCATTER_REVERSE);
323: VecScatterEnd(fetidpmat_ctx->l2g_p,fetidp_global,fetidpmat_ctx->vP,INSERT_VALUES,SCATTER_REVERSE);
324: }
325: VecAXPY(test_vec,-1.0,fetidpmat_ctx->lambda_local);
326: VecNorm(test_vec,NORM_INFINITY,&val);
327: VecDestroy(&test_vec);
328: MPI_Reduce(&val,&rval,1,MPIU_REAL,MPI_MAX,0,comm);
329: PetscViewerASCIIPrintf(viewer,"A: CHECK glob to loc: % 1.14e\n",rval);
331: if (fetidpmat_ctx->l2g_p) {
332: VecAXPY(test_vec_p,-1.0,fetidpmat_ctx->vP);
333: VecNorm(test_vec_p,NORM_INFINITY,&val);
334: MPI_Reduce(&val,&rval,1,MPIU_REAL,MPI_MAX,0,comm);
335: PetscViewerASCIIPrintf(viewer,"A: CHECK glob to loc (p): % 1.14e\n",rval);
336: }
338: if (fetidp->fully_redundant) {
339: VecSet(fetidp_global,0.0);
340: VecSet(fetidpmat_ctx->lambda_local,0.5);
341: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
342: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
343: VecSum(fetidp_global,&sval);
344: val = PetscRealPart(sval)-fetidpmat_ctx->n_lambda;
345: MPI_Reduce(&val,&rval,1,MPIU_REAL,MPI_MAX,0,comm);
346: PetscViewerASCIIPrintf(viewer,"B: CHECK loc to glob: % 1.14e\n",rval);
347: }
349: if (fetidpmat_ctx->l2g_p) {
350: VecSet(pcis->vec1_N,1.0);
351: VecSet(pcis->vec1_global,0.0);
352: VecScatterBegin(matis->rctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
353: VecScatterEnd(matis->rctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
355: VecSet(fetidp_global,0.0);
356: VecSet(fetidpmat_ctx->vP,-1.0);
357: VecScatterBegin(fetidpmat_ctx->l2g_p,fetidpmat_ctx->vP,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
358: VecScatterEnd(fetidpmat_ctx->l2g_p,fetidpmat_ctx->vP,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
359: VecScatterBegin(fetidpmat_ctx->g2g_p,fetidp_global,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
360: VecScatterEnd(fetidpmat_ctx->g2g_p,fetidp_global,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
361: VecScatterBegin(fetidpmat_ctx->g2g_p,pcis->vec1_global,fetidp_global,INSERT_VALUES,SCATTER_FORWARD);
362: VecScatterEnd(fetidpmat_ctx->g2g_p,pcis->vec1_global,fetidp_global,INSERT_VALUES,SCATTER_FORWARD);
363: VecSum(fetidp_global,&sval);
364: val = PetscRealPart(sval);
365: MPI_Reduce(&val,&rval,1,MPIU_REAL,MPI_MAX,0,comm);
366: PetscViewerASCIIPrintf(viewer,"B: CHECK loc to glob (p): % 1.14e\n",rval);
367: }
369: /******************************************************************/
370: /* TEST C: It should hold B_delta*w=0, w\in\widehat{W} */
371: /* This is the meaning of the B matrix */
372: /******************************************************************/
374: VecSetRandom(pcis->vec1_N,NULL);
375: VecSet(pcis->vec1_global,0.0);
376: VecScatterBegin(matis->rctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
377: VecScatterEnd(matis->rctx,pcis->vec1_N,pcis->vec1_global,ADD_VALUES,SCATTER_REVERSE);
378: VecScatterBegin(matis->rctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
379: VecScatterEnd(matis->rctx,pcis->vec1_global,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD);
380: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
381: VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
382: /* Action of B_delta */
383: MatMult(fetidpmat_ctx->B_delta,pcis->vec1_B,fetidpmat_ctx->lambda_local);
384: VecSet(fetidp_global,0.0);
385: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
386: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
387: VecNorm(fetidp_global,NORM_INFINITY,&val);
388: PetscViewerASCIIPrintf(viewer,"C: CHECK infty norm of B_delta*w (w continuous): % 1.14e\n",val);
390: /******************************************************************/
391: /* TEST D: It should hold E_Dw = w - P_Dw w\in\widetilde{W} */
392: /* E_D = R_D^TR */
393: /* P_D = B_{D,delta}^T B_{delta} */
394: /* eq.44 Mandel Tezaur and Dohrmann 2005 */
395: /******************************************************************/
397: /* compute a random vector in \widetilde{W} */
398: VecSetRandom(pcis->vec1_N,NULL);
399: /* set zero at vertices and essential dofs */
400: VecGetArray(pcis->vec1_N,&array);
401: for (i=0;i<n_vertices;i++) array[vertex_indices[i]] = 0.0;
402: PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph,&dirdofs);
403: if (dirdofs) {
404: const PetscInt *idxs;
405: PetscInt ndir;
407: ISGetLocalSize(dirdofs,&ndir);
408: ISGetIndices(dirdofs,&idxs);
409: for (i=0;i<ndir;i++) array[idxs[i]] = 0.0;
410: ISRestoreIndices(dirdofs,&idxs);
411: }
412: VecRestoreArray(pcis->vec1_N,&array);
413: /* store w for final comparison */
414: VecDuplicate(pcis->vec1_B,&test_vec);
415: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,test_vec,INSERT_VALUES,SCATTER_FORWARD);
416: VecScatterEnd(pcis->N_to_B,pcis->vec1_N,test_vec,INSERT_VALUES,SCATTER_FORWARD);
418: /* Jump operator P_D : results stored in pcis->vec1_B */
419: /* Action of B_delta */
420: MatMult(fetidpmat_ctx->B_delta,test_vec,fetidpmat_ctx->lambda_local);
421: VecSet(fetidp_global,0.0);
422: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
423: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
424: /* Action of B_Ddelta^T */
425: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
426: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
427: MatMultTranspose(fetidpmat_ctx->B_Ddelta,fetidpmat_ctx->lambda_local,pcis->vec1_B);
429: /* Average operator E_D : results stored in pcis->vec2_B */
430: PCBDDCScalingExtension(fetidpmat_ctx->pc,test_vec,pcis->vec1_global);
431: VecScatterBegin(pcis->global_to_B,pcis->vec1_global,pcis->vec2_B,INSERT_VALUES,SCATTER_FORWARD);
432: VecScatterEnd(pcis->global_to_B,pcis->vec1_global,pcis->vec2_B,INSERT_VALUES,SCATTER_FORWARD);
434: /* test E_D=I-P_D */
435: VecAXPY(pcis->vec1_B,1.0,pcis->vec2_B);
436: VecAXPY(pcis->vec1_B,-1.0,test_vec);
437: VecNorm(pcis->vec1_B,NORM_INFINITY,&val);
438: VecDestroy(&test_vec);
439: MPI_Reduce(&val,&rval,1,MPIU_REAL,MPI_MAX,0,comm);
440: PetscViewerASCIIPrintf(viewer,"D: CHECK infty norm of E_D + P_D - I: % 1.14e\n",PetscGlobalRank,val);
442: /******************************************************************/
443: /* TEST E: It should hold R_D^TP_Dw=0 w\in\widetilde{W} */
444: /* eq.48 Mandel Tezaur and Dohrmann 2005 */
445: /******************************************************************/
447: VecSetRandom(pcis->vec1_N,NULL);
448: /* set zero at vertices and essential dofs */
449: VecGetArray(pcis->vec1_N,&array);
450: for (i=0;i<n_vertices;i++) array[vertex_indices[i]] = 0.0;
451: if (dirdofs) {
452: const PetscInt *idxs;
453: PetscInt ndir;
455: ISGetLocalSize(dirdofs,&ndir);
456: ISGetIndices(dirdofs,&idxs);
457: for (i=0;i<ndir;i++) array[idxs[i]] = 0.0;
458: ISRestoreIndices(dirdofs,&idxs);
459: }
460: VecRestoreArray(pcis->vec1_N,&array);
462: /* Jump operator P_D : results stored in pcis->vec1_B */
464: VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
465: VecScatterEnd(pcis->N_to_B,pcis->vec1_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);
466: /* Action of B_delta */
467: MatMult(fetidpmat_ctx->B_delta,pcis->vec1_B,fetidpmat_ctx->lambda_local);
468: VecSet(fetidp_global,0.0);
469: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
470: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,fetidp_global,ADD_VALUES,SCATTER_FORWARD);
471: /* Action of B_Ddelta^T */
472: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
473: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
474: MatMultTranspose(fetidpmat_ctx->B_Ddelta,fetidpmat_ctx->lambda_local,pcis->vec1_B);
475: /* scaling */
476: PCBDDCScalingExtension(fetidpmat_ctx->pc,pcis->vec1_B,pcis->vec1_global);
477: VecNorm(pcis->vec1_global,NORM_INFINITY,&val);
478: PetscViewerASCIIPrintf(viewer,"E: CHECK infty norm of R^T_D P_D: % 1.14e\n",val);
480: if (!fetidp->fully_redundant) {
481: /******************************************************************/
482: /* TEST F: It should holds B_{delta}B^T_{D,delta}=I */
483: /* Corollary thm 14 Mandel Tezaur and Dohrmann 2005 */
484: /******************************************************************/
485: VecDuplicate(fetidp_global,&test_vec);
486: VecSetRandom(fetidp_global,NULL);
487: if (fetidpmat_ctx->l2g_p) {
488: VecSet(fetidpmat_ctx->vP,0.);
489: VecScatterBegin(fetidpmat_ctx->l2g_p,fetidpmat_ctx->vP,fetidp_global,INSERT_VALUES,SCATTER_FORWARD);
490: VecScatterEnd(fetidpmat_ctx->l2g_p,fetidpmat_ctx->vP,fetidp_global,INSERT_VALUES,SCATTER_FORWARD);
491: }
492: /* Action of B_Ddelta^T */
493: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
494: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidp_global,fetidpmat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE);
495: MatMultTranspose(fetidpmat_ctx->B_Ddelta,fetidpmat_ctx->lambda_local,pcis->vec1_B);
496: /* Action of B_delta */
497: MatMult(fetidpmat_ctx->B_delta,pcis->vec1_B,fetidpmat_ctx->lambda_local);
498: VecSet(test_vec,0.0);
499: VecScatterBegin(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,test_vec,ADD_VALUES,SCATTER_FORWARD);
500: VecScatterEnd(fetidpmat_ctx->l2g_lambda,fetidpmat_ctx->lambda_local,test_vec,ADD_VALUES,SCATTER_FORWARD);
501: VecAXPY(fetidp_global,-1.,test_vec);
502: VecNorm(fetidp_global,NORM_INFINITY,&val);
503: PetscViewerASCIIPrintf(viewer,"E: CHECK infty norm of P^T_D - I: % 1.14e\n",val);
504: VecDestroy(&test_vec);
505: }
506: PetscViewerASCIIPrintf(viewer,"-------------------------------------\n");
507: PetscViewerFlush(viewer);
508: VecDestroy(&test_vec_p);
509: ISDestroy(&dirdofs);
510: VecDestroy(&fetidp_global);
511: ISRestoreIndices(isvert,&vertex_indices);
512: PCBDDCGraphRestoreCandidatesIS(pcbddc->mat_graph,NULL,NULL,NULL,NULL,&isvert);
513: return(0);
514: }
516: static PetscErrorCode KSPFETIDPSetUpOperators(KSP ksp)
517: {
518: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
519: PC_BDDC *pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
520: Mat A,Ap;
521: PetscInt fid = -1;
522: PetscBool ismatis,pisz,allp;
523: PetscBool flip; /* Usually, Stokes is written (B = -\int_\Omega \nabla \cdot u q)
524: | A B'| | v | = | f |
525: | B 0 | | p | = | g |
526: If -ksp_fetidp_saddlepoint_flip is true, the code assumes it is written as
527: | A B'| | v | = | f |
528: |-B 0 | | p | = |-g |
529: */
530: PetscObjectState matstate, matnnzstate;
531: PetscErrorCode ierr;
534: pisz = PETSC_TRUE;
535: flip = PETSC_FALSE;
536: allp = PETSC_FALSE;
537: PetscOptionsBegin(PetscObjectComm((PetscObject)ksp),((PetscObject)ksp)->prefix,"FETI-DP options","PC");
538: PetscOptionsInt("-ksp_fetidp_pressure_field","Field id for pressures for saddle-point problems",NULL,fid,&fid,NULL);
539: PetscOptionsBool("-ksp_fetidp_pressure_iszero","Zero pressure block",NULL,pisz,&pisz,NULL);
540: PetscOptionsBool("-ksp_fetidp_pressure_all","Use the whole pressure set instead of just that at the interface",NULL,allp,&allp,NULL);
541: PetscOptionsBool("-ksp_fetidp_saddlepoint_flip","Flip the sign of the pressure-velocity (lower-left) block",NULL,flip,&flip,NULL);
542: PetscOptionsEnd();
544: fetidp->saddlepoint = (fid >= 0 ? PETSC_TRUE : fetidp->saddlepoint);
546: KSPGetOperators(ksp,&A,&Ap);
547: PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
548: if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Amat should be of type MATIS");
549: PetscObjectTypeCompare((PetscObject)Ap,MATIS,&ismatis);
550: if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Pmat should be of type MATIS");
552: /* Quiet return if the matrix states are unchanged.
553: Needed only for the saddle point case since it uses MatZeroRows
554: on a matrix that may not have changed */
555: PetscObjectStateGet((PetscObject)A,&matstate);
556: MatGetNonzeroState(A,&matnnzstate);
557: if (matstate == fetidp->matstate && matnnzstate == fetidp->matnnzstate) return(0);
558: fetidp->matstate = matstate;
559: fetidp->matnnzstate = matnnzstate;
560: fetidp->statechanged = fetidp->saddlepoint;
562: /* see if MATIS has same fields attached */
563: if (!pcbddc->n_ISForDofsLocal && !pcbddc->n_ISForDofs) {
564: PetscContainer c;
566: PetscObjectQuery((PetscObject)A,"_convert_nest_lfields",(PetscObject*)&c);
567: if (c) {
568: MatISLocalFields lf;
569: PetscContainerGetPointer(c,(void**)&lf);
570: PCBDDCSetDofsSplittingLocal(fetidp->innerbddc,lf->nr,lf->rf);
571: }
572: }
574: if (!fetidp->saddlepoint) {
575: PCSetOperators(fetidp->innerbddc,A,Ap);
576: } else {
577: Mat nA,lA;
578: Mat PPmat;
579: IS pP;
580: PetscInt totP;
582: MatISGetLocalMat(A,&lA);
583: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_lA",(PetscObject)lA);
585: pP = fetidp->pP;
586: if (!pP) { /* first time, need to compute pressure dofs */
587: PC_IS *pcis = (PC_IS*)fetidp->innerbddc->data;
588: Mat_IS *matis = (Mat_IS*)(A->data);
589: ISLocalToGlobalMapping l2g;
590: IS lP = NULL,II,pII,lPall,Pall,is1,is2;
591: const PetscInt *idxs;
592: PetscInt nl,ni,*widxs;
593: PetscInt i,j,n_neigh,*neigh,*n_shared,**shared,*count;
594: PetscInt rst,ren,n;
595: PetscBool ploc;
597: MatGetLocalSize(A,&nl,NULL);
598: MatGetOwnershipRange(A,&rst,&ren);
599: MatGetLocalSize(lA,&n,NULL);
600: MatGetLocalToGlobalMapping(A,&l2g,NULL);
602: if (!pcis->is_I_local) { /* need to compute interior dofs */
603: PetscCalloc1(n,&count);
604: ISLocalToGlobalMappingGetInfo(l2g,&n_neigh,&neigh,&n_shared,&shared);
605: for (i=1;i<n_neigh;i++)
606: for (j=0;j<n_shared[i];j++)
607: count[shared[i][j]] += 1;
608: for (i=0,j=0;i<n;i++) if (!count[i]) count[j++] = i;
609: ISLocalToGlobalMappingRestoreInfo(l2g,&n_neigh,&neigh,&n_shared,&shared);
610: ISCreateGeneral(PETSC_COMM_SELF,j,count,PETSC_OWN_POINTER,&II);
611: } else {
612: PetscObjectReference((PetscObject)pcis->is_I_local);
613: II = pcis->is_I_local;
614: }
616: /* interior dofs in layout */
617: MatISSetUpSF(A);
618: PetscMemzero(matis->sf_leafdata,n*sizeof(PetscInt));
619: PetscMemzero(matis->sf_rootdata,nl*sizeof(PetscInt));
620: ISGetLocalSize(II,&ni);
621: ISGetIndices(II,&idxs);
622: for (i=0;i<ni;i++) matis->sf_leafdata[idxs[i]] = 1;
623: ISRestoreIndices(II,&idxs);
624: PetscSFReduceBegin(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
625: PetscSFReduceEnd(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
626: PetscMalloc1(PetscMax(nl,n),&widxs);
627: for (i=0,ni=0;i<nl;i++) if (matis->sf_rootdata[i]) widxs[ni++] = i+rst;
628: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),ni,widxs,PETSC_COPY_VALUES,&pII);
630: /* pressure dofs */
631: Pall = NULL;
632: lPall = NULL;
633: ploc = PETSC_FALSE;
634: if (fid >= 0) {
635: if (pcbddc->n_ISForDofsLocal) {
636: PetscInt np;
638: if (fid >= pcbddc->n_ISForDofsLocal) SETERRQ2(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Invalid field id for pressure %D, max %D",fid,pcbddc->n_ISForDofsLocal);
639: /* need a sequential IS */
640: ISGetLocalSize(pcbddc->ISForDofsLocal[fid],&np);
641: ISGetIndices(pcbddc->ISForDofsLocal[fid],&idxs);
642: ISCreateGeneral(PETSC_COMM_SELF,np,idxs,PETSC_COPY_VALUES,&lPall);
643: ISRestoreIndices(pcbddc->ISForDofsLocal[fid],&idxs);
644: ploc = PETSC_TRUE;
645: } else if (pcbddc->n_ISForDofs) {
646: if (fid >= pcbddc->n_ISForDofs) SETERRQ2(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Invalid field id for pressure %D, max %D",fid,pcbddc->n_ISForDofs);
647: PetscObjectReference((PetscObject)pcbddc->ISForDofs[fid]);
648: Pall = pcbddc->ISForDofs[fid];
649: } else SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Missing fields! Use PCBDDCSetDofsSplitting/Local");
650: } else { /* fallback to zero pressure block */
651: IS list[2];
653: MatFindZeroDiagonals(A,&list[1]);
654: ISComplement(list[1],rst,ren,&list[0]);
655: PCBDDCSetDofsSplitting(fetidp->innerbddc,2,list);
656: ISDestroy(&list[0]);
657: Pall = list[1];
658: }
659: /* if the user requested the entire pressure,
660: remove the interior pressure dofs from II (or pII) */
661: if (allp) {
662: if (ploc) {
663: IS nII;
664: ISDifference(II,lPall,&nII);
665: ISDestroy(&II);
666: II = nII;
667: } else {
668: IS nII;
669: ISDifference(pII,Pall,&nII);
670: ISDestroy(&pII);
671: pII = nII;
672: }
673: }
674: if (ploc) {
675: ISDifference(lPall,II,&lP);
676: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_lP",(PetscObject)lP);
677: } else {
678: ISDifference(Pall,pII,&pP);
679: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_pP",(PetscObject)pP);
680: /* need all local pressure dofs */
681: PetscMemzero(matis->sf_leafdata,n*sizeof(PetscInt));
682: PetscMemzero(matis->sf_rootdata,nl*sizeof(PetscInt));
683: ISGetLocalSize(Pall,&ni);
684: ISGetIndices(Pall,&idxs);
685: for (i=0;i<ni;i++) matis->sf_rootdata[idxs[i]-rst] = 1;
686: ISRestoreIndices(Pall,&idxs);
687: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
688: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
689: for (i=0,ni=0;i<n;i++) if (matis->sf_leafdata[i]) widxs[ni++] = i;
690: ISCreateGeneral(PETSC_COMM_SELF,ni,widxs,PETSC_COPY_VALUES,&lPall);
691: }
693: if (!Pall) {
694: PetscMemzero(matis->sf_leafdata,n*sizeof(PetscInt));
695: PetscMemzero(matis->sf_rootdata,nl*sizeof(PetscInt));
696: ISGetLocalSize(lPall,&ni);
697: ISGetIndices(lPall,&idxs);
698: for (i=0;i<ni;i++) matis->sf_leafdata[idxs[i]] = 1;
699: ISRestoreIndices(lPall,&idxs);
700: PetscSFReduceBegin(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
701: PetscSFReduceEnd(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
702: for (i=0,ni=0;i<nl;i++) if (matis->sf_rootdata[i]) widxs[ni++] = i+rst;
703: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),ni,widxs,PETSC_COPY_VALUES,&Pall);
704: }
705: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_aP",(PetscObject)Pall);
707: if (flip) {
708: PetscInt npl;
709: ISGetLocalSize(Pall,&npl);
710: ISGetIndices(Pall,&idxs);
711: MatCreateVecs(A,NULL,&fetidp->rhs_flip);
712: VecSet(fetidp->rhs_flip,1.);
713: VecSetOption(fetidp->rhs_flip,VEC_IGNORE_OFF_PROC_ENTRIES,PETSC_TRUE);
714: for (i=0;i<npl;i++) {
715: VecSetValue(fetidp->rhs_flip,idxs[i],-1.,INSERT_VALUES);
716: }
717: VecAssemblyBegin(fetidp->rhs_flip);
718: VecAssemblyEnd(fetidp->rhs_flip);
719: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_flip",(PetscObject)fetidp->rhs_flip);
720: ISRestoreIndices(Pall,&idxs);
721: }
722: ISDestroy(&Pall);
723: ISDestroy(&pII);
725: /* local selected pressures in subdomain-wise and global ordering */
726: PetscMemzero(matis->sf_leafdata,n*sizeof(PetscInt));
727: PetscMemzero(matis->sf_rootdata,nl*sizeof(PetscInt));
728: if (!ploc) {
729: if (!pP) SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_PLIB,"Missing parallel pressure IS");
730: ISGetLocalSize(pP,&ni);
731: ISGetIndices(pP,&idxs);
732: for (i=0;i<ni;i++) matis->sf_rootdata[idxs[i]-rst] = 1;
733: ISRestoreIndices(pP,&idxs);
734: PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
735: PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
736: for (i=0,ni=0;i<n;i++) if (matis->sf_leafdata[i]) widxs[ni++] = i;
737: ISLocalToGlobalMappingApply(l2g,ni,widxs,widxs+ni);
738: ISCreateGeneral(PETSC_COMM_SELF,ni,widxs,PETSC_COPY_VALUES,&lP);
739: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_lP",(PetscObject)lP);
740: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),ni,widxs+ni,PETSC_COPY_VALUES,&is1);
741: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_gP",(PetscObject)is1);
742: ISDestroy(&is1);
743: } else {
744: if (!lP) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing sequential pressure IS");
745: ISGetLocalSize(lP,&ni);
746: ISGetIndices(lP,&idxs);
747: for (i=0;i<ni;i++)
748: if (idxs[i] >=0 && idxs[i] < n)
749: matis->sf_leafdata[idxs[i]] = 1;
750: ISRestoreIndices(lP,&idxs);
751: PetscSFReduceBegin(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
752: ISLocalToGlobalMappingApply(l2g,ni,idxs,widxs);
753: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),ni,widxs,PETSC_COPY_VALUES,&is1);
754: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_gP",(PetscObject)is1);
755: ISDestroy(&is1);
756: PetscSFReduceEnd(matis->sf,MPIU_INT,matis->sf_leafdata,matis->sf_rootdata,MPIU_REPLACE);
757: for (i=0,ni=0;i<nl;i++) if (matis->sf_rootdata[i]) widxs[ni++] = i+rst;
758: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),ni,widxs,PETSC_COPY_VALUES,&pP);
759: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_pP",(PetscObject)pP);
760: }
761: PetscFree(widxs);
763: /* If there's any "interior pressure",
764: we may want to use a discrete harmonic solver instead
765: of a Stokes harmonic for the Dirichlet preconditioner
766: Need to extract the interior velocity dofs in interior dofs ordering (iV)
767: and interior pressure dofs in local ordering (iP) */
768: if (!allp) {
769: ISLocalToGlobalMapping l2g_t;
771: ISDifference(lPall,lP,&is1);
772: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_iP",(PetscObject)is1);
773: ISDifference(II,is1,&is2);
774: ISDestroy(&is1);
775: ISLocalToGlobalMappingCreateIS(II,&l2g_t);
776: ISGlobalToLocalMappingApplyIS(l2g_t,IS_GTOLM_DROP,is2,&is1);
777: ISGetLocalSize(is1,&i);
778: ISGetLocalSize(is2,&j);
779: if (i != j) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inconsistent local sizes %D and %D for iV",i,j);
780: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_iV",(PetscObject)is1);
781: ISLocalToGlobalMappingDestroy(&l2g_t);
782: ISDestroy(&is1);
783: ISDestroy(&is2);
784: }
785: ISDestroy(&lPall);
786: ISDestroy(&II);
788: /* exclude selected pressures from the inner BDDC */
789: if (pcbddc->DirichletBoundariesLocal) {
790: IS list[2],plP,isout;
791: PetscInt np;
793: /* need a parallel IS */
794: ISGetLocalSize(lP,&np);
795: ISGetIndices(lP,&idxs);
796: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),np,idxs,PETSC_USE_POINTER,&plP);
797: list[0] = plP;
798: list[1] = pcbddc->DirichletBoundariesLocal;
799: ISConcatenate(PetscObjectComm((PetscObject)ksp),2,list,&isout);
800: ISDestroy(&plP);
801: ISRestoreIndices(lP,&idxs);
802: PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc,isout);
803: ISDestroy(&isout);
804: } else if (pcbddc->DirichletBoundaries) {
805: IS list[2],isout;
807: list[0] = pP;
808: list[1] = pcbddc->DirichletBoundaries;
809: ISConcatenate(PetscObjectComm((PetscObject)ksp),2,list,&isout);
810: PCBDDCSetDirichletBoundaries(fetidp->innerbddc,isout);
811: ISDestroy(&isout);
812: } else {
813: IS plP;
814: PetscInt np;
816: /* need a parallel IS */
817: ISGetLocalSize(lP,&np);
818: ISGetIndices(lP,&idxs);
819: ISCreateGeneral(PetscObjectComm((PetscObject)ksp),np,idxs,PETSC_COPY_VALUES,&plP);
820: PCBDDCSetDirichletBoundariesLocal(fetidp->innerbddc,plP);
821: ISDestroy(&plP);
822: ISRestoreIndices(lP,&idxs);
823: }
824: ISDestroy(&lP);
825: fetidp->pP = pP;
826: }
828: /* total number of selected pressure dofs */
829: ISGetSize(fetidp->pP,&totP);
831: /* Set operator for inner BDDC */
832: if (totP || fetidp->rhs_flip) {
833: MatDuplicate(A,MAT_COPY_VALUES,&nA);
834: } else {
835: PetscObjectReference((PetscObject)A);
836: nA = A;
837: }
838: if (fetidp->rhs_flip) {
839: MatDiagonalScale(nA,fetidp->rhs_flip,NULL);
840: if (totP) {
841: Mat lA2;
843: MatISGetLocalMat(nA,&lA);
844: MatDuplicate(lA,MAT_COPY_VALUES,&lA2);
845: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_lA",(PetscObject)lA2);
846: MatDestroy(&lA2);
847: }
848: }
850: if (totP) {
851: MatSetOption(nA,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);
852: MatZeroRowsColumnsIS(nA,fetidp->pP,1.,NULL,NULL);
853: } else {
854: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_lA",NULL);
855: }
856: PCSetOperators(fetidp->innerbddc,nA,nA);
857: MatDestroy(&nA);
859: /* non-zero rhs on interior dofs when applying the preconditioner */
860: if (totP) pcbddc->switch_static = PETSC_TRUE;
862: /* if there are no pressures, set inner bddc flag for benign saddle point */
863: if (!totP) {
864: pcbddc->benign_saddle_point = PETSC_TRUE;
865: pcbddc->compute_nonetflux = PETSC_TRUE;
866: }
868: /* Divergence mat */
869: if (totP) {
870: Mat B;
871: IS P;
872: PetscBool save;
874: PetscObjectQuery((PetscObject)fetidp->innerbddc,"__KSPFETIDP_aP",(PetscObject*)&P);
875: MatCreateSubMatrix(A,P,NULL,MAT_INITIAL_MATRIX,&B);
876: save = pcbddc->compute_nonetflux; /* SetDivergenceMat activates nonetflux computation */
877: PCBDDCSetDivergenceMat(fetidp->innerbddc,B,PETSC_FALSE,NULL);
878: pcbddc->compute_nonetflux = save;
879: MatDestroy(&B);
880: }
882: /* Operators for pressure preconditioner */
883: if (totP) {
885: /* Extract pressure block */
886: if (!pisz) {
887: Mat C;
889: MatCreateSubMatrix(A,fetidp->pP,fetidp->pP,MAT_INITIAL_MATRIX,&C);
890: MatScale(C,-1.);
891: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_C",(PetscObject)C);
892: MatDestroy(&C);
893: } else if (A != Ap) { /* user has provided a different Pmat, use it to extract the pressure preconditioner */
894: Mat C;
896: MatCreateSubMatrix(Ap,fetidp->pP,fetidp->pP,MAT_INITIAL_MATRIX,&C);
897: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)C);
898: MatDestroy(&C);
899: }
900: PetscObjectQuery((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject*)&PPmat);
902: /* Preconditioned operator for the pressure block */
903: if (PPmat) {
904: Mat C;
905: IS Pall;
906: PetscInt AM,PAM,PAN,pam,pan,am,an,pl,pIl,pAg,pIg;
907: PetscBool ismatis;
909: PetscObjectTypeCompare((PetscObject)PPmat,MATIS,&ismatis);
910: if (ismatis) {
911: MatISGetMPIXAIJ(PPmat,MAT_INITIAL_MATRIX,&C);
912: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)C);
913: MatDestroy(&C);
914: PetscObjectQuery((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject*)&PPmat);
915: }
916: PetscObjectQuery((PetscObject)fetidp->innerbddc,"__KSPFETIDP_aP",(PetscObject*)&Pall);
917: MatGetSize(A,&AM,NULL);
918: MatGetSize(PPmat,&PAM,&PAN);
919: ISGetSize(Pall,&pAg);
920: ISGetSize(fetidp->pP,&pIg);
921: MatGetLocalSize(PPmat,&pam,&pan);
922: MatGetLocalSize(A,&am,&an);
923: ISGetLocalSize(Pall,&pIl);
924: ISGetLocalSize(fetidp->pP,&pl);
925: if (PAM != PAN) SETERRQ2(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Pressure matrix must be square, unsupported %D x %D",PAM,PAN);
926: if (pam != pan) SETERRQ2(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Local sizes of pressure matrix must be equal, unsupported %D x %D",pam,pan);
927: if (pam != am && pam != pl && pam != pIl) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_USER,"Invalid number of local rows %D for pressure matrix! Supported are %D, %D or %D",pam,am,pl,pIl);
928: if (pan != an && pan != pl && pan != pIl) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_USER,"Invalid number of local columns %D for pressure matrix! Supported are %D, %D or %D",pan,an,pl,pIl);
929: if (PAM == AM) { /* monolithic ordering, restrict to pressure */
930: MatCreateSubMatrix(PPmat,fetidp->pP,fetidp->pP,MAT_INITIAL_MATRIX,&C);
931: } else if (pAg == PAM) { /* global ordering for pressure only */
932: if (!allp) { /* solving for interface pressure only */
933: IS restr;
935: ISRenumber(fetidp->pP,NULL,NULL,&restr);
936: MatCreateSubMatrix(PPmat,restr,restr,MAT_INITIAL_MATRIX,&C);
937: ISDestroy(&restr);
938: } else {
939: PetscObjectReference((PetscObject)PPmat);
940: C = PPmat;
941: }
942: } else if (pIg == PAM) { /* global ordering for selected pressure only */
943: PetscObjectReference((PetscObject)PPmat);
944: C = PPmat;
945: } else {
946: SETERRQ(PetscObjectComm((PetscObject)ksp),PETSC_ERR_USER,"Unable to use the pressure matrix");
947: }
948: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)C);
949: MatDestroy(&C);
950: } else {
951: Mat C;
953: PetscObjectQuery((PetscObject)fetidp->innerbddc,"__KSPFETIDP_C",(PetscObject*)&C);
954: if (C) { /* non-zero pressure block, most likely Almost Incompressible Elasticity */
955: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)C);
956: } else { /* identity (need to be scaled properly by the user using e.g. a Richardson method */
957: PetscInt nl;
959: ISGetLocalSize(fetidp->pP,&nl);
960: MatCreate(PetscObjectComm((PetscObject)ksp),&PPmat);
961: MatSetSizes(PPmat,nl,nl,totP,totP);
962: MatSetType(PPmat,MATAIJ);
963: MatMPIAIJSetPreallocation(PPmat,1,NULL,0,NULL);
964: MatSeqAIJSetPreallocation(PPmat,1,NULL);
965: MatAssemblyBegin(PPmat,MAT_FINAL_ASSEMBLY);
966: MatAssemblyEnd(PPmat,MAT_FINAL_ASSEMBLY);
967: MatShift(PPmat,1.);
968: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_PPmat",(PetscObject)PPmat);
969: MatDestroy(&PPmat);
970: }
971: }
972: } else { /* totP == 0 */
973: PetscObjectCompose((PetscObject)fetidp->innerbddc,"__KSPFETIDP_pP",NULL);
974: }
975: }
976: return(0);
977: }
979: static PetscErrorCode KSPSetUp_FETIDP(KSP ksp)
980: {
981: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
982: PC_BDDC *pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
983: PetscBool flg;
987: KSPFETIDPSetUpOperators(ksp);
988: /* set up BDDC */
989: PCSetErrorIfFailure(fetidp->innerbddc,ksp->errorifnotconverged);
990: PCSetUp(fetidp->innerbddc);
991: /* FETI-DP as it is implemented needs an exact coarse solver */
992: if (pcbddc->coarse_ksp) {
993: KSPSetTolerances(pcbddc->coarse_ksp,PETSC_SMALL,PETSC_SMALL,PETSC_DEFAULT,1000);
994: KSPSetNormType(pcbddc->coarse_ksp,KSP_NORM_DEFAULT);
995: }
996: /* FETI-DP as it is implemented needs exact local Neumann solvers */
997: KSPSetTolerances(pcbddc->ksp_R,PETSC_SMALL,PETSC_SMALL,PETSC_DEFAULT,1000);
998: KSPSetNormType(pcbddc->ksp_R,KSP_NORM_DEFAULT);
1000: /* setup FETI-DP operators
1001: If fetidp->statechanged is true, we need update the operators
1002: that are needed in the saddle-point case. This should be replaced
1003: by a better logic when the FETI-DP matrix and preconditioner will
1004: have their own classes */
1005: if (pcbddc->new_primal_space || fetidp->statechanged) {
1006: Mat F; /* the FETI-DP matrix */
1007: PC D; /* the FETI-DP preconditioner */
1008: KSPReset(fetidp->innerksp);
1009: PCBDDCCreateFETIDPOperators(fetidp->innerbddc,fetidp->fully_redundant,((PetscObject)ksp)->prefix,&F,&D);
1010: KSPSetOperators(fetidp->innerksp,F,F);
1011: KSPSetTolerances(fetidp->innerksp,ksp->rtol,ksp->abstol,ksp->divtol,ksp->max_it);
1012: KSPSetPC(fetidp->innerksp,D);
1013: KSPSetFromOptions(fetidp->innerksp);
1014: MatCreateVecs(F,&(fetidp->innerksp)->vec_rhs,&(fetidp->innerksp)->vec_sol);
1015: MatDestroy(&F);
1016: PCDestroy(&D);
1017: if (fetidp->check) {
1018: PetscViewer viewer;
1020: if (!pcbddc->dbg_viewer) {
1021: viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)ksp));
1022: } else {
1023: viewer = pcbddc->dbg_viewer;
1024: }
1025: KSPFETIDPCheckOperators(ksp,viewer);
1026: }
1027: }
1028: fetidp->statechanged = PETSC_FALSE;
1029: pcbddc->new_primal_space = PETSC_FALSE;
1031: /* propagate settings to the inner solve */
1032: KSPGetComputeSingularValues(ksp,&flg);
1033: KSPSetComputeSingularValues(fetidp->innerksp,flg);
1034: if (ksp->res_hist) {
1035: KSPSetResidualHistory(fetidp->innerksp,ksp->res_hist,ksp->res_hist_max,ksp->res_hist_reset);
1036: }
1037: KSPSetErrorIfNotConverged(fetidp->innerksp,ksp->errorifnotconverged);
1038: KSPSetUp(fetidp->innerksp);
1039: return(0);
1040: }
1042: static PetscErrorCode KSPSolve_FETIDP(KSP ksp)
1043: {
1045: Mat F,A;
1046: MatNullSpace nsp;
1047: Vec X,B,Xl,Bl;
1048: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
1049: PC_BDDC *pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
1052: PetscCitationsRegister(citation,&cited);
1053: if (fetidp->saddlepoint) {
1054: PetscCitationsRegister(citation2,&cited2);
1055: }
1056: KSPGetOperators(ksp,&A,NULL);
1057: KSPGetRhs(ksp,&B);
1058: KSPGetSolution(ksp,&X);
1059: KSPGetOperators(fetidp->innerksp,&F,NULL);
1060: KSPGetRhs(fetidp->innerksp,&Bl);
1061: KSPGetSolution(fetidp->innerksp,&Xl);
1062: PCBDDCMatFETIDPGetRHS(F,B,Bl);
1063: if (ksp->transpose_solve) {
1064: KSPSolveTranspose(fetidp->innerksp,Bl,Xl);
1065: } else {
1066: KSPSolve(fetidp->innerksp,Bl,Xl);
1067: }
1068: PCBDDCMatFETIDPGetSolution(F,Xl,X);
1069: MatGetNullSpace(A,&nsp);
1070: if (nsp) {
1071: MatNullSpaceRemove(nsp,X);
1072: }
1073: /* update ksp with stats from inner ksp */
1074: KSPGetConvergedReason(fetidp->innerksp,&ksp->reason);
1075: KSPGetIterationNumber(fetidp->innerksp,&ksp->its);
1076: ksp->totalits += ksp->its;
1077: KSPGetResidualHistory(fetidp->innerksp,NULL,&ksp->res_hist_len);
1078: /* restore defaults for inner BDDC (Pre/PostSolve flags) */
1079: pcbddc->temp_solution_used = PETSC_FALSE;
1080: pcbddc->rhs_change = PETSC_FALSE;
1081: pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1082: return(0);
1083: }
1085: static PetscErrorCode KSPReset_FETIDP(KSP ksp)
1086: {
1087: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
1088: PC_BDDC *pcbddc;
1092: ISDestroy(&fetidp->pP);
1093: VecDestroy(&fetidp->rhs_flip);
1094: /* avoid PCReset that does not take into account ref counting */
1095: PCDestroy(&fetidp->innerbddc);
1096: PCCreate(PetscObjectComm((PetscObject)ksp),&fetidp->innerbddc);
1097: PCSetType(fetidp->innerbddc,PCBDDC);
1098: pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
1099: pcbddc->symmetric_primal = PETSC_FALSE;
1100: PetscLogObjectParent((PetscObject)ksp,(PetscObject)fetidp->innerbddc);
1101: KSPDestroy(&fetidp->innerksp);
1102: fetidp->saddlepoint = PETSC_FALSE;
1103: fetidp->matstate = -1;
1104: fetidp->matnnzstate = -1;
1105: fetidp->statechanged = PETSC_TRUE;
1106: return(0);
1107: }
1109: static PetscErrorCode KSPDestroy_FETIDP(KSP ksp)
1110: {
1111: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
1115: KSPReset_FETIDP(ksp);
1116: PCDestroy(&fetidp->innerbddc);
1117: KSPDestroy(&fetidp->innerksp);
1118: PetscFree(fetidp->monctx);
1119: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPSetInnerBDDC_C",NULL);
1120: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPGetInnerBDDC_C",NULL);
1121: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPGetInnerKSP_C",NULL);
1122: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPSetPressureOperator_C",NULL);
1123: PetscFree(ksp->data);
1124: return(0);
1125: }
1127: static PetscErrorCode KSPView_FETIDP(KSP ksp,PetscViewer viewer)
1128: {
1129: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
1131: PetscBool iascii;
1134: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1135: if (iascii) {
1136: PetscViewerASCIIPrintf(viewer," fully redundant: %D\n",fetidp->fully_redundant);
1137: PetscViewerASCIIPrintf(viewer," saddle point: %D\n",fetidp->saddlepoint);
1138: PetscViewerASCIIPrintf(viewer," inner solver details\n");
1139: PetscViewerASCIIAddTab(viewer,2);
1140: }
1141: KSPView(fetidp->innerksp,viewer);
1142: if (iascii) {
1143: PetscViewerASCIISubtractTab(viewer,2);
1144: PetscViewerASCIIPrintf(viewer," BDDC solver details\n");
1145: PetscViewerASCIIAddTab(viewer,2);
1146: }
1147: PCView(fetidp->innerbddc,viewer);
1148: if (iascii) {
1149: PetscViewerASCIISubtractTab(viewer,2);
1150: }
1151: return(0);
1152: }
1154: static PetscErrorCode KSPSetFromOptions_FETIDP(PetscOptionItems *PetscOptionsObject,KSP ksp)
1155: {
1156: KSP_FETIDP *fetidp = (KSP_FETIDP*)ksp->data;
1160: /* set options prefixes for the inner objects, since the parent prefix will be valid at this point */
1161: PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerksp,((PetscObject)ksp)->prefix);
1162: PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerksp,"fetidp_");
1163: if (!fetidp->userbddc) {
1164: PetscObjectSetOptionsPrefix((PetscObject)fetidp->innerbddc,((PetscObject)ksp)->prefix);
1165: PetscObjectAppendOptionsPrefix((PetscObject)fetidp->innerbddc,"fetidp_bddc_");
1166: }
1167: PetscOptionsHead(PetscOptionsObject,"KSP FETIDP options");
1168: PetscOptionsBool("-ksp_fetidp_fullyredundant","Use fully redundant multipliers","none",fetidp->fully_redundant,&fetidp->fully_redundant,NULL);
1169: PetscOptionsBool("-ksp_fetidp_saddlepoint","Activates support for saddle-point problems",NULL,fetidp->saddlepoint,&fetidp->saddlepoint,NULL);
1170: PetscOptionsBool("-ksp_fetidp_check","Activates verbose debugging output FETI-DP operators",NULL,fetidp->check,&fetidp->check,NULL);
1171: PetscOptionsTail();
1172: PCSetFromOptions(fetidp->innerbddc);
1173: return(0);
1174: }
1176: /*MC
1177: KSPFETIDP - The FETI-DP method
1179: This class implements the FETI-DP method [1].
1180: The matrix for the KSP must be of type MATIS.
1181: The FETI-DP linear system (automatically generated constructing an internal PCBDDC object) is solved using an internal KSP object.
1183: Options Database Keys:
1184: + -ksp_fetidp_fullyredundant <false> : use a fully redundant set of Lagrange multipliers
1185: . -ksp_fetidp_saddlepoint <false> : activates support for saddle point problems, see [2]
1186: . -ksp_fetidp_saddlepoint_flip <false> : usually, an incompressible Stokes problem is written as
1187: | A B^T | | v | = | f |
1188: | B 0 | | p | = | g |
1189: with B representing -\int_\Omega \nabla \cdot u q.
1190: If -ksp_fetidp_saddlepoint_flip is true, the code assumes that the user provides it as
1191: | A B^T | | v | = | f |
1192: |-B 0 | | p | = |-g |
1193: . -ksp_fetidp_pressure_field <-1> : activates support for saddle point problems, and identifies the pressure field id.
1194: If this information is not provided, the pressure field is detected by using MatFindZeroDiagonals().
1195: . -ksp_fetidp_pressure_iszero <true> : if false, extracts the pressure block from the matrix (i.e. for Almost Incompressible Elasticity)
1196: - -ksp_fetidp_pressure_all <false> : if false, uses the interface pressures, as described in [2]. If true, uses the entire pressure field.
1198: Level: Advanced
1200: Notes: Options for the inner KSP and for the customization of the PCBDDC object can be specified at command line by using the prefixes -fetidp_ and -fetidp_bddc_. E.g.,
1201: .vb
1202: -fetidp_ksp_type gmres -fetidp_bddc_pc_bddc_symmetric false
1203: .ve
1204: will use GMRES for the solution of the linear system on the Lagrange multipliers, generated using a non-symmetric PCBDDC.
1206: For saddle point problems with continuous pressures, the preconditioned operator for the pressure solver can be specified with KSPFETIDPSetPressureOperator().
1207: If it's not provided, an identity matrix will be created; the user then needs to scale it through a Richardson solver.
1208: Options for the pressure solver can be prefixed with -fetidp_fielsplit_p_, E.g.
1209: .vb
1210: -fetidp_fielsplit_p_ksp_type preonly -fetidp_fielsplit_p_pc_type lu -fetidp_fielsplit_p_pc_factor_mat_solver_package mumps
1211: .ve
1212: In order to use the deluxe version of FETI-DP, you must customize the inner BDDC operator with -fetidp_bddc_pc_bddc_use_deluxe_scaling -fetidp_bddc_pc_bddc_deluxe_singlemat and use
1213: non-redundant multipliers, i.e. -ksp_fetidp_fullyredundant false. Options for the scaling solver are prefixed by -fetidp_bddelta_, E.g.
1214: .vb
1215: -fetidp_bddelta_pc_factor_mat_solver_package mumps -my_fetidp_bddelta_pc_type lu
1216: .ve
1218: Some of the basic options such as maximum number of iterations and tolerances are automatically passed from this KSP to the inner KSP that actually performs the iterations.
1220: The converged reason and number of iterations computed are passed from the inner KSP to this KSP at the end of the solution.
1222: Developer Notes: Even though this method does not directly use any norms, the user is allowed to set the KSPNormType to any value.
1223: This is so users do not have to change KSPNormType options when they switch from other KSP methods to this one.
1225: References:
1226: .vb
1227: . [1] - C. Farhat, M. Lesoinne, P. LeTallec, K. Pierson, and D. Rixen, FETI-DP: a dual-primal unified FETI method. I. A faster alternative to the two-level FETI method, Internat. J. Numer. Methods Engrg., 50 (2001), pp. 1523--1544
1228: . [2] - X. Tu, J. Li, A FETI-DP type domain decomposition algorithm for three-dimensional incompressible Stokes equations, SIAM J. Numer. Anal., 53 (2015), pp. 720-742
1229: .ve
1231: .seealso: MATIS, PCBDDC, KSPFETIDPSetInnerBDDC(), KSPFETIDPGetInnerBDDC(), KSPFETIDPGetInnerKSP()
1232: M*/
1233: PETSC_EXTERN PetscErrorCode KSPCreate_FETIDP(KSP ksp)
1234: {
1236: KSP_FETIDP *fetidp;
1237: KSP_FETIDPMon *monctx;
1238: PC_BDDC *pcbddc;
1239: PC pc;
1242: KSPSetSupportedNorm(ksp,KSP_NORM_NONE,PC_LEFT,3);
1243: KSPSetSupportedNorm(ksp,KSP_NORM_NONE,PC_RIGHT,2);
1244: KSPSetSupportedNorm(ksp,KSP_NORM_PRECONDITIONED,PC_LEFT,2);
1245: KSPSetSupportedNorm(ksp,KSP_NORM_PRECONDITIONED,PC_RIGHT,2);
1246: KSPSetSupportedNorm(ksp,KSP_NORM_UNPRECONDITIONED,PC_LEFT,2);
1247: KSPSetSupportedNorm(ksp,KSP_NORM_UNPRECONDITIONED,PC_RIGHT,2);
1248: KSPSetSupportedNorm(ksp,KSP_NORM_NATURAL,PC_LEFT,2);
1250: PetscNewLog(ksp,&fetidp);
1251: fetidp->matstate = -1;
1252: fetidp->matnnzstate = -1;
1253: fetidp->statechanged = PETSC_TRUE;
1255: ksp->data = (void*)fetidp;
1256: ksp->ops->setup = KSPSetUp_FETIDP;
1257: ksp->ops->solve = KSPSolve_FETIDP;
1258: ksp->ops->destroy = KSPDestroy_FETIDP;
1259: ksp->ops->computeeigenvalues = KSPComputeEigenvalues_FETIDP;
1260: ksp->ops->computeextremesingularvalues = KSPComputeExtremeSingularValues_FETIDP;
1261: ksp->ops->view = KSPView_FETIDP;
1262: ksp->ops->setfromoptions = KSPSetFromOptions_FETIDP;
1263: ksp->ops->buildsolution = KSPBuildSolution_FETIDP;
1264: ksp->ops->buildresidual = KSPBuildResidualDefault;
1265: KSPGetPC(ksp,&pc);
1266: PCSetType(pc,PCNONE);
1267: /* create the inner KSP for the Lagrange multipliers */
1268: KSPCreate(PetscObjectComm((PetscObject)ksp),&fetidp->innerksp);
1269: KSPGetPC(fetidp->innerksp,&pc);
1270: PCSetType(pc,PCNONE);
1271: PetscLogObjectParent((PetscObject)ksp,(PetscObject)fetidp->innerksp);
1272: /* monitor */
1273: PetscNew(&monctx);
1274: monctx->parentksp = ksp;
1275: fetidp->monctx = monctx;
1276: KSPMonitorSet(fetidp->innerksp,KSPMonitor_FETIDP,fetidp->monctx,NULL);
1277: /* create the inner BDDC */
1278: PCCreate(PetscObjectComm((PetscObject)ksp),&fetidp->innerbddc);
1279: PCSetType(fetidp->innerbddc,PCBDDC);
1280: /* make sure we always obtain a consistent FETI-DP matrix
1281: for symmetric problems, the user can always customize it through the command line */
1282: pcbddc = (PC_BDDC*)fetidp->innerbddc->data;
1283: pcbddc->symmetric_primal = PETSC_FALSE;
1284: PetscLogObjectParent((PetscObject)ksp,(PetscObject)fetidp->innerbddc);
1285: /* composed functions */
1286: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPSetInnerBDDC_C",KSPFETIDPSetInnerBDDC_FETIDP);
1287: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPGetInnerBDDC_C",KSPFETIDPGetInnerBDDC_FETIDP);
1288: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPGetInnerKSP_C",KSPFETIDPGetInnerKSP_FETIDP);
1289: PetscObjectComposeFunction((PetscObject)ksp,"KSPFETIDPSetPressureOperator_C",KSPFETIDPSetPressureOperator_FETIDP);
1290: /* need to call KSPSetUp_FETIDP even with KSP_SETUP_NEWMATRIX */
1291: ksp->setupnewmatrix = PETSC_TRUE;
1292: return(0);
1293: }