Actual source code: ex52.c

petsc-master 2016-08-26
Report Typos and Errors
  2: static char help[] = "Solves a linear system in parallel with KSP. Modified from ex2.c \n\
  3:                       Illustrate how to use external packages MUMPS, SUPERLU and STRUMPACK \n\
  4: Input parameters include:\n\
  5:   -random_exact_sol : use a random exact solution vector\n\
  6:   -view_exact_sol   : write exact solution vector to stdout\n\
  7:   -m <mesh_x>       : number of mesh points in x-direction\n\
  8:   -n <mesh_n>       : number of mesh points in y-direction\n\n";

 10:  #include <petscksp.h>

 14: int main(int argc,char **args)
 15: {
 16:   Vec            x,b,u;    /* approx solution, RHS, exact solution */
 17:   Mat            A,F;
 18:   KSP            ksp;      /* linear solver context */
 19:   PC             pc;
 20:   PetscRandom    rctx;     /* random number generator context */
 21:   PetscReal      norm;     /* norm of solution error */
 22:   PetscInt       i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
 24:   PetscBool      flg=PETSC_FALSE,flg_ilu=PETSC_FALSE,flg_ch=PETSC_FALSE;
 25: #if defined(PETSC_HAVE_MUMPS)
 26:   PetscBool      flg_mumps=PETSC_FALSE,flg_mumps_ch=PETSC_FALSE;
 27: #endif
 28: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
 29:   PetscBool      flg_superlu=PETSC_FALSE;
 30: #endif
 31: #if defined(PETSC_HAVE_STRUMPACK)
 32:   PetscBool      flg_strumpack=PETSC_FALSE;
 33: #endif
 34:   PetscScalar    v;
 35:   PetscMPIInt    rank,size;
 36: #if defined(PETSC_USE_LOG)
 37:   PetscLogStage  stage;
 38: #endif

 40:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 41:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 42:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 43:   PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
 44:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 45:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 46:          Compute the matrix and right-hand-side vector that define
 47:          the linear system, Ax = b.
 48:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 49:   MatCreate(PETSC_COMM_WORLD,&A);
 50:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 51:   MatSetFromOptions(A);
 52:   MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
 53:   MatSeqAIJSetPreallocation(A,5,NULL);
 54:   MatSetUp(A);

 56:   /*
 57:      Currently, all PETSc parallel matrix formats are partitioned by
 58:      contiguous chunks of rows across the processors.  Determine which
 59:      rows of the matrix are locally owned.
 60:   */
 61:   MatGetOwnershipRange(A,&Istart,&Iend);

 63:   /*
 64:      Set matrix elements for the 2-D, five-point stencil in parallel.
 65:       - Each processor needs to insert only elements that it owns
 66:         locally (but any non-local elements will be sent to the
 67:         appropriate processor during matrix assembly).
 68:       - Always specify global rows and columns of matrix entries.

 70:      Note: this uses the less common natural ordering that orders first
 71:      all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
 72:      instead of J = I +- m as you might expect. The more standard ordering
 73:      would first do all variables for y = h, then y = 2h etc.

 75:    */
 76:   PetscLogStageRegister("Assembly", &stage);
 77:   PetscLogStagePush(stage);
 78:   for (Ii=Istart; Ii<Iend; Ii++) {
 79:     v = -1.0; i = Ii/n; j = Ii - i*n;
 80:     if (i>0)   {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 81:     if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 82:     if (j>0)   {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 83:     if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
 84:     v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
 85:   }

 87:   /*
 88:      Assemble matrix, using the 2-step process:
 89:        MatAssemblyBegin(), MatAssemblyEnd()
 90:      Computations can be done while messages are in transition
 91:      by placing code between these two statements.
 92:   */
 93:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 94:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
 95:   PetscLogStagePop();

 97:   /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
 98:   MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);

100:   /*
101:      Create parallel vectors.
102:       - We form 1 vector from scratch and then duplicate as needed.
103:       - When using VecCreate(), VecSetSizes and VecSetFromOptions()
104:         in this example, we specify only the
105:         vector's global dimension; the parallel partitioning is determined
106:         at runtime.
107:       - When solving a linear system, the vectors and matrices MUST
108:         be partitioned accordingly.  PETSc automatically generates
109:         appropriately partitioned matrices and vectors when MatCreate()
110:         and VecCreate() are used with the same communicator.
111:       - The user can alternatively specify the local vector and matrix
112:         dimensions when more sophisticated partitioning is needed
113:         (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
114:         below).
115:   */
116:   VecCreate(PETSC_COMM_WORLD,&u);
117:   VecSetSizes(u,PETSC_DECIDE,m*n);
118:   VecSetFromOptions(u);
119:   VecDuplicate(u,&b);
120:   VecDuplicate(b,&x);

122:   /*
123:      Set exact solution; then compute right-hand-side vector.
124:      By default we use an exact solution of a vector with all
125:      elements of 1.0;  Alternatively, using the runtime option
126:      -random_sol forms a solution vector with random components.
127:   */
128:   PetscOptionsGetBool(NULL,NULL,"-random_exact_sol",&flg,NULL);
129:   if (flg) {
130:     PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
131:     PetscRandomSetFromOptions(rctx);
132:     VecSetRandom(u,rctx);
133:     PetscRandomDestroy(&rctx);
134:   } else {
135:     VecSet(u,1.0);
136:   }
137:   MatMult(A,u,b);

139:   /*
140:      View the exact solution vector if desired
141:   */
142:   flg  = PETSC_FALSE;
143:   PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);
144:   if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}

146:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147:                 Create the linear solver and set various options
148:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

150:   /*
151:      Create linear solver context
152:   */
153:   KSPCreate(PETSC_COMM_WORLD,&ksp);
154:   KSPSetOperators(ksp,A,A);

156:   /*
157:     Example of how to use external package MUMPS
158:     Note: runtime options
159:           '-ksp_type preonly -pc_type lu -pc_factor_mat_solver_package mumps -mat_mumps_icntl_7 2 -mat_mumps_icntl_1 0.0'
160:           are equivalent to these procedural calls
161:   */
162: #if defined(PETSC_HAVE_MUMPS)
163:   flg_mumps    = PETSC_FALSE;
164:   flg_mumps_ch = PETSC_FALSE;
165:   PetscOptionsGetBool(NULL,NULL,"-use_mumps_lu",&flg_mumps,NULL);
166:   PetscOptionsGetBool(NULL,NULL,"-use_mumps_ch",&flg_mumps_ch,NULL);
167:   if (flg_mumps || flg_mumps_ch) {
168:     KSPSetType(ksp,KSPPREONLY);
169:     PetscInt  ival,icntl;
170:     PetscReal val;
171:     KSPGetPC(ksp,&pc);
172:     if (flg_mumps) {
173:       PCSetType(pc,PCLU);
174:     } else if (flg_mumps_ch) {
175:       MatSetOption(A,MAT_SPD,PETSC_TRUE); /* set MUMPS id%SYM=1 */
176:       PCSetType(pc,PCCHOLESKY);
177:     }
178:     PCFactorSetMatSolverPackage(pc,MATSOLVERMUMPS);
179:     PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
180:     PCFactorGetMatrix(pc,&F);

182:     /* sequential ordering */
183:     icntl = 7; ival = 2;
184:     MatMumpsSetIcntl(F,icntl,ival);

186:     /* threshhold for row pivot detection */
187:     MatMumpsSetIcntl(F,24,1);
188:     icntl = 3; val = 1.e-6;
189:     MatMumpsSetCntl(F,icntl,val);

191:     /* compute determinant of A */
192:     MatMumpsSetIcntl(F,33,1);
193:   }
194: #endif

196:   /*
197:     Example of how to use external package SuperLU
198:     Note: runtime options
199:           '-ksp_type preonly -pc_type ilu -pc_factor_mat_solver_package superlu -mat_superlu_ilu_droptol 1.e-8'
200:           are equivalent to these procedual calls
201:   */
202: #if defined(PETSC_HAVE_SUPERLU) || defined(PETSC_HAVE_SUPERLU_DIST)
203:   flg_ilu     = PETSC_FALSE;
204:   flg_superlu = PETSC_FALSE;
205:   PetscOptionsGetBool(NULL,NULL,"-use_superlu_lu",&flg_superlu,NULL);
206:   PetscOptionsGetBool(NULL,NULL,"-use_superlu_ilu",&flg_ilu,NULL);
207:   if (flg_superlu || flg_ilu) {
208:     KSPSetType(ksp,KSPPREONLY);
209:     KSPGetPC(ksp,&pc);
210:     if (flg_superlu) {
211:       PCSetType(pc,PCLU);
212:     } else if (flg_ilu) {
213:       PCSetType(pc,PCILU);
214:     }
215:     if (size == 1) {
216: #if !defined(PETSC_HAVE_SUPERLU)
217:       SETERRQ(PETSC_COMM_WORLD,1,"This test requires SUPERLU");
218: #endif
219:       PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU);
220:     } else {
221: #if !defined(PETSC_HAVE_SUPERLU_DIST)
222:       SETERRQ(PETSC_COMM_WORLD,1,"This test requires SUPERLU_DIST");
223: #endif
224:       PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU_DIST);
225:     }
226:     PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
227:     PCFactorGetMatrix(pc,&F);
228: #if defined(PETSC_HAVE_SUPERLU)
229:     if (size == 1) {
230:       MatSuperluSetILUDropTol(F,1.e-8);
231:     }
232: #endif
233:   }
234: #endif


237:   /*
238:     Example of how to use external package STRUMPACK
239:     Note: runtime options
240:           '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_package strumpack \
241:               -mat_strumpack_rctol 1.e-3 -mat_strumpack_hssminsize 50 -mat_strumpack_colperm 0'
242:           are equivalent to these procedural calls

244:     We refer to the STRUMPACK-sparse manual, section 5, on more info on how to tune the preconditioner.
245:   */
246: #if defined(PETSC_HAVE_STRUMPACK)
247:   flg_ilu       = PETSC_FALSE;
248:   flg_strumpack = PETSC_FALSE;
249:   PetscOptionsGetBool(NULL,NULL,"-use_strumpack_lu",&flg_strumpack,NULL);
250:   PetscOptionsGetBool(NULL,NULL,"-use_strumpack_ilu",&flg_ilu,NULL);
251:   if (flg_strumpack || flg_ilu) {
252:     KSPSetType(ksp,KSPPREONLY);
253:     KSPGetPC(ksp,&pc);
254:     if (flg_strumpack) {
255:       PCSetType(pc,PCLU);
256:     } else if (flg_ilu) {
257:       PCSetType(pc,PCILU);
258:     }
259: #if !defined(PETSC_HAVE_STRUMPACK)
260:     SETERRQ(PETSC_COMM_WORLD,1,"This test requires STRUMPACK");
261: #endif
262:     PCFactorSetMatSolverPackage(pc,MATSOLVERSTRUMPACK);
263:     PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
264:     PCFactorGetMatrix(pc,&F);
265: #if defined(PETSC_HAVE_STRUMPACK)
266:     /* The compression tolerance used when doing low-rank compression */
267:     /* in the preconditioner. This is problem specific!               */
268:     MatSTRUMPACKSetHSSRelCompTol(F,1.e-3);
269:     /* Set minimum matrix size for HSS compression to 50 in order to  */
270:     /* demonstrate preconditioner on small problems. For performance  */
271:     /* a value of say 500 (the default) is better.                    */
272:     MatSTRUMPACKSetHSSMinSize(F,50);
273:     /* Since this is a simple discretization, the diagonal is always  */
274:     /* nonzero, and there is no need for the extra MC64 permutation.  */
275:     MatSTRUMPACKSetColPerm(F,PETSC_FALSE);
276: #endif
277:   }
278: #endif

280:   /*
281:     Example of how to use procedural calls that are equivalent to
282:           '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_package petsc'
283:   */
284:   flg     = PETSC_FALSE;
285:   flg_ilu = PETSC_FALSE;
286:   flg_ch  = PETSC_FALSE;
287:   PetscOptionsGetBool(NULL,NULL,"-use_petsc_lu",&flg,NULL);
288:   PetscOptionsGetBool(NULL,NULL,"-use_petsc_ilu",&flg_ilu,NULL);
289:   PetscOptionsGetBool(NULL,NULL,"-use_petsc_ch",&flg_ch,NULL);
290:   if (flg || flg_ilu || flg_ch) {
291:     KSPSetType(ksp,KSPPREONLY);
292:     KSPGetPC(ksp,&pc);
293:     if (flg) {
294:       PCSetType(pc,PCLU);
295:     } else if (flg_ilu) {
296:       PCSetType(pc,PCILU);
297:     } else if (flg_ch) {
298:       PCSetType(pc,PCCHOLESKY);
299:     }
300:     PCFactorSetMatSolverPackage(pc,MATSOLVERPETSC);
301:     PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
302:     PCFactorGetMatrix(pc,&F);
303: 
304:     /* Test MatGetDiagonal() */
305:     Vec diag;
306:     KSPSetUp(ksp);
307:     VecDuplicate(x,&diag);
308:     MatGetDiagonal(F,diag);
309:     /* VecView(diag,PETSC_VIEWER_STDOUT_WORLD); */
310:     VecDestroy(&diag);
311:   }

313:   KSPSetFromOptions(ksp);

315:   /* Get info from matrix factors */
316:   KSPSetUp(ksp);

318: #if defined(PETSC_HAVE_MUMPS)
319:   if (flg_mumps || flg_mumps_ch) {
320:     PetscInt  icntl,infog34;
321:     PetscReal cntl,rinfo12,rinfo13;
322:     icntl = 3;
323:     MatMumpsGetCntl(F,icntl,&cntl);
324: 
325:     /* compute determinant */
326:     if (!rank) {
327:       MatMumpsGetInfog(F,34,&infog34);
328:       MatMumpsGetRinfog(F,12,&rinfo12);
329:       MatMumpsGetRinfog(F,13,&rinfo13);
330:       PetscPrintf(PETSC_COMM_SELF,"  Mumps row pivot threshhold = %g\n",cntl);
331:       PetscPrintf(PETSC_COMM_SELF,"  Mumps determinant = (%g, %g) * 2^%D \n",(double)rinfo12,(double)rinfo13,infog34);
332:     }
333:   }
334: #endif

336:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
337:                       Solve the linear system
338:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
339:   KSPSolve(ksp,b,x);

341:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
342:                       Check solution and clean up
343:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
344:   VecAXPY(x,-1.0,u);
345:   VecNorm(x,NORM_2,&norm);
346:   KSPGetIterationNumber(ksp,&its);

348:   /*
349:      Print convergence information.  PetscPrintf() produces a single
350:      print statement from all processes that share a communicator.
351:      An alternative is PetscFPrintf(), which prints to a file.
352:   */
353:   if (norm < 1.e-12) {
354:     PetscPrintf(PETSC_COMM_WORLD,"Norm of error < 1.e-12 iterations %D\n",its);
355:   } else {
356:     PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
357:  }

359:   /*
360:      Free work space.  All PETSc objects should be destroyed when they
361:      are no longer needed.
362:   */
363:   KSPDestroy(&ksp);
364:   VecDestroy(&u);  VecDestroy(&x);
365:   VecDestroy(&b);  MatDestroy(&A);

367:   /*
368:      Always call PetscFinalize() before exiting a program.  This routine
369:        - finalizes the PETSc libraries as well as MPI
370:        - provides summary and diagnostic information if certain runtime
371:          options are chosen (e.g., -log_summary).
372:   */
373:   PetscFinalize();
374:   return ierr;
375: }