Actual source code: ex52.c
petsc-3.3-p7 2013-05-11
2: /* Program usage: mpiexec -n <procs> ex52 [-help] [all PETSc options] */
4: static char help[] = "Solves a linear system in parallel with KSP. Modified from ex2.c \n\
5: Illustrate how to use external packages MUMPS and SUPERLU \n\
6: Input parameters include:\n\
7: -random_exact_sol : use a random exact solution vector\n\
8: -view_exact_sol : write exact solution vector to stdout\n\
9: -m <mesh_x> : number of mesh points in x-direction\n\
10: -n <mesh_n> : number of mesh points in y-direction\n\n";
12: #include <petscksp.h>
16: int main(int argc,char **args)
17: {
18: Vec x,b,u; /* approx solution, RHS, exact solution */
19: Mat A; /* linear system matrix */
20: KSP ksp; /* linear solver context */
21: PetscRandom rctx; /* random number generator context */
22: PetscReal norm; /* norm of solution error */
23: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
25: PetscBool flg,flg_ilu,flg_ch;
26: PetscScalar v;
27: #if defined(PETSC_USE_LOG)
28: PetscLogStage stage;
29: #endif
31: PetscInitialize(&argc,&args,(char *)0,help);
32: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
33: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
34: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: Compute the matrix and right-hand-side vector that define
36: the linear system, Ax = b.
37: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
38: MatCreate(PETSC_COMM_WORLD,&A);
39: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
40: MatSetFromOptions(A);
41: MatMPIAIJSetPreallocation(A,5,PETSC_NULL,5,PETSC_NULL);
42: MatSeqAIJSetPreallocation(A,5,PETSC_NULL);
44: /*
45: Currently, all PETSc parallel matrix formats are partitioned by
46: contiguous chunks of rows across the processors. Determine which
47: rows of the matrix are locally owned.
48: */
49: MatGetOwnershipRange(A,&Istart,&Iend);
51: /*
52: Set matrix elements for the 2-D, five-point stencil in parallel.
53: - Each processor needs to insert only elements that it owns
54: locally (but any non-local elements will be sent to the
55: appropriate processor during matrix assembly).
56: - Always specify global rows and columns of matrix entries.
58: Note: this uses the less common natural ordering that orders first
59: all the unknowns for x = h then for x = 2h etc; Hence you see J = Ii +- n
60: instead of J = I +- m as you might expect. The more standard ordering
61: would first do all variables for y = h, then y = 2h etc.
63: */
64: PetscLogStageRegister("Assembly", &stage);
65: PetscLogStagePush(stage);
66: for (Ii=Istart; Ii<Iend; Ii++) {
67: v = -1.0; i = Ii/n; j = Ii - i*n;
68: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
69: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
70: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
71: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
72: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
73: }
75: /*
76: Assemble matrix, using the 2-step process:
77: MatAssemblyBegin(), MatAssemblyEnd()
78: Computations can be done while messages are in transition
79: by placing code between these two statements.
80: */
81: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
82: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
83: PetscLogStagePop();
85: /* A is symmetric. Set symmetric flag to enable ICC/Cholesky preconditioner */
86: MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
88: /*
89: Create parallel vectors.
90: - We form 1 vector from scratch and then duplicate as needed.
91: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
92: in this example, we specify only the
93: vector's global dimension; the parallel partitioning is determined
94: at runtime.
95: - When solving a linear system, the vectors and matrices MUST
96: be partitioned accordingly. PETSc automatically generates
97: appropriately partitioned matrices and vectors when MatCreate()
98: and VecCreate() are used with the same communicator.
99: - The user can alternatively specify the local vector and matrix
100: dimensions when more sophisticated partitioning is needed
101: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
102: below).
103: */
104: VecCreate(PETSC_COMM_WORLD,&u);
105: VecSetSizes(u,PETSC_DECIDE,m*n);
106: VecSetFromOptions(u);
107: VecDuplicate(u,&b);
108: VecDuplicate(b,&x);
110: /*
111: Set exact solution; then compute right-hand-side vector.
112: By default we use an exact solution of a vector with all
113: elements of 1.0; Alternatively, using the runtime option
114: -random_sol forms a solution vector with random components.
115: */
116: PetscOptionsGetBool(PETSC_NULL,"-random_exact_sol",&flg,PETSC_NULL);
117: if (flg) {
118: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
119: PetscRandomSetFromOptions(rctx);
120: VecSetRandom(u,rctx);
121: PetscRandomDestroy(&rctx);
122: } else {
123: VecSet(u,1.0);
124: }
125: MatMult(A,u,b);
127: /*
128: View the exact solution vector if desired
129: */
130: flg = PETSC_FALSE;
131: PetscOptionsGetBool(PETSC_NULL,"-view_exact_sol",&flg,PETSC_NULL);
132: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Create the linear solver and set various options
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: /*
139: Create linear solver context
140: */
141: KSPCreate(PETSC_COMM_WORLD,&ksp);
142: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
144: /*
145: Example of how to use external package MUMPS
146: Note: runtime options
147: '-ksp_type preonly -pc_type lu -pc_factor_mat_solver_package mumps -mat_mumps_icntl_7 2'
148: are equivalent to these procedual calls
149: */
150: #ifdef PETSC_HAVE_MUMPS
151: flg = PETSC_FALSE;
152: flg_ch = PETSC_FALSE;
153: PetscOptionsGetBool(PETSC_NULL,"-use_mumps_lu",&flg,PETSC_NULL);
154: PetscOptionsGetBool(PETSC_NULL,"-use_mumps_ch",&flg_ch,PETSC_NULL);
155: if (flg || flg_ch){
156: KSPSetType(ksp,KSPPREONLY);
157: PC pc;
158: Mat F;
159: PetscInt ival,icntl;
160: KSPGetPC(ksp,&pc);
161: if (flg){
162: PCSetType(pc,PCLU);
163: } else if (flg_ch) {
164: MatSetOption(A,MAT_SPD,PETSC_TRUE); /* set MUMPS id%SYM=1 */
165: PCSetType(pc,PCCHOLESKY);
166: }
167: PCFactorSetMatSolverPackage(pc,MATSOLVERMUMPS);
168: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
169: PCFactorGetMatrix(pc,&F);
170: icntl=7; ival = 2;
171: MatMumpsSetIcntl(F,icntl,ival);
172: }
173: #endif
175: /*
176: Example of how to use external package SuperLU
177: Note: runtime options
178: '-ksp_type preonly -pc_type ilu -pc_factor_mat_solver_package superlu -mat_superlu_ilu_droptol 1.e-8'
179: are equivalent to these procedual calls
180: */
181: #ifdef PETSC_HAVE_SUPERLU
182: flg_ilu = PETSC_FALSE;
183: flg = PETSC_FALSE;
184: PetscOptionsGetBool(PETSC_NULL,"-use_superlu_lu",&flg,PETSC_NULL);
185: PetscOptionsGetBool(PETSC_NULL,"-use_superlu_ilu",&flg_ilu,PETSC_NULL);
186: if (flg || flg_ilu){
187: KSPSetType(ksp,KSPPREONLY);
188: PC pc;
189: Mat F;
190: KSPGetPC(ksp,&pc);
191: if (flg){
192: PCSetType(pc,PCLU);
193: } else if (flg_ilu) {
194: PCSetType(pc,PCILU);
195: }
196: PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU);
197: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
198: PCFactorGetMatrix(pc,&F);
199: MatSuperluSetILUDropTol(F,1.e-8);
200: }
201: #endif
203: /*
204: Example of how to use procedural calls that are equivalent to
205: '-ksp_type preonly -pc_type lu/ilu -pc_factor_mat_solver_package petsc'
206: */
207: flg = PETSC_FALSE;
208: flg_ilu = PETSC_FALSE;
209: flg_ch = PETSC_FALSE;
210: PetscOptionsGetBool(PETSC_NULL,"-use_petsc_lu",&flg,PETSC_NULL);
211: PetscOptionsGetBool(PETSC_NULL,"-use_petsc_ilu",&flg_ilu,PETSC_NULL);
212: PetscOptionsGetBool(PETSC_NULL,"-use_petsc_ch",&flg_ch,PETSC_NULL);
213: if (flg || flg_ilu || flg_ch){
214: KSPSetType(ksp,KSPPREONLY);
215: PC pc;
216: Mat F;
217: KSPGetPC(ksp,&pc);
218: if (flg){
219: PCSetType(pc,PCLU);
220: } else if (flg_ilu) {
221: PCSetType(pc,PCILU);
222: } else if (flg_ch) {
223: PCSetType(pc,PCCHOLESKY);
224: }
225: PCFactorSetMatSolverPackage(pc,MATSOLVERPETSC);
226: PCFactorSetUpMatSolverPackage(pc); /* call MatGetFactor() to create F */
227: PCFactorGetMatrix(pc,&F);
229: /* Test MatGetDiagonal() */
230: Vec diag;
231: KSPSetUp(ksp);
232: MatView(F,PETSC_VIEWER_STDOUT_WORLD);
233: VecDuplicate(x,&diag);
234: MatGetDiagonal(F,diag);
235: VecView(diag,PETSC_VIEWER_STDOUT_WORLD);
236: VecDestroy(&diag);
237: }
238:
239: KSPSetFromOptions(ksp);
241: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
242: Solve the linear system
243: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
245: KSPSolve(ksp,b,x);
247: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
248: Check solution and clean up
249: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
251: /*
252: Check the error
253: */
254: VecAXPY(x,-1.0,u);
255: VecNorm(x,NORM_2,&norm);
256: KSPGetIterationNumber(ksp,&its);
257: /* Scale the norm */
258: /* norm *= sqrt(1.0/((m+1)*(n+1))); */
260: /*
261: Print convergence information. PetscPrintf() produces a single
262: print statement from all processes that share a communicator.
263: An alternative is PetscFPrintf(), which prints to a file.
264: */
265: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %G iterations %D\n",
266: norm,its);
268: /*
269: Free work space. All PETSc objects should be destroyed when they
270: are no longer needed.
271: */
272: KSPDestroy(&ksp);
273: VecDestroy(&u); VecDestroy(&x);
274: VecDestroy(&b); MatDestroy(&A);
276: /*
277: Always call PetscFinalize() before exiting a program. This routine
278: - finalizes the PETSc libraries as well as MPI
279: - provides summary and diagnostic information if certain runtime
280: options are chosen (e.g., -log_summary).
281: */
282: PetscFinalize();
283: return 0;
284: }