Actual source code: ex23.c
petsc-3.3-p7 2013-05-11
2: /* Program usage: mpiexec ex23 [-help] [all PETSc options] */
4: static char help[] = "Solves a tridiagonal linear system.\n\n";
6: /*T
7: Concepts: KSP^basic parallel example;
8: Processors: n
9: T*/
11: /*
12: Include "petscksp.h" so that we can use KSP solvers. Note that this file
13: automatically includes:
14: petscsys.h - base PETSc routines petscvec.h - vectors
15: petscmat.h - matrices
16: petscis.h - index sets petscksp.h - Krylov subspace methods
17: petscviewer.h - viewers petscpc.h - preconditioners
19: Note: The corresponding uniprocessor example is ex1.c
20: */
21: #include <petscksp.h>
25: int main(int argc,char **args)
26: {
27: Vec x, b, u; /* approx solution, RHS, exact solution */
28: Mat A; /* linear system matrix */
29: KSP ksp; /* linear solver context */
30: PC pc; /* preconditioner context */
31: PetscReal norm,tol=1.e-11; /* norm of solution error */
33: PetscInt i,n = 10,col[3],its,rstart,rend,nlocal;
34: PetscScalar neg_one = -1.0,one = 1.0,value[3];
36: PetscInitialize(&argc,&args,(char *)0,help);
37: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
39: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
40: Compute the matrix and right-hand-side vector that define
41: the linear system, Ax = b.
42: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
44: /*
45: Create vectors. Note that we form 1 vector from scratch and
46: then duplicate as needed. For this simple case let PETSc decide how
47: many elements of the vector are stored on each processor. The second
48: argument to VecSetSizes() below causes PETSc to decide.
49: */
50: VecCreate(PETSC_COMM_WORLD,&x);
51: VecSetSizes(x,PETSC_DECIDE,n);
52: VecSetFromOptions(x);
53: VecDuplicate(x,&b);
54: VecDuplicate(x,&u);
56: /* Identify the starting and ending mesh points on each
57: processor for the interior part of the mesh. We let PETSc decide
58: above. */
60: VecGetOwnershipRange(x,&rstart,&rend);
61: VecGetLocalSize(x,&nlocal);
63: /*
64: Create matrix. When using MatCreate(), the matrix format can
65: be specified at runtime.
67: Performance tuning note: For problems of substantial size,
68: preallocation of matrix memory is crucial for attaining good
69: performance. See the matrix chapter of the users manual for details.
71: We pass in nlocal as the "local" size of the matrix to force it
72: to have the same parallel layout as the vector created above.
73: */
74: MatCreate(PETSC_COMM_WORLD,&A);
75: MatSetSizes(A,nlocal,nlocal,n,n);
76: MatSetFromOptions(A);
77: MatSetUp(A);
79: /*
80: Assemble matrix.
82: The linear system is distributed across the processors by
83: chunks of contiguous rows, which correspond to contiguous
84: sections of the mesh on which the problem is discretized.
85: For matrix assembly, each processor contributes entries for
86: the part that it owns locally.
87: */
90: if (!rstart) {
91: rstart = 1;
92: i = 0; col[0] = 0; col[1] = 1; value[0] = 2.0; value[1] = -1.0;
93: MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
94: }
95: if (rend == n) {
96: rend = n-1;
97: i = n-1; col[0] = n-2; col[1] = n-1; value[0] = -1.0; value[1] = 2.0;
98: MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
99: }
101: /* Set entries corresponding to the mesh interior */
102: value[0] = -1.0; value[1] = 2.0; value[2] = -1.0;
103: for (i=rstart; i<rend; i++) {
104: col[0] = i-1; col[1] = i; col[2] = i+1;
105: MatSetValues(A,1,&i,3,col,value,INSERT_VALUES);
106: }
108: /* Assemble the matrix */
109: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
110: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
112: /*
113: Set exact solution; then compute right-hand-side vector.
114: */
115: VecSet(u,one);
116: MatMult(A,u,b);
118: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
119: Create the linear solver and set various options
120: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121: /*
122: Create linear solver context
123: */
124: KSPCreate(PETSC_COMM_WORLD,&ksp);
126: /*
127: Set operators. Here the matrix that defines the linear system
128: also serves as the preconditioning matrix.
129: */
130: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
132: /*
133: Set linear solver defaults for this problem (optional).
134: - By extracting the KSP and PC contexts from the KSP context,
135: we can then directly call any KSP and PC routines to set
136: various options.
137: - The following four statements are optional; all of these
138: parameters could alternatively be specified at runtime via
139: KSPSetFromOptions();
140: */
141: KSPGetPC(ksp,&pc);
142: PCSetType(pc,PCJACOBI);
143: KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
145: /*
146: Set runtime options, e.g.,
147: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
148: These options will override those specified above as long as
149: KSPSetFromOptions() is called _after_ any other customization
150: routines.
151: */
152: KSPSetFromOptions(ksp);
153:
154: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155: Solve the linear system
156: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
157: /*
158: Solve linear system
159: */
160: KSPSolve(ksp,b,x);
162: /*
163: View solver info; we could instead use the option -ksp_view to
164: print this info to the screen at the conclusion of KSPSolve().
165: */
166: KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Check solution and clean up
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: /*
172: Check the error
173: */
174: VecAXPY(x,neg_one,u);
175: VecNorm(x,NORM_2,&norm);
176: KSPGetIterationNumber(ksp,&its);
177: if (norm > tol){
178: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %G, Iterations %D\n",norm,its);
179: }
181: /*
182: Free work space. All PETSc objects should be destroyed when they
183: are no longer needed.
184: */
185: VecDestroy(&x); VecDestroy(&u);
186: VecDestroy(&b); MatDestroy(&A);
187: KSPDestroy(&ksp);
189: /*
190: Always call PetscFinalize() before exiting a program. This routine
191: - finalizes the PETSc libraries as well as MPI
192: - provides summary and diagnostic information if certain runtime
193: options are chosen (e.g., -log_summary).
194: */
195: PetscFinalize();
196: return 0;
197: }