Actual source code: ex12.c
petsc-3.3-p7 2013-05-11
2: /* Program usage: mpiexec -n <procs> ex12 [-help] [all PETSc options] */
4: static char help[] = "Solves a linear system in parallel with KSP.\n\
5: Input parameters include:\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_n> : number of mesh points in y-direction\n\n";
9: /*T
10: Concepts: KSP^solving a system of linear equations
11: Concepts: KSP^Laplacian, 2d
12: Concepts: PC^registering preconditioners
13: Processors: n
14: T*/
16: /*
17: Demonstrates registering a new preconditioner (PC) type.
19: To register a PC type whose code is linked into the executable,
20: use PCRegister(). To register a PC type in a dynamic library use PCRegisterDynamic()
22: Also provide the prototype for your PCCreate_XXX() function. In
23: this example we use the PETSc implementation of the Jacobi method,
24: PCCreate_Jacobi() just as an example.
26: See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
27: write a new PC component.
29: See the manual page PCRegisterDynamic() for details on how to register a method.
30: */
32: /*
33: Include "petscksp.h" so that we can use KSP solvers. Note that this file
34: automatically includes:
35: petscsys.h - base PETSc routines petscvec.h - vectors
36: petscmat.h - matrices
37: petscis.h - index sets petscksp.h - Krylov subspace methods
38: petscviewer.h - viewers petscpc.h - preconditioners
39: */
40: #include <petscksp.h>
42: EXTERN_C_BEGIN
43: extern PetscErrorCode PCCreate_Jacobi(PC);
44: EXTERN_C_END
48: int main(int argc,char **args)
49: {
50: Vec x,b,u; /* approx solution, RHS, exact solution */
51: Mat A; /* linear system matrix */
52: KSP ksp; /* linear solver context */
53: PetscReal norm; /* norm of solution error */
54: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
56: PetscScalar v,one = 1.0,neg_one = -1.0;
57: PC pc; /* preconditioner context */
59: PetscInitialize(&argc,&args,(char *)0,help);
60: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
61: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
63: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
64: Compute the matrix and right-hand-side vector that define
65: the linear system, Ax = b.
66: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
67: /*
68: Create parallel matrix, specifying only its global dimensions.
69: When using MatCreate(), the matrix format can be specified at
70: runtime. Also, the parallel partitioning of the matrix can be
71: determined by PETSc at runtime.
72: */
73: MatCreate(PETSC_COMM_WORLD,&A);
74: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
75: MatSetFromOptions(A);
76: MatSetUp(A);
78: /*
79: Currently, all PETSc parallel matrix formats are partitioned by
80: contiguous chunks of rows across the processors. Determine which
81: rows of the matrix are locally owned.
82: */
83: MatGetOwnershipRange(A,&Istart,&Iend);
85: /*
86: Set matrix elements for the 2-D, five-point stencil in parallel.
87: - Each processor needs to insert only elements that it owns
88: locally (but any non-local elements will be sent to the
89: appropriate processor during matrix assembly).
90: - Always specify global rows and columns of matrix entries.
91: */
92: for (Ii=Istart; Ii<Iend; Ii++) {
93: v = -1.0; i = Ii/n; j = Ii - i*n;
94: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
95: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
96: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
97: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
98: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
99: }
101: /*
102: Assemble matrix, using the 2-step process:
103: MatAssemblyBegin(), MatAssemblyEnd()
104: Computations can be done while messages are in transition
105: by placing code between these two statements.
106: */
107: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
108: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
110: /*
111: Create parallel vectors.
112: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
113: we specify only the vector's global
114: dimension; the parallel partitioning is determined at runtime.
115: - When solving a linear system, the vectors and matrices MUST
116: be partitioned accordingly. PETSc automatically generates
117: appropriately partitioned matrices and vectors when MatCreate()
118: and VecCreate() are used with the same communicator.
119: - Note: We form 1 vector from scratch and then duplicate as needed.
120: */
121: VecCreate(PETSC_COMM_WORLD,&u);
122: VecSetSizes(u,PETSC_DECIDE,m*n);
123: VecSetFromOptions(u);
124: VecDuplicate(u,&b);
125: VecDuplicate(b,&x);
127: /*
128: Set exact solution; then compute right-hand-side vector.
129: Use an exact solution of a vector with all elements of 1.0;
130: */
131: VecSet(u,one);
132: MatMult(A,u,b);
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Create the linear solver and set various options
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
138: /*
139: Create linear solver context
140: */
141: KSPCreate(PETSC_COMM_WORLD,&ksp);
143: /*
144: Set operators. Here the matrix that defines the linear system
145: also serves as the preconditioning matrix.
146: */
147: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
149: /*
150: First register a new PC type with the command PCRegister()
151: */
152: PCRegister("ourjacobi",0,"PCCreate_Jacobi",PCCreate_Jacobi);
153:
154: /*
155: Set the PC type to be the new method
156: */
157: KSPGetPC(ksp,&pc);
158: PCSetType(pc,"ourjacobi");
160: /*
161: Set runtime options, e.g.,
162: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
163: These options will override those specified above as long as
164: KSPSetFromOptions() is called _after_ any other customization
165: routines.
166: */
167: KSPSetFromOptions(ksp);
169: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170: Solve the linear system
171: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
173: KSPSolve(ksp,b,x);
175: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: Check solution and clean up
177: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
179: /*
180: Check the error
181: */
182: VecAXPY(x,neg_one,u);
183: VecNorm(x,NORM_2,&norm);
184: KSPGetIterationNumber(ksp,&its);
185: /* Scale the norm */
186: /* norm *= sqrt(1.0/((m+1)*(n+1))); */
188: /*
189: Print convergence information. PetscPrintf() produces a single
190: print statement from all processes that share a communicator.
191: */
192: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %G iterations %D\n",norm,its);
194: /*
195: Free work space. All PETSc objects should be destroyed when they
196: are no longer needed.
197: */
198: KSPDestroy(&ksp);
199: VecDestroy(&u); VecDestroy(&x);
200: VecDestroy(&b); MatDestroy(&A);
202: /*
203: Always call PetscFinalize() before exiting a program. This routine
204: - finalizes the PETSc libraries as well as MPI
205: - provides summary and diagnostic information if certain runtime
206: options are chosen (e.g., -log_summary).
207: */
208: PetscFinalize();
209: return 0;
210: }