Actual source code: ex23.c

  1: static char help[] = "Solves a tridiagonal linear system.\n\n";

  3: /*
  4:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  7:      petscmat.h - matrices
  8:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
  9:      petscviewer.h - viewers               petscpc.h  - preconditioners

 11:   Note:  The corresponding uniprocessor example is ex1.c
 12: */
 13: #include <petscksp.h>

 15: int main(int argc, char **args)
 16: {
 17:   Vec         x, b, u;                                   /* approx solution, RHS, exact solution */
 18:   Mat         A;                                         /* linear system matrix */
 19:   KSP         ksp;                                       /* linear solver context */
 20:   PC          pc;                                        /* preconditioner context */
 21:   PetscReal   norm, tol = 1000. * PETSC_MACHINE_EPSILON; /* norm of solution error */
 22:   PetscInt    i, n      = 10, col[3], its, rstart, rend, nlocal;
 23:   PetscScalar one = 1.0, value[3];

 25:   PetscFunctionBeginUser;
 26:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 27:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));

 29:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 30:          Compute the matrix and right-hand-side vector that define
 31:          the linear system, Ax = b.
 32:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 34:   /*
 35:      Create vectors.  Note that we form 1 vector from scratch and
 36:      then duplicate as needed. For this simple case let PETSc decide how
 37:      many elements of the vector are stored on each processor. The second
 38:      argument to VecSetSizes() below causes PETSc to decide.
 39:   */
 40:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 41:   PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
 42:   PetscCall(VecSetFromOptions(x));
 43:   PetscCall(VecDuplicate(x, &b));
 44:   PetscCall(VecDuplicate(x, &u));

 46:   /* Identify the starting and ending mesh points on each
 47:      processor for the interior part of the mesh. We let PETSc decide
 48:      above. */

 50:   PetscCall(VecGetOwnershipRange(x, &rstart, &rend));
 51:   PetscCall(VecGetLocalSize(x, &nlocal));

 53:   /*
 54:      Create matrix.  When using MatCreate(), the matrix format can
 55:      be specified at runtime.

 57:      Performance tuning note:  For problems of substantial size,
 58:      preallocation of matrix memory is crucial for attaining good
 59:      performance. See the matrix chapter of the users manual for details.

 61:      We pass in nlocal as the "local" size of the matrix to force it
 62:      to have the same parallel layout as the vector created above.
 63:   */
 64:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 65:   PetscCall(MatSetSizes(A, nlocal, nlocal, n, n));
 66:   PetscCall(MatSetFromOptions(A));
 67:   PetscCall(MatSetUp(A));

 69:   /*
 70:      Assemble matrix.

 72:      The linear system is distributed across the processors by
 73:      chunks of contiguous rows, which correspond to contiguous
 74:      sections of the mesh on which the problem is discretized.
 75:      For matrix assembly, each processor contributes entries for
 76:      the part that it owns locally.
 77:   */

 79:   if (!rstart) {
 80:     rstart   = 1;
 81:     i        = 0;
 82:     col[0]   = 0;
 83:     col[1]   = 1;
 84:     value[0] = 2.0;
 85:     value[1] = -1.0;
 86:     PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
 87:   }
 88:   if (rend == n) {
 89:     rend     = n - 1;
 90:     i        = n - 1;
 91:     col[0]   = n - 2;
 92:     col[1]   = n - 1;
 93:     value[0] = -1.0;
 94:     value[1] = 2.0;
 95:     PetscCall(MatSetValues(A, 1, &i, 2, col, value, INSERT_VALUES));
 96:   }

 98:   /* Set entries corresponding to the mesh interior */
 99:   value[0] = -1.0;
100:   value[1] = 2.0;
101:   value[2] = -1.0;
102:   for (i = rstart; i < rend; i++) {
103:     col[0] = i - 1;
104:     col[1] = i;
105:     col[2] = i + 1;
106:     PetscCall(MatSetValues(A, 1, &i, 3, col, value, INSERT_VALUES));
107:   }

109:   /* Assemble the matrix */
110:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
111:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

113:   /*
114:      Set exact solution; then compute right-hand-side vector.
115:   */
116:   PetscCall(VecSet(u, one));
117:   PetscCall(MatMult(A, u, b));

119:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120:                 Create the linear solver and set various options
121:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
122:   /*
123:      Create linear solver context
124:   */
125:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

127:   /*
128:      Set operators. Here the matrix that defines the linear system
129:      also serves as the preconditioning matrix.
130:   */
131:   PetscCall(KSPSetOperators(ksp, A, A));

133:   /*
134:      Set linear solver defaults for this problem (optional).
135:      - By extracting the KSP and PC contexts from the KSP context,
136:        we can then directly call any KSP and PC routines to set
137:        various options.
138:      - The following four statements are optional; all of these
139:        parameters could alternatively be specified at runtime via
140:        KSPSetFromOptions();
141:   */
142:   PetscCall(KSPGetPC(ksp, &pc));
143:   PetscCall(PCSetType(pc, PCJACOBI));
144:   PetscCall(KSPSetTolerances(ksp, 1.e-7, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT));

146:   /*
147:     Set runtime options, e.g.,
148:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
149:     These options will override those specified above as long as
150:     KSPSetFromOptions() is called _after_ any other customization
151:     routines.
152:   */
153:   PetscCall(KSPSetFromOptions(ksp));

155:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156:                       Solve the linear system
157:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
158:   /*
159:      Solve linear system
160:   */
161:   PetscCall(KSPSolve(ksp, b, x));

163:   /*
164:      View solver info; we could instead use the option -ksp_view to
165:      print this info to the screen at the conclusion of KSPSolve().
166:   */
167:   PetscCall(KSPView(ksp, PETSC_VIEWER_STDOUT_WORLD));

169:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170:                       Check solution and clean up
171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172:   /*
173:      Check the error
174:   */
175:   PetscCall(VecAXPY(x, -1.0, u));
176:   PetscCall(VecNorm(x, NORM_2, &norm));
177:   PetscCall(KSPGetIterationNumber(ksp, &its));
178:   if (norm > tol) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g, Iterations %" PetscInt_FMT "\n", (double)norm, its));

180:   /*
181:      Free work space.  All PETSc objects should be destroyed when they
182:      are no longer needed.
183:   */
184:   PetscCall(VecDestroy(&x));
185:   PetscCall(VecDestroy(&u));
186:   PetscCall(VecDestroy(&b));
187:   PetscCall(MatDestroy(&A));
188:   PetscCall(KSPDestroy(&ksp));

190:   /*
191:      Always call PetscFinalize() before exiting a program.  This routine
192:        - finalizes the PETSc libraries as well as MPI
193:        - provides summary and diagnostic information if certain runtime
194:          options are chosen (e.g., -log_view).
195:   */
196:   PetscCall(PetscFinalize());
197:   return 0;
198: }

200: /*TEST

202:    build:
203:       requires: !complex !single

205:    test:
206:       args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

208:    test:
209:       suffix: 2
210:       nsize: 3
211:       args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

213:    test:
214:       suffix: 3
215:       nsize: 2
216:       args: -ksp_monitor_short -ksp_rtol 1e-6 -ksp_type pipefgmres

218: TEST*/