Actual source code: ex11.c

  1: static char help[] = "Solves a linear system in parallel with KSP.\n\n";

  3: /*
  4:    Description: Solves a complex linear system in parallel with KSP.

  6:    The model problem:
  7:       Solve Helmholtz equation on the unit square: (0,1) x (0,1)
  8:           -delta u - sigma1*u + i*sigma2*u = f,
  9:            where delta = Laplace operator
 10:       Dirichlet b.c.'s on all sides
 11:       Use the 2-D, five-point finite difference stencil.

 13:    Compiling the code:
 14:       This code uses the complex numbers version of PETSc, so configure
 15:       must be run to enable this
 16: */

 18: /*
 19:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 20:   automatically includes:
 21:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 22:      petscmat.h - matrices
 23:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 24:      petscviewer.h - viewers               petscpc.h  - preconditioners
 25: */
 26: #include <petscksp.h>

 28: int main(int argc, char **args)
 29: {
 30:   Vec         x, b, u; /* approx solution, RHS, exact solution */
 31:   Mat         A;       /* linear system matrix */
 32:   KSP         ksp;     /* linear solver context */
 33:   PetscReal   norm;    /* norm of solution error */
 34:   PetscInt    dim, i, j, Ii, J, Istart, Iend, n = 6, its, use_random;
 35:   PetscScalar v, none = -1.0, sigma2, pfive = 0.5, *xa;
 36:   PetscRandom rctx;
 37:   PetscReal   h2, sigma1 = 100.0;
 38:   PetscBool   flg = PETSC_FALSE;

 40:   PetscFunctionBeginUser;
 41:   PetscCall(PetscInitialize(&argc, &args, (char *)0, help));
 42:   PetscCall(PetscOptionsGetReal(NULL, NULL, "-sigma1", &sigma1, NULL));
 43:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));
 44:   dim = n * n;

 46:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 47:          Compute the matrix and right-hand-side vector that define
 48:          the linear system, Ax = b.
 49:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
 50:   /*
 51:      Create parallel matrix, specifying only its global dimensions.
 52:      When using MatCreate(), the matrix format can be specified at
 53:      runtime. Also, the parallel partitioning of the matrix is
 54:      determined by PETSc at runtime.
 55:   */
 56:   PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
 57:   PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, dim, dim));
 58:   PetscCall(MatSetFromOptions(A));
 59:   PetscCall(MatSetUp(A));

 61:   /*
 62:      Currently, all PETSc parallel matrix formats are partitioned by
 63:      contiguous chunks of rows across the processors.  Determine which
 64:      rows of the matrix are locally owned.
 65:   */
 66:   PetscCall(MatGetOwnershipRange(A, &Istart, &Iend));

 68:   /*
 69:      Set matrix elements in parallel.
 70:       - Each processor needs to insert only elements that it owns
 71:         locally (but any non-local elements will be sent to the
 72:         appropriate processor during matrix assembly).
 73:       - Always specify global rows and columns of matrix entries.
 74:   */

 76:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-norandom", &flg, NULL));
 77:   if (flg) use_random = 0;
 78:   else use_random = 1;
 79:   if (use_random) {
 80:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
 81:     PetscCall(PetscRandomSetFromOptions(rctx));
 82:     PetscCall(PetscRandomSetInterval(rctx, 0.0, PETSC_i));
 83:   } else {
 84:     sigma2 = 10.0 * PETSC_i;
 85:   }
 86:   h2 = 1.0 / ((n + 1) * (n + 1));
 87:   for (Ii = Istart; Ii < Iend; Ii++) {
 88:     v = -1.0;
 89:     i = Ii / n;
 90:     j = Ii - i * n;
 91:     if (i > 0) {
 92:       J = Ii - n;
 93:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
 94:     }
 95:     if (i < n - 1) {
 96:       J = Ii + n;
 97:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
 98:     }
 99:     if (j > 0) {
100:       J = Ii - 1;
101:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
102:     }
103:     if (j < n - 1) {
104:       J = Ii + 1;
105:       PetscCall(MatSetValues(A, 1, &Ii, 1, &J, &v, ADD_VALUES));
106:     }
107:     if (use_random) PetscCall(PetscRandomGetValue(rctx, &sigma2));
108:     v = 4.0 - sigma1 * h2 + sigma2 * h2;
109:     PetscCall(MatSetValues(A, 1, &Ii, 1, &Ii, &v, ADD_VALUES));
110:   }
111:   if (use_random) PetscCall(PetscRandomDestroy(&rctx));

113:   /*
114:      Assemble matrix, using the 2-step process:
115:        MatAssemblyBegin(), MatAssemblyEnd()
116:      Computations can be done while messages are in transition
117:      by placing code between these two statements.
118:   */
119:   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
120:   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));

122:   /*
123:      Create parallel vectors.
124:       - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
125:       we specify only the vector's global
126:         dimension; the parallel partitioning is determined at runtime.
127:       - Note: We form 1 vector from scratch and then duplicate as needed.
128:   */
129:   PetscCall(VecCreate(PETSC_COMM_WORLD, &u));
130:   PetscCall(VecSetSizes(u, PETSC_DECIDE, dim));
131:   PetscCall(VecSetFromOptions(u));
132:   PetscCall(VecDuplicate(u, &b));
133:   PetscCall(VecDuplicate(b, &x));

135:   /*
136:      Set exact solution; then compute right-hand-side vector.
137:   */

139:   if (use_random) {
140:     PetscCall(PetscRandomCreate(PETSC_COMM_WORLD, &rctx));
141:     PetscCall(PetscRandomSetFromOptions(rctx));
142:     PetscCall(VecSetRandom(u, rctx));
143:   } else {
144:     PetscCall(VecSet(u, pfive));
145:   }
146:   PetscCall(MatMult(A, u, b));

148:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149:                 Create the linear solver and set various options
150:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

152:   /*
153:      Create linear solver context
154:   */
155:   PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));

157:   /*
158:      Set operators. Here the matrix that defines the linear system
159:      also serves as the preconditioning matrix.
160:   */
161:   PetscCall(KSPSetOperators(ksp, A, A));

163:   /*
164:     Set runtime options, e.g.,
165:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
166:   */
167:   PetscCall(KSPSetFromOptions(ksp));

169:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
170:                       Solve the linear system
171:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

173:   PetscCall(KSPSolve(ksp, b, x));

175:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176:                       Check solution and clean up
177:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

179:   /*
180:       Print the first 3 entries of x; this demonstrates extraction of the
181:       real and imaginary components of the complex vector, x.
182:   */
183:   flg = PETSC_FALSE;
184:   PetscCall(PetscOptionsGetBool(NULL, NULL, "-print_x3", &flg, NULL));
185:   if (flg) {
186:     PetscCall(VecGetArray(x, &xa));
187:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "The first three entries of x are:\n"));
188:     for (i = 0; i < 3; i++) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "x[%" PetscInt_FMT "] = %g + %g i\n", i, (double)PetscRealPart(xa[i]), (double)PetscImaginaryPart(xa[i])));
189:     PetscCall(VecRestoreArray(x, &xa));
190:   }

192:   /*
193:      Check the error
194:   */
195:   PetscCall(VecAXPY(x, none, u));
196:   PetscCall(VecNorm(x, NORM_2, &norm));
197:   PetscCall(KSPGetIterationNumber(ksp, &its));
198:   if (norm < 1.e-12) {
199:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error < 1.e-12 iterations %" PetscInt_FMT "\n", its));
200:   } else {
201:     PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Norm of error %g iterations %" PetscInt_FMT "\n", (double)norm, its));
202:   }

204:   /*
205:      Free work space.  All PETSc objects should be destroyed when they
206:      are no longer needed.
207:   */
208:   PetscCall(KSPDestroy(&ksp));
209:   if (use_random) PetscCall(PetscRandomDestroy(&rctx));
210:   PetscCall(VecDestroy(&u));
211:   PetscCall(VecDestroy(&x));
212:   PetscCall(VecDestroy(&b));
213:   PetscCall(MatDestroy(&A));
214:   PetscCall(PetscFinalize());
215:   return 0;
216: }

218: /*TEST

220:    build:
221:       requires: complex

223:    test:
224:       args: -n 6 -norandom -pc_type none -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always

226:    testset:
227:       suffix: deflation
228:       args: -norandom -pc_type deflation -ksp_monitor_short
229:       requires: superlu_dist

231:       test:
232:         nsize: 6

234:       test:
235:         nsize: 3
236:         args: -pc_deflation_compute_space {{db2 aggregation}}

238:       test:
239:         suffix: pc_deflation_init_only-0
240:         nsize: 4
241:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
242:         #TODO remove suffix and next test when this works
243:         #args: -pc_deflation_init_only {{0 1}separate output}
244:         args: -pc_deflation_init_only 0

246:       test:
247:         suffix: pc_deflation_init_only-1
248:         nsize: 4
249:         args: -ksp_type fgmres -pc_deflation_compute_space db4 -pc_deflation_compute_space_size 2 -pc_deflation_levels 2 -deflation_ksp_max_it 10
250:         args: -pc_deflation_init_only 1

252: TEST*/