Actual source code: ex3.c

  1: static char help[] = "Parallel vector layout.\n\n";

  3: /*
  4:   Include "petscvec.h" so that we can use vectors.  Note that this file
  5:   automatically includes:
  6:      petscsys.h       - base PETSc routines   petscis.h     - index sets
  7:      petscviewer.h - viewers
  8: */
  9: #include <petscvec.h>

 11: int main(int argc, char **argv)
 12: {
 13:   PetscMPIInt rank;
 14:   PetscInt    i, istart, iend, n = 6, nlocal;
 15:   PetscScalar v, *array;
 16:   Vec         x;
 17:   PetscViewer viewer;

 19:   PetscFunctionBeginUser;
 20:   PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
 21:   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));

 23:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-n", &n, NULL));

 25:   /*
 26:      Create a vector, specifying only its global dimension.
 27:      When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
 28:      the vector format (currently parallel or sequential) is
 29:      determined at runtime.  Also, the parallel partitioning of
 30:      the vector is determined by PETSc at runtime.
 31:   */
 32:   PetscCall(VecCreate(PETSC_COMM_WORLD, &x));
 33:   PetscCall(VecSetSizes(x, PETSC_DECIDE, n));
 34:   PetscCall(VecSetFromOptions(x));

 36:   /*
 37:      PETSc parallel vectors are partitioned by
 38:      contiguous chunks of rows across the processors.  Determine
 39:      which vector are locally owned.
 40:   */
 41:   PetscCall(VecGetOwnershipRange(x, &istart, &iend));

 43:   /* --------------------------------------------------------------------
 44:      Set the vector elements.
 45:       - Always specify global locations of vector entries.
 46:       - Each processor can insert into any location, even ones it does not own
 47:       - In this case each processor adds values to all the entries,
 48:          this is not practical, but is merely done as an example
 49:    */
 50:   for (i = 0; i < n; i++) {
 51:     v = (PetscReal)(rank * i);
 52:     PetscCall(VecSetValues(x, 1, &i, &v, ADD_VALUES));
 53:   }

 55:   /*
 56:      Assemble vector, using the 2-step process:
 57:        VecAssemblyBegin(), VecAssemblyEnd()
 58:      Computations can be done while messages are in transition
 59:      by placing code between these two statements.
 60:   */
 61:   PetscCall(VecAssemblyBegin(x));
 62:   PetscCall(VecAssemblyEnd(x));

 64:   /*
 65:      Open an X-window viewer.  Note that we specify the same communicator
 66:      for the viewer as we used for the distributed vector (PETSC_COMM_WORLD).
 67:        - Helpful runtime option:
 68:             -draw_pause <pause> : sets time (in seconds) that the
 69:                   program pauses after PetscDrawPause() has been called
 70:                   (0 is default, -1 implies until user input).

 72:   */
 73:   PetscCall(PetscViewerDrawOpen(PETSC_COMM_WORLD, NULL, NULL, 0, 0, 300, 300, &viewer));
 74:   PetscCall(PetscObjectSetName((PetscObject)viewer, "Line graph Plot"));
 75:   PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_DRAW_LG));
 76:   /*
 77:      View the vector
 78:   */
 79:   PetscCall(VecView(x, viewer));

 81:   /* --------------------------------------------------------------------
 82:        Access the vector values directly. Each processor has access only
 83:     to its portion of the vector. For default PETSc vectors VecGetArray()
 84:     does NOT involve a copy
 85:   */
 86:   PetscCall(VecGetLocalSize(x, &nlocal));
 87:   PetscCall(VecGetArray(x, &array));
 88:   for (i = 0; i < nlocal; i++) array[i] = rank + 1;
 89:   PetscCall(VecRestoreArray(x, &array));

 91:   /*
 92:      View the vector
 93:   */
 94:   PetscCall(VecView(x, viewer));

 96:   /*
 97:      Free work space.  All PETSc objects should be destroyed when they
 98:      are no longer needed.
 99:   */
100:   PetscCall(PetscViewerPopFormat(viewer));
101:   PetscCall(PetscViewerDestroy(&viewer));
102:   PetscCall(VecDestroy(&x));

104:   PetscCall(PetscFinalize());
105:   return 0;
106: }

108: /*TEST

110:      test:
111:        nsize: 2

113: TEST*/