Actual source code: pcis.h

petsc-3.3-p7 2013-05-11

  5: #include <petsc-private/pcimpl.h>
  6: #include <../src/mat/impls/is/matis.h>
  7: #include <petscksp.h>

  9: /* 
 10:    Context (data structure) common for all Iterative Substructuring preconditioners.  
 11: */

 13: typedef struct {

 15:    /* In naming the variables, we adopted the following convention: */
 16:    /* * B - stands for interface nodes;                             */
 17:    /* * I - stands for interior nodes;                              */
 18:    /* * D - stands for Dirichlet (by extension, refers to interior  */
 19:    /*       nodes) and                                              */
 20:    /* * N - stands for Neumann (by extension, refers to all local   */
 21:    /*       nodes, interior plus interface).                        */
 22:    /* In some cases, I or D would apply equaly well (e.g. vec1_D).  */

 24:   PetscInt   n;              /* number of nodes (interior+interface) in this subdomain */
 25:   PetscInt   n_B;            /* number of interface nodes in this subdomain */
 26:   IS         is_B_local,     /* local (sequential) index sets for interface (B) and interior (I) nodes */
 27:              is_I_local,
 28:              is_B_global,
 29:              is_I_global;
 30:   Mat        A_II, A_IB,     /* local (sequential) submatrices */
 31:              A_BI, A_BB;
 32:   Vec        D;              /* diagonal scaling "matrix" (stored as a vector, since it's diagonal) */
 33:   KSP        ksp_N,         /* linear solver contexts */
 34:              ksp_D;
 35:   Vec        vec1_N,         /* local (sequential) work vectors */
 36:              vec2_N,
 37:              vec1_D,
 38:              vec2_D,
 39:              vec3_D,
 40:              vec1_B,
 41:              vec2_B,
 42:              vec3_B,
 43:              vec1_global;
 44:   PetscScalar*    work_N;
 45:   VecScatter      global_to_D;    /* scattering context from global to local interior nodes */
 46:   VecScatter      N_to_B     ;    /* scattering context from all local nodes to local interface nodes */
 47:   VecScatter      global_to_B;    /* scattering context from global to local interface nodes */
 48:   PetscBool       pure_neumann;
 49:   PetscScalar     scaling_factor;

 51:   PetscBool  ISLocalToGlobalMappingGetInfoWasCalled;
 52:   PetscInt   n_neigh;    /* number of neighbours this subdomain has (by now, INCLUDING OR NOT the subdomain itself). */
 53:                          /* Once this is definitively decided, the code can be simplifies and some if's eliminated.  */
 54:   PetscInt   *neigh;     /* list of neighbouring subdomains                                                          */
 55:   PetscInt   *n_shared;  /* n_shared[j] is the number of nodes shared with subdomain neigh[j]                        */
 56:   PetscInt   **shared;   /* shared[j][i] is the local index of the i-th node shared with subdomain neigh[j]          */
 57:                   /* It is necessary some consistency in the                                                  */
 58:                   /* numbering of the shared edges from each side.                                            */
 59:                   /* For instance:                                                                            */
 60:                   /*                                                                                          */
 61:                   /* +-------+-------+                                                                        */
 62:                   /* |   k   |   l   | subdomains k and l are neighbours                                      */
 63:                   /* +-------+-------+                                                                        */
 64:                   /*                                                                                          */
 65:                   /* Let i and j be s.t. proc[k].neigh[i]==l and                                              */
 66:                   /*                     proc[l].neigh[j]==k.                                                 */
 67:                   /*                                                                                          */
 68:                   /* We need:                                                                                 */
 69:                   /* proc[k].loc_to_glob(proc[k].shared[i][m]) == proc[l].loc_to_glob(proc[l].shared[j][m])   */
 70:                   /* for all 0 <= m < proc[k].n_shared[i], or equiv'ly, for all 0 <= m < proc[l].n_shared[j]  */
 71: } PC_IS;

 73: extern PetscErrorCode  PCISSetUp(PC pc);
 74: extern PetscErrorCode  PCISDestroy(PC pc);
 75: extern PetscErrorCode  PCISCreate(PC pc);
 76: extern PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D);
 77: extern PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc);
 78: extern PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N);
 79: extern PetscErrorCode  PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal);

 81: #endif /* __pcis_h */