Actual source code: pcis.h


 5:  #include private/pcimpl.h
 6:  #include src/mat/impls/is/matis.h
 7:  #include petscksp.h

  9: /* 
 10:    Context (data structure) common for all Iterative Substructuring preconditioners.  
 11: */

 13: typedef struct {

 15:    /* In naming the variables, we adopted the following convention: */
 16:    /* * B - stands for interface nodes;                             */
 17:    /* * I - stands for interior nodes;                              */
 18:    /* * D - stands for Dirichlet (by extension, refers to interior  */
 19:    /*       nodes) and                                              */
 20:    /* * N - stands for Neumann (by extension, refers to all local   */
 21:    /*       nodes, interior plus interface).                        */
 22:    /* In some cases, I or D would apply equaly well (e.g. vec1_D).  */

 24:   PetscInt   n;              /* number of nodes (interior+interface) in this subdomain */
 25:   PetscInt   n_B;            /* number of interface nodes in this subdomain */
 26:   IS         is_B_local,     /* local (sequential) index sets for interface (B) and interior (I) nodes */
 27:              is_I_local,
 28:              is_B_global,
 29:              is_I_global;
 30:   Mat        A_II, A_IB,     /* local (sequential) submatrices */
 31:              A_BI, A_BB;
 32:   Vec        D;              /* diagonal scaling "matrix" (stored as a vector, since it's diagonal) */
 33:   KSP        ksp_N,         /* linear solver contexts */
 34:              ksp_D;
 35:   Vec        vec1_N,         /* local (sequential) work vectors */
 36:              vec2_N,
 37:              vec1_D,
 38:              vec2_D,
 39:              vec3_D,
 40:              vec1_B,
 41:              vec2_B,
 42:              vec3_B,
 43:              vec1_global;
 44:   PetscScalar*    work_N;
 45:   VecScatter      global_to_D;    /* scattering context from global to local interior nodes */
 46:   VecScatter      N_to_B     ;    /* scattering context from all local nodes to local interface nodes */
 47:   VecScatter      global_to_B;    /* scattering context from global to local interface nodes */
 48:   PetscTruth      pure_neumann;

 50:   PetscTruth ISLocalToGlobalMappingGetInfoWasCalled;
 51:   PetscInt   n_neigh;    /* number of neighbours this subdomain has (by now, INCLUDING OR NOT the subdomain itself). */
 52:                          /* Once this is definitively decided, the code can be simplifies and some if's eliminated.  */
 53:   PetscInt   *neigh;     /* list of neighbouring subdomains                                                          */
 54:   PetscInt   *n_shared;  /* n_shared[j] is the number of nodes shared with subdomain neigh[j]                        */
 55:   PetscInt   **shared;   /* shared[j][i] is the local index of the i-th node shared with subdomain neigh[j]          */
 56:                   /* It is necessary some consistency in the                                                  */
 57:                   /* numbering of the shared edges from each side.                                            */
 58:                   /* For instance:                                                                            */
 59:                   /*                                                                                          */
 60:                   /* +-------+-------+                                                                        */
 61:                   /* |   k   |   l   | subdomains k and l are neighbours                                      */
 62:                   /* +-------+-------+                                                                        */
 63:                   /*                                                                                          */
 64:                   /* Let i and j be s.t. proc[k].neigh[i]==l and                                              */
 65:                   /*                     proc[l].neigh[j]==k.                                                 */
 66:                   /*                                                                                          */
 67:                   /* We need:                                                                                 */
 68:                   /* proc[k].loc_to_glob(proc[k].shared[i][m]) == proc[l].loc_to_glob(proc[l].shared[j][m])   */
 69:                   /* for all 0 <= m < proc[k].n_shared[i], or equiv'ly, for all 0 <= m < proc[l].n_shared[j]  */
 70: } PC_IS;

 72: EXTERN PetscErrorCode  PCISSetUp(PC pc);
 73: EXTERN PetscErrorCode  PCISDestroy(PC pc);
 74: EXTERN PetscErrorCode  PCISCreate(PC pc);
 75: EXTERN PetscErrorCode  PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D);
 76: EXTERN PetscErrorCode  PCISScatterArrayNToVecB(PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode, PC pc);
 77: EXTERN PetscErrorCode  PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N);

 79: #endif /* __pcis_h */