Actual source code: pcisimpl.h
1: #ifndef PETSC_PCISIMPL_H
2: #define PETSC_PCISIMPL_H
4: #include <petsc/private/pcimpl.h>
5: #include <petsc/private/matisimpl.h>
6: #include <petscksp.h>
8: /*
9: Context (data structure) common for all Iterative Substructuring preconditioners.
10: */
12: typedef struct {
13: /* In naming the variables, we adopted the following convention: */
14: /* * B - stands for interface nodes; */
15: /* * I - stands for interior nodes; */
16: /* * D - stands for Dirichlet (by extension, refers to interior */
17: /* nodes) and */
18: /* * N - stands for Neumann (by extension, refers to all local */
19: /* nodes, interior plus interface). */
20: /* In some cases, I or D would apply equally well (e.g. vec1_D). */
22: PetscInt n; /* number of nodes (interior+interface) in this subdomain */
23: PetscInt n_B; /* number of interface nodes in this subdomain */
24: IS is_B_local, /* local (sequential) index sets for interface (B) and interior (I) nodes */
25: is_I_local, is_B_global, is_I_global;
27: Mat A_II, A_IB, /* local (sequential) submatrices */
28: A_BI, A_BB;
29: Mat pA_II;
30: Vec D; /* diagonal scaling "matrix" (stored as a vector, since it's diagonal) */
31: KSP ksp_N, /* linear solver contexts */
32: ksp_D;
33: Vec vec1_N, /* local (sequential) work vectors */
34: vec2_N, vec1_D, vec2_D, vec3_D, vec4_D, vec1_B, vec2_B, vec3_B, vec1_global;
36: PetscScalar *work_N;
37: VecScatter N_to_D; /* scattering context from all local nodes to local interior nodes */
38: VecScatter global_to_D; /* scattering context from global to local interior nodes */
39: VecScatter N_to_B; /* scattering context from all local nodes to local interface nodes */
40: VecScatter global_to_B; /* scattering context from global to local interface nodes */
41: PetscBool pure_neumann;
42: PetscScalar scaling_factor;
43: PetscBool use_stiffness_scaling;
45: ISLocalToGlobalMapping mapping;
46: PetscInt n_neigh; /* number of neighbours this subdomain has (INCLUDING the subdomain itself). */
47: PetscInt *neigh; /* list of neighbouring subdomains */
48: PetscInt *n_shared; /* n_shared[j] is the number of nodes shared with subdomain neigh[j] */
49: PetscInt **shared; /* shared[j][i] is the local index of the i-th node shared with subdomain neigh[j] */
50: /* It is necessary some consistency in the */
51: /* numbering of the shared edges from each side. */
52: /* For instance: */
53: /* */
54: /* +-------+-------+ */
55: /* | k | l | subdomains k and l are neighbours */
56: /* +-------+-------+ */
57: /* */
58: /* Let i and j be s.t. proc[k].neigh[i]==l and */
59: /* proc[l].neigh[j]==k. */
60: /* */
61: /* We need: */
62: /* proc[k].loc_to_glob(proc[k].shared[i][m]) == proc[l].loc_to_glob(proc[l].shared[j][m]) */
63: /* for all 0 <= m < proc[k].n_shared[i], or equiv'ly, for all 0 <= m < proc[l].n_shared[j] */
64: ISLocalToGlobalMapping BtoNmap;
65: PetscBool reusesubmatrices;
66: } PC_IS;
68: PETSC_EXTERN PetscErrorCode PCISSetUp(PC, PetscBool, PetscBool);
69: PETSC_EXTERN PetscErrorCode PCISDestroy(PC);
70: PETSC_EXTERN PetscErrorCode PCISCreate(PC);
71: PETSC_EXTERN PetscErrorCode PCISApplySchur(PC, Vec, Vec, Vec, Vec, Vec);
72: PETSC_EXTERN PetscErrorCode PCISScatterArrayNToVecB(PetscScalar *, Vec, InsertMode, ScatterMode, PC);
73: PETSC_EXTERN PetscErrorCode PCISApplyInvSchur(PC, Vec, Vec, Vec, Vec);
75: #endif // PETSC_PCISIMPL_H