Actual source code: mpibaij.h


  2: #ifndef __MPIBAIJ_H
  4: #include <../src/mat/impls/baij/seq/baij.h>
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  6: #include <petscctable.h>

  8: #if defined(PETSC_USE_CTABLE)
  9:   #define PETSCTABLE PetscTable
 10: #else
 11:   #define PETSCTABLE PetscInt *
 12: #endif

 14: #define MPIBAIJHEADER \
 15:   PetscInt   *rangebs;                            /* rmap->range/bs */ \
 16:   PetscInt    rstartbs, rendbs, cstartbs, cendbs; /* map values / bs  */ \
 17:   Mat         A, B;                               /* local submatrices: A (diag part), B (off-diag part) */ \
 18:   PetscMPIInt size;                               /* size of communicator */ \
 19:   PetscMPIInt rank;                               /* rank of proc in communicator */ \
 20:   PetscInt    bs2;                                /* block size, bs2 = bs*bs */ \
 21:   PetscInt    Mbs, Nbs;                           /* number block rows/cols in matrix; M/bs, N/bs */ \
 22:   PetscInt    mbs, nbs;                           /* number block rows/cols on processor; m/bs, n/bs */ \
 23: \
 24:   /* The following variables are used for matrix assembly */ \
 25: \
 26:   PetscBool    donotstash;              /* if 1, off processor entries dropped */ \
 27:   PetscBool    subset_off_proc_entries; /* PETSC_TRUE if assembly will always communicate a subset of the entries communicated the first time */ \
 28:   MPI_Request *send_waits;              /* array of send requests */ \
 29:   MPI_Request *recv_waits;              /* array of receive requests */ \
 30:   PetscInt     nsends, nrecvs;          /* numbers of sends and receives */ \
 31:   MatScalar   *svalues, *rvalues;       /* sending and receiving data */ \
 32:   PetscInt     rmax;                    /* maximum message length */ \
 33:   PETSCTABLE   colmap;                  /* local col number of off-diag col */ \
 34: \
 35:   PetscInt *garray; /* work array */ \
 36: \
 37:   /* The following variable is used by blocked matrix assembly */ \
 38:   MatScalar *barray; /* Block array of size bs2 */ \
 39: \
 40:   /* The following variables are used for matrix-vector products */ \
 41: \
 42:   Vec        lvec;        /* local vector */ \
 43:   VecScatter Mvctx;       /* scatter context for vector */ \
 44:   PetscBool  roworiented; /* if true, row-oriented input, default true */ \
 45: \
 46:   /* The following variables are for MatGetRow() */ \
 47: \
 48:   PetscInt    *rowindices;   /* column indices for row */ \
 49:   PetscScalar *rowvalues;    /* nonzero values in row */ \
 50:   PetscBool    getrowactive; /* indicates MatGetRow(), not restored */ \
 51: \
 52:   /* Some variables to make MatSetValues and others more efficient */ \
 53:   PetscInt    rstart_bs, rend_bs; \
 54:   PetscInt    cstart_bs, cend_bs; \
 55:   PetscInt   *ht; /* Hash table to speed up matrix assembly */ \
 56:   MatScalar **hd; /* Hash table data */ \
 57:   PetscInt    ht_size; \
 58:   PetscInt    ht_total_ct, ht_insert_ct; /* Hash table statistics */ \
 59:   PetscBool   ht_flag;                   /* Flag to indicate if hash tables are used */ \
 60:   double      ht_fact;                   /* Factor to determine the HT size */ \
 61: \
 62:   PetscInt   setvalueslen;  /* only used for single precision computations */ \
 63:   MatScalar *setvaluescopy; /* area double precision values in MatSetValuesXXX() are copied*/ \
 64:                             /* before calling MatSetValuesXXX_MPIBAIJ_MatScalar() */ \
 65:   PetscBool ijonly          /* used in  MatCreateSubMatrices_MPIBAIJ_local() for getting ij structure only */

 67: typedef struct {
 68:   MPIBAIJHEADER;
 69: } Mat_MPIBAIJ;

 71: PETSC_INTERN PetscErrorCode MatView_MPIBAIJ(Mat, PetscViewer);
 72: PETSC_INTERN PetscErrorCode MatLoad_MPIBAIJ(Mat, PetscViewer);
 73: PETSC_INTERN PetscErrorCode MatView_MPIBAIJ_Binary(Mat, PetscViewer);
 74: PETSC_INTERN PetscErrorCode MatLoad_MPIBAIJ_Binary(Mat, PetscViewer);

 76: PETSC_INTERN PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat);
 77: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
 78: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *);
 79: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIBAIJ_Private(Mat, IS, IS, PetscInt, MatReuse, Mat *);
 80: PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIBAIJ(Mat, MPI_Comm, MatReuse, Mat *);
 81: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat, PetscInt, IS[], PetscInt);
 82: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat, PetscInt, IS *);
 83: PETSC_INTERN PetscErrorCode MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz);
 84: PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat, const PetscInt *, Mat, const PetscInt *, PetscInt *);

 86: PETSC_INTERN PetscErrorCode MatConjugate_SeqBAIJ(Mat);
 87: #endif