xref: /petsc/src/mat/impls/aij/mpi/mpiaij.h (revision 0425e358a93e3df180e109da13d1c846f016d85b)
1 
2 #if !defined(__MPIAIJ_H)
3 #define __MPIAIJ_H
4 
5 #include <../src/mat/impls/aij/seq/aij.h>
6 
7 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
8   PetscLayout rowmap;
9   PetscInt    **buf_ri,**buf_rj;
10   PetscMPIInt *len_s,*len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
11   PetscMPIInt nsend,nrecv;
12   PetscInt    *bi,*bj;    /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
13   PetscInt    *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
14   PetscErrorCode (*destroy)(Mat);
15   PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*);
16 } Mat_Merge_SeqsToMPI;
17 
18 typedef struct { /* used by MatPtAP_MPIAIJ_MPIAIJ() and MatMatMult_MPIAIJ_MPIAIJ() */
19   PetscInt    *startsj_s,*startsj_r;    /* used by MatGetBrowsOfAoCols_MPIAIJ */
20   PetscScalar *bufa;                    /* used by MatGetBrowsOfAoCols_MPIAIJ */
21   Mat         P_loc,P_oth;     /* partial B_seq -- intend to replace B_seq */
22   PetscInt    *api,*apj;       /* symbolic i and j arrays of the local product A_loc*B_seq */
23   PetscScalar *apv;
24   MatReuse    reuse;           /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
25   PetscScalar *apa;            /* tmp array for store a row of A*P used in MatMatMult() */
26   Mat         A_loc;           /* used by MatTransposeMatMult(), contains api and apj */
27   Mat         Pt;              /* used by MatTransposeMatMult(), Pt = P^T */
28   PetscBool   scalable;        /* flag determines scalable or non-scalable implementation */
29   Mat         Rd,Ro,AP_loc,C_loc,C_oth;
30   PetscInt    algType;         /* implementation algorithm */
31 
32   Mat_Merge_SeqsToMPI *merge;
33   PetscErrorCode (*destroy)(Mat);
34   PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*);
35   PetscErrorCode (*view)(Mat,PetscViewer);
36 } Mat_PtAPMPI;
37 
38 typedef struct {
39   Mat A,B;                             /* local submatrices: A (diag part),
40                                            B (off-diag part) */
41   PetscMPIInt size;                     /* size of communicator */
42   PetscMPIInt rank;                     /* rank of proc in communicator */
43 
44   /* The following variables are used for matrix assembly */
45   PetscBool   donotstash;               /* PETSC_TRUE if off processor entries dropped */
46   MPI_Request *send_waits;              /* array of send requests */
47   MPI_Request *recv_waits;              /* array of receive requests */
48   PetscInt    nsends,nrecvs;           /* numbers of sends and receives */
49   PetscScalar *svalues,*rvalues;       /* sending and receiving data */
50   PetscInt    rmax;                     /* maximum message length */
51 #if defined(PETSC_USE_CTABLE)
52   PetscTable colmap;
53 #else
54   PetscInt *colmap;                     /* local col number of off-diag col */
55 #endif
56   PetscInt *garray;                     /* global index of all off-processor columns */
57 
58   /* The following variables are used for matrix-vector products */
59   Vec        lvec;                 /* local vector */
60   Vec        diag;
61   VecScatter Mvctx,Mvctx_mpi1;     /* scatter context for vector */
62   PetscBool  Mvctx_mpi1_flg;       /* if true, additional Mvctx_mpi1 is requested for mat-mat ops, default false */
63   PetscBool  roworiented;          /* if true, row-oriented input, default true */
64 
65   /* The following variables are for MatGetRow() */
66   PetscInt    *rowindices;         /* column indices for row */
67   PetscScalar *rowvalues;          /* nonzero values in row */
68   PetscBool   getrowactive;        /* indicates MatGetRow(), not restored */
69 
70   /* Used by MatDistribute_MPIAIJ() to allow reuse of previous matrix allocation  and nonzero pattern */
71   PetscInt *ld;                    /* number of entries per row left of diagona block */
72 
73   /* Used by MatMatMult() and MatPtAP() */
74   Mat_PtAPMPI *ptap;
75 
76   /* used by MatMatMatMult() */
77   Mat_MatMatMatMult *matmatmatmult;
78 
79   /* Used by MPICUSP and MPICUSPARSE classes */
80   void * spptr;
81 
82 } Mat_MPIAIJ;
83 
84 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);
85 
86 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat,MatAssemblyType);
87 
88 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
89 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
90 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat,MatDuplicateOption,Mat*);
91 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat,PetscInt,IS [],PetscInt);
92 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat,PetscInt,IS [],PetscInt);
93 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat,ISColoring,MatFDColoring);
94 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat,ISColoring,MatFDColoring);
95 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
96 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
97 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat,MatCreateSubMatrixOption,MatReuse,Mat *[]);
98 
99 
100 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat,IS,IS,MatReuse,Mat*);
101 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat,IS,IS,PetscInt,MatReuse,Mat*);
102 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat,IS,IS,IS,MatReuse,Mat*);
103 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat,IS,IS,MatReuse,Mat*);
104 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*);
105 
106 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat,PetscViewer);
107 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);
108 PETSC_INTERN PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
109 PETSC_INTERN PetscErrorCode MatMatMult_MPIAIJ_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
110 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,PetscReal,Mat*);
111 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat,Mat,PetscReal,Mat*);
112 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat*);
113 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
114 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,Mat);
115 
116 PETSC_INTERN PetscErrorCode MatMatMatMult_MPIAIJ_MPIAIJ_MPIAIJ(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
117 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat,Mat,Mat,PetscReal,Mat*);
118 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat,Mat,Mat,Mat);
119 
120 PETSC_INTERN PetscErrorCode MatPtAP_MPIAIJ_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
121 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat*);
122 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
123 
124 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat,Mat,PetscReal,Mat*);
125 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat,Mat,Mat);
126 
127 
128 #if defined(PETSC_HAVE_HYPRE)
129 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat,Mat,PetscReal,Mat*);
130 #endif
131 
132 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(Mat);
133 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
134 
135 PETSC_INTERN PetscErrorCode MatRARt_MPIAIJ_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
136 
137 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
138 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar [],InsertMode);
139 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(Mat);
140 PETSC_INTERN PetscErrorCode PetscContainerDestroy_Mat_MatMatMultMPI(void*);
141 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat,MatOption,PetscBool);
142 
143 PETSC_INTERN PetscErrorCode MatTransposeMatMult_MPIAIJ_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
144 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,PetscReal,Mat*);
145 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat*);
146 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
147 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,Mat);
148 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat,Mat,Mat);
149 PETSC_INTERN PetscErrorCode MatTransposeMatMult_MPIAIJ_MPIDense(Mat,Mat,MatReuse,PetscReal,Mat*);
150 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat,Mat,PetscReal,Mat*);
151 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIDense(Mat,Mat,Mat);
152 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat,Mat*);
153 
154 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(PetscOptionItems*,Mat);
155 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
156 
157 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
158 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat,IS,IS,const MatFactorInfo*,Mat*);
159 #endif
160 PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat,Vec,Vec);
161 PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat,IS,IS,const MatFactorInfo*);
162 
163 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,PetscInt*);
164 
165 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat,Mat*);
166 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat,Vec);
167 
168 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat,Mat*,Mat*);
169 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat,IS,IS,IS,MatStructure,Mat,Mat);
170 
171 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
172 #define AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa) \
173 {\
174   PetscInt    _anz,_pnz,_j,_k,*_ai,*_aj,_row,*_pi,*_pj,_nextp,*_apJ;      \
175   PetscScalar *_aa,_valtmp,*_pa;                             \
176   _apJ = apj + api[i];\
177   /* diagonal portion of A */\
178   _ai  = ad->i;\
179   _anz = _ai[i+1] - _ai[i];\
180   _aj  = ad->j + _ai[i];\
181   _aa  = ad->a + _ai[i];\
182   for (_j=0; _j<_anz; _j++) {\
183     _row = _aj[_j]; \
184     _pi  = p_loc->i;                                 \
185     _pnz = _pi[_row+1] - _pi[_row];         \
186     _pj  = p_loc->j + _pi[_row];                 \
187     _pa  = p_loc->a + _pi[_row];                 \
188     /* perform sparse axpy */                    \
189     _valtmp = _aa[_j];                           \
190     _nextp  = 0; \
191     for (_k=0; _nextp<_pnz; _k++) {                    \
192       if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */   \
193         apa[_k] += _valtmp*_pa[_nextp++];                                \
194       } \
195     }                                           \
196     (void)PetscLogFlops(2.0*_pnz);              \
197   }                                             \
198   /* off-diagonal portion of A */               \
199   _ai  = ao->i;\
200   _anz = _ai[i+1] - _ai[i];                     \
201   _aj  = ao->j + _ai[i];                         \
202   _aa  = ao->a + _ai[i];                         \
203   for (_j=0; _j<_anz; _j++) {                      \
204     _row = _aj[_j];    \
205     _pi  = p_oth->i;                         \
206     _pnz = _pi[_row+1] - _pi[_row];          \
207     _pj  = p_oth->j + _pi[_row];                  \
208     _pa  = p_oth->a + _pi[_row];                  \
209     /* perform sparse axpy */                     \
210     _valtmp = _aa[_j];                             \
211     _nextp  = 0; \
212     for (_k=0; _nextp<_pnz; _k++) {                     \
213       if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */\
214         apa[_k] += _valtmp*_pa[_nextp++];                       \
215       }                                                     \
216     }                                            \
217     (void)PetscLogFlops(2.0*_pnz);               \
218   } \
219 }
220 
221 #define AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa) \
222 {\
223   PetscInt    _anz,_pnz,_j,_k,*_ai,*_aj,_row,*_pi,*_pj;      \
224   PetscScalar *_aa,_valtmp,*_pa;                             \
225   /* diagonal portion of A */\
226   _ai  = ad->i;\
227   _anz = _ai[i+1] - _ai[i];\
228   _aj  = ad->j + _ai[i];\
229   _aa  = ad->a + _ai[i];\
230   for (_j=0; _j<_anz; _j++) {\
231     _row = _aj[_j]; \
232     _pi  = p_loc->i;                                 \
233     _pnz = _pi[_row+1] - _pi[_row];         \
234     _pj  = p_loc->j + _pi[_row];                 \
235     _pa  = p_loc->a + _pi[_row];                 \
236     /* perform dense axpy */                    \
237     _valtmp = _aa[_j];                           \
238     for (_k=0; _k<_pnz; _k++) {                    \
239       apa[_pj[_k]] += _valtmp*_pa[_k];               \
240     }                                           \
241     (void)PetscLogFlops(2.0*_pnz);              \
242   }                                             \
243   /* off-diagonal portion of A */               \
244   _ai  = ao->i;\
245   _anz = _ai[i+1] - _ai[i];                     \
246   _aj  = ao->j + _ai[i];                         \
247   _aa  = ao->a + _ai[i];                         \
248   for (_j=0; _j<_anz; _j++) {                      \
249     _row = _aj[_j];    \
250     _pi  = p_oth->i;                         \
251     _pnz = _pi[_row+1] - _pi[_row];          \
252     _pj  = p_oth->j + _pi[_row];                  \
253     _pa  = p_oth->a + _pi[_row];                  \
254     /* perform dense axpy */                     \
255     _valtmp = _aa[_j];                             \
256     for (_k=0; _k<_pnz; _k++) {                     \
257       apa[_pj[_k]] += _valtmp*_pa[_k];                \
258     }                                            \
259     (void)PetscLogFlops(2.0*_pnz);               \
260   } \
261 }
262 
263 #endif
264