xref: /petsc/src/mat/impls/aij/mpi/mpimatmatmult.c (revision f32d5d438193b8862751fcea3ab06f64cf68d4e3)
1 #define PETSCMAT_DLL
2 
3 /*
4   Defines matrix-matrix product routines for pairs of MPIAIJ matrices
5           C = A * B
6 */
7 #include "src/mat/impls/aij/seq/aij.h" /*I "petscmat.h" I*/
8 #include "src/mat/utils/freespace.h"
9 #include "src/mat/impls/aij/mpi/mpiaij.h"
10 #include "petscbt.h"
11 #include "src/mat/impls/dense/mpi/mpidense.h"
12 
13 #undef __FUNCT__
14 #define __FUNCT__ "MatMatMult_MPIAIJ_MPIAIJ"
15 PetscErrorCode MatMatMult_MPIAIJ_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill, Mat *C)
16 {
17   PetscErrorCode ierr;
18 
19   PetscFunctionBegin;
20   if (scall == MAT_INITIAL_MATRIX){
21     ierr = MatMatMultSymbolic_MPIAIJ_MPIAIJ(A,B,fill,C);CHKERRQ(ierr);/* numeric product is computed as well */
22   } else if (scall == MAT_REUSE_MATRIX){
23     ierr = MatMatMultNumeric_MPIAIJ_MPIAIJ(A,B,*C);CHKERRQ(ierr);
24   } else {
25     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 #undef __FUNCT__
31 #define __FUNCT__ "PetscObjectContainerDestroy_Mat_MatMatMultMPI"
32 PetscErrorCode PetscObjectContainerDestroy_Mat_MatMatMultMPI(void *ptr)
33 {
34   PetscErrorCode       ierr;
35   Mat_MatMatMultMPI    *mult=(Mat_MatMatMultMPI*)ptr;
36 
37   PetscFunctionBegin;
38   ierr = PetscFree(mult->startsj);CHKERRQ(ierr);
39   ierr = PetscFree(mult->bufa);CHKERRQ(ierr);
40   if (mult->isrowa){ierr = ISDestroy(mult->isrowa);CHKERRQ(ierr);}
41   if (mult->isrowb){ierr = ISDestroy(mult->isrowb);CHKERRQ(ierr);}
42   if (mult->iscolb){ierr = ISDestroy(mult->iscolb);CHKERRQ(ierr);}
43   if (mult->C_seq){ierr = MatDestroy(mult->C_seq);CHKERRQ(ierr);}
44   if (mult->A_loc){ierr = MatDestroy(mult->A_loc);CHKERRQ(ierr); }
45   if (mult->B_seq){ierr = MatDestroy(mult->B_seq);CHKERRQ(ierr);}
46   if (mult->B_loc){ierr = MatDestroy(mult->B_loc);CHKERRQ(ierr);}
47   if (mult->B_oth){ierr = MatDestroy(mult->B_oth);CHKERRQ(ierr);}
48   ierr = PetscFree(mult->abi);CHKERRQ(ierr);
49   ierr = PetscFree(mult->abj);CHKERRQ(ierr);
50   ierr = PetscFree(mult);CHKERRQ(ierr);
51   PetscFunctionReturn(0);
52 }
53 
54 EXTERN PetscErrorCode MatDestroy_AIJ(Mat);
55 #undef __FUNCT__
56 #define __FUNCT__ "MatDestroy_MPIAIJ_MatMatMult"
57 PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(Mat A)
58 {
59   PetscErrorCode       ierr;
60   PetscObjectContainer container;
61   Mat_MatMatMultMPI    *mult=PETSC_NULL;
62 
63   PetscFunctionBegin;
64   ierr = PetscObjectQuery((PetscObject)A,"Mat_MatMatMultMPI",(PetscObject *)&container);CHKERRQ(ierr);
65   if (container) {
66     ierr = PetscObjectContainerGetPointer(container,(void **)&mult);CHKERRQ(ierr);
67   } else {
68     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
69   }
70   A->ops->destroy = mult->MatDestroy;
71   ierr = PetscObjectCompose((PetscObject)A,"Mat_MatMatMultMPI",0);CHKERRQ(ierr);
72   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
73   ierr = PetscObjectContainerDestroy(container);CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 #undef __FUNCT__
78 #define __FUNCT__ "MatDuplicate_MPIAIJ_MatMatMult"
79 PetscErrorCode MatDuplicate_MPIAIJ_MatMatMult(Mat A, MatDuplicateOption op, Mat *M) {
80   PetscErrorCode       ierr;
81   Mat_MatMatMultMPI    *mult;
82   PetscObjectContainer container;
83 
84   PetscFunctionBegin;
85   ierr = PetscObjectQuery((PetscObject)A,"Mat_MatMatMultMPI",(PetscObject *)&container);CHKERRQ(ierr);
86   if (container) {
87     ierr  = PetscObjectContainerGetPointer(container,(void **)&mult);CHKERRQ(ierr);
88   } else {
89     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
90   }
91   ierr = (*mult->MatDuplicate)(A,op,M);CHKERRQ(ierr);
92   (*M)->ops->destroy   = mult->MatDestroy;   /* =MatDestroy_MPIAIJ, *M doesn't duplicate A's container! */
93   (*M)->ops->duplicate = mult->MatDuplicate; /* =MatDuplicate_ MPIAIJ */
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "MatMatMultSymbolic_MPIAIJ_MPIAIJ"
99 PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C)
100 {
101   PetscErrorCode       ierr;
102   PetscInt             start,end;
103   Mat_MatMatMultMPI    *mult;
104   PetscObjectContainer container;
105 
106   PetscFunctionBegin;
107   if (A->cmap.rstart != B->rmap.rstart || A->cmap.rend != B->rmap.rend){
108     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap.rstart,A->cmap.rend,B->rmap.rstart,B->rmap.rend);
109   }
110   ierr = PetscNew(Mat_MatMatMultMPI,&mult);CHKERRQ(ierr);
111 
112   /* create a seq matrix B_seq = submatrix of B by taking rows of B that equal to nonzero col of A */
113   ierr = MatGetBrowsOfAcols(A,B,MAT_INITIAL_MATRIX,&mult->isrowb,&mult->iscolb,&mult->brstart,&mult->B_seq);CHKERRQ(ierr);
114 
115   /*  create a seq matrix A_seq = submatrix of A by taking all local rows of A */
116   start = A->rmap.rstart; end = A->rmap.rend;
117   ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&mult->isrowa);CHKERRQ(ierr);
118   ierr = MatGetLocalMatCondensed(A,MAT_INITIAL_MATRIX,&mult->isrowa,&mult->isrowb,&mult->A_loc);CHKERRQ(ierr);
119 
120   /* compute C_seq = A_seq * B_seq */
121   ierr = MatMatMult_SeqAIJ_SeqAIJ(mult->A_loc,mult->B_seq,MAT_INITIAL_MATRIX,fill,&mult->C_seq);CHKERRQ(ierr);
122 
123   /* create mpi matrix C by concatinating C_seq */
124   ierr = PetscObjectReference((PetscObject)mult->C_seq);CHKERRQ(ierr); /* prevent C_seq being destroyed by MatMerge() */
125   ierr = MatMerge(A->comm,mult->C_seq,B->cmap.n,MAT_INITIAL_MATRIX,C);CHKERRQ(ierr);
126 
127   /* attach the supporting struct to C for reuse of symbolic C */
128   ierr = PetscObjectContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
129   ierr = PetscObjectContainerSetPointer(container,mult);CHKERRQ(ierr);
130   ierr = PetscObjectCompose((PetscObject)(*C),"Mat_MatMatMultMPI",(PetscObject)container);CHKERRQ(ierr);
131   ierr = PetscObjectContainerSetUserDestroy(container,PetscObjectContainerDestroy_Mat_MatMatMultMPI);CHKERRQ(ierr);
132   mult->MatDestroy   = (*C)->ops->destroy;
133   mult->MatDuplicate = (*C)->ops->duplicate;
134 
135   (*C)->ops->destroy   = MatDestroy_MPIAIJ_MatMatMult;
136   (*C)->ops->duplicate = MatDuplicate_MPIAIJ_MatMatMult;
137   PetscFunctionReturn(0);
138 }
139 
140 /* This routine is called ONLY in the case of reusing previously computed symbolic C */
141 #undef __FUNCT__
142 #define __FUNCT__ "MatMatMultNumeric_MPIAIJ_MPIAIJ"
143 PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat A,Mat B,Mat C)
144 {
145   PetscErrorCode       ierr;
146   Mat                  *seq;
147   Mat_MatMatMultMPI    *mult;
148   PetscObjectContainer container;
149 
150   PetscFunctionBegin;
151   ierr = PetscObjectQuery((PetscObject)C,"Mat_MatMatMultMPI",(PetscObject *)&container);CHKERRQ(ierr);
152   if (container) {
153     ierr  = PetscObjectContainerGetPointer(container,(void **)&mult);CHKERRQ(ierr);
154   } else {
155     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
156   }
157 
158   seq = &mult->B_seq;
159   ierr = MatGetSubMatrices(B,1,&mult->isrowb,&mult->iscolb,MAT_REUSE_MATRIX,&seq);CHKERRQ(ierr);
160   mult->B_seq = *seq;
161 
162   seq = &mult->A_loc;
163   ierr = MatGetSubMatrices(A,1,&mult->isrowa,&mult->isrowb,MAT_REUSE_MATRIX,&seq);CHKERRQ(ierr);
164   mult->A_loc = *seq;
165 
166   ierr = MatMatMult_SeqAIJ_SeqAIJ(mult->A_loc,mult->B_seq,MAT_REUSE_MATRIX,0.0,&mult->C_seq);CHKERRQ(ierr);
167 
168   ierr = PetscObjectReference((PetscObject)mult->C_seq);CHKERRQ(ierr);
169   ierr = MatMerge(A->comm,mult->C_seq,B->cmap.n,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
170   PetscFunctionReturn(0);
171 }
172 
173 #undef __FUNCT__
174 #define __FUNCT__ "MatMatMult_MPIAIJ_MPIDense"
175 PetscErrorCode MatMatMult_MPIAIJ_MPIDense(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
176 {
177   PetscErrorCode ierr;
178 
179   PetscFunctionBegin;
180   if (scall == MAT_INITIAL_MATRIX){
181     ierr = MatMatMultSymbolic_MPIAIJ_MPIDense(A,B,fill,C);CHKERRQ(ierr);
182   }
183   ierr = MatMatMultNumeric_MPIAIJ_MPIDense(A,B,*C);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "MatMatMultSymbolic_MPIAIJ_MPIDense"
189 PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIDense(Mat A,Mat B,PetscReal fill,Mat *C)
190 {
191   PetscErrorCode         ierr;
192   Mat                    workB;
193   Mat_MPIAIJ             *aij = (Mat_MPIAIJ*) A->data;
194   PetscInt               nz = aij->B->cmap.n;
195 
196   PetscFunctionBegin;
197   ierr = MatMatMultSymbolic_MPIDense_MPIDense(A,B,0.0,C);
198 
199   /* Create work matrix used to store off processor rows of B needed for local product */
200   ierr = MatCreateSeqDense(PETSC_COMM_SELF,nz,B->cmap.N,PETSC_NULL,&workB);CHKERRQ(ierr);
201   ierr = PetscObjectCompose((PetscObject)(*C),"workB",(PetscObject)workB);CHKERRQ(ierr);
202   PetscFunctionReturn(0);
203 }
204 
205 /*
206     Performs an efficient scatter on the rows of B needed by this process
207 */
208 PetscErrorCode MatMPIDenseScatter(Mat A,Mat B,Mat workB)
209 {
210   Mat_MPIAIJ             *aij = (Mat_MPIAIJ*)A->data;
211   PetscErrorCode         ierr;
212   PetscScalar            *b,*w,*svalues,*rvalues;
213   VecScatter             ctx = aij->Mvctx;
214   VecScatter_MPI_General *from = (VecScatter_MPI_General*) ctx->fromdata;
215   VecScatter_MPI_General *to   = ( VecScatter_MPI_General*) ctx->todata;
216   PetscInt               i,j,k;
217   PetscMPIInt            *sindices,*sstarts,*sprocs,*rindices,*rstarts,*rprocs,nrecvs;
218   MPI_Request            *swaits,*rwaits;
219   MPI_Comm               comm = A->comm;
220   PetscMPIInt            tag = ctx->tag,ncols = B->cmap.N, nrows = aij->B->cmap.n,imdex,nrowsB = B->rmap.n;
221   MPI_Status             status;
222 
223   PetscFunctionBegin;
224   if (nrows != workB->rmap.n) SETERRQ2(PETSC_ERR_PLIB,"Number of rows of workB %D not equal to columns of aij->B %D",nrows,workB->cmap.n);
225   sindices  = to->indices;
226   sstarts   = to->starts;
227   sprocs    = to->procs;
228 
229   rindices  = from->indices;
230   rstarts   = from->starts;
231   rprocs    = from->procs;
232 
233   ierr = MatGetArray(B,&b);CHKERRQ(ierr);
234   ierr = MatGetArray(workB,&w);CHKERRQ(ierr);
235 
236   ierr = PetscMalloc(from->n*sizeof(MPI_Request),&rwaits);CHKERRQ(ierr);
237   ierr = PetscMalloc(ncols*rstarts[from->n]*sizeof(PetscScalar),&rvalues);CHKERRQ(ierr);
238   for (i=0; i<from->n; i++) {
239     // printf("[%d]rstarts %d %d %d %d\n",PetscGlobalRank,rstarts[i],rstarts[i+1],ncols,rprocs[i]);
240     ierr = MPI_Irecv(rvalues+ncols*rstarts[i],ncols*(rstarts[i+1]-rstarts[i]),MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
241   }
242 
243   ierr = PetscMalloc(to->n*sizeof(MPI_Request),&swaits);CHKERRQ(ierr);
244   ierr = PetscMalloc(ncols*(sstarts[to->n])*sizeof(PetscScalar),&svalues);CHKERRQ(ierr);
245   for (i=0; i<to->n; i++) {
246     //    printf("[%d]sstarts %d %d %d %d\n",PetscGlobalRank,sstarts[i],sstarts[i+1],ncols,sprocs[i]);
247     /* pack a message at a time */
248   CHKMEMQ;
249     for (j=0; j<sstarts[i+1]-sstarts[i]; j++){
250   CHKMEMQ;
251       for (k=0; k<ncols; k++) {
252   CHKMEMQ;
253         svalues[ncols*(sstarts[i] + j) + k] = b[sindices[sstarts[i]+j] + nrowsB*k];
254   CHKMEMQ;
255 	if (PetscGlobalRank == -1){
256 	  printf("row %d col %d %d val %g %d\n",j,k,ncols*(sstarts[i] + j) + k,b[sindices[sstarts[i]+j] + nrows*k],sindices[sstarts[i]+j] + nrows*k);}
257       }
258     }
259 
260     ierr = MPI_Isend(svalues+ncols*sstarts[i],ncols*(sstarts[i+1]-sstarts[i]),MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
261   }
262   CHKMEMQ;
263   nrecvs = from->n;
264   while (nrecvs) {
265     //printf("[%d] waiting %d\n",PetscGlobalRank,nrecvs);
266     ierr = MPI_Waitany(from->n,rwaits,&imdex,&status);CHKERRQ(ierr);
267     //printf("MPI status %d %d\n",ierr,status.MPI_ERROR);
268     nrecvs--;
269     /* unpack a message at a time */
270   CHKMEMQ;
271     for (j=0; j<rstarts[imdex+1]-rstarts[imdex]; j++){
272       for (k=0; k<ncols; k++) {
273   CHKMEMQ;
274         w[rindices[rstarts[imdex]+j] + nrows*k] = rvalues[ncols*(rstarts[imdex] + j) + k];
275   CHKMEMQ;
276       }
277     }
278   }
279   ierr = PetscFree(rvalues);CHKERRQ(ierr);
280   ierr = PetscFree(rwaits);CHKERRQ(ierr);
281 
282   if (to->n) {ierr = MPI_Waitall(to->n,swaits,to->sstatus);CHKERRQ(ierr)}
283 
284   ierr = PetscFree(svalues);CHKERRQ(ierr);
285   ierr = PetscFree(swaits);CHKERRQ(ierr);
286 
287   ierr = MatRestoreArray(B,&b);CHKERRQ(ierr);
288   ierr = MatRestoreArray(workB,&w);CHKERRQ(ierr);
289   CHKMEMQ;
290   ierr = MatAssemblyBegin(workB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
291   CHKMEMQ;
292   ierr = MatAssemblyEnd(workB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
293   CHKMEMQ;
294   if (PetscGlobalRank == -11) MatView(workB,PETSC_VIEWER_STDOUT_SELF);
295   PetscFunctionReturn(0);
296 }
297 extern PetscErrorCode MatMatMultNumericAdd_SeqAIJ_SeqDense(Mat,Mat,Mat);
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "MatMatMultNumeric_MPIAIJ_MPIDense"
301 PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIDense(Mat A,Mat B,Mat C)
302 {
303   PetscErrorCode ierr;
304   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)A->data;
305   Mat_MPIDense   *bdense = (Mat_MPIDense*)B->data;
306   Mat_MPIDense   *cdense = (Mat_MPIDense*)C->data;
307   Mat            workB;
308 
309   PetscFunctionBegin;
310 
311   /* diagonal block of A times all local rows of B*/
312   ierr = MatMatMultNumeric_SeqAIJ_SeqDense(aij->A,bdense->A,cdense->A);CHKERRQ(ierr);
313 
314   /* get off processor parts of B needed to complete the product */
315   ierr = PetscObjectQuery((PetscObject)C,"workB",(PetscObject*)&workB);CHKERRQ(ierr);
316   ierr = MatMPIDenseScatter(A,B,workB);CHKERRQ(ierr);
317   CHKMEMQ;
318 
319   CHKMEMQ;
320   if (PetscGlobalRank == -1) {ierr = MatView(workB,0);CHKERRQ(ierr);}
321 
322   CHKMEMQ;
323   /* off-diagonal block of A times nonlocal rows of B */
324   ierr = MatMatMultNumericAdd_SeqAIJ_SeqDense(aij->B,workB,cdense->A);CHKERRQ(ierr);
325   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
326   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330