xref: /petsc/src/mat/impls/aij/mpi/mpiaij.c (revision e64afeac6eee7ee176942f4491c6cf7fd052eefb)
1be1d678aSKris Buschelman #define PETSCMAT_DLL
28a729477SBarry Smith 
3b47fd4b1SSatish Balay #include "src/mat/impls/aij/mpi/mpiaij.h"   /*I "petscmat.h" I*/
4d9942c19SSatish Balay #include "src/inline/spops.h"
58a729477SBarry Smith 
60f5bd95cSBarry Smith /*
70f5bd95cSBarry Smith   Local utility routine that creates a mapping from the global column
89e25ed09SBarry Smith number to the local number in the off-diagonal part of the local
90f5bd95cSBarry Smith storage of the matrix.  When PETSC_USE_CTABLE is used this is scalable at
100f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor
110f5bd95cSBarry Smith has an order N integer array but is fast to acess.
129e25ed09SBarry Smith */
134a2ae208SSatish Balay #undef __FUNCT__
144a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private"
15dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat)
169e25ed09SBarry Smith {
1744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
186849ba73SBarry Smith   PetscErrorCode ierr;
19899cda47SBarry Smith   PetscInt       n = aij->B->cmap.n,i;
20dbb450caSBarry Smith 
213a40ed3dSBarry Smith   PetscFunctionBegin;
22aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
23273d9f13SBarry Smith   ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr);
24b1fc9764SSatish Balay   for (i=0; i<n; i++){
250f5bd95cSBarry Smith     ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr);
26b1fc9764SSatish Balay   }
27b1fc9764SSatish Balay #else
28899cda47SBarry Smith   ierr = PetscMalloc((mat->cmap.N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr);
29899cda47SBarry Smith   ierr = PetscLogObjectMemory(mat,mat->cmap.N*sizeof(PetscInt));CHKERRQ(ierr);
30899cda47SBarry Smith   ierr = PetscMemzero(aij->colmap,mat->cmap.N*sizeof(PetscInt));CHKERRQ(ierr);
31905e6a2fSBarry Smith   for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1;
32b1fc9764SSatish Balay #endif
333a40ed3dSBarry Smith   PetscFunctionReturn(0);
349e25ed09SBarry Smith }
359e25ed09SBarry Smith 
36085a36d4SBarry Smith 
370520107fSSatish Balay #define CHUNKSIZE   15
3830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \
390520107fSSatish Balay { \
407cd84e04SBarry Smith     if (col <= lastcol1) low1 = 0; else high1 = nrow1; \
41fd3458f5SBarry Smith     lastcol1 = col;\
42fd3458f5SBarry Smith     while (high1-low1 > 5) { \
43fd3458f5SBarry Smith       t = (low1+high1)/2; \
44fd3458f5SBarry Smith       if (rp1[t] > col) high1 = t; \
45fd3458f5SBarry Smith       else             low1  = t; \
46ba4e3ef2SSatish Balay     } \
47fd3458f5SBarry Smith       for (_i=low1; _i<high1; _i++) { \
48fd3458f5SBarry Smith         if (rp1[_i] > col) break; \
49fd3458f5SBarry Smith         if (rp1[_i] == col) { \
50fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap1[_i] += value;   \
51fd3458f5SBarry Smith           else                    ap1[_i] = value; \
5230770e4dSSatish Balay           goto a_noinsert; \
530520107fSSatish Balay         } \
540520107fSSatish Balay       }  \
55abc0a331SBarry Smith       if (value == 0.0 && ignorezeroentries) goto a_noinsert; \
5689280ab3SLois Curfman McInnes       if (nonew == 1) goto a_noinsert; \
57085a36d4SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
58421e10b8SBarry Smith       MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \
59669a8dbcSSatish Balay       N = nrow1++ - 1; a->nz++; high1++; \
600520107fSSatish Balay       /* shift up all the later entries in this row */ \
610520107fSSatish Balay       for (ii=N; ii>=_i; ii--) { \
62fd3458f5SBarry Smith         rp1[ii+1] = rp1[ii]; \
63fd3458f5SBarry Smith         ap1[ii+1] = ap1[ii]; \
640520107fSSatish Balay       } \
65fd3458f5SBarry Smith       rp1[_i] = col;  \
66fd3458f5SBarry Smith       ap1[_i] = value;  \
6730770e4dSSatish Balay       a_noinsert: ; \
68fd3458f5SBarry Smith       ailen[row] = nrow1; \
690520107fSSatish Balay }
700a198c4cSBarry Smith 
71085a36d4SBarry Smith 
7230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \
7330770e4dSSatish Balay { \
747cd84e04SBarry Smith     if (col <= lastcol2) low2 = 0; else high2 = nrow2; \
75fd3458f5SBarry Smith     lastcol2 = col;\
76fd3458f5SBarry Smith     while (high2-low2 > 5) { \
77fd3458f5SBarry Smith       t = (low2+high2)/2; \
78fd3458f5SBarry Smith       if (rp2[t] > col) high2 = t; \
79fd3458f5SBarry Smith       else             low2  = t; \
80ba4e3ef2SSatish Balay     } \
81fd3458f5SBarry Smith        for (_i=low2; _i<high2; _i++) { \
82fd3458f5SBarry Smith         if (rp2[_i] > col) break; \
83fd3458f5SBarry Smith         if (rp2[_i] == col) { \
84fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap2[_i] += value;   \
85fd3458f5SBarry Smith           else                    ap2[_i] = value; \
8630770e4dSSatish Balay           goto b_noinsert; \
8730770e4dSSatish Balay         } \
8830770e4dSSatish Balay       }  \
89abc0a331SBarry Smith       if (value == 0.0 && ignorezeroentries) goto b_noinsert; \
9089280ab3SLois Curfman McInnes       if (nonew == 1) goto b_noinsert; \
91085a36d4SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
92421e10b8SBarry Smith       MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \
93669a8dbcSSatish Balay       N = nrow2++ - 1; b->nz++; high2++;\
9430770e4dSSatish Balay       /* shift up all the later entries in this row */ \
9530770e4dSSatish Balay       for (ii=N; ii>=_i; ii--) { \
96fd3458f5SBarry Smith         rp2[ii+1] = rp2[ii]; \
97fd3458f5SBarry Smith         ap2[ii+1] = ap2[ii]; \
9830770e4dSSatish Balay       } \
99fd3458f5SBarry Smith       rp2[_i] = col;  \
100fd3458f5SBarry Smith       ap2[_i] = value;  \
10130770e4dSSatish Balay       b_noinsert: ; \
102fd3458f5SBarry Smith       bilen[row] = nrow2; \
10330770e4dSSatish Balay }
10430770e4dSSatish Balay 
1054a2ae208SSatish Balay #undef __FUNCT__
1062fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ"
1072fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[])
1082fd7e33dSBarry Smith {
1092fd7e33dSBarry Smith   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)A->data;
1102fd7e33dSBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data;
1112fd7e33dSBarry Smith   PetscErrorCode ierr;
1122fd7e33dSBarry Smith   PetscInt       l,*garray = mat->garray,diag;
1132fd7e33dSBarry Smith 
1142fd7e33dSBarry Smith   PetscFunctionBegin;
1152fd7e33dSBarry Smith   /* code only works for square matrices A */
1162fd7e33dSBarry Smith 
1172fd7e33dSBarry Smith   /* find size of row to the left of the diagonal part */
1182fd7e33dSBarry Smith   ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr);
1192fd7e33dSBarry Smith   row  = row - diag;
1202fd7e33dSBarry Smith   for (l=0; l<b->i[row+1]-b->i[row]; l++) {
1212fd7e33dSBarry Smith     if (garray[b->j[b->i[row]+l]] > diag) break;
1222fd7e33dSBarry Smith   }
1232fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr);
1242fd7e33dSBarry Smith 
1252fd7e33dSBarry Smith   /* diagonal part */
1262fd7e33dSBarry Smith   ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr);
1272fd7e33dSBarry Smith 
1282fd7e33dSBarry Smith   /* right of diagonal part */
1292fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr);
1302fd7e33dSBarry Smith   PetscFunctionReturn(0);
1312fd7e33dSBarry Smith }
1322fd7e33dSBarry Smith 
1332fd7e33dSBarry Smith #undef __FUNCT__
1344a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ"
135b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
1368a729477SBarry Smith {
13744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
13887828ca2SBarry Smith   PetscScalar    value;
139dfbe8321SBarry Smith   PetscErrorCode ierr;
140899cda47SBarry Smith   PetscInt       i,j,rstart = mat->rmap.rstart,rend = mat->rmap.rend;
141899cda47SBarry Smith   PetscInt       cstart = mat->cmap.rstart,cend = mat->cmap.rend,row,col;
142273d9f13SBarry Smith   PetscTruth     roworiented = aij->roworiented;
1438a729477SBarry Smith 
1440520107fSSatish Balay   /* Some Variables required in the macro */
1454ee7247eSSatish Balay   Mat            A = aij->A;
1464ee7247eSSatish Balay   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
14757809a77SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
14887828ca2SBarry Smith   PetscScalar    *aa = a->a;
149edb03aefSBarry Smith   PetscTruth     ignorezeroentries = a->ignorezeroentries;
15030770e4dSSatish Balay   Mat            B = aij->B;
15130770e4dSSatish Balay   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
152899cda47SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap.n,am = aij->A->rmap.n;
15387828ca2SBarry Smith   PetscScalar    *ba = b->a;
15430770e4dSSatish Balay 
155fd3458f5SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
156fd3458f5SBarry Smith   PetscInt       nonew = a->nonew;
157fd3458f5SBarry Smith   PetscScalar    *ap1,*ap2;
1584ee7247eSSatish Balay 
1593a40ed3dSBarry Smith   PetscFunctionBegin;
1608a729477SBarry Smith   for (i=0; i<m; i++) {
1615ef9f2a5SBarry Smith     if (im[i] < 0) continue;
1622515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
163899cda47SBarry Smith     if (im[i] >= mat->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap.N-1);
1640a198c4cSBarry Smith #endif
1654b0e389bSBarry Smith     if (im[i] >= rstart && im[i] < rend) {
1664b0e389bSBarry Smith       row      = im[i] - rstart;
167fd3458f5SBarry Smith       lastcol1 = -1;
168fd3458f5SBarry Smith       rp1      = aj + ai[row];
169fd3458f5SBarry Smith       ap1      = aa + ai[row];
170fd3458f5SBarry Smith       rmax1    = aimax[row];
171fd3458f5SBarry Smith       nrow1    = ailen[row];
172fd3458f5SBarry Smith       low1     = 0;
173fd3458f5SBarry Smith       high1    = nrow1;
174fd3458f5SBarry Smith       lastcol2 = -1;
175fd3458f5SBarry Smith       rp2      = bj + bi[row];
176d498b1e9SBarry Smith       ap2      = ba + bi[row];
177fd3458f5SBarry Smith       rmax2    = bimax[row];
178d498b1e9SBarry Smith       nrow2    = bilen[row];
179fd3458f5SBarry Smith       low2     = 0;
180fd3458f5SBarry Smith       high2    = nrow2;
181fd3458f5SBarry Smith 
1821eb62cbbSBarry Smith       for (j=0; j<n; j++) {
1834b0e389bSBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
184abc0a331SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
185fd3458f5SBarry Smith         if (in[j] >= cstart && in[j] < cend){
186fd3458f5SBarry Smith           col = in[j] - cstart;
18730770e4dSSatish Balay           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
188273d9f13SBarry Smith         } else if (in[j] < 0) continue;
1892515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
190899cda47SBarry Smith         else if (in[j] >= mat->cmap.N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap.N-1);}
1910a198c4cSBarry Smith #endif
1921eb62cbbSBarry Smith         else {
193227d817aSBarry Smith           if (mat->was_assembled) {
194905e6a2fSBarry Smith             if (!aij->colmap) {
195905e6a2fSBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
196905e6a2fSBarry Smith             }
197aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
1980f5bd95cSBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
199fa46199cSSatish Balay 	    col--;
200b1fc9764SSatish Balay #else
201905e6a2fSBarry Smith             col = aij->colmap[in[j]] - 1;
202b1fc9764SSatish Balay #endif
203ec8511deSBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
2042493cbb0SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
2054b0e389bSBarry Smith               col =  in[j];
2069bf004c3SSatish Balay               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
207f9508a3cSSatish Balay               B = aij->B;
208f9508a3cSSatish Balay               b = (Mat_SeqAIJ*)B->data;
209f9508a3cSSatish Balay               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
210d498b1e9SBarry Smith               rp2      = bj + bi[row];
211d498b1e9SBarry Smith               ap2      = ba + bi[row];
212d498b1e9SBarry Smith               rmax2    = bimax[row];
213d498b1e9SBarry Smith               nrow2    = bilen[row];
214d498b1e9SBarry Smith               low2     = 0;
215d498b1e9SBarry Smith               high2    = nrow2;
216899cda47SBarry Smith               bm       = aij->B->rmap.n;
217f9508a3cSSatish Balay               ba = b->a;
218d6dfbf8fSBarry Smith             }
219c48de900SBarry Smith           } else col = in[j];
22030770e4dSSatish Balay           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
2211eb62cbbSBarry Smith         }
2221eb62cbbSBarry Smith       }
2235ef9f2a5SBarry Smith     } else {
22490f02eecSBarry Smith       if (!aij->donotstash) {
225d36fbae8SSatish Balay         if (roworiented) {
2265b8514ebSBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
2278798bf22SSatish Balay           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
228d36fbae8SSatish Balay         } else {
2295b8514ebSBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
2308798bf22SSatish Balay           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
2314b0e389bSBarry Smith         }
2321eb62cbbSBarry Smith       }
2338a729477SBarry Smith     }
23490f02eecSBarry Smith   }
2353a40ed3dSBarry Smith   PetscFunctionReturn(0);
2368a729477SBarry Smith }
2378a729477SBarry Smith 
2384a2ae208SSatish Balay #undef __FUNCT__
2394a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ"
240b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
241b49de8d1SLois Curfman McInnes {
242b49de8d1SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
243dfbe8321SBarry Smith   PetscErrorCode ierr;
244899cda47SBarry Smith   PetscInt       i,j,rstart = mat->rmap.rstart,rend = mat->rmap.rend;
245899cda47SBarry Smith   PetscInt       cstart = mat->cmap.rstart,cend = mat->cmap.rend,row,col;
246b49de8d1SLois Curfman McInnes 
2473a40ed3dSBarry Smith   PetscFunctionBegin;
248b49de8d1SLois Curfman McInnes   for (i=0; i<m; i++) {
24997e567efSBarry Smith     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
250899cda47SBarry Smith     if (idxm[i] >= mat->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap.N-1);
251b49de8d1SLois Curfman McInnes     if (idxm[i] >= rstart && idxm[i] < rend) {
252b49de8d1SLois Curfman McInnes       row = idxm[i] - rstart;
253b49de8d1SLois Curfman McInnes       for (j=0; j<n; j++) {
25497e567efSBarry Smith         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
255899cda47SBarry Smith         if (idxn[j] >= mat->cmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap.N-1);
256b49de8d1SLois Curfman McInnes         if (idxn[j] >= cstart && idxn[j] < cend){
257b49de8d1SLois Curfman McInnes           col = idxn[j] - cstart;
258b49de8d1SLois Curfman McInnes           ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
259fa852ad4SSatish Balay         } else {
260905e6a2fSBarry Smith           if (!aij->colmap) {
261905e6a2fSBarry Smith             ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
262905e6a2fSBarry Smith           }
263aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
2640f5bd95cSBarry Smith           ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr);
265fa46199cSSatish Balay           col --;
266b1fc9764SSatish Balay #else
267905e6a2fSBarry Smith           col = aij->colmap[idxn[j]] - 1;
268b1fc9764SSatish Balay #endif
269e60e1c95SSatish Balay           if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0;
270d9d09a02SSatish Balay           else {
271b49de8d1SLois Curfman McInnes             ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
272b49de8d1SLois Curfman McInnes           }
273b49de8d1SLois Curfman McInnes         }
274b49de8d1SLois Curfman McInnes       }
275a8c6a408SBarry Smith     } else {
27629bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"Only local values currently supported");
277b49de8d1SLois Curfman McInnes     }
278b49de8d1SLois Curfman McInnes   }
2793a40ed3dSBarry Smith   PetscFunctionReturn(0);
280b49de8d1SLois Curfman McInnes }
281bc5ccf88SSatish Balay 
2824a2ae208SSatish Balay #undef __FUNCT__
2834a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ"
284dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode)
285bc5ccf88SSatish Balay {
286bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
287dfbe8321SBarry Smith   PetscErrorCode ierr;
288b1d57f15SBarry Smith   PetscInt       nstash,reallocs;
289bc5ccf88SSatish Balay   InsertMode     addv;
290bc5ccf88SSatish Balay 
291bc5ccf88SSatish Balay   PetscFunctionBegin;
292bc5ccf88SSatish Balay   if (aij->donotstash) {
293bc5ccf88SSatish Balay     PetscFunctionReturn(0);
294bc5ccf88SSatish Balay   }
295bc5ccf88SSatish Balay 
296bc5ccf88SSatish Balay   /* make sure all processors are either in INSERTMODE or ADDMODE */
2977adad957SLisandro Dalcin   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
298bc5ccf88SSatish Balay   if (addv == (ADD_VALUES|INSERT_VALUES)) {
29929bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
300bc5ccf88SSatish Balay   }
301bc5ccf88SSatish Balay   mat->insertmode = addv; /* in case this processor had no cache */
302bc5ccf88SSatish Balay 
303899cda47SBarry Smith   ierr = MatStashScatterBegin_Private(&mat->stash,mat->rmap.range);CHKERRQ(ierr);
3048798bf22SSatish Balay   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
305ae15b995SBarry Smith   ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
306bc5ccf88SSatish Balay   PetscFunctionReturn(0);
307bc5ccf88SSatish Balay }
308bc5ccf88SSatish Balay 
3094a2ae208SSatish Balay #undef __FUNCT__
3104a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ"
311dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode)
312bc5ccf88SSatish Balay {
313bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
31491c97fd4SSatish Balay   Mat_SeqAIJ     *a=(Mat_SeqAIJ *)aij->A->data;
3156849ba73SBarry Smith   PetscErrorCode ierr;
316b1d57f15SBarry Smith   PetscMPIInt    n;
317b1d57f15SBarry Smith   PetscInt       i,j,rstart,ncols,flg;
318b1d57f15SBarry Smith   PetscInt       *row,*col,other_disassembled;
31987828ca2SBarry Smith   PetscScalar    *val;
320bc5ccf88SSatish Balay   InsertMode     addv = mat->insertmode;
321bc5ccf88SSatish Balay 
32291c97fd4SSatish Balay   /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */
323bc5ccf88SSatish Balay   PetscFunctionBegin;
324bc5ccf88SSatish Balay   if (!aij->donotstash) {
325a2d1c673SSatish Balay     while (1) {
3268798bf22SSatish Balay       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
327a2d1c673SSatish Balay       if (!flg) break;
328a2d1c673SSatish Balay 
329bc5ccf88SSatish Balay       for (i=0; i<n;) {
330bc5ccf88SSatish Balay         /* Now identify the consecutive vals belonging to the same row */
331bc5ccf88SSatish Balay         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
332bc5ccf88SSatish Balay         if (j < n) ncols = j-i;
333bc5ccf88SSatish Balay         else       ncols = n-i;
334bc5ccf88SSatish Balay         /* Now assemble all these values with a single function call */
335bc5ccf88SSatish Balay         ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
336bc5ccf88SSatish Balay         i = j;
337bc5ccf88SSatish Balay       }
338bc5ccf88SSatish Balay     }
3398798bf22SSatish Balay     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
340bc5ccf88SSatish Balay   }
3412f53aa61SHong Zhang   a->compressedrow.use     = PETSC_FALSE;
342bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr);
343bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr);
344bc5ccf88SSatish Balay 
345bc5ccf88SSatish Balay   /* determine if any processor has disassembled, if so we must
346bc5ccf88SSatish Balay      also disassemble ourselfs, in order that we may reassemble. */
347bc5ccf88SSatish Balay   /*
348bc5ccf88SSatish Balay      if nonzero structure of submatrix B cannot change then we know that
349bc5ccf88SSatish Balay      no processor disassembled thus we can skip this stuff
350bc5ccf88SSatish Balay   */
351bc5ccf88SSatish Balay   if (!((Mat_SeqAIJ*)aij->B->data)->nonew)  {
3527adad957SLisandro Dalcin     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
353bc5ccf88SSatish Balay     if (mat->was_assembled && !other_disassembled) {
354bc5ccf88SSatish Balay       ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
355ad59fb31SSatish Balay     }
356ad59fb31SSatish Balay   }
357bc5ccf88SSatish Balay   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
358bc5ccf88SSatish Balay     ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr);
359bc5ccf88SSatish Balay   }
3604e0d8c25SBarry Smith   ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr);
36191c97fd4SSatish Balay   ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */
362bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr);
363bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr);
364bc5ccf88SSatish Balay 
365606d414cSSatish Balay   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
366606d414cSSatish Balay   aij->rowvalues = 0;
367a30b2313SHong Zhang 
368a30b2313SHong Zhang   /* used by MatAXPY() */
36991c97fd4SSatish Balay   a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0;  /* b->xtoy = 0 */
37091c97fd4SSatish Balay   a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0;  /* b->XtoY = 0 */
371a30b2313SHong Zhang 
372bc5ccf88SSatish Balay   PetscFunctionReturn(0);
373bc5ccf88SSatish Balay }
374bc5ccf88SSatish Balay 
3754a2ae208SSatish Balay #undef __FUNCT__
3764a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ"
377dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A)
3781eb62cbbSBarry Smith {
37944a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
380dfbe8321SBarry Smith   PetscErrorCode ierr;
3813a40ed3dSBarry Smith 
3823a40ed3dSBarry Smith   PetscFunctionBegin;
38378b31e54SBarry Smith   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
38478b31e54SBarry Smith   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
3853a40ed3dSBarry Smith   PetscFunctionReturn(0);
3861eb62cbbSBarry Smith }
3871eb62cbbSBarry Smith 
3884a2ae208SSatish Balay #undef __FUNCT__
3894a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ"
390f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
3911eb62cbbSBarry Smith {
39244a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
3936849ba73SBarry Smith   PetscErrorCode ierr;
3947adad957SLisandro Dalcin   PetscMPIInt    size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
395899cda47SBarry Smith   PetscInt       i,*owners = A->rmap.range;
396b1d57f15SBarry Smith   PetscInt       *nprocs,j,idx,nsends,row;
397b1d57f15SBarry Smith   PetscInt       nmax,*svalues,*starts,*owner,nrecvs;
398b1d57f15SBarry Smith   PetscInt       *rvalues,count,base,slen,*source;
399899cda47SBarry Smith   PetscInt       *lens,*lrows,*values,rstart=A->rmap.rstart;
4007adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)A)->comm;
4011eb62cbbSBarry Smith   MPI_Request    *send_waits,*recv_waits;
4021eb62cbbSBarry Smith   MPI_Status     recv_status,*send_status;
4036543fbbaSBarry Smith #if defined(PETSC_DEBUG)
4046543fbbaSBarry Smith   PetscTruth     found = PETSC_FALSE;
4056543fbbaSBarry Smith #endif
4061eb62cbbSBarry Smith 
4073a40ed3dSBarry Smith   PetscFunctionBegin;
4081eb62cbbSBarry Smith   /*  first count number of contributors to each processor */
409b1d57f15SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
410b1d57f15SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
411b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
4126543fbbaSBarry Smith   j = 0;
4131eb62cbbSBarry Smith   for (i=0; i<N; i++) {
4146543fbbaSBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
4156543fbbaSBarry Smith     lastidx = idx;
4166543fbbaSBarry Smith     for (; j<size; j++) {
4171eb62cbbSBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
4186543fbbaSBarry Smith         nprocs[2*j]++;
4196543fbbaSBarry Smith         nprocs[2*j+1] = 1;
4206543fbbaSBarry Smith         owner[i] = j;
4216543fbbaSBarry Smith #if defined(PETSC_DEBUG)
4226543fbbaSBarry Smith         found = PETSC_TRUE;
4236543fbbaSBarry Smith #endif
4246543fbbaSBarry Smith         break;
4251eb62cbbSBarry Smith       }
4261eb62cbbSBarry Smith     }
4276543fbbaSBarry Smith #if defined(PETSC_DEBUG)
42829bbc08cSBarry Smith     if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
4296543fbbaSBarry Smith     found = PETSC_FALSE;
4306543fbbaSBarry Smith #endif
4311eb62cbbSBarry Smith   }
432c1dc657dSBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
4331eb62cbbSBarry Smith 
4341eb62cbbSBarry Smith   /* inform other processors of number of messages and max length*/
435c1dc657dSBarry Smith   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
4361eb62cbbSBarry Smith 
4371eb62cbbSBarry Smith   /* post receives:   */
438b1d57f15SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
439b0a32e0cSBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
4401eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
441b1d57f15SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
4421eb62cbbSBarry Smith   }
4431eb62cbbSBarry Smith 
4441eb62cbbSBarry Smith   /* do sends:
4451eb62cbbSBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
4461eb62cbbSBarry Smith          the ith processor
4471eb62cbbSBarry Smith   */
448b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
449b0a32e0cSBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
450b1d57f15SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
4511eb62cbbSBarry Smith   starts[0] = 0;
452c1dc657dSBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
4531eb62cbbSBarry Smith   for (i=0; i<N; i++) {
4541eb62cbbSBarry Smith     svalues[starts[owner[i]]++] = rows[i];
4551eb62cbbSBarry Smith   }
4561eb62cbbSBarry Smith 
4571eb62cbbSBarry Smith   starts[0] = 0;
458c1dc657dSBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
4591eb62cbbSBarry Smith   count = 0;
46017699dbbSLois Curfman McInnes   for (i=0; i<size; i++) {
461c1dc657dSBarry Smith     if (nprocs[2*i+1]) {
462b1d57f15SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
4631eb62cbbSBarry Smith     }
4641eb62cbbSBarry Smith   }
465606d414cSSatish Balay   ierr = PetscFree(starts);CHKERRQ(ierr);
4661eb62cbbSBarry Smith 
46717699dbbSLois Curfman McInnes   base = owners[rank];
4681eb62cbbSBarry Smith 
4691eb62cbbSBarry Smith   /*  wait on receives */
470b1d57f15SBarry Smith   ierr   = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
4711eb62cbbSBarry Smith   source = lens + nrecvs;
4721eb62cbbSBarry Smith   count  = nrecvs; slen = 0;
4731eb62cbbSBarry Smith   while (count) {
474ca161407SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
4751eb62cbbSBarry Smith     /* unpack receives into our local space */
476b1d57f15SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
477d6dfbf8fSBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
478d6dfbf8fSBarry Smith     lens[imdex]    = n;
4791eb62cbbSBarry Smith     slen          += n;
4801eb62cbbSBarry Smith     count--;
4811eb62cbbSBarry Smith   }
482606d414cSSatish Balay   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
4831eb62cbbSBarry Smith 
4841eb62cbbSBarry Smith   /* move the data into the send scatter */
485b1d57f15SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
4861eb62cbbSBarry Smith   count = 0;
4871eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
4881eb62cbbSBarry Smith     values = rvalues + i*nmax;
4891eb62cbbSBarry Smith     for (j=0; j<lens[i]; j++) {
4901eb62cbbSBarry Smith       lrows[count++] = values[j] - base;
4911eb62cbbSBarry Smith     }
4921eb62cbbSBarry Smith   }
493606d414cSSatish Balay   ierr = PetscFree(rvalues);CHKERRQ(ierr);
494606d414cSSatish Balay   ierr = PetscFree(lens);CHKERRQ(ierr);
495606d414cSSatish Balay   ierr = PetscFree(owner);CHKERRQ(ierr);
496606d414cSSatish Balay   ierr = PetscFree(nprocs);CHKERRQ(ierr);
4971eb62cbbSBarry Smith 
4981eb62cbbSBarry Smith   /* actually zap the local rows */
4996eb55b6aSBarry Smith   /*
5006eb55b6aSBarry Smith         Zero the required rows. If the "diagonal block" of the matrix
501a8c7a070SBarry Smith      is square and the user wishes to set the diagonal we use separate
5026eb55b6aSBarry Smith      code so that MatSetValues() is not called for each diagonal allocating
5036eb55b6aSBarry Smith      new memory, thus calling lots of mallocs and slowing things down.
5046eb55b6aSBarry Smith 
505f4df32b1SMatthew Knepley        Contributed by: Matthew Knepley
5066eb55b6aSBarry Smith   */
507e2d53e46SBarry Smith   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
508f4df32b1SMatthew Knepley   ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr);
509899cda47SBarry Smith   if ((diag != 0.0) && (l->A->rmap.N == l->A->cmap.N)) {
510f4df32b1SMatthew Knepley     ierr      = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr);
511f4df32b1SMatthew Knepley   } else if (diag != 0.0) {
512f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
513fa46199cSSatish Balay     if (((Mat_SeqAIJ*)l->A->data)->nonew) {
51429bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\
515512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
5166525c446SSatish Balay     }
517e2d53e46SBarry Smith     for (i = 0; i < slen; i++) {
518e2d53e46SBarry Smith       row  = lrows[i] + rstart;
519f4df32b1SMatthew Knepley       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
520e2d53e46SBarry Smith     }
521e2d53e46SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
522e2d53e46SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
5236eb55b6aSBarry Smith   } else {
524f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
5256eb55b6aSBarry Smith   }
526606d414cSSatish Balay   ierr = PetscFree(lrows);CHKERRQ(ierr);
52772dacd9aSBarry Smith 
5281eb62cbbSBarry Smith   /* wait on sends */
5291eb62cbbSBarry Smith   if (nsends) {
530b0a32e0cSBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
531ca161407SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
532606d414cSSatish Balay     ierr = PetscFree(send_status);CHKERRQ(ierr);
5331eb62cbbSBarry Smith   }
534606d414cSSatish Balay   ierr = PetscFree(send_waits);CHKERRQ(ierr);
535606d414cSSatish Balay   ierr = PetscFree(svalues);CHKERRQ(ierr);
5361eb62cbbSBarry Smith 
5373a40ed3dSBarry Smith   PetscFunctionReturn(0);
5381eb62cbbSBarry Smith }
5391eb62cbbSBarry Smith 
5404a2ae208SSatish Balay #undef __FUNCT__
5414a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ"
542dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy)
5431eb62cbbSBarry Smith {
544416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
545dfbe8321SBarry Smith   PetscErrorCode ierr;
546b1d57f15SBarry Smith   PetscInt       nt;
547416022c9SBarry Smith 
5483a40ed3dSBarry Smith   PetscFunctionBegin;
549a2ce50c7SBarry Smith   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
550899cda47SBarry Smith   if (nt != A->cmap.n) {
551899cda47SBarry Smith     SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap.n,nt);
552fbd6ef76SBarry Smith   }
553ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
554f830108cSBarry Smith   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
555ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
556f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
5573a40ed3dSBarry Smith   PetscFunctionReturn(0);
5581eb62cbbSBarry Smith }
5591eb62cbbSBarry Smith 
5604a2ae208SSatish Balay #undef __FUNCT__
5614a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ"
562dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
563da3a660dSBarry Smith {
564416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
565dfbe8321SBarry Smith   PetscErrorCode ierr;
5663a40ed3dSBarry Smith 
5673a40ed3dSBarry Smith   PetscFunctionBegin;
568ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
569f830108cSBarry Smith   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
570ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
571f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
5723a40ed3dSBarry Smith   PetscFunctionReturn(0);
573da3a660dSBarry Smith }
574da3a660dSBarry Smith 
5754a2ae208SSatish Balay #undef __FUNCT__
5764a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ"
577dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy)
578da3a660dSBarry Smith {
579416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
580dfbe8321SBarry Smith   PetscErrorCode ierr;
581a5ff213dSBarry Smith   PetscTruth     merged;
582da3a660dSBarry Smith 
5833a40ed3dSBarry Smith   PetscFunctionBegin;
584a5ff213dSBarry Smith   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
585da3a660dSBarry Smith   /* do nondiagonal part */
5867c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
587a5ff213dSBarry Smith   if (!merged) {
588da3a660dSBarry Smith     /* send it on its way */
589ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
590da3a660dSBarry Smith     /* do local part */
5917c922b88SBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
592da3a660dSBarry Smith     /* receive remote parts: note this assumes the values are not actually */
593a5ff213dSBarry Smith     /* added in yy until the next line, */
594ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
595a5ff213dSBarry Smith   } else {
596a5ff213dSBarry Smith     /* do local part */
597a5ff213dSBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
598a5ff213dSBarry Smith     /* send it on its way */
599ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
600a5ff213dSBarry Smith     /* values actually were received in the Begin() but we need to call this nop */
601ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
602a5ff213dSBarry Smith   }
6033a40ed3dSBarry Smith   PetscFunctionReturn(0);
604da3a660dSBarry Smith }
605da3a660dSBarry Smith 
606cd0d46ebSvictorle EXTERN_C_BEGIN
607cd0d46ebSvictorle #undef __FUNCT__
6085fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ"
60913c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f)
610cd0d46ebSvictorle {
6114f423910Svictorle   MPI_Comm       comm;
612cd0d46ebSvictorle   Mat_MPIAIJ     *Aij = (Mat_MPIAIJ *) Amat->data, *Bij;
61366501d38Svictorle   Mat            Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs;
614cd0d46ebSvictorle   IS             Me,Notme;
6156849ba73SBarry Smith   PetscErrorCode ierr;
616b1d57f15SBarry Smith   PetscInt       M,N,first,last,*notme,i;
617b1d57f15SBarry Smith   PetscMPIInt    size;
618cd0d46ebSvictorle 
619cd0d46ebSvictorle   PetscFunctionBegin;
62042e5f5b4Svictorle 
62142e5f5b4Svictorle   /* Easy test: symmetric diagonal block */
62266501d38Svictorle   Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A;
6235485867bSBarry Smith   ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr);
624cd0d46ebSvictorle   if (!*f) PetscFunctionReturn(0);
6254f423910Svictorle   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
626b1d57f15SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
627b1d57f15SBarry Smith   if (size == 1) PetscFunctionReturn(0);
62842e5f5b4Svictorle 
62942e5f5b4Svictorle   /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */
630cd0d46ebSvictorle   ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr);
631cd0d46ebSvictorle   ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr);
632b1d57f15SBarry Smith   ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),&notme);CHKERRQ(ierr);
633cd0d46ebSvictorle   for (i=0; i<first; i++) notme[i] = i;
634cd0d46ebSvictorle   for (i=last; i<M; i++) notme[i-last+first] = i;
635268466fbSBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr);
636268466fbSBarry Smith   ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr);
637268466fbSBarry Smith   ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr);
63866501d38Svictorle   Aoff = Aoffs[0];
639268466fbSBarry Smith   ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr);
64066501d38Svictorle   Boff = Boffs[0];
6415485867bSBarry Smith   ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr);
64266501d38Svictorle   ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr);
64366501d38Svictorle   ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr);
64442e5f5b4Svictorle   ierr = ISDestroy(Me);CHKERRQ(ierr);
64542e5f5b4Svictorle   ierr = ISDestroy(Notme);CHKERRQ(ierr);
64642e5f5b4Svictorle 
647cd0d46ebSvictorle   PetscFunctionReturn(0);
648cd0d46ebSvictorle }
649cd0d46ebSvictorle EXTERN_C_END
650cd0d46ebSvictorle 
6514a2ae208SSatish Balay #undef __FUNCT__
6524a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ"
653dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
654da3a660dSBarry Smith {
655416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
656dfbe8321SBarry Smith   PetscErrorCode ierr;
657da3a660dSBarry Smith 
6583a40ed3dSBarry Smith   PetscFunctionBegin;
659da3a660dSBarry Smith   /* do nondiagonal part */
6607c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
661da3a660dSBarry Smith   /* send it on its way */
662ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
663da3a660dSBarry Smith   /* do local part */
6647c922b88SBarry Smith   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
665a5ff213dSBarry Smith   /* receive remote parts */
666ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
6673a40ed3dSBarry Smith   PetscFunctionReturn(0);
668da3a660dSBarry Smith }
669da3a660dSBarry Smith 
6701eb62cbbSBarry Smith /*
6711eb62cbbSBarry Smith   This only works correctly for square matrices where the subblock A->A is the
6721eb62cbbSBarry Smith    diagonal block
6731eb62cbbSBarry Smith */
6744a2ae208SSatish Balay #undef __FUNCT__
6754a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ"
676dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v)
6771eb62cbbSBarry Smith {
678dfbe8321SBarry Smith   PetscErrorCode ierr;
679416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
6803a40ed3dSBarry Smith 
6813a40ed3dSBarry Smith   PetscFunctionBegin;
682899cda47SBarry Smith   if (A->rmap.N != A->cmap.N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
683899cda47SBarry Smith   if (A->rmap.rstart != A->cmap.rstart || A->rmap.rend != A->cmap.rend) {
68429bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition");
6853a40ed3dSBarry Smith   }
6863a40ed3dSBarry Smith   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
6873a40ed3dSBarry Smith   PetscFunctionReturn(0);
6881eb62cbbSBarry Smith }
6891eb62cbbSBarry Smith 
6904a2ae208SSatish Balay #undef __FUNCT__
6914a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ"
692f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa)
693052efed2SBarry Smith {
694052efed2SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
695dfbe8321SBarry Smith   PetscErrorCode ierr;
6963a40ed3dSBarry Smith 
6973a40ed3dSBarry Smith   PetscFunctionBegin;
698f4df32b1SMatthew Knepley   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
699f4df32b1SMatthew Knepley   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
7003a40ed3dSBarry Smith   PetscFunctionReturn(0);
701052efed2SBarry Smith }
702052efed2SBarry Smith 
7034a2ae208SSatish Balay #undef __FUNCT__
7044a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ"
705dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat)
7061eb62cbbSBarry Smith {
70744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
708dfbe8321SBarry Smith   PetscErrorCode ierr;
70983e2fdc7SBarry Smith 
7103a40ed3dSBarry Smith   PetscFunctionBegin;
711aa482453SBarry Smith #if defined(PETSC_USE_LOG)
712899cda47SBarry Smith   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap.N,mat->cmap.N);
713a5a9c739SBarry Smith #endif
7148798bf22SSatish Balay   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
71578b31e54SBarry Smith   ierr = MatDestroy(aij->A);CHKERRQ(ierr);
71678b31e54SBarry Smith   ierr = MatDestroy(aij->B);CHKERRQ(ierr);
717aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
7189c666560SBarry Smith   if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);}
719b1fc9764SSatish Balay #else
72005b42c5fSBarry Smith   ierr = PetscFree(aij->colmap);CHKERRQ(ierr);
721b1fc9764SSatish Balay #endif
72205b42c5fSBarry Smith   ierr = PetscFree(aij->garray);CHKERRQ(ierr);
7237c922b88SBarry Smith   if (aij->lvec)   {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);}
7247c922b88SBarry Smith   if (aij->Mvctx)  {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);}
72505b42c5fSBarry Smith   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
7268aa348c1SBarry Smith   ierr = PetscFree(aij->ld);CHKERRQ(ierr);
727606d414cSSatish Balay   ierr = PetscFree(aij);CHKERRQ(ierr);
728901853e0SKris Buschelman 
729dbd8c25aSHong Zhang   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
730901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
731901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
732901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
733901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr);
734901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
735ff69c46cSKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
736901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
7373a40ed3dSBarry Smith   PetscFunctionReturn(0);
7381eb62cbbSBarry Smith }
739ee50ffe9SBarry Smith 
7404a2ae208SSatish Balay #undef __FUNCT__
7418e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary"
742dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer)
7438e2fed03SBarry Smith {
7448e2fed03SBarry Smith   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
7458e2fed03SBarry Smith   Mat_SeqAIJ*       A = (Mat_SeqAIJ*)aij->A->data;
7468e2fed03SBarry Smith   Mat_SeqAIJ*       B = (Mat_SeqAIJ*)aij->B->data;
7476849ba73SBarry Smith   PetscErrorCode    ierr;
74832dcc486SBarry Smith   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
7496f69ff64SBarry Smith   int               fd;
750a788621eSSatish Balay   PetscInt          nz,header[4],*row_lengths,*range=0,rlen,i;
751899cda47SBarry Smith   PetscInt          nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap.rstart,rnz;
7528e2fed03SBarry Smith   PetscScalar       *column_values;
7538e2fed03SBarry Smith 
7548e2fed03SBarry Smith   PetscFunctionBegin;
7557adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
7567adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
7578e2fed03SBarry Smith   nz   = A->nz + B->nz;
758958c9bccSBarry Smith   if (!rank) {
7598e2fed03SBarry Smith     header[0] = MAT_FILE_COOKIE;
760899cda47SBarry Smith     header[1] = mat->rmap.N;
761899cda47SBarry Smith     header[2] = mat->cmap.N;
7627adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
7638e2fed03SBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
7646f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
7658e2fed03SBarry Smith     /* get largest number of rows any processor has */
766899cda47SBarry Smith     rlen = mat->rmap.n;
767357abbc8SBarry Smith     range = mat->rmap.range;
7688e2fed03SBarry Smith     for (i=1; i<size; i++) {
7698e2fed03SBarry Smith       rlen = PetscMax(rlen,range[i+1] - range[i]);
7708e2fed03SBarry Smith     }
7718e2fed03SBarry Smith   } else {
7727adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
773899cda47SBarry Smith     rlen = mat->rmap.n;
7748e2fed03SBarry Smith   }
7758e2fed03SBarry Smith 
7768e2fed03SBarry Smith   /* load up the local row counts */
777b1d57f15SBarry Smith   ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr);
778899cda47SBarry Smith   for (i=0; i<mat->rmap.n; i++) {
7798e2fed03SBarry Smith     row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
7808e2fed03SBarry Smith   }
7818e2fed03SBarry Smith 
7828e2fed03SBarry Smith   /* store the row lengths to the file */
783958c9bccSBarry Smith   if (!rank) {
7848e2fed03SBarry Smith     MPI_Status status;
785899cda47SBarry Smith     ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap.n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
7868e2fed03SBarry Smith     for (i=1; i<size; i++) {
7878e2fed03SBarry Smith       rlen = range[i+1] - range[i];
7887adad957SLisandro Dalcin       ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
7896f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
7908e2fed03SBarry Smith     }
7918e2fed03SBarry Smith   } else {
7927adad957SLisandro Dalcin     ierr = MPI_Send(row_lengths,mat->rmap.n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
7938e2fed03SBarry Smith   }
7948e2fed03SBarry Smith   ierr = PetscFree(row_lengths);CHKERRQ(ierr);
7958e2fed03SBarry Smith 
7968e2fed03SBarry Smith   /* load up the local column indices */
7978e2fed03SBarry Smith   nzmax = nz; /* )th processor needs space a largest processor needs */
7987adad957SLisandro Dalcin   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
799b1d57f15SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
8008e2fed03SBarry Smith   cnt  = 0;
801899cda47SBarry Smith   for (i=0; i<mat->rmap.n; i++) {
8028e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
8038e2fed03SBarry Smith       if ( (col = garray[B->j[j]]) > cstart) break;
8048e2fed03SBarry Smith       column_indices[cnt++] = col;
8058e2fed03SBarry Smith     }
8068e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
8078e2fed03SBarry Smith       column_indices[cnt++] = A->j[k] + cstart;
8088e2fed03SBarry Smith     }
8098e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
8108e2fed03SBarry Smith       column_indices[cnt++] = garray[B->j[j]];
8118e2fed03SBarry Smith     }
8128e2fed03SBarry Smith   }
81377431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
8148e2fed03SBarry Smith 
8158e2fed03SBarry Smith   /* store the column indices to the file */
816958c9bccSBarry Smith   if (!rank) {
8178e2fed03SBarry Smith     MPI_Status status;
8186f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
8198e2fed03SBarry Smith     for (i=1; i<size; i++) {
8207adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
82177431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
8227adad957SLisandro Dalcin       ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
8236f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
8248e2fed03SBarry Smith     }
8258e2fed03SBarry Smith   } else {
8267adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
8277adad957SLisandro Dalcin     ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
8288e2fed03SBarry Smith   }
8298e2fed03SBarry Smith   ierr = PetscFree(column_indices);CHKERRQ(ierr);
8308e2fed03SBarry Smith 
8318e2fed03SBarry Smith   /* load up the local column values */
8328e2fed03SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
8338e2fed03SBarry Smith   cnt  = 0;
834899cda47SBarry Smith   for (i=0; i<mat->rmap.n; i++) {
8358e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
8368e2fed03SBarry Smith       if ( garray[B->j[j]] > cstart) break;
8378e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
8388e2fed03SBarry Smith     }
8398e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
8408e2fed03SBarry Smith       column_values[cnt++] = A->a[k];
8418e2fed03SBarry Smith     }
8428e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
8438e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
8448e2fed03SBarry Smith     }
8458e2fed03SBarry Smith   }
84677431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
8478e2fed03SBarry Smith 
8488e2fed03SBarry Smith   /* store the column values to the file */
849958c9bccSBarry Smith   if (!rank) {
8508e2fed03SBarry Smith     MPI_Status status;
8516f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
8528e2fed03SBarry Smith     for (i=1; i<size; i++) {
8537adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
85477431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
8557adad957SLisandro Dalcin       ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
8566f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
8578e2fed03SBarry Smith     }
8588e2fed03SBarry Smith   } else {
8597adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
8607adad957SLisandro Dalcin     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
8618e2fed03SBarry Smith   }
8628e2fed03SBarry Smith   ierr = PetscFree(column_values);CHKERRQ(ierr);
8638e2fed03SBarry Smith   PetscFunctionReturn(0);
8648e2fed03SBarry Smith }
8658e2fed03SBarry Smith 
8668e2fed03SBarry Smith #undef __FUNCT__
8674a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket"
868dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
869416022c9SBarry Smith {
87044a69424SLois Curfman McInnes   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
871dfbe8321SBarry Smith   PetscErrorCode    ierr;
87232dcc486SBarry Smith   PetscMPIInt       rank = aij->rank,size = aij->size;
873d38fa0fbSBarry Smith   PetscTruth        isdraw,iascii,isbinary;
874b0a32e0cSBarry Smith   PetscViewer       sviewer;
875f3ef73ceSBarry Smith   PetscViewerFormat format;
876416022c9SBarry Smith 
8773a40ed3dSBarry Smith   PetscFunctionBegin;
878fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
87932077d6dSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
8808e2fed03SBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
88132077d6dSBarry Smith   if (iascii) {
882b0a32e0cSBarry Smith     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
883456192e2SBarry Smith     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
8844e220ebcSLois Curfman McInnes       MatInfo    info;
885923f20ffSKris Buschelman       PetscTruth inodes;
886923f20ffSKris Buschelman 
8877adad957SLisandro Dalcin       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
888888f2ed8SSatish Balay       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
889923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr);
890923f20ffSKris Buschelman       if (!inodes) {
89177431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n",
892899cda47SBarry Smith 					      rank,mat->rmap.n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
8936831982aSBarry Smith       } else {
89477431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n",
895899cda47SBarry Smith 		    rank,mat->rmap.n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
8966831982aSBarry Smith       }
897888f2ed8SSatish Balay       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
89877431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
899888f2ed8SSatish Balay       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
90077431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
901b0a32e0cSBarry Smith       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
90207d81ca4SBarry Smith       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
903a40aa06bSLois Curfman McInnes       ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr);
9043a40ed3dSBarry Smith       PetscFunctionReturn(0);
905fb9695e5SSatish Balay     } else if (format == PETSC_VIEWER_ASCII_INFO) {
906923f20ffSKris Buschelman       PetscInt   inodecount,inodelimit,*inodes;
907923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr);
908923f20ffSKris Buschelman       if (inodes) {
909923f20ffSKris Buschelman         ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr);
910d38fa0fbSBarry Smith       } else {
911d38fa0fbSBarry Smith         ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr);
912d38fa0fbSBarry Smith       }
9133a40ed3dSBarry Smith       PetscFunctionReturn(0);
9144aedb280SBarry Smith     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
9154aedb280SBarry Smith       PetscFunctionReturn(0);
91608480c60SBarry Smith     }
9178e2fed03SBarry Smith   } else if (isbinary) {
9188e2fed03SBarry Smith     if (size == 1) {
9197adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
9208e2fed03SBarry Smith       ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
9218e2fed03SBarry Smith     } else {
9228e2fed03SBarry Smith       ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr);
9238e2fed03SBarry Smith     }
9248e2fed03SBarry Smith     PetscFunctionReturn(0);
9250f5bd95cSBarry Smith   } else if (isdraw) {
926b0a32e0cSBarry Smith     PetscDraw  draw;
92719bcc07fSBarry Smith     PetscTruth isnull;
928b0a32e0cSBarry Smith     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
929b0a32e0cSBarry Smith     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
93019bcc07fSBarry Smith   }
93119bcc07fSBarry Smith 
93217699dbbSLois Curfman McInnes   if (size == 1) {
9337adad957SLisandro Dalcin     ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
93478b31e54SBarry Smith     ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
9353a40ed3dSBarry Smith   } else {
93695373324SBarry Smith     /* assemble the entire matrix onto first processor. */
93795373324SBarry Smith     Mat         A;
938ec8511deSBarry Smith     Mat_SeqAIJ  *Aloc;
939899cda47SBarry Smith     PetscInt    M = mat->rmap.N,N = mat->cmap.N,m,*ai,*aj,row,*cols,i,*ct;
94087828ca2SBarry Smith     PetscScalar *a;
9412ee70a88SLois Curfman McInnes 
9427adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
94317699dbbSLois Curfman McInnes     if (!rank) {
944f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
9453a40ed3dSBarry Smith     } else {
946f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
94795373324SBarry Smith     }
948f204ca49SKris Buschelman     /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */
949f204ca49SKris Buschelman     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
950f204ca49SKris Buschelman     ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
95152e6d16bSBarry Smith     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
952416022c9SBarry Smith 
95395373324SBarry Smith     /* copy over the A part */
954ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->A->data;
955899cda47SBarry Smith     m = aij->A->rmap.n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
956899cda47SBarry Smith     row = mat->rmap.rstart;
957899cda47SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap.rstart ;}
95895373324SBarry Smith     for (i=0; i<m; i++) {
959416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr);
96095373324SBarry Smith       row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i];
96195373324SBarry Smith     }
9622ee70a88SLois Curfman McInnes     aj = Aloc->j;
963899cda47SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap.rstart;}
96495373324SBarry Smith 
96595373324SBarry Smith     /* copy over the B part */
966ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->B->data;
967899cda47SBarry Smith     m    = aij->B->rmap.n;  ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
968899cda47SBarry Smith     row  = mat->rmap.rstart;
969b1d57f15SBarry Smith     ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
970b0a32e0cSBarry Smith     ct   = cols;
971bfec09a0SHong Zhang     for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];}
97295373324SBarry Smith     for (i=0; i<m; i++) {
973416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr);
97495373324SBarry Smith       row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i];
97595373324SBarry Smith     }
976606d414cSSatish Balay     ierr = PetscFree(ct);CHKERRQ(ierr);
9776d4a8577SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
9786d4a8577SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
97955843e3eSBarry Smith     /*
98055843e3eSBarry Smith        Everyone has to call to draw the matrix since the graphics waits are
981b0a32e0cSBarry Smith        synchronized across all processors that share the PetscDraw object
98255843e3eSBarry Smith     */
983b0a32e0cSBarry Smith     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
984e03a110bSBarry Smith     if (!rank) {
9857adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
9866831982aSBarry Smith       ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
98795373324SBarry Smith     }
988b0a32e0cSBarry Smith     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
98978b31e54SBarry Smith     ierr = MatDestroy(A);CHKERRQ(ierr);
99095373324SBarry Smith   }
9913a40ed3dSBarry Smith   PetscFunctionReturn(0);
9921eb62cbbSBarry Smith }
9931eb62cbbSBarry Smith 
9944a2ae208SSatish Balay #undef __FUNCT__
9954a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ"
996dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer)
997416022c9SBarry Smith {
998dfbe8321SBarry Smith   PetscErrorCode ierr;
99932077d6dSBarry Smith   PetscTruth     iascii,isdraw,issocket,isbinary;
1000416022c9SBarry Smith 
10013a40ed3dSBarry Smith   PetscFunctionBegin;
100232077d6dSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
1003fb9695e5SSatish Balay   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
1004fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
1005b0a32e0cSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr);
100632077d6dSBarry Smith   if (iascii || isdraw || isbinary || issocket) {
10077b2a1423SBarry Smith     ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
10085cd90555SBarry Smith   } else {
100979a5c55eSBarry Smith     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name);
1010416022c9SBarry Smith   }
10113a40ed3dSBarry Smith   PetscFunctionReturn(0);
1012416022c9SBarry Smith }
1013416022c9SBarry Smith 
10144a2ae208SSatish Balay #undef __FUNCT__
10154a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ"
1016b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
10178a729477SBarry Smith {
101844a69424SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1019dfbe8321SBarry Smith   PetscErrorCode ierr;
1020c14dc6b6SHong Zhang   Vec            bb1;
10218a729477SBarry Smith 
10223a40ed3dSBarry Smith   PetscFunctionBegin;
1023c14dc6b6SHong Zhang   ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
10242798e883SHong Zhang 
1025c16cb8f2SBarry Smith   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
1026da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1027bd3bf7d3SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
10282798e883SHong Zhang       its--;
1029da3a660dSBarry Smith     }
10302798e883SHong Zhang 
10312798e883SHong Zhang     while (its--) {
1032ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1033ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
10342798e883SHong Zhang 
1035c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1036efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1037c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
10382798e883SHong Zhang 
1039c14dc6b6SHong Zhang       /* local sweep */
104071f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
10412798e883SHong Zhang     }
10423a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
1043da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1044c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
10452798e883SHong Zhang       its--;
1046da3a660dSBarry Smith     }
10472798e883SHong Zhang     while (its--) {
1048ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1049ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
10502798e883SHong Zhang 
1051c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1052efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1053c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
1054c14dc6b6SHong Zhang 
1055c14dc6b6SHong Zhang       /* local sweep */
105671f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
10572798e883SHong Zhang     }
10583a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
1059da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1060c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
10612798e883SHong Zhang       its--;
1062da3a660dSBarry Smith     }
10632798e883SHong Zhang     while (its--) {
1064ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1065ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
10662798e883SHong Zhang 
1067c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1068efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1069c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
10702798e883SHong Zhang 
1071c14dc6b6SHong Zhang       /* local sweep */
107271f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
10732798e883SHong Zhang     }
10743a40ed3dSBarry Smith   } else {
107529bbc08cSBarry Smith     SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported");
1076c16cb8f2SBarry Smith   }
1077c14dc6b6SHong Zhang 
1078c14dc6b6SHong Zhang   ierr = VecDestroy(bb1);CHKERRQ(ierr);
10793a40ed3dSBarry Smith   PetscFunctionReturn(0);
10808a729477SBarry Smith }
1081a66be287SLois Curfman McInnes 
10824a2ae208SSatish Balay #undef __FUNCT__
108342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ"
108442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B)
108542e855d1Svictor {
108642e855d1Svictor   MPI_Comm       comm,pcomm;
108742e855d1Svictor   PetscInt       first,local_size,nrows,*rows;
108842e855d1Svictor   int            ntids;
108942e855d1Svictor   IS             crowp,growp,irowp,lrowp,lcolp,icolp;
109042e855d1Svictor   PetscErrorCode ierr;
109142e855d1Svictor 
109242e855d1Svictor   PetscFunctionBegin;
109342e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)A,&comm); CHKERRQ(ierr);
109442e855d1Svictor   /* make a collective version of 'rowp' */
109542e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm); CHKERRQ(ierr);
109642e855d1Svictor   if (pcomm==comm) {
109742e855d1Svictor     crowp = rowp;
109842e855d1Svictor   } else {
109942e855d1Svictor     ierr = ISGetSize(rowp,&nrows); CHKERRQ(ierr);
110042e855d1Svictor     ierr = ISGetIndices(rowp,&rows); CHKERRQ(ierr);
110142e855d1Svictor     ierr = ISCreateGeneral(comm,nrows,rows,&crowp); CHKERRQ(ierr);
110242e855d1Svictor     ierr = ISRestoreIndices(rowp,&rows); CHKERRQ(ierr);
110342e855d1Svictor   }
110442e855d1Svictor   /* collect the global row permutation and invert it */
110542e855d1Svictor   ierr = ISAllGather(crowp,&growp); CHKERRQ(ierr);
110642e855d1Svictor   ierr = ISSetPermutation(growp); CHKERRQ(ierr);
110742e855d1Svictor   if (pcomm!=comm) {
110842e855d1Svictor     ierr = ISDestroy(crowp); CHKERRQ(ierr);
110942e855d1Svictor   }
111042e855d1Svictor   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
111142e855d1Svictor   /* get the local target indices */
111242e855d1Svictor   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL); CHKERRQ(ierr);
111342e855d1Svictor   ierr = MatGetLocalSize(A,&local_size,PETSC_NULL); CHKERRQ(ierr);
111442e855d1Svictor   ierr = ISGetIndices(irowp,&rows); CHKERRQ(ierr);
111542e855d1Svictor   ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp); CHKERRQ(ierr);
111642e855d1Svictor   ierr = ISRestoreIndices(irowp,&rows); CHKERRQ(ierr);
111742e855d1Svictor   ierr = ISDestroy(irowp); CHKERRQ(ierr);
111842e855d1Svictor   /* the column permutation is so much easier;
111942e855d1Svictor      make a local version of 'colp' and invert it */
112042e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm); CHKERRQ(ierr);
112142e855d1Svictor   ierr = MPI_Comm_size(pcomm,&ntids); CHKERRQ(ierr);
112242e855d1Svictor   if (ntids==1) {
112342e855d1Svictor     lcolp = colp;
112442e855d1Svictor   } else {
112542e855d1Svictor     ierr = ISGetSize(colp,&nrows); CHKERRQ(ierr);
112642e855d1Svictor     ierr = ISGetIndices(colp,&rows); CHKERRQ(ierr);
112742e855d1Svictor     ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp); CHKERRQ(ierr);
112842e855d1Svictor   }
112942e855d1Svictor   ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp); CHKERRQ(ierr);
113042e855d1Svictor   ierr = ISSetPermutation(lcolp); CHKERRQ(ierr);
113142e855d1Svictor   if (ntids>1) {
113242e855d1Svictor     ierr = ISRestoreIndices(colp,&rows); CHKERRQ(ierr);
113342e855d1Svictor     ierr = ISDestroy(lcolp); CHKERRQ(ierr);
113442e855d1Svictor   }
113542e855d1Svictor   /* now we just get the submatrix */
113642e855d1Svictor   ierr = MatGetSubMatrix(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B); CHKERRQ(ierr);
113742e855d1Svictor   /* clean up */
113842e855d1Svictor   ierr = ISDestroy(lrowp); CHKERRQ(ierr);
113942e855d1Svictor   ierr = ISDestroy(icolp); CHKERRQ(ierr);
114042e855d1Svictor   PetscFunctionReturn(0);
114142e855d1Svictor }
114242e855d1Svictor 
114342e855d1Svictor #undef __FUNCT__
11444a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ"
1145dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1146a66be287SLois Curfman McInnes {
1147a66be287SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1148a66be287SLois Curfman McInnes   Mat            A = mat->A,B = mat->B;
1149dfbe8321SBarry Smith   PetscErrorCode ierr;
1150329f5518SBarry Smith   PetscReal      isend[5],irecv[5];
1151a66be287SLois Curfman McInnes 
11523a40ed3dSBarry Smith   PetscFunctionBegin;
11534e220ebcSLois Curfman McInnes   info->block_size     = 1.0;
11544e220ebcSLois Curfman McInnes   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
11554e220ebcSLois Curfman McInnes   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
11564e220ebcSLois Curfman McInnes   isend[3] = info->memory;  isend[4] = info->mallocs;
11574e220ebcSLois Curfman McInnes   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
11584e220ebcSLois Curfman McInnes   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
11594e220ebcSLois Curfman McInnes   isend[3] += info->memory;  isend[4] += info->mallocs;
1160a66be287SLois Curfman McInnes   if (flag == MAT_LOCAL) {
11614e220ebcSLois Curfman McInnes     info->nz_used      = isend[0];
11624e220ebcSLois Curfman McInnes     info->nz_allocated = isend[1];
11634e220ebcSLois Curfman McInnes     info->nz_unneeded  = isend[2];
11644e220ebcSLois Curfman McInnes     info->memory       = isend[3];
11654e220ebcSLois Curfman McInnes     info->mallocs      = isend[4];
1166a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_MAX) {
11677adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
11684e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
11694e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
11704e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
11714e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
11724e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1173a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_SUM) {
11747adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
11754e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
11764e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
11774e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
11784e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
11794e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1180a66be287SLois Curfman McInnes   }
11814e220ebcSLois Curfman McInnes   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
11824e220ebcSLois Curfman McInnes   info->fill_ratio_needed = 0;
11834e220ebcSLois Curfman McInnes   info->factor_mallocs    = 0;
1184899cda47SBarry Smith   info->rows_global       = (double)matin->rmap.N;
1185899cda47SBarry Smith   info->columns_global    = (double)matin->cmap.N;
1186899cda47SBarry Smith   info->rows_local        = (double)matin->rmap.n;
1187899cda47SBarry Smith   info->columns_local     = (double)matin->cmap.N;
11884e220ebcSLois Curfman McInnes 
11893a40ed3dSBarry Smith   PetscFunctionReturn(0);
1190a66be287SLois Curfman McInnes }
1191a66be287SLois Curfman McInnes 
11924a2ae208SSatish Balay #undef __FUNCT__
11934a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ"
11944e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg)
1195c74985f6SBarry Smith {
1196c0bbcb79SLois Curfman McInnes   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1197dfbe8321SBarry Smith   PetscErrorCode ierr;
1198c74985f6SBarry Smith 
11993a40ed3dSBarry Smith   PetscFunctionBegin;
120012c028f9SKris Buschelman   switch (op) {
1201512a5fc5SBarry Smith   case MAT_NEW_NONZERO_LOCATIONS:
120212c028f9SKris Buschelman   case MAT_NEW_NONZERO_ALLOCATION_ERR:
120312c028f9SKris Buschelman   case MAT_KEEP_ZEROED_ROWS:
120412c028f9SKris Buschelman   case MAT_NEW_NONZERO_LOCATION_ERR:
120512c028f9SKris Buschelman   case MAT_USE_INODES:
120612c028f9SKris Buschelman   case MAT_IGNORE_ZERO_ENTRIES:
12074e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
12084e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
120912c028f9SKris Buschelman     break;
121012c028f9SKris Buschelman   case MAT_ROW_ORIENTED:
12114e0d8c25SBarry Smith     a->roworiented = flg;
12124e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
12134e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
121412c028f9SKris Buschelman     break;
12154e0d8c25SBarry Smith   case MAT_NEW_DIAGONALS:
1216290bbb0aSBarry Smith     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
121712c028f9SKris Buschelman     break;
121812c028f9SKris Buschelman   case MAT_IGNORE_OFF_PROC_ENTRIES:
12197c922b88SBarry Smith     a->donotstash = PETSC_TRUE;
122012c028f9SKris Buschelman     break;
122177e54ba9SKris Buschelman   case MAT_SYMMETRIC:
12224e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
122325f421beSHong Zhang     break;
122477e54ba9SKris Buschelman   case MAT_STRUCTURALLY_SYMMETRIC:
1225bf108f30SBarry Smith   case MAT_HERMITIAN:
1226bf108f30SBarry Smith   case MAT_SYMMETRY_ETERNAL:
12274e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
122877e54ba9SKris Buschelman     break;
122912c028f9SKris Buschelman   default:
1230ad86a440SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
12313a40ed3dSBarry Smith   }
12323a40ed3dSBarry Smith   PetscFunctionReturn(0);
1233c74985f6SBarry Smith }
1234c74985f6SBarry Smith 
12354a2ae208SSatish Balay #undef __FUNCT__
12364a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ"
1237b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
123839e00950SLois Curfman McInnes {
1239154123eaSLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
124087828ca2SBarry Smith   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
12416849ba73SBarry Smith   PetscErrorCode ierr;
1242899cda47SBarry Smith   PetscInt       i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap.rstart;
1243899cda47SBarry Smith   PetscInt       nztot,nzA,nzB,lrow,rstart = matin->rmap.rstart,rend = matin->rmap.rend;
1244b1d57f15SBarry Smith   PetscInt       *cmap,*idx_p;
124539e00950SLois Curfman McInnes 
12463a40ed3dSBarry Smith   PetscFunctionBegin;
1247abc0a331SBarry Smith   if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active");
12487a0afa10SBarry Smith   mat->getrowactive = PETSC_TRUE;
12497a0afa10SBarry Smith 
125070f0671dSBarry Smith   if (!mat->rowvalues && (idx || v)) {
12517a0afa10SBarry Smith     /*
12527a0afa10SBarry Smith         allocate enough space to hold information from the longest row.
12537a0afa10SBarry Smith     */
12547a0afa10SBarry Smith     Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data;
1255b1d57f15SBarry Smith     PetscInt     max = 1,tmp;
1256899cda47SBarry Smith     for (i=0; i<matin->rmap.n; i++) {
12577a0afa10SBarry Smith       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
12587a0afa10SBarry Smith       if (max < tmp) { max = tmp; }
12597a0afa10SBarry Smith     }
1260b1d57f15SBarry Smith     ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr);
1261b1d57f15SBarry Smith     mat->rowindices = (PetscInt*)(mat->rowvalues + max);
12627a0afa10SBarry Smith   }
12637a0afa10SBarry Smith 
126429bbc08cSBarry Smith   if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows")
1265abc0e9e4SLois Curfman McInnes   lrow = row - rstart;
126639e00950SLois Curfman McInnes 
1267154123eaSLois Curfman McInnes   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1268154123eaSLois Curfman McInnes   if (!v)   {pvA = 0; pvB = 0;}
1269154123eaSLois Curfman McInnes   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1270f830108cSBarry Smith   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1271f830108cSBarry Smith   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1272154123eaSLois Curfman McInnes   nztot = nzA + nzB;
1273154123eaSLois Curfman McInnes 
127470f0671dSBarry Smith   cmap  = mat->garray;
1275154123eaSLois Curfman McInnes   if (v  || idx) {
1276154123eaSLois Curfman McInnes     if (nztot) {
1277154123eaSLois Curfman McInnes       /* Sort by increasing column numbers, assuming A and B already sorted */
1278b1d57f15SBarry Smith       PetscInt imark = -1;
1279154123eaSLois Curfman McInnes       if (v) {
128070f0671dSBarry Smith         *v = v_p = mat->rowvalues;
128139e00950SLois Curfman McInnes         for (i=0; i<nzB; i++) {
128270f0671dSBarry Smith           if (cmap[cworkB[i]] < cstart)   v_p[i] = vworkB[i];
1283154123eaSLois Curfman McInnes           else break;
1284154123eaSLois Curfman McInnes         }
1285154123eaSLois Curfman McInnes         imark = i;
128670f0671dSBarry Smith         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
128770f0671dSBarry Smith         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1288154123eaSLois Curfman McInnes       }
1289154123eaSLois Curfman McInnes       if (idx) {
129070f0671dSBarry Smith         *idx = idx_p = mat->rowindices;
129170f0671dSBarry Smith         if (imark > -1) {
129270f0671dSBarry Smith           for (i=0; i<imark; i++) {
129370f0671dSBarry Smith             idx_p[i] = cmap[cworkB[i]];
129470f0671dSBarry Smith           }
129570f0671dSBarry Smith         } else {
1296154123eaSLois Curfman McInnes           for (i=0; i<nzB; i++) {
129770f0671dSBarry Smith             if (cmap[cworkB[i]] < cstart)   idx_p[i] = cmap[cworkB[i]];
1298154123eaSLois Curfman McInnes             else break;
1299154123eaSLois Curfman McInnes           }
1300154123eaSLois Curfman McInnes           imark = i;
130170f0671dSBarry Smith         }
130270f0671dSBarry Smith         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart + cworkA[i];
130370f0671dSBarry Smith         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]];
130439e00950SLois Curfman McInnes       }
13053f97c4b0SBarry Smith     } else {
13061ca473b0SSatish Balay       if (idx) *idx = 0;
13071ca473b0SSatish Balay       if (v)   *v   = 0;
13081ca473b0SSatish Balay     }
1309154123eaSLois Curfman McInnes   }
131039e00950SLois Curfman McInnes   *nz = nztot;
1311f830108cSBarry Smith   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1312f830108cSBarry Smith   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
13133a40ed3dSBarry Smith   PetscFunctionReturn(0);
131439e00950SLois Curfman McInnes }
131539e00950SLois Curfman McInnes 
13164a2ae208SSatish Balay #undef __FUNCT__
13174a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ"
1318b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
131939e00950SLois Curfman McInnes {
13207a0afa10SBarry Smith   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
13213a40ed3dSBarry Smith 
13223a40ed3dSBarry Smith   PetscFunctionBegin;
1323abc0a331SBarry Smith   if (!aij->getrowactive) {
1324abc0a331SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
13257a0afa10SBarry Smith   }
13267a0afa10SBarry Smith   aij->getrowactive = PETSC_FALSE;
13273a40ed3dSBarry Smith   PetscFunctionReturn(0);
132839e00950SLois Curfman McInnes }
132939e00950SLois Curfman McInnes 
13304a2ae208SSatish Balay #undef __FUNCT__
13314a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ"
1332dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm)
1333855ac2c5SLois Curfman McInnes {
1334855ac2c5SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1335ec8511deSBarry Smith   Mat_SeqAIJ     *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data;
1336dfbe8321SBarry Smith   PetscErrorCode ierr;
1337899cda47SBarry Smith   PetscInt       i,j,cstart = mat->cmap.rstart;
1338329f5518SBarry Smith   PetscReal      sum = 0.0;
133987828ca2SBarry Smith   PetscScalar    *v;
134004ca555eSLois Curfman McInnes 
13413a40ed3dSBarry Smith   PetscFunctionBegin;
134217699dbbSLois Curfman McInnes   if (aij->size == 1) {
134314183eadSLois Curfman McInnes     ierr =  MatNorm(aij->A,type,norm);CHKERRQ(ierr);
134437fa93a5SLois Curfman McInnes   } else {
134504ca555eSLois Curfman McInnes     if (type == NORM_FROBENIUS) {
134604ca555eSLois Curfman McInnes       v = amat->a;
134704ca555eSLois Curfman McInnes       for (i=0; i<amat->nz; i++) {
1348aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1349329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
135004ca555eSLois Curfman McInnes #else
135104ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
135204ca555eSLois Curfman McInnes #endif
135304ca555eSLois Curfman McInnes       }
135404ca555eSLois Curfman McInnes       v = bmat->a;
135504ca555eSLois Curfman McInnes       for (i=0; i<bmat->nz; i++) {
1356aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1357329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
135804ca555eSLois Curfman McInnes #else
135904ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
136004ca555eSLois Curfman McInnes #endif
136104ca555eSLois Curfman McInnes       }
13627adad957SLisandro Dalcin       ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
136304ca555eSLois Curfman McInnes       *norm = sqrt(*norm);
13643a40ed3dSBarry Smith     } else if (type == NORM_1) { /* max column norm */
1365329f5518SBarry Smith       PetscReal *tmp,*tmp2;
1366b1d57f15SBarry Smith       PetscInt    *jj,*garray = aij->garray;
1367899cda47SBarry Smith       ierr = PetscMalloc((mat->cmap.N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr);
1368899cda47SBarry Smith       ierr = PetscMalloc((mat->cmap.N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr);
1369899cda47SBarry Smith       ierr = PetscMemzero(tmp,mat->cmap.N*sizeof(PetscReal));CHKERRQ(ierr);
137004ca555eSLois Curfman McInnes       *norm = 0.0;
137104ca555eSLois Curfman McInnes       v = amat->a; jj = amat->j;
137204ca555eSLois Curfman McInnes       for (j=0; j<amat->nz; j++) {
1373bfec09a0SHong Zhang         tmp[cstart + *jj++ ] += PetscAbsScalar(*v);  v++;
137404ca555eSLois Curfman McInnes       }
137504ca555eSLois Curfman McInnes       v = bmat->a; jj = bmat->j;
137604ca555eSLois Curfman McInnes       for (j=0; j<bmat->nz; j++) {
1377bfec09a0SHong Zhang         tmp[garray[*jj++]] += PetscAbsScalar(*v); v++;
137804ca555eSLois Curfman McInnes       }
13797adad957SLisandro Dalcin       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap.N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
1380899cda47SBarry Smith       for (j=0; j<mat->cmap.N; j++) {
138104ca555eSLois Curfman McInnes         if (tmp2[j] > *norm) *norm = tmp2[j];
138204ca555eSLois Curfman McInnes       }
1383606d414cSSatish Balay       ierr = PetscFree(tmp);CHKERRQ(ierr);
1384606d414cSSatish Balay       ierr = PetscFree(tmp2);CHKERRQ(ierr);
13853a40ed3dSBarry Smith     } else if (type == NORM_INFINITY) { /* max row norm */
1386329f5518SBarry Smith       PetscReal ntemp = 0.0;
1387899cda47SBarry Smith       for (j=0; j<aij->A->rmap.n; j++) {
1388bfec09a0SHong Zhang         v = amat->a + amat->i[j];
138904ca555eSLois Curfman McInnes         sum = 0.0;
139004ca555eSLois Curfman McInnes         for (i=0; i<amat->i[j+1]-amat->i[j]; i++) {
1391cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
139204ca555eSLois Curfman McInnes         }
1393bfec09a0SHong Zhang         v = bmat->a + bmat->i[j];
139404ca555eSLois Curfman McInnes         for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) {
1395cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
139604ca555eSLois Curfman McInnes         }
1397515d9167SLois Curfman McInnes         if (sum > ntemp) ntemp = sum;
139804ca555eSLois Curfman McInnes       }
13997adad957SLisandro Dalcin       ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
1400ca161407SBarry Smith     } else {
140129bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"No support for two norm");
140204ca555eSLois Curfman McInnes     }
140337fa93a5SLois Curfman McInnes   }
14043a40ed3dSBarry Smith   PetscFunctionReturn(0);
1405855ac2c5SLois Curfman McInnes }
1406855ac2c5SLois Curfman McInnes 
14074a2ae208SSatish Balay #undef __FUNCT__
14084a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ"
1409dfbe8321SBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,Mat *matout)
1410b7c46309SBarry Smith {
1411b7c46309SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1412da668accSHong Zhang   Mat_SeqAIJ     *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data;
1413dfbe8321SBarry Smith   PetscErrorCode ierr;
1414da668accSHong Zhang   PetscInt       M = A->rmap.N,N = A->cmap.N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,i,*d_nnz;
1415da668accSHong Zhang   PetscInt       cstart=A->cmap.rstart,ncol;
14163a40ed3dSBarry Smith   Mat            B;
141787828ca2SBarry Smith   PetscScalar    *array;
1418b7c46309SBarry Smith 
14193a40ed3dSBarry Smith   PetscFunctionBegin;
1420da668accSHong Zhang   if (!matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1421da668accSHong Zhang 
1422da668accSHong Zhang   /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */
1423da668accSHong Zhang   ma = A->rmap.n; na = A->cmap.n; mb = a->B->rmap.n;
1424da668accSHong Zhang   ai = Aloc->i; aj = Aloc->j;
1425da668accSHong Zhang   bi = Bloc->i; bj = Bloc->j;
1426da668accSHong Zhang   ierr = PetscMalloc((1+na+bi[mb])*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
1427da668accSHong Zhang   cols = d_nnz + na + 1; /* work space to be used by B part */
1428da668accSHong Zhang   ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr);
1429da668accSHong Zhang   for (i=0; i<ai[ma]; i++){
1430da668accSHong Zhang     d_nnz[aj[i]] ++;
1431da668accSHong Zhang     aj[i] += cstart; /* global col index to be used by MatSetValues() */
1432d4bb536fSBarry Smith   }
1433d4bb536fSBarry Smith 
14347adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1435899cda47SBarry Smith   ierr = MatSetSizes(B,A->cmap.n,A->rmap.n,N,M);CHKERRQ(ierr);
14367adad957SLisandro Dalcin   ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1437da668accSHong Zhang   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr);
1438b7c46309SBarry Smith 
1439b7c46309SBarry Smith   /* copy over the A part */
1440da668accSHong Zhang   array = Aloc->a;
1441899cda47SBarry Smith   row = A->rmap.rstart;
1442da668accSHong Zhang   for (i=0; i<ma; i++) {
1443da668accSHong Zhang     ncol = ai[i+1]-ai[i];
1444da668accSHong Zhang     ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
1445da668accSHong Zhang     row++; array += ncol; aj += ncol;
1446b7c46309SBarry Smith   }
1447b7c46309SBarry Smith   aj = Aloc->j;
1448da668accSHong Zhang   for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */
1449b7c46309SBarry Smith 
1450b7c46309SBarry Smith   /* copy over the B part */
1451da668accSHong Zhang   array = Bloc->a;
1452899cda47SBarry Smith   row = A->rmap.rstart;
1453da668accSHong Zhang   for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];}
1454da668accSHong Zhang   for (i=0; i<mb; i++) {
1455da668accSHong Zhang     ncol = bi[i+1]-bi[i];
1456da668accSHong Zhang     ierr = MatSetValues(B,ncol,cols,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
1457da668accSHong Zhang     row++; array += ncol; cols += ncol;
1458b7c46309SBarry Smith   }
1459da668accSHong Zhang   ierr = PetscFree(d_nnz);CHKERRQ(ierr);
14606d4a8577SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
14616d4a8577SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
14627c922b88SBarry Smith   if (matout) {
14630de55854SLois Curfman McInnes     *matout = B;
14640de55854SLois Curfman McInnes   } else {
1465273d9f13SBarry Smith     ierr = MatHeaderCopy(A,B);CHKERRQ(ierr);
14660de55854SLois Curfman McInnes   }
14673a40ed3dSBarry Smith   PetscFunctionReturn(0);
1468b7c46309SBarry Smith }
1469b7c46309SBarry Smith 
14704a2ae208SSatish Balay #undef __FUNCT__
14714a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ"
1472dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr)
1473a008b906SSatish Balay {
14744b967eb1SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
14754b967eb1SSatish Balay   Mat            a = aij->A,b = aij->B;
1476dfbe8321SBarry Smith   PetscErrorCode ierr;
1477b1d57f15SBarry Smith   PetscInt       s1,s2,s3;
1478a008b906SSatish Balay 
14793a40ed3dSBarry Smith   PetscFunctionBegin;
14804b967eb1SSatish Balay   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
14814b967eb1SSatish Balay   if (rr) {
1482e1311b90SBarry Smith     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
148329bbc08cSBarry Smith     if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
14844b967eb1SSatish Balay     /* Overlap communication with computation. */
1485ca9f406cSSatish Balay     ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1486a008b906SSatish Balay   }
14874b967eb1SSatish Balay   if (ll) {
1488e1311b90SBarry Smith     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
148929bbc08cSBarry Smith     if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1490f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr);
14914b967eb1SSatish Balay   }
14924b967eb1SSatish Balay   /* scale  the diagonal block */
1493f830108cSBarry Smith   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
14944b967eb1SSatish Balay 
14954b967eb1SSatish Balay   if (rr) {
14964b967eb1SSatish Balay     /* Do a scatter end and then right scale the off-diagonal block */
1497ca9f406cSSatish Balay     ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1498f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr);
14994b967eb1SSatish Balay   }
15004b967eb1SSatish Balay 
15013a40ed3dSBarry Smith   PetscFunctionReturn(0);
1502a008b906SSatish Balay }
1503a008b906SSatish Balay 
15044a2ae208SSatish Balay #undef __FUNCT__
1505521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ"
1506521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs)
15075a838052SSatish Balay {
1508521d7252SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1509521d7252SBarry Smith   PetscErrorCode ierr;
1510521d7252SBarry Smith 
15113a40ed3dSBarry Smith   PetscFunctionBegin;
1512521d7252SBarry Smith   ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr);
1513521d7252SBarry Smith   ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr);
15143a40ed3dSBarry Smith   PetscFunctionReturn(0);
15155a838052SSatish Balay }
15164a2ae208SSatish Balay #undef __FUNCT__
15174a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ"
1518dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A)
1519bb5a7306SBarry Smith {
1520bb5a7306SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1521dfbe8321SBarry Smith   PetscErrorCode ierr;
15223a40ed3dSBarry Smith 
15233a40ed3dSBarry Smith   PetscFunctionBegin;
1524bb5a7306SBarry Smith   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
15253a40ed3dSBarry Smith   PetscFunctionReturn(0);
1526bb5a7306SBarry Smith }
1527bb5a7306SBarry Smith 
15284a2ae208SSatish Balay #undef __FUNCT__
15294a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ"
1530dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag)
1531d4bb536fSBarry Smith {
1532d4bb536fSBarry Smith   Mat_MPIAIJ     *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data;
1533d4bb536fSBarry Smith   Mat            a,b,c,d;
1534d4bb536fSBarry Smith   PetscTruth     flg;
1535dfbe8321SBarry Smith   PetscErrorCode ierr;
1536d4bb536fSBarry Smith 
15373a40ed3dSBarry Smith   PetscFunctionBegin;
1538d4bb536fSBarry Smith   a = matA->A; b = matA->B;
1539d4bb536fSBarry Smith   c = matB->A; d = matB->B;
1540d4bb536fSBarry Smith 
1541d4bb536fSBarry Smith   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1542abc0a331SBarry Smith   if (flg) {
1543d4bb536fSBarry Smith     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1544d4bb536fSBarry Smith   }
15457adad957SLisandro Dalcin   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
15463a40ed3dSBarry Smith   PetscFunctionReturn(0);
1547d4bb536fSBarry Smith }
1548d4bb536fSBarry Smith 
15494a2ae208SSatish Balay #undef __FUNCT__
15504a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ"
1551dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str)
1552cb5b572fSBarry Smith {
1553dfbe8321SBarry Smith   PetscErrorCode ierr;
1554cb5b572fSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ *)A->data;
1555cb5b572fSBarry Smith   Mat_MPIAIJ     *b = (Mat_MPIAIJ *)B->data;
1556cb5b572fSBarry Smith 
1557cb5b572fSBarry Smith   PetscFunctionBegin;
155833f4a19fSKris Buschelman   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
155933f4a19fSKris Buschelman   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1560cb5b572fSBarry Smith     /* because of the column compression in the off-processor part of the matrix a->B,
1561cb5b572fSBarry Smith        the number of columns in a->B and b->B may be different, hence we cannot call
1562cb5b572fSBarry Smith        the MatCopy() directly on the two parts. If need be, we can provide a more
1563cb5b572fSBarry Smith        efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices
1564cb5b572fSBarry Smith        then copying the submatrices */
1565cb5b572fSBarry Smith     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1566cb5b572fSBarry Smith   } else {
1567cb5b572fSBarry Smith     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1568cb5b572fSBarry Smith     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1569cb5b572fSBarry Smith   }
1570cb5b572fSBarry Smith   PetscFunctionReturn(0);
1571cb5b572fSBarry Smith }
1572cb5b572fSBarry Smith 
15734a2ae208SSatish Balay #undef __FUNCT__
15744a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ"
1575dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A)
1576273d9f13SBarry Smith {
1577dfbe8321SBarry Smith   PetscErrorCode ierr;
1578273d9f13SBarry Smith 
1579273d9f13SBarry Smith   PetscFunctionBegin;
1580273d9f13SBarry Smith   ierr =  MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1581273d9f13SBarry Smith   PetscFunctionReturn(0);
1582273d9f13SBarry Smith }
1583273d9f13SBarry Smith 
1584ac90fabeSBarry Smith #include "petscblaslapack.h"
1585ac90fabeSBarry Smith #undef __FUNCT__
1586ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ"
1587f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1588ac90fabeSBarry Smith {
1589dfbe8321SBarry Smith   PetscErrorCode ierr;
1590b1d57f15SBarry Smith   PetscInt       i;
1591ac90fabeSBarry Smith   Mat_MPIAIJ     *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data;
15924ce68768SBarry Smith   PetscBLASInt   bnz,one=1;
1593ac90fabeSBarry Smith   Mat_SeqAIJ     *x,*y;
1594ac90fabeSBarry Smith 
1595ac90fabeSBarry Smith   PetscFunctionBegin;
1596ac90fabeSBarry Smith   if (str == SAME_NONZERO_PATTERN) {
1597f4df32b1SMatthew Knepley     PetscScalar alpha = a;
1598ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->A->data;
1599ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->A->data;
16004ce68768SBarry Smith     bnz = (PetscBLASInt)x->nz;
1601f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1602ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->B->data;
1603ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->B->data;
16044ce68768SBarry Smith     bnz = (PetscBLASInt)x->nz;
1605f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1606a30b2313SHong Zhang   } else if (str == SUBSET_NONZERO_PATTERN) {
1607f4df32b1SMatthew Knepley     ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr);
1608c537a176SHong Zhang 
1609c537a176SHong Zhang     x = (Mat_SeqAIJ *)xx->B->data;
1610a30b2313SHong Zhang     y = (Mat_SeqAIJ *)yy->B->data;
1611a30b2313SHong Zhang     if (y->xtoy && y->XtoY != xx->B) {
1612a30b2313SHong Zhang       ierr = PetscFree(y->xtoy);CHKERRQ(ierr);
1613a30b2313SHong Zhang       ierr = MatDestroy(y->XtoY);CHKERRQ(ierr);
1614c537a176SHong Zhang     }
1615a30b2313SHong Zhang     if (!y->xtoy) { /* get xtoy */
1616899cda47SBarry Smith       ierr = MatAXPYGetxtoy_Private(xx->B->rmap.n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr);
1617a30b2313SHong Zhang       y->XtoY = xx->B;
1618407f6b05SHong Zhang       ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr);
1619c537a176SHong Zhang     }
1620f4df32b1SMatthew Knepley     for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]);
1621ac90fabeSBarry Smith   } else {
1622f4df32b1SMatthew Knepley     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1623ac90fabeSBarry Smith   }
1624ac90fabeSBarry Smith   PetscFunctionReturn(0);
1625ac90fabeSBarry Smith }
1626ac90fabeSBarry Smith 
1627354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat);
1628354c94deSBarry Smith 
1629354c94deSBarry Smith #undef __FUNCT__
1630354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ"
1631354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat)
1632354c94deSBarry Smith {
1633354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX)
1634354c94deSBarry Smith   PetscErrorCode ierr;
1635354c94deSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
1636354c94deSBarry Smith 
1637354c94deSBarry Smith   PetscFunctionBegin;
1638354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr);
1639354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr);
1640354c94deSBarry Smith #else
1641354c94deSBarry Smith   PetscFunctionBegin;
1642354c94deSBarry Smith #endif
1643354c94deSBarry Smith   PetscFunctionReturn(0);
1644354c94deSBarry Smith }
1645354c94deSBarry Smith 
164699cafbc1SBarry Smith #undef __FUNCT__
164799cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ"
164899cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A)
164999cafbc1SBarry Smith {
165099cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
165199cafbc1SBarry Smith   PetscErrorCode ierr;
165299cafbc1SBarry Smith 
165399cafbc1SBarry Smith   PetscFunctionBegin;
165499cafbc1SBarry Smith   ierr = MatRealPart(a->A);CHKERRQ(ierr);
165599cafbc1SBarry Smith   ierr = MatRealPart(a->B);CHKERRQ(ierr);
165699cafbc1SBarry Smith   PetscFunctionReturn(0);
165799cafbc1SBarry Smith }
165899cafbc1SBarry Smith 
165999cafbc1SBarry Smith #undef __FUNCT__
166099cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ"
166199cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A)
166299cafbc1SBarry Smith {
166399cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
166499cafbc1SBarry Smith   PetscErrorCode ierr;
166599cafbc1SBarry Smith 
166699cafbc1SBarry Smith   PetscFunctionBegin;
166799cafbc1SBarry Smith   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
166899cafbc1SBarry Smith   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
166999cafbc1SBarry Smith   PetscFunctionReturn(0);
167099cafbc1SBarry Smith }
167199cafbc1SBarry Smith 
1672103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
1673103bf8bdSMatthew Knepley 
1674103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp>
1675a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp>
1676a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp>
1677a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp>
1678103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp>
1679a2c909beSMatthew Knepley #include <boost/multi_array.hpp>
1680a2c909beSMatthew Knepley #include <boost/parallel/distributed_property_map.hpp>
1681103bf8bdSMatthew Knepley 
1682103bf8bdSMatthew Knepley #undef __FUNCT__
1683103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ"
1684103bf8bdSMatthew Knepley /*
1685103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1686103bf8bdSMatthew Knepley */
1687103bf8bdSMatthew Knepley PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat A, IS isrow, IS iscol, MatFactorInfo *info, Mat *fact)
1688103bf8bdSMatthew Knepley {
1689a2c909beSMatthew Knepley   namespace petsc = boost::distributed::petsc;
1690a2c909beSMatthew Knepley 
1691a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1692a2c909beSMatthew Knepley   using boost::graph::distributed::ilu_default::process_group_type;
1693a2c909beSMatthew Knepley   using boost::graph::ilu_permuted;
1694a2c909beSMatthew Knepley 
1695103bf8bdSMatthew Knepley   PetscTruth      row_identity, col_identity;
1696776b82aeSLisandro Dalcin   PetscContainer  c;
1697103bf8bdSMatthew Knepley   PetscInt        m, n, M, N;
1698103bf8bdSMatthew Knepley   PetscErrorCode  ierr;
1699103bf8bdSMatthew Knepley 
1700103bf8bdSMatthew Knepley   PetscFunctionBegin;
1701103bf8bdSMatthew Knepley   if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu");
1702103bf8bdSMatthew Knepley   ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr);
1703103bf8bdSMatthew Knepley   ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr);
1704103bf8bdSMatthew Knepley   if (!row_identity || !col_identity) {
1705103bf8bdSMatthew Knepley     SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU");
1706103bf8bdSMatthew Knepley   }
1707103bf8bdSMatthew Knepley 
1708103bf8bdSMatthew Knepley   process_group_type pg;
1709a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1710a2c909beSMatthew Knepley   lgraph_type*   lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg));
1711a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1712a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1713a2c909beSMatthew Knepley 
1714103bf8bdSMatthew Knepley   petsc::read_matrix(A, graph, get(boost::edge_weight, graph));
1715a2c909beSMatthew Knepley   ilu_permuted(level_graph);
1716103bf8bdSMatthew Knepley 
1717103bf8bdSMatthew Knepley   /* put together the new matrix */
17187adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr);
1719103bf8bdSMatthew Knepley   ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr);
1720103bf8bdSMatthew Knepley   ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr);
1721103bf8bdSMatthew Knepley   ierr = MatSetSizes(*fact, m, n, M, N);CHKERRQ(ierr);
17227adad957SLisandro Dalcin   ierr = MatSetType(*fact, ((PetscObject)A)->type_name);CHKERRQ(ierr);
172310bd6422SMatthew Knepley   ierr = MatAssemblyBegin(*fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
172410bd6422SMatthew Knepley   ierr = MatAssemblyEnd(*fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1725103bf8bdSMatthew Knepley   (*fact)->factor = FACTOR_LU;
1726103bf8bdSMatthew Knepley 
17277adad957SLisandro Dalcin   ierr = PetscContainerCreate(((PetscObject)A)->comm, &c);
1728776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(c, lgraph_p);
1729103bf8bdSMatthew Knepley   ierr = PetscObjectCompose((PetscObject) (*fact), "graph", (PetscObject) c);
1730103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1731103bf8bdSMatthew Knepley }
1732103bf8bdSMatthew Knepley 
1733103bf8bdSMatthew Knepley #undef __FUNCT__
1734103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ"
1735103bf8bdSMatthew Knepley PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat A, MatFactorInfo *info, Mat *B)
1736103bf8bdSMatthew Knepley {
1737103bf8bdSMatthew Knepley   PetscFunctionBegin;
1738103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1739103bf8bdSMatthew Knepley }
1740103bf8bdSMatthew Knepley 
1741103bf8bdSMatthew Knepley #undef __FUNCT__
1742103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ"
1743103bf8bdSMatthew Knepley /*
1744103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1745103bf8bdSMatthew Knepley */
1746103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x)
1747103bf8bdSMatthew Knepley {
1748a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1749a2c909beSMatthew Knepley 
1750a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1751a2c909beSMatthew Knepley   lgraph_type*   lgraph_p;
1752776b82aeSLisandro Dalcin   PetscContainer c;
1753103bf8bdSMatthew Knepley   PetscErrorCode ierr;
1754103bf8bdSMatthew Knepley 
1755103bf8bdSMatthew Knepley   PetscFunctionBegin;
1756103bf8bdSMatthew Knepley   ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr);
1757776b82aeSLisandro Dalcin   ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr);
1758103bf8bdSMatthew Knepley   ierr = VecCopy(b, x); CHKERRQ(ierr);
1759a2c909beSMatthew Knepley 
1760a2c909beSMatthew Knepley   PetscScalar* array_x;
1761a2c909beSMatthew Knepley   ierr = VecGetArray(x, &array_x);CHKERRQ(ierr);
1762a2c909beSMatthew Knepley   PetscInt sx;
1763a2c909beSMatthew Knepley   ierr = VecGetSize(x, &sx);CHKERRQ(ierr);
1764a2c909beSMatthew Knepley 
1765a2c909beSMatthew Knepley   PetscScalar* array_b;
1766a2c909beSMatthew Knepley   ierr = VecGetArray(b, &array_b);CHKERRQ(ierr);
1767a2c909beSMatthew Knepley   PetscInt sb;
1768a2c909beSMatthew Knepley   ierr = VecGetSize(b, &sb);CHKERRQ(ierr);
1769a2c909beSMatthew Knepley 
1770a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1771a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1772a2c909beSMatthew Knepley 
1773a2c909beSMatthew Knepley   typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type;
1774a2c909beSMatthew Knepley   array_ref_type                                 ref_b(array_b, boost::extents[num_vertices(graph)]),
1775a2c909beSMatthew Knepley                                                  ref_x(array_x, boost::extents[num_vertices(graph)]);
1776a2c909beSMatthew Knepley 
1777a2c909beSMatthew Knepley   typedef boost::iterator_property_map<array_ref_type::iterator,
1778a2c909beSMatthew Knepley                                 boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type>  gvector_type;
1779a2c909beSMatthew Knepley   gvector_type                                   vector_b(ref_b.begin(), get(boost::vertex_index, graph)),
1780a2c909beSMatthew Knepley                                                  vector_x(ref_x.begin(), get(boost::vertex_index, graph));
1781a2c909beSMatthew Knepley 
1782a2c909beSMatthew Knepley   ilu_set_solve(*lgraph_p, vector_b, vector_x);
1783a2c909beSMatthew Knepley 
1784103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1785103bf8bdSMatthew Knepley }
1786103bf8bdSMatthew Knepley #endif
1787103bf8bdSMatthew Knepley 
178869db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */
178969db28dcSHong Zhang   PetscInt       nzlocal,nsends,nrecvs;
1790aa5bb8c0SSatish Balay   PetscMPIInt    *send_rank;
1791aa5bb8c0SSatish Balay   PetscInt       *sbuf_nz,*sbuf_j,**rbuf_j;
179269db28dcSHong Zhang   PetscScalar    *sbuf_a,**rbuf_a;
179369db28dcSHong Zhang   PetscErrorCode (*MatDestroy)(Mat);
179469db28dcSHong Zhang } Mat_Redundant;
179569db28dcSHong Zhang 
179669db28dcSHong Zhang #undef __FUNCT__
179769db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant"
179869db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr)
179969db28dcSHong Zhang {
180069db28dcSHong Zhang   PetscErrorCode       ierr;
180169db28dcSHong Zhang   Mat_Redundant        *redund=(Mat_Redundant*)ptr;
180269db28dcSHong Zhang   PetscInt             i;
180369db28dcSHong Zhang 
180469db28dcSHong Zhang   PetscFunctionBegin;
180569db28dcSHong Zhang   ierr = PetscFree(redund->send_rank);CHKERRQ(ierr);
180669db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr);
180769db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr);
180869db28dcSHong Zhang   for (i=0; i<redund->nrecvs; i++){
180969db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr);
181069db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr);
181169db28dcSHong Zhang   }
181269db28dcSHong Zhang   ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr);
181369db28dcSHong Zhang   ierr = PetscFree(redund);CHKERRQ(ierr);
181469db28dcSHong Zhang   PetscFunctionReturn(0);
181569db28dcSHong Zhang }
181669db28dcSHong Zhang 
181769db28dcSHong Zhang #undef __FUNCT__
181869db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant"
181969db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A)
182069db28dcSHong Zhang {
182169db28dcSHong Zhang   PetscErrorCode  ierr;
182269db28dcSHong Zhang   PetscContainer  container;
182369db28dcSHong Zhang   Mat_Redundant   *redund=PETSC_NULL;
182469db28dcSHong Zhang 
182569db28dcSHong Zhang   PetscFunctionBegin;
182669db28dcSHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
182769db28dcSHong Zhang   if (container) {
182869db28dcSHong Zhang     ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
182969db28dcSHong Zhang   } else {
183069db28dcSHong Zhang     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
183169db28dcSHong Zhang   }
183269db28dcSHong Zhang   A->ops->destroy = redund->MatDestroy;
183369db28dcSHong Zhang   ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr);
183469db28dcSHong Zhang   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
183569db28dcSHong Zhang   ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
183669db28dcSHong Zhang   PetscFunctionReturn(0);
183769db28dcSHong Zhang }
183869db28dcSHong Zhang 
183969db28dcSHong Zhang #undef __FUNCT__
184069db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ"
184169db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant)
184269db28dcSHong Zhang {
184369db28dcSHong Zhang   PetscMPIInt    rank,size;
18447adad957SLisandro Dalcin   MPI_Comm       comm=((PetscObject)mat)->comm;
184569db28dcSHong Zhang   PetscErrorCode ierr;
184669db28dcSHong Zhang   PetscInt       nsends=0,nrecvs=0,i,rownz_max=0;
184769db28dcSHong Zhang   PetscMPIInt    *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL;
184869db28dcSHong Zhang   PetscInt       *rowrange=mat->rmap.range;
184969db28dcSHong Zhang   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
185069db28dcSHong Zhang   Mat            A=aij->A,B=aij->B,C=*matredundant;
185169db28dcSHong Zhang   Mat_SeqAIJ     *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data;
185269db28dcSHong Zhang   PetscScalar    *sbuf_a;
185369db28dcSHong Zhang   PetscInt       nzlocal=a->nz+b->nz;
185469db28dcSHong Zhang   PetscInt       j,cstart=mat->cmap.rstart,cend=mat->cmap.rend,row,nzA,nzB,ncols,*cworkA,*cworkB;
185569db28dcSHong Zhang   PetscInt       rstart=mat->rmap.rstart,rend=mat->rmap.rend,*bmap=aij->garray,M,N;
185669db28dcSHong Zhang   PetscInt       *cols,ctmp,lwrite,*rptr,l,*sbuf_j;
185769db28dcSHong Zhang   PetscScalar    *vals,*aworkA,*aworkB;
185869db28dcSHong Zhang   PetscMPIInt    tag1,tag2,tag3,imdex;
185969db28dcSHong Zhang   MPI_Request    *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL,
186069db28dcSHong Zhang                  *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL;
186169db28dcSHong Zhang   MPI_Status     recv_status,*send_status;
186269db28dcSHong Zhang   PetscInt       *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count;
186369db28dcSHong Zhang   PetscInt       **rbuf_j=PETSC_NULL;
186469db28dcSHong Zhang   PetscScalar    **rbuf_a=PETSC_NULL;
186569db28dcSHong Zhang   Mat_Redundant  *redund=PETSC_NULL;
186669db28dcSHong Zhang   PetscContainer container;
186769db28dcSHong Zhang 
186869db28dcSHong Zhang   PetscFunctionBegin;
186969db28dcSHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
187069db28dcSHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
187169db28dcSHong Zhang 
187269db28dcSHong Zhang   if (reuse == MAT_REUSE_MATRIX) {
187369db28dcSHong Zhang     ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
187469db28dcSHong Zhang     if (M != N || M != mat->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size");
187569db28dcSHong Zhang     ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr);
187669db28dcSHong Zhang     if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size");
187769db28dcSHong Zhang     ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
187869db28dcSHong Zhang     if (container) {
187969db28dcSHong Zhang       ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
188069db28dcSHong Zhang     } else {
188169db28dcSHong Zhang       SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
188269db28dcSHong Zhang     }
188369db28dcSHong Zhang     if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal");
188469db28dcSHong Zhang 
188569db28dcSHong Zhang     nsends    = redund->nsends;
188669db28dcSHong Zhang     nrecvs    = redund->nrecvs;
188769db28dcSHong Zhang     send_rank = redund->send_rank; recv_rank = send_rank + size;
188869db28dcSHong Zhang     sbuf_nz   = redund->sbuf_nz;     rbuf_nz = sbuf_nz + nsends;
188969db28dcSHong Zhang     sbuf_j    = redund->sbuf_j;
189069db28dcSHong Zhang     sbuf_a    = redund->sbuf_a;
189169db28dcSHong Zhang     rbuf_j    = redund->rbuf_j;
189269db28dcSHong Zhang     rbuf_a    = redund->rbuf_a;
189369db28dcSHong Zhang   }
189469db28dcSHong Zhang 
189569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
189669db28dcSHong Zhang     PetscMPIInt  subrank,subsize;
189769db28dcSHong Zhang     PetscInt     nleftover,np_subcomm;
189869db28dcSHong Zhang     /* get the destination processors' id send_rank, nsends and nrecvs */
189969db28dcSHong Zhang     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
190069db28dcSHong Zhang     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
190169db28dcSHong Zhang     ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank);
190269db28dcSHong Zhang     recv_rank = send_rank + size;
190369db28dcSHong Zhang     np_subcomm = size/nsubcomm;
190469db28dcSHong Zhang     nleftover  = size - nsubcomm*np_subcomm;
190569db28dcSHong Zhang     nsends = 0; nrecvs = 0;
190669db28dcSHong Zhang     for (i=0; i<size; i++){ /* i=rank*/
190769db28dcSHong Zhang       if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */
190869db28dcSHong Zhang         send_rank[nsends] = i; nsends++;
190969db28dcSHong Zhang         recv_rank[nrecvs++] = i;
191069db28dcSHong Zhang       }
191169db28dcSHong Zhang     }
191269db28dcSHong Zhang     if (rank >= size - nleftover){/* this proc is a leftover processor */
191369db28dcSHong Zhang       i = size-nleftover-1;
191469db28dcSHong Zhang       j = 0;
191569db28dcSHong Zhang       while (j < nsubcomm - nleftover){
191669db28dcSHong Zhang         send_rank[nsends++] = i;
191769db28dcSHong Zhang         i--; j++;
191869db28dcSHong Zhang       }
191969db28dcSHong Zhang     }
192069db28dcSHong Zhang 
192169db28dcSHong Zhang     if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */
192269db28dcSHong Zhang       for (i=0; i<nleftover; i++){
192369db28dcSHong Zhang         recv_rank[nrecvs++] = size-nleftover+i;
192469db28dcSHong Zhang       }
192569db28dcSHong Zhang     }
192669db28dcSHong Zhang 
192769db28dcSHong Zhang     /* allocate sbuf_j, sbuf_a */
192869db28dcSHong Zhang     i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2;
192969db28dcSHong Zhang     ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr);
193069db28dcSHong Zhang     ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr);
193169db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
193269db28dcSHong Zhang 
193369db28dcSHong Zhang   /* copy mat's local entries into the buffers */
193469db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
193569db28dcSHong Zhang     rownz_max = 0;
193669db28dcSHong Zhang     rptr = sbuf_j;
193769db28dcSHong Zhang     cols = sbuf_j + rend-rstart + 1;
193869db28dcSHong Zhang     vals = sbuf_a;
193969db28dcSHong Zhang     rptr[0] = 0;
194069db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
194169db28dcSHong Zhang       row = i + rstart;
194269db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
194369db28dcSHong Zhang       ncols  = nzA + nzB;
194469db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
194569db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
194669db28dcSHong Zhang       /* load the column indices for this row into cols */
194769db28dcSHong Zhang       lwrite = 0;
194869db28dcSHong Zhang       for (l=0; l<nzB; l++) {
194969db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart){
195069db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
195169db28dcSHong Zhang           cols[lwrite++] = ctmp;
195269db28dcSHong Zhang         }
195369db28dcSHong Zhang       }
195469db28dcSHong Zhang       for (l=0; l<nzA; l++){
195569db28dcSHong Zhang         vals[lwrite]   = aworkA[l];
195669db28dcSHong Zhang         cols[lwrite++] = cstart + cworkA[l];
195769db28dcSHong Zhang       }
195869db28dcSHong Zhang       for (l=0; l<nzB; l++) {
195969db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend){
196069db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
196169db28dcSHong Zhang           cols[lwrite++] = ctmp;
196269db28dcSHong Zhang         }
196369db28dcSHong Zhang       }
196469db28dcSHong Zhang       vals += ncols;
196569db28dcSHong Zhang       cols += ncols;
196669db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
196769db28dcSHong Zhang       if (rownz_max < ncols) rownz_max = ncols;
196869db28dcSHong Zhang     }
196969db28dcSHong Zhang     if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz);
197069db28dcSHong Zhang   } else { /* only copy matrix values into sbuf_a */
197169db28dcSHong Zhang     rptr = sbuf_j;
197269db28dcSHong Zhang     vals = sbuf_a;
197369db28dcSHong Zhang     rptr[0] = 0;
197469db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
197569db28dcSHong Zhang       row = i + rstart;
197669db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
197769db28dcSHong Zhang       ncols  = nzA + nzB;
197869db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
197969db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
198069db28dcSHong Zhang       lwrite = 0;
198169db28dcSHong Zhang       for (l=0; l<nzB; l++) {
198269db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l];
198369db28dcSHong Zhang       }
198469db28dcSHong Zhang       for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l];
198569db28dcSHong Zhang       for (l=0; l<nzB; l++) {
198669db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l];
198769db28dcSHong Zhang       }
198869db28dcSHong Zhang       vals += ncols;
198969db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
199069db28dcSHong Zhang     }
199169db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
199269db28dcSHong Zhang 
199369db28dcSHong Zhang   /* send nzlocal to others, and recv other's nzlocal */
199469db28dcSHong Zhang   /*--------------------------------------------------*/
199569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
199669db28dcSHong Zhang     ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
199769db28dcSHong Zhang     s_waits2 = s_waits3 + nsends;
199869db28dcSHong Zhang     s_waits1 = s_waits2 + nsends;
199969db28dcSHong Zhang     r_waits1 = s_waits1 + nsends;
200069db28dcSHong Zhang     r_waits2 = r_waits1 + nrecvs;
200169db28dcSHong Zhang     r_waits3 = r_waits2 + nrecvs;
200269db28dcSHong Zhang   } else {
200369db28dcSHong Zhang     ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
200469db28dcSHong Zhang     r_waits3 = s_waits3 + nsends;
200569db28dcSHong Zhang   }
200669db28dcSHong Zhang 
200769db28dcSHong Zhang   ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr);
200869db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
200969db28dcSHong Zhang     /* get new tags to keep the communication clean */
201069db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr);
201169db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr);
201269db28dcSHong Zhang     ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr);
201369db28dcSHong Zhang     rbuf_nz = sbuf_nz + nsends;
201469db28dcSHong Zhang 
201569db28dcSHong Zhang     /* post receives of other's nzlocal */
201669db28dcSHong Zhang     for (i=0; i<nrecvs; i++){
201769db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr);
201869db28dcSHong Zhang     }
201969db28dcSHong Zhang     /* send nzlocal to others */
202069db28dcSHong Zhang     for (i=0; i<nsends; i++){
202169db28dcSHong Zhang       sbuf_nz[i] = nzlocal;
202269db28dcSHong Zhang       ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr);
202369db28dcSHong Zhang     }
202469db28dcSHong Zhang     /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */
202569db28dcSHong Zhang     count = nrecvs;
202669db28dcSHong Zhang     while (count) {
202769db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr);
202869db28dcSHong Zhang       recv_rank[imdex] = recv_status.MPI_SOURCE;
202969db28dcSHong Zhang       /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */
203069db28dcSHong Zhang       ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr);
203169db28dcSHong Zhang 
203269db28dcSHong Zhang       i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */
203369db28dcSHong Zhang       rbuf_nz[imdex] += i + 2;
203469db28dcSHong Zhang       ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr);
203569db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr);
203669db28dcSHong Zhang       count--;
203769db28dcSHong Zhang     }
203869db28dcSHong Zhang     /* wait on sends of nzlocal */
203969db28dcSHong Zhang     if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);}
204069db28dcSHong Zhang     /* send mat->i,j to others, and recv from other's */
204169db28dcSHong Zhang     /*------------------------------------------------*/
204269db28dcSHong Zhang     for (i=0; i<nsends; i++){
204369db28dcSHong Zhang       j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1;
204469db28dcSHong Zhang       ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
204569db28dcSHong Zhang     }
204669db28dcSHong Zhang     /* wait on receives of mat->i,j */
204769db28dcSHong Zhang     /*------------------------------*/
204869db28dcSHong Zhang     count = nrecvs;
204969db28dcSHong Zhang     while (count) {
205069db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr);
205169db28dcSHong Zhang       if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
205269db28dcSHong Zhang       count--;
205369db28dcSHong Zhang     }
205469db28dcSHong Zhang     /* wait on sends of mat->i,j */
205569db28dcSHong Zhang     /*---------------------------*/
205669db28dcSHong Zhang     if (nsends) {
205769db28dcSHong Zhang       ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr);
205869db28dcSHong Zhang     }
205969db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
206069db28dcSHong Zhang 
206169db28dcSHong Zhang   /* post receives, send and receive mat->a */
206269db28dcSHong Zhang   /*----------------------------------------*/
206369db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++) {
206469db28dcSHong Zhang     ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr);
206569db28dcSHong Zhang   }
206669db28dcSHong Zhang   for (i=0; i<nsends; i++){
206769db28dcSHong Zhang     ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
206869db28dcSHong Zhang   }
206969db28dcSHong Zhang   count = nrecvs;
207069db28dcSHong Zhang   while (count) {
207169db28dcSHong Zhang     ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr);
207269db28dcSHong Zhang     if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
207369db28dcSHong Zhang     count--;
207469db28dcSHong Zhang   }
207569db28dcSHong Zhang   if (nsends) {
207669db28dcSHong Zhang     ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr);
207769db28dcSHong Zhang   }
207869db28dcSHong Zhang 
207969db28dcSHong Zhang   ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr);
208069db28dcSHong Zhang 
208169db28dcSHong Zhang   /* create redundant matrix */
208269db28dcSHong Zhang   /*-------------------------*/
208369db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
208469db28dcSHong Zhang     /* compute rownz_max for preallocation */
208569db28dcSHong Zhang     for (imdex=0; imdex<nrecvs; imdex++){
208669db28dcSHong Zhang       j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]];
208769db28dcSHong Zhang       rptr = rbuf_j[imdex];
208869db28dcSHong Zhang       for (i=0; i<j; i++){
208969db28dcSHong Zhang         ncols = rptr[i+1] - rptr[i];
209069db28dcSHong Zhang         if (rownz_max < ncols) rownz_max = ncols;
209169db28dcSHong Zhang       }
209269db28dcSHong Zhang     }
209369db28dcSHong Zhang 
209469db28dcSHong Zhang     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
209569db28dcSHong Zhang     ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
209669db28dcSHong Zhang     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
209769db28dcSHong Zhang     ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr);
209869db28dcSHong Zhang     ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr);
209969db28dcSHong Zhang   } else {
210069db28dcSHong Zhang     C = *matredundant;
210169db28dcSHong Zhang   }
210269db28dcSHong Zhang 
210369db28dcSHong Zhang   /* insert local matrix entries */
210469db28dcSHong Zhang   rptr = sbuf_j;
210569db28dcSHong Zhang   cols = sbuf_j + rend-rstart + 1;
210669db28dcSHong Zhang   vals = sbuf_a;
210769db28dcSHong Zhang   for (i=0; i<rend-rstart; i++){
210869db28dcSHong Zhang     row   = i + rstart;
210969db28dcSHong Zhang     ncols = rptr[i+1] - rptr[i];
211069db28dcSHong Zhang     ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
211169db28dcSHong Zhang     vals += ncols;
211269db28dcSHong Zhang     cols += ncols;
211369db28dcSHong Zhang   }
211469db28dcSHong Zhang   /* insert received matrix entries */
211569db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++){
211669db28dcSHong Zhang     rstart = rowrange[recv_rank[imdex]];
211769db28dcSHong Zhang     rend   = rowrange[recv_rank[imdex]+1];
211869db28dcSHong Zhang     rptr = rbuf_j[imdex];
211969db28dcSHong Zhang     cols = rbuf_j[imdex] + rend-rstart + 1;
212069db28dcSHong Zhang     vals = rbuf_a[imdex];
212169db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
212269db28dcSHong Zhang       row   = i + rstart;
212369db28dcSHong Zhang       ncols = rptr[i+1] - rptr[i];
212469db28dcSHong Zhang       ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
212569db28dcSHong Zhang       vals += ncols;
212669db28dcSHong Zhang       cols += ncols;
212769db28dcSHong Zhang     }
212869db28dcSHong Zhang   }
212969db28dcSHong Zhang   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
213069db28dcSHong Zhang   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
213169db28dcSHong Zhang   ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
213269db28dcSHong Zhang   if (M != mat->rmap.N || N != mat->cmap.N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap.N);
213369db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
213469db28dcSHong Zhang     PetscContainer container;
213569db28dcSHong Zhang     *matredundant = C;
213669db28dcSHong Zhang     /* create a supporting struct and attach it to C for reuse */
213738f2d2fdSLisandro Dalcin     ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr);
213869db28dcSHong Zhang     ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
213969db28dcSHong Zhang     ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr);
214069db28dcSHong Zhang     ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr);
214169db28dcSHong Zhang     ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr);
214269db28dcSHong Zhang 
214369db28dcSHong Zhang     redund->nzlocal = nzlocal;
214469db28dcSHong Zhang     redund->nsends  = nsends;
214569db28dcSHong Zhang     redund->nrecvs  = nrecvs;
214669db28dcSHong Zhang     redund->send_rank = send_rank;
214769db28dcSHong Zhang     redund->sbuf_nz = sbuf_nz;
214869db28dcSHong Zhang     redund->sbuf_j  = sbuf_j;
214969db28dcSHong Zhang     redund->sbuf_a  = sbuf_a;
215069db28dcSHong Zhang     redund->rbuf_j  = rbuf_j;
215169db28dcSHong Zhang     redund->rbuf_a  = rbuf_a;
215269db28dcSHong Zhang 
215369db28dcSHong Zhang     redund->MatDestroy = C->ops->destroy;
215469db28dcSHong Zhang     C->ops->destroy    = MatDestroy_MatRedundant;
215569db28dcSHong Zhang   }
215669db28dcSHong Zhang   PetscFunctionReturn(0);
215769db28dcSHong Zhang }
215869db28dcSHong Zhang 
215903bc72f1SMatthew Knepley #undef __FUNCT__
216003bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ"
216103bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[])
216203bc72f1SMatthew Knepley {
216303bc72f1SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
216403bc72f1SMatthew Knepley   PetscInt       n      = A->rmap.n;
216503bc72f1SMatthew Knepley   PetscInt       cstart = A->cmap.rstart;
216603bc72f1SMatthew Knepley   PetscInt      *cmap   = mat->garray;
216703bc72f1SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
216803bc72f1SMatthew Knepley   Vec            diagV, offdiagV;
216903bc72f1SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
217003bc72f1SMatthew Knepley   PetscInt       r;
217103bc72f1SMatthew Knepley   PetscErrorCode ierr;
217203bc72f1SMatthew Knepley 
217303bc72f1SMatthew Knepley   PetscFunctionBegin;
217403bc72f1SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2175*e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2176*e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
217703bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
217803bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
217903bc72f1SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
218003bc72f1SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
218103bc72f1SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
218203bc72f1SMatthew Knepley   for(r = 0; r < n; ++r) {
218303bc72f1SMatthew Knepley     if (diagA[r] <= offdiagA[r]) {
218403bc72f1SMatthew Knepley       a[r]   = diagA[r];
218503bc72f1SMatthew Knepley       idx[r] = cstart + diagIdx[r];
218603bc72f1SMatthew Knepley     } else {
218703bc72f1SMatthew Knepley       a[r]   = offdiagA[r];
218803bc72f1SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
218903bc72f1SMatthew Knepley     }
219003bc72f1SMatthew Knepley   }
219103bc72f1SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
219203bc72f1SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
219303bc72f1SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
219403bc72f1SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
219503bc72f1SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
219603bc72f1SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
219703bc72f1SMatthew Knepley   PetscFunctionReturn(0);
219803bc72f1SMatthew Knepley }
219903bc72f1SMatthew Knepley 
22008a729477SBarry Smith /* -------------------------------------------------------------------*/
2201cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ,
2202cda55fadSBarry Smith        MatGetRow_MPIAIJ,
2203cda55fadSBarry Smith        MatRestoreRow_MPIAIJ,
2204cda55fadSBarry Smith        MatMult_MPIAIJ,
220597304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ,
22067c922b88SBarry Smith        MatMultTranspose_MPIAIJ,
22077c922b88SBarry Smith        MatMultTransposeAdd_MPIAIJ,
2208103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2209103bf8bdSMatthew Knepley        MatSolve_MPIAIJ,
2210103bf8bdSMatthew Knepley #else
2211cda55fadSBarry Smith        0,
2212103bf8bdSMatthew Knepley #endif
2213cda55fadSBarry Smith        0,
2214cda55fadSBarry Smith        0,
221597304618SKris Buschelman /*10*/ 0,
2216cda55fadSBarry Smith        0,
2217cda55fadSBarry Smith        0,
221844a69424SLois Curfman McInnes        MatRelax_MPIAIJ,
2219b7c46309SBarry Smith        MatTranspose_MPIAIJ,
222097304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ,
2221cda55fadSBarry Smith        MatEqual_MPIAIJ,
2222cda55fadSBarry Smith        MatGetDiagonal_MPIAIJ,
2223cda55fadSBarry Smith        MatDiagonalScale_MPIAIJ,
2224cda55fadSBarry Smith        MatNorm_MPIAIJ,
222597304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ,
2226cda55fadSBarry Smith        MatAssemblyEnd_MPIAIJ,
22271eb62cbbSBarry Smith        0,
2228cda55fadSBarry Smith        MatSetOption_MPIAIJ,
2229cda55fadSBarry Smith        MatZeroEntries_MPIAIJ,
223097304618SKris Buschelman /*25*/ MatZeroRows_MPIAIJ,
2231cda55fadSBarry Smith        0,
2232103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2233103bf8bdSMatthew Knepley        MatLUFactorNumeric_MPIAIJ,
2234103bf8bdSMatthew Knepley #else
2235cda55fadSBarry Smith        0,
2236103bf8bdSMatthew Knepley #endif
2237cda55fadSBarry Smith        0,
2238cda55fadSBarry Smith        0,
223997304618SKris Buschelman /*30*/ MatSetUpPreallocation_MPIAIJ,
2240103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2241103bf8bdSMatthew Knepley        MatILUFactorSymbolic_MPIAIJ,
2242103bf8bdSMatthew Knepley #else
2243cda55fadSBarry Smith        0,
2244103bf8bdSMatthew Knepley #endif
2245cda55fadSBarry Smith        0,
2246cda55fadSBarry Smith        0,
2247cda55fadSBarry Smith        0,
224897304618SKris Buschelman /*35*/ MatDuplicate_MPIAIJ,
2249cda55fadSBarry Smith        0,
2250cda55fadSBarry Smith        0,
2251cda55fadSBarry Smith        0,
2252cda55fadSBarry Smith        0,
225397304618SKris Buschelman /*40*/ MatAXPY_MPIAIJ,
2254cda55fadSBarry Smith        MatGetSubMatrices_MPIAIJ,
2255cda55fadSBarry Smith        MatIncreaseOverlap_MPIAIJ,
2256cda55fadSBarry Smith        MatGetValues_MPIAIJ,
2257cb5b572fSBarry Smith        MatCopy_MPIAIJ,
22588c07d4e3SBarry Smith /*45*/ 0,
2259cda55fadSBarry Smith        MatScale_MPIAIJ,
2260cda55fadSBarry Smith        0,
2261cda55fadSBarry Smith        0,
2262cda55fadSBarry Smith        0,
2263521d7252SBarry Smith /*50*/ MatSetBlockSize_MPIAIJ,
2264cda55fadSBarry Smith        0,
2265cda55fadSBarry Smith        0,
2266cda55fadSBarry Smith        0,
2267cda55fadSBarry Smith        0,
226897304618SKris Buschelman /*55*/ MatFDColoringCreate_MPIAIJ,
2269cda55fadSBarry Smith        0,
2270cda55fadSBarry Smith        MatSetUnfactored_MPIAIJ,
227142e855d1Svictor        MatPermute_MPIAIJ,
2272cda55fadSBarry Smith        0,
227397304618SKris Buschelman /*60*/ MatGetSubMatrix_MPIAIJ,
2274e03a110bSBarry Smith        MatDestroy_MPIAIJ,
2275e03a110bSBarry Smith        MatView_MPIAIJ,
2276357abbc8SBarry Smith        0,
2277a2243be0SBarry Smith        0,
227897304618SKris Buschelman /*65*/ 0,
2279a2243be0SBarry Smith        0,
2280a2243be0SBarry Smith        0,
2281a2243be0SBarry Smith        0,
2282a2243be0SBarry Smith        0,
228397304618SKris Buschelman /*70*/ 0,
2284a2243be0SBarry Smith        0,
2285a2243be0SBarry Smith        MatSetColoring_MPIAIJ,
2286dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
2287779c1a83SBarry Smith        MatSetValuesAdic_MPIAIJ,
2288dcf5cc72SBarry Smith #else
2289dcf5cc72SBarry Smith        0,
2290dcf5cc72SBarry Smith #endif
229197304618SKris Buschelman        MatSetValuesAdifor_MPIAIJ,
229297304618SKris Buschelman /*75*/ 0,
229397304618SKris Buschelman        0,
229497304618SKris Buschelman        0,
229597304618SKris Buschelman        0,
229697304618SKris Buschelman        0,
229797304618SKris Buschelman /*80*/ 0,
229897304618SKris Buschelman        0,
229997304618SKris Buschelman        0,
230097304618SKris Buschelman        0,
230141acf15aSKris Buschelman /*84*/ MatLoad_MPIAIJ,
23026284ec50SHong Zhang        0,
23036284ec50SHong Zhang        0,
23046284ec50SHong Zhang        0,
23056284ec50SHong Zhang        0,
2306865e5f61SKris Buschelman        0,
2307865e5f61SKris Buschelman /*90*/ MatMatMult_MPIAIJ_MPIAIJ,
230826be0446SHong Zhang        MatMatMultSymbolic_MPIAIJ_MPIAIJ,
230926be0446SHong Zhang        MatMatMultNumeric_MPIAIJ_MPIAIJ,
23107a7894deSKris Buschelman        MatPtAP_Basic,
23117a7894deSKris Buschelman        MatPtAPSymbolic_MPIAIJ,
23127a7894deSKris Buschelman /*95*/ MatPtAPNumeric_MPIAIJ,
23137a7894deSKris Buschelman        0,
23147a7894deSKris Buschelman        0,
23157a7894deSKris Buschelman        0,
23167a7894deSKris Buschelman        0,
23177a7894deSKris Buschelman /*100*/0,
2318865e5f61SKris Buschelman        MatPtAPSymbolic_MPIAIJ_MPIAIJ,
23197a7894deSKris Buschelman        MatPtAPNumeric_MPIAIJ_MPIAIJ,
23202fd7e33dSBarry Smith        MatConjugate_MPIAIJ,
23212fd7e33dSBarry Smith        0,
232299cafbc1SBarry Smith /*105*/MatSetValuesRow_MPIAIJ,
232399cafbc1SBarry Smith        MatRealPart_MPIAIJ,
232469db28dcSHong Zhang        MatImaginaryPart_MPIAIJ,
232569db28dcSHong Zhang        0,
232669db28dcSHong Zhang        0,
232769db28dcSHong Zhang /*110*/0,
232803bc72f1SMatthew Knepley        MatGetRedundantMatrix_MPIAIJ,
232903bc72f1SMatthew Knepley        MatGetRowMin_MPIAIJ};
233036ce4990SBarry Smith 
23312e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/
23322e8a6d31SBarry Smith 
2333fb2e594dSBarry Smith EXTERN_C_BEGIN
23344a2ae208SSatish Balay #undef __FUNCT__
23354a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ"
2336be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat)
23372e8a6d31SBarry Smith {
23382e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2339dfbe8321SBarry Smith   PetscErrorCode ierr;
23402e8a6d31SBarry Smith 
23412e8a6d31SBarry Smith   PetscFunctionBegin;
23422e8a6d31SBarry Smith   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
23432e8a6d31SBarry Smith   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
23442e8a6d31SBarry Smith   PetscFunctionReturn(0);
23452e8a6d31SBarry Smith }
2346fb2e594dSBarry Smith EXTERN_C_END
23472e8a6d31SBarry Smith 
2348fb2e594dSBarry Smith EXTERN_C_BEGIN
23494a2ae208SSatish Balay #undef __FUNCT__
23504a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ"
2351be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat)
23522e8a6d31SBarry Smith {
23532e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2354dfbe8321SBarry Smith   PetscErrorCode ierr;
23552e8a6d31SBarry Smith 
23562e8a6d31SBarry Smith   PetscFunctionBegin;
23572e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
23582e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
23592e8a6d31SBarry Smith   PetscFunctionReturn(0);
23602e8a6d31SBarry Smith }
2361fb2e594dSBarry Smith EXTERN_C_END
23628a729477SBarry Smith 
2363e090d566SSatish Balay #include "petscpc.h"
236427508adbSBarry Smith EXTERN_C_BEGIN
23654a2ae208SSatish Balay #undef __FUNCT__
2366a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ"
2367be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
2368a23d5eceSKris Buschelman {
2369a23d5eceSKris Buschelman   Mat_MPIAIJ     *b;
2370dfbe8321SBarry Smith   PetscErrorCode ierr;
2371b1d57f15SBarry Smith   PetscInt       i;
2372a23d5eceSKris Buschelman 
2373a23d5eceSKris Buschelman   PetscFunctionBegin;
2374a23d5eceSKris Buschelman   B->preallocated = PETSC_TRUE;
2375a23d5eceSKris Buschelman   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
2376a23d5eceSKris Buschelman   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
237777431f27SBarry Smith   if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
237877431f27SBarry Smith   if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
2379899cda47SBarry Smith 
2380899cda47SBarry Smith   B->rmap.bs = B->cmap.bs = 1;
23816148ca0dSBarry Smith   ierr = PetscMapSetUp(&B->rmap);CHKERRQ(ierr);
23826148ca0dSBarry Smith   ierr = PetscMapSetUp(&B->cmap);CHKERRQ(ierr);
2383a23d5eceSKris Buschelman   if (d_nnz) {
2384899cda47SBarry Smith     for (i=0; i<B->rmap.n; i++) {
238577431f27SBarry Smith       if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
2386a23d5eceSKris Buschelman     }
2387a23d5eceSKris Buschelman   }
2388a23d5eceSKris Buschelman   if (o_nnz) {
2389899cda47SBarry Smith     for (i=0; i<B->rmap.n; i++) {
239077431f27SBarry Smith       if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
2391a23d5eceSKris Buschelman     }
2392a23d5eceSKris Buschelman   }
2393a23d5eceSKris Buschelman   b = (Mat_MPIAIJ*)B->data;
2394899cda47SBarry Smith 
2395899cda47SBarry Smith   /* Explicitly create 2 MATSEQAIJ matrices. */
2396899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2397899cda47SBarry Smith   ierr = MatSetSizes(b->A,B->rmap.n,B->cmap.n,B->rmap.n,B->cmap.n);CHKERRQ(ierr);
2398899cda47SBarry Smith   ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr);
2399899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
2400899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2401899cda47SBarry Smith   ierr = MatSetSizes(b->B,B->rmap.n,B->cmap.N,B->rmap.n,B->cmap.N);CHKERRQ(ierr);
2402899cda47SBarry Smith   ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr);
2403899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
2404899cda47SBarry Smith 
2405c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr);
2406c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr);
2407a23d5eceSKris Buschelman 
2408a23d5eceSKris Buschelman   PetscFunctionReturn(0);
2409a23d5eceSKris Buschelman }
2410a23d5eceSKris Buschelman EXTERN_C_END
2411a23d5eceSKris Buschelman 
24124a2ae208SSatish Balay #undef __FUNCT__
24134a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ"
2414dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2415d6dfbf8fSBarry Smith {
2416d6dfbf8fSBarry Smith   Mat            mat;
2417416022c9SBarry Smith   Mat_MPIAIJ     *a,*oldmat = (Mat_MPIAIJ*)matin->data;
2418dfbe8321SBarry Smith   PetscErrorCode ierr;
2419d6dfbf8fSBarry Smith 
24203a40ed3dSBarry Smith   PetscFunctionBegin;
2421416022c9SBarry Smith   *newmat       = 0;
24227adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
2423899cda47SBarry Smith   ierr = MatSetSizes(mat,matin->rmap.n,matin->cmap.n,matin->rmap.N,matin->cmap.N);CHKERRQ(ierr);
24247adad957SLisandro Dalcin   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
24251d5dac46SHong Zhang   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2426273d9f13SBarry Smith   a    = (Mat_MPIAIJ*)mat->data;
2427e1b6402fSHong Zhang 
2428d6dfbf8fSBarry Smith   mat->factor       = matin->factor;
2429899cda47SBarry Smith   mat->rmap.bs      = matin->rmap.bs;
2430c456f294SBarry Smith   mat->assembled    = PETSC_TRUE;
2431e7641de0SSatish Balay   mat->insertmode   = NOT_SET_VALUES;
2432273d9f13SBarry Smith   mat->preallocated = PETSC_TRUE;
2433d6dfbf8fSBarry Smith 
243417699dbbSLois Curfman McInnes   a->size           = oldmat->size;
243517699dbbSLois Curfman McInnes   a->rank           = oldmat->rank;
2436e7641de0SSatish Balay   a->donotstash     = oldmat->donotstash;
2437e7641de0SSatish Balay   a->roworiented    = oldmat->roworiented;
2438e7641de0SSatish Balay   a->rowindices     = 0;
2439bcd2baecSBarry Smith   a->rowvalues      = 0;
2440bcd2baecSBarry Smith   a->getrowactive   = PETSC_FALSE;
2441d6dfbf8fSBarry Smith 
24427adad957SLisandro Dalcin   ierr = PetscMapCopy(((PetscObject)mat)->comm,&matin->rmap,&mat->rmap);CHKERRQ(ierr);
24437adad957SLisandro Dalcin   ierr = PetscMapCopy(((PetscObject)mat)->comm,&matin->cmap,&mat->cmap);CHKERRQ(ierr);
2444899cda47SBarry Smith 
24457adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr);
24462ee70a88SLois Curfman McInnes   if (oldmat->colmap) {
2447aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
24480f5bd95cSBarry Smith     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2449b1fc9764SSatish Balay #else
2450899cda47SBarry Smith     ierr = PetscMalloc((mat->cmap.N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
2451899cda47SBarry Smith     ierr = PetscLogObjectMemory(mat,(mat->cmap.N)*sizeof(PetscInt));CHKERRQ(ierr);
2452899cda47SBarry Smith     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap.N)*sizeof(PetscInt));CHKERRQ(ierr);
2453b1fc9764SSatish Balay #endif
2454416022c9SBarry Smith   } else a->colmap = 0;
24553f41c07dSBarry Smith   if (oldmat->garray) {
2456b1d57f15SBarry Smith     PetscInt len;
2457899cda47SBarry Smith     len  = oldmat->B->cmap.n;
2458b1d57f15SBarry Smith     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
245952e6d16bSBarry Smith     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2460b1d57f15SBarry Smith     if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); }
2461416022c9SBarry Smith   } else a->garray = 0;
2462d6dfbf8fSBarry Smith 
2463416022c9SBarry Smith   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
246452e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
2465a56f8943SBarry Smith   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
246652e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
24672e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
246852e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
24692e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
247052e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
24717adad957SLisandro Dalcin   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
24728a729477SBarry Smith   *newmat = mat;
24733a40ed3dSBarry Smith   PetscFunctionReturn(0);
24748a729477SBarry Smith }
2475416022c9SBarry Smith 
2476e090d566SSatish Balay #include "petscsys.h"
2477416022c9SBarry Smith 
24784a2ae208SSatish Balay #undef __FUNCT__
24794a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ"
2480f69a0ea3SMatthew Knepley PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, MatType type,Mat *newmat)
2481416022c9SBarry Smith {
2482d65a2f8fSBarry Smith   Mat            A;
248387828ca2SBarry Smith   PetscScalar    *vals,*svals;
248419bcc07fSBarry Smith   MPI_Comm       comm = ((PetscObject)viewer)->comm;
2485416022c9SBarry Smith   MPI_Status     status;
24866849ba73SBarry Smith   PetscErrorCode ierr;
2487dc231df0SBarry Smith   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,maxnz;
2488167e7480SBarry Smith   PetscInt       i,nz,j,rstart,rend,mmax;
2489b1d57f15SBarry Smith   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols;
2490910ba992SMatthew Knepley   PetscInt       *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols;
2491dc231df0SBarry Smith   PetscInt       cend,cstart,n,*rowners;
2492b1d57f15SBarry Smith   int            fd;
2493416022c9SBarry Smith 
24943a40ed3dSBarry Smith   PetscFunctionBegin;
24951dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
24961dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
249717699dbbSLois Curfman McInnes   if (!rank) {
2498b0a32e0cSBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
24990752156aSBarry Smith     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
2500552e946dSBarry Smith     if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
25016c5fab8fSBarry Smith   }
25026c5fab8fSBarry Smith 
2503b1d57f15SBarry Smith   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2504416022c9SBarry Smith   M = header[1]; N = header[2];
2505416022c9SBarry Smith   /* determine ownership of all rows */
250629cdbbc8SSatish Balay   m    = M/size + ((M % size) > rank);
2507dc231df0SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
2508dc231df0SBarry Smith   ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
2509167e7480SBarry Smith 
2510167e7480SBarry Smith   /* First process needs enough room for process with most rows */
2511167e7480SBarry Smith   if (!rank) {
2512167e7480SBarry Smith     mmax       = rowners[1];
2513167e7480SBarry Smith     for (i=2; i<size; i++) {
2514167e7480SBarry Smith       mmax = PetscMax(mmax,rowners[i]);
2515167e7480SBarry Smith     }
2516167e7480SBarry Smith   } else mmax = m;
2517167e7480SBarry Smith 
2518416022c9SBarry Smith   rowners[0] = 0;
251917699dbbSLois Curfman McInnes   for (i=2; i<=size; i++) {
2520416022c9SBarry Smith     rowners[i] += rowners[i-1];
2521416022c9SBarry Smith   }
252217699dbbSLois Curfman McInnes   rstart = rowners[rank];
252317699dbbSLois Curfman McInnes   rend   = rowners[rank+1];
2524416022c9SBarry Smith 
2525416022c9SBarry Smith   /* distribute row lengths to all processors */
2526167e7480SBarry Smith   ierr    = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr);
252717699dbbSLois Curfman McInnes   if (!rank) {
2528dc231df0SBarry Smith     ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr);
2529dc231df0SBarry Smith     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
2530b1d57f15SBarry Smith     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
2531b1d57f15SBarry Smith     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2532dc231df0SBarry Smith     for (j=0; j<m; j++) {
2533dc231df0SBarry Smith       procsnz[0] += ourlens[j];
2534dc231df0SBarry Smith     }
2535dc231df0SBarry Smith     for (i=1; i<size; i++) {
2536dc231df0SBarry Smith       ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr);
2537dc231df0SBarry Smith       /* calculate the number of nonzeros on each processor */
2538dc231df0SBarry Smith       for (j=0; j<rowners[i+1]-rowners[i]; j++) {
2539416022c9SBarry Smith         procsnz[i] += rowlengths[j];
2540416022c9SBarry Smith       }
2541dc231df0SBarry Smith       ierr = MPI_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2542416022c9SBarry Smith     }
2543606d414cSSatish Balay     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2544dc231df0SBarry Smith   } else {
2545dc231df0SBarry Smith     ierr = MPI_Recv(ourlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2546dc231df0SBarry Smith   }
2547416022c9SBarry Smith 
2548dc231df0SBarry Smith   if (!rank) {
2549416022c9SBarry Smith     /* determine max buffer needed and allocate it */
2550416022c9SBarry Smith     maxnz = 0;
25518a8e0b3aSBarry Smith     for (i=0; i<size; i++) {
25520452661fSBarry Smith       maxnz = PetscMax(maxnz,procsnz[i]);
2553416022c9SBarry Smith     }
2554b1d57f15SBarry Smith     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2555416022c9SBarry Smith 
2556416022c9SBarry Smith     /* read in my part of the matrix column indices  */
2557416022c9SBarry Smith     nz   = procsnz[0];
2558b1d57f15SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
25590752156aSBarry Smith     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2560d65a2f8fSBarry Smith 
2561d65a2f8fSBarry Smith     /* read in every one elses and ship off */
256217699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2563d65a2f8fSBarry Smith       nz   = procsnz[i];
25640752156aSBarry Smith       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2565b1d57f15SBarry Smith       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2566d65a2f8fSBarry Smith     }
2567606d414cSSatish Balay     ierr = PetscFree(cols);CHKERRQ(ierr);
25683a40ed3dSBarry Smith   } else {
2569416022c9SBarry Smith     /* determine buffer space needed for message */
2570416022c9SBarry Smith     nz = 0;
2571416022c9SBarry Smith     for (i=0; i<m; i++) {
2572416022c9SBarry Smith       nz += ourlens[i];
2573416022c9SBarry Smith     }
2574dc231df0SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
2575416022c9SBarry Smith 
2576416022c9SBarry Smith     /* receive message of column indices*/
2577b1d57f15SBarry Smith     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2578b1d57f15SBarry Smith     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
257929bbc08cSBarry Smith     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2580416022c9SBarry Smith   }
2581416022c9SBarry Smith 
2582b362ba68SBarry Smith   /* determine column ownership if matrix is not square */
2583b362ba68SBarry Smith   if (N != M) {
2584b362ba68SBarry Smith     n      = N/size + ((N % size) > rank);
2585b1d57f15SBarry Smith     ierr   = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2586b362ba68SBarry Smith     cstart = cend - n;
2587b362ba68SBarry Smith   } else {
2588b362ba68SBarry Smith     cstart = rstart;
2589b362ba68SBarry Smith     cend   = rend;
2590fb2e594dSBarry Smith     n      = cend - cstart;
2591b362ba68SBarry Smith   }
2592b362ba68SBarry Smith 
2593416022c9SBarry Smith   /* loop over local rows, determining number of off diagonal entries */
2594b1d57f15SBarry Smith   ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr);
2595416022c9SBarry Smith   jj = 0;
2596416022c9SBarry Smith   for (i=0; i<m; i++) {
2597416022c9SBarry Smith     for (j=0; j<ourlens[i]; j++) {
2598b362ba68SBarry Smith       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
2599416022c9SBarry Smith       jj++;
2600416022c9SBarry Smith     }
2601416022c9SBarry Smith   }
2602d65a2f8fSBarry Smith 
2603d65a2f8fSBarry Smith   /* create our matrix */
2604416022c9SBarry Smith   for (i=0; i<m; i++) {
2605416022c9SBarry Smith     ourlens[i] -= offlens[i];
2606416022c9SBarry Smith   }
2607f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&A);CHKERRQ(ierr);
2608f69a0ea3SMatthew Knepley   ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr);
2609d10c748bSKris Buschelman   ierr = MatSetType(A,type);CHKERRQ(ierr);
2610d10c748bSKris Buschelman   ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr);
2611d10c748bSKris Buschelman 
2612d65a2f8fSBarry Smith   for (i=0; i<m; i++) {
2613d65a2f8fSBarry Smith     ourlens[i] += offlens[i];
2614d65a2f8fSBarry Smith   }
2615416022c9SBarry Smith 
261617699dbbSLois Curfman McInnes   if (!rank) {
2617906b51c7SHong Zhang     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2618416022c9SBarry Smith 
2619416022c9SBarry Smith     /* read in my part of the matrix numerical values  */
2620416022c9SBarry Smith     nz   = procsnz[0];
26210752156aSBarry Smith     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2622d65a2f8fSBarry Smith 
2623d65a2f8fSBarry Smith     /* insert into matrix */
2624d65a2f8fSBarry Smith     jj      = rstart;
2625d65a2f8fSBarry Smith     smycols = mycols;
2626d65a2f8fSBarry Smith     svals   = vals;
2627d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2628dc231df0SBarry Smith       ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2629d65a2f8fSBarry Smith       smycols += ourlens[i];
2630d65a2f8fSBarry Smith       svals   += ourlens[i];
2631d65a2f8fSBarry Smith       jj++;
2632416022c9SBarry Smith     }
2633416022c9SBarry Smith 
2634d65a2f8fSBarry Smith     /* read in other processors and ship out */
263517699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2636416022c9SBarry Smith       nz   = procsnz[i];
26370752156aSBarry Smith       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
26387adad957SLisandro Dalcin       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr);
2639416022c9SBarry Smith     }
2640606d414cSSatish Balay     ierr = PetscFree(procsnz);CHKERRQ(ierr);
26413a40ed3dSBarry Smith   } else {
2642d65a2f8fSBarry Smith     /* receive numeric values */
264387828ca2SBarry Smith     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2644416022c9SBarry Smith 
2645d65a2f8fSBarry Smith     /* receive message of values*/
26467adad957SLisandro Dalcin     ierr = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr);
2647ca161407SBarry Smith     ierr = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
264829bbc08cSBarry Smith     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2649d65a2f8fSBarry Smith 
2650d65a2f8fSBarry Smith     /* insert into matrix */
2651d65a2f8fSBarry Smith     jj      = rstart;
2652d65a2f8fSBarry Smith     smycols = mycols;
2653d65a2f8fSBarry Smith     svals   = vals;
2654d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2655dc231df0SBarry Smith       ierr     = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2656d65a2f8fSBarry Smith       smycols += ourlens[i];
2657d65a2f8fSBarry Smith       svals   += ourlens[i];
2658d65a2f8fSBarry Smith       jj++;
2659d65a2f8fSBarry Smith     }
2660d65a2f8fSBarry Smith   }
2661dc231df0SBarry Smith   ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr);
2662606d414cSSatish Balay   ierr = PetscFree(vals);CHKERRQ(ierr);
2663606d414cSSatish Balay   ierr = PetscFree(mycols);CHKERRQ(ierr);
2664606d414cSSatish Balay   ierr = PetscFree(rowners);CHKERRQ(ierr);
2665d65a2f8fSBarry Smith 
26666d4a8577SBarry Smith   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
26676d4a8577SBarry Smith   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2668d10c748bSKris Buschelman   *newmat = A;
26693a40ed3dSBarry Smith   PetscFunctionReturn(0);
2670416022c9SBarry Smith }
2671a0ff6018SBarry Smith 
26724a2ae208SSatish Balay #undef __FUNCT__
26734a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ"
2674a0ff6018SBarry Smith /*
267529da9460SBarry Smith     Not great since it makes two copies of the submatrix, first an SeqAIJ
267629da9460SBarry Smith   in local and then by concatenating the local matrices the end result.
267729da9460SBarry Smith   Writing it directly would be much like MatGetSubMatrices_MPIAIJ()
2678a0ff6018SBarry Smith */
2679b1d57f15SBarry Smith PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2680a0ff6018SBarry Smith {
2681dfbe8321SBarry Smith   PetscErrorCode ierr;
268232dcc486SBarry Smith   PetscMPIInt    rank,size;
2683b1d57f15SBarry Smith   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j;
2684b1d57f15SBarry Smith   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal;
2685fee21e36SBarry Smith   Mat            *local,M,Mreuse;
268687828ca2SBarry Smith   PetscScalar    *vwork,*aa;
26877adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)mat)->comm;
268800e6dbe6SBarry Smith   Mat_SeqAIJ     *aij;
26897e2c5f70SBarry Smith 
2690a0ff6018SBarry Smith 
2691a0ff6018SBarry Smith   PetscFunctionBegin;
26921dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
26931dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
269400e6dbe6SBarry Smith 
2695fee21e36SBarry Smith   if (call ==  MAT_REUSE_MATRIX) {
2696fee21e36SBarry Smith     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
2697e005ede5SBarry Smith     if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2698fee21e36SBarry Smith     local = &Mreuse;
2699fee21e36SBarry Smith     ierr  = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr);
2700fee21e36SBarry Smith   } else {
2701a0ff6018SBarry Smith     ierr   = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr);
2702fee21e36SBarry Smith     Mreuse = *local;
2703606d414cSSatish Balay     ierr   = PetscFree(local);CHKERRQ(ierr);
2704fee21e36SBarry Smith   }
2705a0ff6018SBarry Smith 
2706a0ff6018SBarry Smith   /*
2707a0ff6018SBarry Smith       m - number of local rows
2708a0ff6018SBarry Smith       n - number of columns (same on all processors)
2709a0ff6018SBarry Smith       rstart - first row in new global matrix generated
2710a0ff6018SBarry Smith   */
2711fee21e36SBarry Smith   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2712a0ff6018SBarry Smith   if (call == MAT_INITIAL_MATRIX) {
2713fee21e36SBarry Smith     aij = (Mat_SeqAIJ*)(Mreuse)->data;
271400e6dbe6SBarry Smith     ii  = aij->i;
271500e6dbe6SBarry Smith     jj  = aij->j;
271600e6dbe6SBarry Smith 
2717a0ff6018SBarry Smith     /*
271800e6dbe6SBarry Smith         Determine the number of non-zeros in the diagonal and off-diagonal
271900e6dbe6SBarry Smith         portions of the matrix in order to do correct preallocation
2720a0ff6018SBarry Smith     */
272100e6dbe6SBarry Smith 
272200e6dbe6SBarry Smith     /* first get start and end of "diagonal" columns */
27236a6a5d1dSBarry Smith     if (csize == PETSC_DECIDE) {
2724ab50ec6bSBarry Smith       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2725ab50ec6bSBarry Smith       if (mglobal == n) { /* square matrix */
2726e2c4fddaSBarry Smith 	nlocal = m;
27276a6a5d1dSBarry Smith       } else {
2728ab50ec6bSBarry Smith         nlocal = n/size + ((n % size) > rank);
2729ab50ec6bSBarry Smith       }
2730ab50ec6bSBarry Smith     } else {
27316a6a5d1dSBarry Smith       nlocal = csize;
27326a6a5d1dSBarry Smith     }
2733b1d57f15SBarry Smith     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
273400e6dbe6SBarry Smith     rstart = rend - nlocal;
27356a6a5d1dSBarry Smith     if (rank == size - 1 && rend != n) {
273677431f27SBarry Smith       SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
27376a6a5d1dSBarry Smith     }
273800e6dbe6SBarry Smith 
273900e6dbe6SBarry Smith     /* next, compute all the lengths */
2740b1d57f15SBarry Smith     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
274100e6dbe6SBarry Smith     olens = dlens + m;
274200e6dbe6SBarry Smith     for (i=0; i<m; i++) {
274300e6dbe6SBarry Smith       jend = ii[i+1] - ii[i];
274400e6dbe6SBarry Smith       olen = 0;
274500e6dbe6SBarry Smith       dlen = 0;
274600e6dbe6SBarry Smith       for (j=0; j<jend; j++) {
274700e6dbe6SBarry Smith         if (*jj < rstart || *jj >= rend) olen++;
274800e6dbe6SBarry Smith         else dlen++;
274900e6dbe6SBarry Smith         jj++;
275000e6dbe6SBarry Smith       }
275100e6dbe6SBarry Smith       olens[i] = olen;
275200e6dbe6SBarry Smith       dlens[i] = dlen;
275300e6dbe6SBarry Smith     }
2754f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2755f69a0ea3SMatthew Knepley     ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr);
27567adad957SLisandro Dalcin     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2757e2d9671bSKris Buschelman     ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr);
2758606d414cSSatish Balay     ierr = PetscFree(dlens);CHKERRQ(ierr);
2759a0ff6018SBarry Smith   } else {
2760b1d57f15SBarry Smith     PetscInt ml,nl;
2761a0ff6018SBarry Smith 
2762a0ff6018SBarry Smith     M = *newmat;
2763a0ff6018SBarry Smith     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
276429bbc08cSBarry Smith     if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2765a0ff6018SBarry Smith     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2766c48de900SBarry Smith     /*
2767c48de900SBarry Smith          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2768c48de900SBarry Smith        rather than the slower MatSetValues().
2769c48de900SBarry Smith     */
2770c48de900SBarry Smith     M->was_assembled = PETSC_TRUE;
2771c48de900SBarry Smith     M->assembled     = PETSC_FALSE;
2772a0ff6018SBarry Smith   }
2773a0ff6018SBarry Smith   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2774fee21e36SBarry Smith   aij = (Mat_SeqAIJ*)(Mreuse)->data;
277500e6dbe6SBarry Smith   ii  = aij->i;
277600e6dbe6SBarry Smith   jj  = aij->j;
277700e6dbe6SBarry Smith   aa  = aij->a;
2778a0ff6018SBarry Smith   for (i=0; i<m; i++) {
2779a0ff6018SBarry Smith     row   = rstart + i;
278000e6dbe6SBarry Smith     nz    = ii[i+1] - ii[i];
278100e6dbe6SBarry Smith     cwork = jj;     jj += nz;
278200e6dbe6SBarry Smith     vwork = aa;     aa += nz;
27838c638d02SBarry Smith     ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2784a0ff6018SBarry Smith   }
2785a0ff6018SBarry Smith 
2786a0ff6018SBarry Smith   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2787a0ff6018SBarry Smith   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2788a0ff6018SBarry Smith   *newmat = M;
2789fee21e36SBarry Smith 
2790fee21e36SBarry Smith   /* save submatrix used in processor for next request */
2791fee21e36SBarry Smith   if (call ==  MAT_INITIAL_MATRIX) {
2792fee21e36SBarry Smith     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2793fee21e36SBarry Smith     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2794fee21e36SBarry Smith   }
2795fee21e36SBarry Smith 
2796a0ff6018SBarry Smith   PetscFunctionReturn(0);
2797a0ff6018SBarry Smith }
2798273d9f13SBarry Smith 
2799e2e86b8fSSatish Balay EXTERN_C_BEGIN
28004a2ae208SSatish Balay #undef __FUNCT__
2801ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ"
2802b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[])
2803ccd8e176SBarry Smith {
2804899cda47SBarry Smith   PetscInt       m,cstart, cend,j,nnz,i,d;
2805899cda47SBarry Smith   PetscInt       *d_nnz,*o_nnz,nnz_max = 0,rstart,ii;
2806ccd8e176SBarry Smith   const PetscInt *JJ;
2807ccd8e176SBarry Smith   PetscScalar    *values;
2808ccd8e176SBarry Smith   PetscErrorCode ierr;
2809ccd8e176SBarry Smith 
2810ccd8e176SBarry Smith   PetscFunctionBegin;
2811b7940d39SSatish Balay   if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]);
2812899cda47SBarry Smith 
2813899cda47SBarry Smith   B->rmap.bs = B->cmap.bs = 1;
28146148ca0dSBarry Smith   ierr = PetscMapSetUp(&B->rmap);CHKERRQ(ierr);
28156148ca0dSBarry Smith   ierr = PetscMapSetUp(&B->cmap);CHKERRQ(ierr);
2816899cda47SBarry Smith   m      = B->rmap.n;
2817899cda47SBarry Smith   cstart = B->cmap.rstart;
2818899cda47SBarry Smith   cend   = B->cmap.rend;
2819899cda47SBarry Smith   rstart = B->rmap.rstart;
2820899cda47SBarry Smith 
2821ccd8e176SBarry Smith   ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
2822ccd8e176SBarry Smith   o_nnz = d_nnz + m;
2823ccd8e176SBarry Smith 
2824ccd8e176SBarry Smith   for (i=0; i<m; i++) {
2825b7940d39SSatish Balay     nnz     = Ii[i+1]- Ii[i];
2826b7940d39SSatish Balay     JJ      = J + Ii[i];
2827ccd8e176SBarry Smith     nnz_max = PetscMax(nnz_max,nnz);
2828a1661176SMatthew Knepley     if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz);
2829ccd8e176SBarry Smith     for (j=0; j<nnz; j++) {
2830ccd8e176SBarry Smith       if (*JJ >= cstart) break;
2831ccd8e176SBarry Smith       JJ++;
2832ccd8e176SBarry Smith     }
2833ccd8e176SBarry Smith     d = 0;
2834ccd8e176SBarry Smith     for (; j<nnz; j++) {
2835ccd8e176SBarry Smith       if (*JJ++ >= cend) break;
2836ccd8e176SBarry Smith       d++;
2837ccd8e176SBarry Smith     }
2838ccd8e176SBarry Smith     d_nnz[i] = d;
2839ccd8e176SBarry Smith     o_nnz[i] = nnz - d;
2840ccd8e176SBarry Smith   }
2841ccd8e176SBarry Smith   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2842ccd8e176SBarry Smith   ierr = PetscFree(d_nnz);CHKERRQ(ierr);
2843ccd8e176SBarry Smith 
2844ccd8e176SBarry Smith   if (v) values = (PetscScalar*)v;
2845ccd8e176SBarry Smith   else {
2846ccd8e176SBarry Smith     ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr);
2847ccd8e176SBarry Smith     ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2848ccd8e176SBarry Smith   }
2849ccd8e176SBarry Smith 
2850ccd8e176SBarry Smith   for (i=0; i<m; i++) {
2851ccd8e176SBarry Smith     ii   = i + rstart;
2852b7940d39SSatish Balay     nnz  = Ii[i+1]- Ii[i];
2853b7940d39SSatish Balay     ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr);
2854ccd8e176SBarry Smith   }
2855ccd8e176SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2856ccd8e176SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2857ccd8e176SBarry Smith 
2858ccd8e176SBarry Smith   if (!v) {
2859ccd8e176SBarry Smith     ierr = PetscFree(values);CHKERRQ(ierr);
2860ccd8e176SBarry Smith   }
2861ccd8e176SBarry Smith   PetscFunctionReturn(0);
2862ccd8e176SBarry Smith }
2863e2e86b8fSSatish Balay EXTERN_C_END
2864ccd8e176SBarry Smith 
2865ccd8e176SBarry Smith #undef __FUNCT__
2866ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR"
28671eea217eSSatish Balay /*@
2868ccd8e176SBarry Smith    MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format
2869ccd8e176SBarry Smith    (the default parallel PETSc format).
2870ccd8e176SBarry Smith 
2871ccd8e176SBarry Smith    Collective on MPI_Comm
2872ccd8e176SBarry Smith 
2873ccd8e176SBarry Smith    Input Parameters:
2874a1661176SMatthew Knepley +  B - the matrix
2875ccd8e176SBarry Smith .  i - the indices into j for the start of each local row (starts with zero)
2876ccd8e176SBarry Smith .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2877ccd8e176SBarry Smith -  v - optional values in the matrix
2878ccd8e176SBarry Smith 
2879ccd8e176SBarry Smith    Level: developer
2880ccd8e176SBarry Smith 
28812fb0ec9aSBarry Smith    Notes: this actually copies the values from i[], j[], and a[] to put them into PETSc's internal
28822fb0ec9aSBarry Smith      storage format. Thus changing the values in a[] after this call will not effect the matrix values.
28832fb0ec9aSBarry Smith 
2884ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
2885ccd8e176SBarry Smith 
28862fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ,
28878d7a6e47SBarry Smith           MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays()
2888ccd8e176SBarry Smith @*/
2889be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2890ccd8e176SBarry Smith {
2891ccd8e176SBarry Smith   PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
2892ccd8e176SBarry Smith 
2893ccd8e176SBarry Smith   PetscFunctionBegin;
2894ccd8e176SBarry Smith   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr);
2895ccd8e176SBarry Smith   if (f) {
2896ccd8e176SBarry Smith     ierr = (*f)(B,i,j,v);CHKERRQ(ierr);
2897ccd8e176SBarry Smith   }
2898ccd8e176SBarry Smith   PetscFunctionReturn(0);
2899ccd8e176SBarry Smith }
2900ccd8e176SBarry Smith 
2901ccd8e176SBarry Smith #undef __FUNCT__
29024a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation"
2903273d9f13SBarry Smith /*@C
2904ccd8e176SBarry Smith    MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format
2905273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
2906273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
2907273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2908273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
2909273d9f13SBarry Smith 
2910273d9f13SBarry Smith    Collective on MPI_Comm
2911273d9f13SBarry Smith 
2912273d9f13SBarry Smith    Input Parameters:
2913273d9f13SBarry Smith +  A - the matrix
2914273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
2915273d9f13SBarry Smith            (same value is used for all local rows)
2916273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
2917273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
2918273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
2919273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
2920273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
2921273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
2922273d9f13SBarry Smith            submatrix (same value is used for all local rows).
2923273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
2924273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
2925273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
2926273d9f13SBarry Smith            structure. The size of this array is equal to the number
2927273d9f13SBarry Smith            of local rows, i.e 'm'.
2928273d9f13SBarry Smith 
292949a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
293049a6f317SBarry Smith 
2931273d9f13SBarry Smith    The AIJ format (also called the Yale sparse matrix format or
2932ccd8e176SBarry Smith    compressed row storage (CSR)), is fully compatible with standard Fortran 77
2933ccd8e176SBarry Smith    storage.  The stored row and column indices begin with zero.  See the users manual for details.
2934273d9f13SBarry Smith 
2935273d9f13SBarry Smith    The parallel matrix is partitioned such that the first m0 rows belong to
2936273d9f13SBarry Smith    process 0, the next m1 rows belong to process 1, the next m2 rows belong
2937273d9f13SBarry Smith    to process 2 etc.. where m0,m1,m2... are the input parameter 'm'.
2938273d9f13SBarry Smith 
2939273d9f13SBarry Smith    The DIAGONAL portion of the local submatrix of a processor can be defined
2940273d9f13SBarry Smith    as the submatrix which is obtained by extraction the part corresponding
2941273d9f13SBarry Smith    to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the
2942273d9f13SBarry Smith    first row that belongs to the processor, and r2 is the last row belonging
2943273d9f13SBarry Smith    to the this processor. This is a square mxm matrix. The remaining portion
2944273d9f13SBarry Smith    of the local submatrix (mxN) constitute the OFF-DIAGONAL portion.
2945273d9f13SBarry Smith 
2946273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
2947273d9f13SBarry Smith 
2948273d9f13SBarry Smith    Example usage:
2949273d9f13SBarry Smith 
2950273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
2951273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
2952273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
2953273d9f13SBarry Smith    as follows:
2954273d9f13SBarry Smith 
2955273d9f13SBarry Smith .vb
2956273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
2957273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
2958273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
2959273d9f13SBarry Smith     -------------------------------------
2960273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
2961273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
2962273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
2963273d9f13SBarry Smith     -------------------------------------
2964273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
2965273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
2966273d9f13SBarry Smith .ve
2967273d9f13SBarry Smith 
2968273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
2969273d9f13SBarry Smith 
2970273d9f13SBarry Smith .vb
2971273d9f13SBarry Smith       A B C
2972273d9f13SBarry Smith       D E F
2973273d9f13SBarry Smith       G H I
2974273d9f13SBarry Smith .ve
2975273d9f13SBarry Smith 
2976273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
2977273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
2978273d9f13SBarry Smith 
2979273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
2980273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
2981273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
2982273d9f13SBarry Smith 
2983273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
2984273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
2985273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
2986273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
2987273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
2988273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
2989273d9f13SBarry Smith 
2990273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
2991273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
2992273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
2993273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
2994273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
2995273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
2996273d9f13SBarry Smith .vb
2997273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
2998273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
2999273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3000273d9f13SBarry Smith .ve
3001273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3002273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3003273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3004273d9f13SBarry Smith    34 values.
3005273d9f13SBarry Smith 
3006273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3007273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3008273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3009273d9f13SBarry Smith .vb
3010273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3011273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3012273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3013273d9f13SBarry Smith .ve
3014273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3015273d9f13SBarry Smith    hence pre-allocation is perfect.
3016273d9f13SBarry Smith 
3017273d9f13SBarry Smith    Level: intermediate
3018273d9f13SBarry Smith 
3019273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3020273d9f13SBarry Smith 
3021ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(),
3022ccd8e176SBarry Smith           MPIAIJ
3023273d9f13SBarry Smith @*/
3024be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3025273d9f13SBarry Smith {
3026b1d57f15SBarry Smith   PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
3027273d9f13SBarry Smith 
3028273d9f13SBarry Smith   PetscFunctionBegin;
3029a23d5eceSKris Buschelman   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
3030a23d5eceSKris Buschelman   if (f) {
3031a23d5eceSKris Buschelman     ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3032273d9f13SBarry Smith   }
3033273d9f13SBarry Smith   PetscFunctionReturn(0);
3034273d9f13SBarry Smith }
3035273d9f13SBarry Smith 
30364a2ae208SSatish Balay #undef __FUNCT__
30372fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays"
30382fb0ec9aSBarry Smith /*@C
30392fb0ec9aSBarry Smith      MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard
30402fb0ec9aSBarry Smith          CSR format the local rows.
30412fb0ec9aSBarry Smith 
30422fb0ec9aSBarry Smith    Collective on MPI_Comm
30432fb0ec9aSBarry Smith 
30442fb0ec9aSBarry Smith    Input Parameters:
30452fb0ec9aSBarry Smith +  comm - MPI communicator
30462fb0ec9aSBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
30472fb0ec9aSBarry Smith .  n - This value should be the same as the local size used in creating the
30482fb0ec9aSBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
30492fb0ec9aSBarry Smith        calculated if N is given) For square matrices n is almost always m.
30502fb0ec9aSBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
30512fb0ec9aSBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
30522fb0ec9aSBarry Smith .   i - row indices
30532fb0ec9aSBarry Smith .   j - column indices
30542fb0ec9aSBarry Smith -   a - matrix values
30552fb0ec9aSBarry Smith 
30562fb0ec9aSBarry Smith    Output Parameter:
30572fb0ec9aSBarry Smith .   mat - the matrix
305803bfb495SBarry Smith 
30592fb0ec9aSBarry Smith    Level: intermediate
30602fb0ec9aSBarry Smith 
30612fb0ec9aSBarry Smith    Notes:
30622fb0ec9aSBarry Smith        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
30632fb0ec9aSBarry Smith      thus you CANNOT change the matrix entries by changing the values of a[] after you have
30648d7a6e47SBarry Smith      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
30652fb0ec9aSBarry Smith 
30662fb0ec9aSBarry Smith        The i and j indices are 0 based
30672fb0ec9aSBarry Smith 
30682fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
30692fb0ec9aSBarry Smith 
30702fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
30718d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays()
30722fb0ec9aSBarry Smith @*/
307382b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
30742fb0ec9aSBarry Smith {
30752fb0ec9aSBarry Smith   PetscErrorCode ierr;
30762fb0ec9aSBarry Smith 
30772fb0ec9aSBarry Smith  PetscFunctionBegin;
30782fb0ec9aSBarry Smith   if (i[0]) {
30792fb0ec9aSBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
30802fb0ec9aSBarry Smith   }
30812fb0ec9aSBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
30822fb0ec9aSBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
30832fb0ec9aSBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
30842fb0ec9aSBarry Smith   ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr);
30852fb0ec9aSBarry Smith   PetscFunctionReturn(0);
30862fb0ec9aSBarry Smith }
30872fb0ec9aSBarry Smith 
30882fb0ec9aSBarry Smith #undef __FUNCT__
30894a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ"
3090273d9f13SBarry Smith /*@C
3091273d9f13SBarry Smith    MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format
3092273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3093273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3094273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3095273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3096273d9f13SBarry Smith 
3097273d9f13SBarry Smith    Collective on MPI_Comm
3098273d9f13SBarry Smith 
3099273d9f13SBarry Smith    Input Parameters:
3100273d9f13SBarry Smith +  comm - MPI communicator
3101273d9f13SBarry Smith .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3102273d9f13SBarry Smith            This value should be the same as the local size used in creating the
3103273d9f13SBarry Smith            y vector for the matrix-vector product y = Ax.
3104273d9f13SBarry Smith .  n - This value should be the same as the local size used in creating the
3105273d9f13SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3106273d9f13SBarry Smith        calculated if N is given) For square matrices n is almost always m.
3107273d9f13SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3108273d9f13SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3109273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3110273d9f13SBarry Smith            (same value is used for all local rows)
3111273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3112273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3113273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3114273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3115273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3116273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3117273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3118273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3119273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3120273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3121273d9f13SBarry Smith            structure. The size of this array is equal to the number
3122273d9f13SBarry Smith            of local rows, i.e 'm'.
3123273d9f13SBarry Smith 
3124273d9f13SBarry Smith    Output Parameter:
3125273d9f13SBarry Smith .  A - the matrix
3126273d9f13SBarry Smith 
3127175b88e8SBarry Smith    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3128175b88e8SBarry Smith    MatXXXXSetPreallocation() paradgm instead of this routine directly. This is definitely
3129175b88e8SBarry Smith    true if you plan to use the external direct solvers such as SuperLU, MUMPS or Spooles.
3130175b88e8SBarry Smith    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3131175b88e8SBarry Smith 
3132273d9f13SBarry Smith    Notes:
313349a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
313449a6f317SBarry Smith 
3135273d9f13SBarry Smith    m,n,M,N parameters specify the size of the matrix, and its partitioning across
3136273d9f13SBarry Smith    processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate
3137273d9f13SBarry Smith    storage requirements for this matrix.
3138273d9f13SBarry Smith 
3139273d9f13SBarry Smith    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one
3140273d9f13SBarry Smith    processor than it must be used on all processors that share the object for
3141273d9f13SBarry Smith    that argument.
3142273d9f13SBarry Smith 
3143273d9f13SBarry Smith    The user MUST specify either the local or global matrix dimensions
3144273d9f13SBarry Smith    (possibly both).
3145273d9f13SBarry Smith 
314633a7c187SSatish Balay    The parallel matrix is partitioned across processors such that the
314733a7c187SSatish Balay    first m0 rows belong to process 0, the next m1 rows belong to
314833a7c187SSatish Balay    process 1, the next m2 rows belong to process 2 etc.. where
314933a7c187SSatish Balay    m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores
315033a7c187SSatish Balay    values corresponding to [m x N] submatrix.
3151273d9f13SBarry Smith 
315233a7c187SSatish Balay    The columns are logically partitioned with the n0 columns belonging
315333a7c187SSatish Balay    to 0th partition, the next n1 columns belonging to the next
315433a7c187SSatish Balay    partition etc.. where n0,n1,n2... are the the input parameter 'n'.
315533a7c187SSatish Balay 
315633a7c187SSatish Balay    The DIAGONAL portion of the local submatrix on any given processor
315733a7c187SSatish Balay    is the submatrix corresponding to the rows and columns m,n
315833a7c187SSatish Balay    corresponding to the given processor. i.e diagonal matrix on
315933a7c187SSatish Balay    process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1]
316033a7c187SSatish Balay    etc. The remaining portion of the local submatrix [m x (N-n)]
316133a7c187SSatish Balay    constitute the OFF-DIAGONAL portion. The example below better
316233a7c187SSatish Balay    illustrates this concept.
316333a7c187SSatish Balay 
316433a7c187SSatish Balay    For a square global matrix we define each processor's diagonal portion
316533a7c187SSatish Balay    to be its local rows and the corresponding columns (a square submatrix);
316633a7c187SSatish Balay    each processor's off-diagonal portion encompasses the remainder of the
316733a7c187SSatish Balay    local matrix (a rectangular submatrix).
3168273d9f13SBarry Smith 
3169273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3170273d9f13SBarry Smith 
317197d05335SKris Buschelman    When calling this routine with a single process communicator, a matrix of
317297d05335SKris Buschelman    type SEQAIJ is returned.  If a matrix of type MPIAIJ is desired for this
317397d05335SKris Buschelman    type of communicator, use the construction mechanism:
317497d05335SKris Buschelman      MatCreate(...,&A); MatSetType(A,MPIAIJ); MatMPIAIJSetPreallocation(A,...);
317597d05335SKris Buschelman 
3176273d9f13SBarry Smith    By default, this format uses inodes (identical nodes) when possible.
3177273d9f13SBarry Smith    We search for consecutive rows with the same nonzero structure, thereby
3178273d9f13SBarry Smith    reusing matrix information to achieve increased efficiency.
3179273d9f13SBarry Smith 
3180273d9f13SBarry Smith    Options Database Keys:
3181923f20ffSKris Buschelman +  -mat_no_inode  - Do not use inodes
3182923f20ffSKris Buschelman .  -mat_inode_limit <limit> - Sets inode limit (max limit=5)
3183273d9f13SBarry Smith -  -mat_aij_oneindex - Internally use indexing starting at 1
3184273d9f13SBarry Smith         rather than 0.  Note that when calling MatSetValues(),
3185273d9f13SBarry Smith         the user still MUST index entries starting at 0!
3186273d9f13SBarry Smith 
3187273d9f13SBarry Smith 
3188273d9f13SBarry Smith    Example usage:
3189273d9f13SBarry Smith 
3190273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3191273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3192273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3193273d9f13SBarry Smith    as follows:
3194273d9f13SBarry Smith 
3195273d9f13SBarry Smith .vb
3196273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3197273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3198273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3199273d9f13SBarry Smith     -------------------------------------
3200273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3201273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3202273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3203273d9f13SBarry Smith     -------------------------------------
3204273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3205273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3206273d9f13SBarry Smith .ve
3207273d9f13SBarry Smith 
3208273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3209273d9f13SBarry Smith 
3210273d9f13SBarry Smith .vb
3211273d9f13SBarry Smith       A B C
3212273d9f13SBarry Smith       D E F
3213273d9f13SBarry Smith       G H I
3214273d9f13SBarry Smith .ve
3215273d9f13SBarry Smith 
3216273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3217273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3218273d9f13SBarry Smith 
3219273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3220273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3221273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3222273d9f13SBarry Smith 
3223273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3224273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3225273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3226273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3227273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3228273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3229273d9f13SBarry Smith 
3230273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3231273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3232273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3233273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3234273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3235273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3236273d9f13SBarry Smith .vb
3237273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3238273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3239273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3240273d9f13SBarry Smith .ve
3241273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3242273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3243273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3244273d9f13SBarry Smith    34 values.
3245273d9f13SBarry Smith 
3246273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3247273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3248273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3249273d9f13SBarry Smith .vb
3250273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3251273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3252273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3253273d9f13SBarry Smith .ve
3254273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3255273d9f13SBarry Smith    hence pre-allocation is perfect.
3256273d9f13SBarry Smith 
3257273d9f13SBarry Smith    Level: intermediate
3258273d9f13SBarry Smith 
3259273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3260273d9f13SBarry Smith 
3261ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
32622fb0ec9aSBarry Smith           MPIAIJ, MatCreateMPIAIJWithArrays()
3263273d9f13SBarry Smith @*/
3264be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3265273d9f13SBarry Smith {
32666849ba73SBarry Smith   PetscErrorCode ierr;
3267b1d57f15SBarry Smith   PetscMPIInt    size;
3268273d9f13SBarry Smith 
3269273d9f13SBarry Smith   PetscFunctionBegin;
3270f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3271f69a0ea3SMatthew Knepley   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3272273d9f13SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3273273d9f13SBarry Smith   if (size > 1) {
3274273d9f13SBarry Smith     ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr);
3275273d9f13SBarry Smith     ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3276273d9f13SBarry Smith   } else {
3277273d9f13SBarry Smith     ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr);
3278273d9f13SBarry Smith     ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr);
3279273d9f13SBarry Smith   }
3280273d9f13SBarry Smith   PetscFunctionReturn(0);
3281273d9f13SBarry Smith }
3282195d93cdSBarry Smith 
32834a2ae208SSatish Balay #undef __FUNCT__
32844a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ"
3285be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[])
3286195d93cdSBarry Smith {
3287195d93cdSBarry Smith   Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
3288b1d57f15SBarry Smith 
3289195d93cdSBarry Smith   PetscFunctionBegin;
3290195d93cdSBarry Smith   *Ad     = a->A;
3291195d93cdSBarry Smith   *Ao     = a->B;
3292195d93cdSBarry Smith   *colmap = a->garray;
3293195d93cdSBarry Smith   PetscFunctionReturn(0);
3294195d93cdSBarry Smith }
3295a2243be0SBarry Smith 
3296a2243be0SBarry Smith #undef __FUNCT__
3297a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ"
3298dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring)
3299a2243be0SBarry Smith {
3300dfbe8321SBarry Smith   PetscErrorCode ierr;
3301b1d57f15SBarry Smith   PetscInt       i;
3302a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3303a2243be0SBarry Smith 
3304a2243be0SBarry Smith   PetscFunctionBegin;
33058ee2e534SBarry Smith   if (coloring->ctype == IS_COLORING_GLOBAL) {
330608b6dcc0SBarry Smith     ISColoringValue *allcolors,*colors;
3307a2243be0SBarry Smith     ISColoring      ocoloring;
3308a2243be0SBarry Smith 
3309a2243be0SBarry Smith     /* set coloring for diagonal portion */
3310a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr);
3311a2243be0SBarry Smith 
3312a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
33137adad957SLisandro Dalcin     ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr);
3314899cda47SBarry Smith     ierr = PetscMalloc((a->B->cmap.n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3315899cda47SBarry Smith     for (i=0; i<a->B->cmap.n; i++) {
3316a2243be0SBarry Smith       colors[i] = allcolors[a->garray[i]];
3317a2243be0SBarry Smith     }
3318a2243be0SBarry Smith     ierr = PetscFree(allcolors);CHKERRQ(ierr);
331995a80f87SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap.n,colors,&ocoloring);CHKERRQ(ierr);
3320a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3321a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3322a2243be0SBarry Smith   } else if (coloring->ctype == IS_COLORING_GHOSTED) {
332308b6dcc0SBarry Smith     ISColoringValue *colors;
3324b1d57f15SBarry Smith     PetscInt        *larray;
3325a2243be0SBarry Smith     ISColoring      ocoloring;
3326a2243be0SBarry Smith 
3327a2243be0SBarry Smith     /* set coloring for diagonal portion */
3328899cda47SBarry Smith     ierr = PetscMalloc((a->A->cmap.n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3329899cda47SBarry Smith     for (i=0; i<a->A->cmap.n; i++) {
3330899cda47SBarry Smith       larray[i] = i + A->cmap.rstart;
3331a2243be0SBarry Smith     }
3332899cda47SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap.n,larray,PETSC_NULL,larray);CHKERRQ(ierr);
3333899cda47SBarry Smith     ierr = PetscMalloc((a->A->cmap.n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3334899cda47SBarry Smith     for (i=0; i<a->A->cmap.n; i++) {
3335a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3336a2243be0SBarry Smith     }
3337a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
333895a80f87SBarry Smith     ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap.n,colors,&ocoloring);CHKERRQ(ierr);
3339a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr);
3340a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3341a2243be0SBarry Smith 
3342a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
3343899cda47SBarry Smith     ierr = PetscMalloc((a->B->cmap.n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3344899cda47SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap.n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr);
3345899cda47SBarry Smith     ierr = PetscMalloc((a->B->cmap.n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3346899cda47SBarry Smith     for (i=0; i<a->B->cmap.n; i++) {
3347a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3348a2243be0SBarry Smith     }
3349a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
335095a80f87SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap.n,colors,&ocoloring);CHKERRQ(ierr);
3351a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3352a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3353a2243be0SBarry Smith   } else {
335477431f27SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype);
3355a2243be0SBarry Smith   }
3356a2243be0SBarry Smith 
3357a2243be0SBarry Smith   PetscFunctionReturn(0);
3358a2243be0SBarry Smith }
3359a2243be0SBarry Smith 
3360dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
3361a2243be0SBarry Smith #undef __FUNCT__
3362779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ"
3363dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues)
3364a2243be0SBarry Smith {
3365a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3366dfbe8321SBarry Smith   PetscErrorCode ierr;
3367a2243be0SBarry Smith 
3368a2243be0SBarry Smith   PetscFunctionBegin;
3369779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr);
3370779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr);
3371779c1a83SBarry Smith   PetscFunctionReturn(0);
3372779c1a83SBarry Smith }
3373dcf5cc72SBarry Smith #endif
3374779c1a83SBarry Smith 
3375779c1a83SBarry Smith #undef __FUNCT__
3376779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ"
3377b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues)
3378779c1a83SBarry Smith {
3379779c1a83SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3380dfbe8321SBarry Smith   PetscErrorCode ierr;
3381779c1a83SBarry Smith 
3382779c1a83SBarry Smith   PetscFunctionBegin;
3383779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr);
3384779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr);
3385a2243be0SBarry Smith   PetscFunctionReturn(0);
3386a2243be0SBarry Smith }
3387c5d6d63eSBarry Smith 
3388c5d6d63eSBarry Smith #undef __FUNCT__
338951dd7536SBarry Smith #define __FUNCT__ "MatMerge"
3390c5d6d63eSBarry Smith /*@C
339151dd7536SBarry Smith       MatMerge - Creates a single large PETSc matrix by concatinating sequential
339251dd7536SBarry Smith                  matrices from each processor
3393c5d6d63eSBarry Smith 
3394c5d6d63eSBarry Smith     Collective on MPI_Comm
3395c5d6d63eSBarry Smith 
3396c5d6d63eSBarry Smith    Input Parameters:
339751dd7536SBarry Smith +    comm - the communicators the parallel matrix will live on
3398d6bb3c2dSHong Zhang .    inmat - the input sequential matrices
33990e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3400d6bb3c2dSHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
340151dd7536SBarry Smith 
340251dd7536SBarry Smith    Output Parameter:
340351dd7536SBarry Smith .    outmat - the parallel matrix generated
3404c5d6d63eSBarry Smith 
34057e25d530SSatish Balay     Level: advanced
34067e25d530SSatish Balay 
3407f08fae4eSHong Zhang    Notes: The number of columns of the matrix in EACH processor MUST be the same.
3408c5d6d63eSBarry Smith 
3409c5d6d63eSBarry Smith @*/
3410be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3411c5d6d63eSBarry Smith {
3412dfbe8321SBarry Smith   PetscErrorCode ierr;
3413b7940d39SSatish Balay   PetscInt       m,N,i,rstart,nnz,Ii,*dnz,*onz;
3414ba8c8a56SBarry Smith   PetscInt       *indx;
3415ba8c8a56SBarry Smith   PetscScalar    *values;
3416c5d6d63eSBarry Smith 
3417c5d6d63eSBarry Smith   PetscFunctionBegin;
34180e36024fSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3419d6bb3c2dSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
3420d6bb3c2dSHong Zhang     /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */
34210e36024fSHong Zhang     if (n == PETSC_DECIDE){
3422357abbc8SBarry Smith       ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
34230e36024fSHong Zhang     }
3424357abbc8SBarry Smith     ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3425357abbc8SBarry Smith     rstart -= m;
3426d6bb3c2dSHong Zhang 
3427d6bb3c2dSHong Zhang     ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
3428d6bb3c2dSHong Zhang     for (i=0;i<m;i++) {
3429ba8c8a56SBarry Smith       ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3430d6bb3c2dSHong Zhang       ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr);
3431ba8c8a56SBarry Smith       ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3432d6bb3c2dSHong Zhang     }
3433d6bb3c2dSHong Zhang     /* This routine will ONLY return MPIAIJ type matrix */
3434f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3435f69a0ea3SMatthew Knepley     ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3436d6bb3c2dSHong Zhang     ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr);
3437d6bb3c2dSHong Zhang     ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr);
3438d6bb3c2dSHong Zhang     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3439d6bb3c2dSHong Zhang 
3440d6bb3c2dSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
3441d6bb3c2dSHong Zhang     ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr);
3442d6bb3c2dSHong Zhang   } else {
344377431f27SBarry Smith     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
3444d6bb3c2dSHong Zhang   }
3445d6bb3c2dSHong Zhang 
3446d6bb3c2dSHong Zhang   for (i=0;i<m;i++) {
3447ba8c8a56SBarry Smith     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3448b7940d39SSatish Balay     Ii    = i + rstart;
3449b7940d39SSatish Balay     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3450ba8c8a56SBarry Smith     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3451d6bb3c2dSHong Zhang   }
3452d6bb3c2dSHong Zhang   ierr = MatDestroy(inmat);CHKERRQ(ierr);
3453d6bb3c2dSHong Zhang   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3454d6bb3c2dSHong Zhang   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
345551dd7536SBarry Smith 
3456c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3457c5d6d63eSBarry Smith }
3458c5d6d63eSBarry Smith 
3459c5d6d63eSBarry Smith #undef __FUNCT__
3460c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit"
3461dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile)
3462c5d6d63eSBarry Smith {
3463dfbe8321SBarry Smith   PetscErrorCode    ierr;
346432dcc486SBarry Smith   PetscMPIInt       rank;
3465b1d57f15SBarry Smith   PetscInt          m,N,i,rstart,nnz;
3466de4209c5SBarry Smith   size_t            len;
3467b1d57f15SBarry Smith   const PetscInt    *indx;
3468c5d6d63eSBarry Smith   PetscViewer       out;
3469c5d6d63eSBarry Smith   char              *name;
3470c5d6d63eSBarry Smith   Mat               B;
3471b3cc6726SBarry Smith   const PetscScalar *values;
3472c5d6d63eSBarry Smith 
3473c5d6d63eSBarry Smith   PetscFunctionBegin;
3474c5d6d63eSBarry Smith   ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr);
3475c5d6d63eSBarry Smith   ierr = MatGetSize(A,0,&N);CHKERRQ(ierr);
3476f204ca49SKris Buschelman   /* Should this be the type of the diagonal block of A? */
3477f69a0ea3SMatthew Knepley   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
3478f69a0ea3SMatthew Knepley   ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr);
3479f204ca49SKris Buschelman   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
3480f204ca49SKris Buschelman   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
3481c5d6d63eSBarry Smith   ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr);
3482c5d6d63eSBarry Smith   for (i=0;i<m;i++) {
3483c5d6d63eSBarry Smith     ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3484c5d6d63eSBarry Smith     ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3485c5d6d63eSBarry Smith     ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3486c5d6d63eSBarry Smith   }
3487c5d6d63eSBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3488c5d6d63eSBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3489c5d6d63eSBarry Smith 
34907adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
3491c5d6d63eSBarry Smith   ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr);
3492c5d6d63eSBarry Smith   ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr);
3493c5d6d63eSBarry Smith   sprintf(name,"%s.%d",outfile,rank);
3494852598b0SBarry Smith   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr);
3495c5d6d63eSBarry Smith   ierr = PetscFree(name);
3496c5d6d63eSBarry Smith   ierr = MatView(B,out);CHKERRQ(ierr);
3497c5d6d63eSBarry Smith   ierr = PetscViewerDestroy(out);CHKERRQ(ierr);
3498c5d6d63eSBarry Smith   ierr = MatDestroy(B);CHKERRQ(ierr);
3499c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3500c5d6d63eSBarry Smith }
3501e5f2cdd8SHong Zhang 
350251a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
350351a7d1a8SHong Zhang #undef __FUNCT__
350451a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI"
3505be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A)
350651a7d1a8SHong Zhang {
350751a7d1a8SHong Zhang   PetscErrorCode       ierr;
3508671beff6SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3509776b82aeSLisandro Dalcin   PetscContainer       container;
351051a7d1a8SHong Zhang 
351151a7d1a8SHong Zhang   PetscFunctionBegin;
3512671beff6SHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
3513671beff6SHong Zhang   if (container) {
3514776b82aeSLisandro Dalcin     ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
351551a7d1a8SHong Zhang     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
35163e06a4e6SHong Zhang     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
35173e06a4e6SHong Zhang     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
351851a7d1a8SHong Zhang     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
351951a7d1a8SHong Zhang     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
352002c68681SHong Zhang     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
352102c68681SHong Zhang     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
352205b42c5fSBarry Smith     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
352305b42c5fSBarry Smith     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
352405b42c5fSBarry Smith     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
35252c72b5baSSatish Balay     ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr);
3526671beff6SHong Zhang 
3527776b82aeSLisandro Dalcin     ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
3528671beff6SHong Zhang     ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr);
3529671beff6SHong Zhang   }
353051a7d1a8SHong Zhang   ierr = PetscFree(merge);CHKERRQ(ierr);
353151a7d1a8SHong Zhang 
353251a7d1a8SHong Zhang   ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
353351a7d1a8SHong Zhang   PetscFunctionReturn(0);
353451a7d1a8SHong Zhang }
353551a7d1a8SHong Zhang 
353658cb9c82SHong Zhang #include "src/mat/utils/freespace.h"
3537be0fcf8dSHong Zhang #include "petscbt.h"
353838f152feSBarry Smith static PetscEvent logkey_seqstompinum = 0;
3539e5f2cdd8SHong Zhang #undef __FUNCT__
354038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric"
3541e5f2cdd8SHong Zhang /*@C
3542f08fae4eSHong Zhang       MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential
3543e5f2cdd8SHong Zhang                  matrices from each processor
3544e5f2cdd8SHong Zhang 
3545e5f2cdd8SHong Zhang     Collective on MPI_Comm
3546e5f2cdd8SHong Zhang 
3547e5f2cdd8SHong Zhang    Input Parameters:
3548e5f2cdd8SHong Zhang +    comm - the communicators the parallel matrix will live on
3549f08fae4eSHong Zhang .    seqmat - the input sequential matrices
35500e36024fSHong Zhang .    m - number of local rows (or PETSC_DECIDE)
35510e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3552e5f2cdd8SHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
3553e5f2cdd8SHong Zhang 
3554e5f2cdd8SHong Zhang    Output Parameter:
3555f08fae4eSHong Zhang .    mpimat - the parallel matrix generated
3556e5f2cdd8SHong Zhang 
3557e5f2cdd8SHong Zhang     Level: advanced
3558e5f2cdd8SHong Zhang 
3559affca5deSHong Zhang    Notes:
3560affca5deSHong Zhang      The dimensions of the sequential matrix in each processor MUST be the same.
3561affca5deSHong Zhang      The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be
3562affca5deSHong Zhang      destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat.
3563e5f2cdd8SHong Zhang @*/
3564be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat)
356555d1abb9SHong Zhang {
356655d1abb9SHong Zhang   PetscErrorCode       ierr;
35677adad957SLisandro Dalcin   MPI_Comm             comm=((PetscObject)mpimat)->comm;
356855d1abb9SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3569b1d57f15SBarry Smith   PetscMPIInt          size,rank,taga,*len_s;
3570899cda47SBarry Smith   PetscInt             N=mpimat->cmap.N,i,j,*owners,*ai=a->i,*aj=a->j;
3571b1d57f15SBarry Smith   PetscInt             proc,m;
3572b1d57f15SBarry Smith   PetscInt             **buf_ri,**buf_rj;
3573b1d57f15SBarry Smith   PetscInt             k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj;
3574b1d57f15SBarry Smith   PetscInt             nrows,**buf_ri_k,**nextrow,**nextai;
357555d1abb9SHong Zhang   MPI_Request          *s_waits,*r_waits;
357655d1abb9SHong Zhang   MPI_Status           *status;
3577ce805ee4SHong Zhang   MatScalar            *aa=a->a,**abuf_r,*ba_i;
357855d1abb9SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3579776b82aeSLisandro Dalcin   PetscContainer       container;
358055d1abb9SHong Zhang 
358155d1abb9SHong Zhang   PetscFunctionBegin;
35823c2c1871SHong Zhang   if (!logkey_seqstompinum) {
35833c2c1871SHong Zhang     ierr = PetscLogEventRegister(&logkey_seqstompinum,"MatMerge_SeqsToMPINumeric",MAT_COOKIE);
35843c2c1871SHong Zhang   }
35853c2c1871SHong Zhang   ierr = PetscLogEventBegin(logkey_seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
35863c2c1871SHong Zhang 
358755d1abb9SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
358855d1abb9SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
358955d1abb9SHong Zhang 
359055d1abb9SHong Zhang   ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
359155d1abb9SHong Zhang   if (container) {
3592776b82aeSLisandro Dalcin     ierr  = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
359355d1abb9SHong Zhang   }
359455d1abb9SHong Zhang   bi     = merge->bi;
359555d1abb9SHong Zhang   bj     = merge->bj;
359655d1abb9SHong Zhang   buf_ri = merge->buf_ri;
359755d1abb9SHong Zhang   buf_rj = merge->buf_rj;
359855d1abb9SHong Zhang 
359955d1abb9SHong Zhang   ierr   = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
3600357abbc8SBarry Smith   owners = merge->rowmap.range;
360155d1abb9SHong Zhang   len_s  = merge->len_s;
360255d1abb9SHong Zhang 
360355d1abb9SHong Zhang   /* send and recv matrix values */
360455d1abb9SHong Zhang   /*-----------------------------*/
3605357abbc8SBarry Smith   ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr);
360655d1abb9SHong Zhang   ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr);
360755d1abb9SHong Zhang 
360855d1abb9SHong Zhang   ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr);
360955d1abb9SHong Zhang   for (proc=0,k=0; proc<size; proc++){
361055d1abb9SHong Zhang     if (!len_s[proc]) continue;
361155d1abb9SHong Zhang     i = owners[proc];
361255d1abb9SHong Zhang     ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr);
361355d1abb9SHong Zhang     k++;
361455d1abb9SHong Zhang   }
361555d1abb9SHong Zhang 
36160c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);}
36170c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);}
361855d1abb9SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
361955d1abb9SHong Zhang 
362055d1abb9SHong Zhang   ierr = PetscFree(s_waits);CHKERRQ(ierr);
362155d1abb9SHong Zhang   ierr = PetscFree(r_waits);CHKERRQ(ierr);
362255d1abb9SHong Zhang 
362355d1abb9SHong Zhang   /* insert mat values of mpimat */
362455d1abb9SHong Zhang   /*----------------------------*/
362555d1abb9SHong Zhang   ierr = PetscMalloc(N*sizeof(MatScalar),&ba_i);CHKERRQ(ierr);
3626b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
362755d1abb9SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
362855d1abb9SHong Zhang   nextai  = nextrow + merge->nrecv;
362955d1abb9SHong Zhang 
363055d1abb9SHong Zhang   for (k=0; k<merge->nrecv; k++){
363155d1abb9SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
363255d1abb9SHong Zhang     nrows = *(buf_ri_k[k]);
363355d1abb9SHong Zhang     nextrow[k]  = buf_ri_k[k]+1;  /* next row number of k-th recved i-structure */
363455d1abb9SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
363555d1abb9SHong Zhang   }
363655d1abb9SHong Zhang 
363755d1abb9SHong Zhang   /* set values of ba */
3638357abbc8SBarry Smith   m = merge->rowmap.n;
363955d1abb9SHong Zhang   for (i=0; i<m; i++) {
364055d1abb9SHong Zhang     arow = owners[rank] + i;
364155d1abb9SHong Zhang     bj_i = bj+bi[i];  /* col indices of the i-th row of mpimat */
364255d1abb9SHong Zhang     bnzi = bi[i+1] - bi[i];
364355d1abb9SHong Zhang     ierr = PetscMemzero(ba_i,bnzi*sizeof(MatScalar));CHKERRQ(ierr);
364455d1abb9SHong Zhang 
364555d1abb9SHong Zhang     /* add local non-zero vals of this proc's seqmat into ba */
364655d1abb9SHong Zhang     anzi = ai[arow+1] - ai[arow];
364755d1abb9SHong Zhang     aj   = a->j + ai[arow];
364855d1abb9SHong Zhang     aa   = a->a + ai[arow];
364955d1abb9SHong Zhang     nextaj = 0;
365055d1abb9SHong Zhang     for (j=0; nextaj<anzi; j++){
365155d1abb9SHong Zhang       if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
365255d1abb9SHong Zhang         ba_i[j] += aa[nextaj++];
365355d1abb9SHong Zhang       }
365455d1abb9SHong Zhang     }
365555d1abb9SHong Zhang 
365655d1abb9SHong Zhang     /* add received vals into ba */
365755d1abb9SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
365855d1abb9SHong Zhang       /* i-th row */
365955d1abb9SHong Zhang       if (i == *nextrow[k]) {
366055d1abb9SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
366155d1abb9SHong Zhang         aj   = buf_rj[k] + *(nextai[k]);
366255d1abb9SHong Zhang         aa   = abuf_r[k] + *(nextai[k]);
366355d1abb9SHong Zhang         nextaj = 0;
366455d1abb9SHong Zhang         for (j=0; nextaj<anzi; j++){
366555d1abb9SHong Zhang           if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
366655d1abb9SHong Zhang             ba_i[j] += aa[nextaj++];
366755d1abb9SHong Zhang           }
366855d1abb9SHong Zhang         }
366955d1abb9SHong Zhang         nextrow[k]++; nextai[k]++;
367055d1abb9SHong Zhang       }
367155d1abb9SHong Zhang     }
367255d1abb9SHong Zhang     ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr);
367355d1abb9SHong Zhang   }
367455d1abb9SHong Zhang   ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
367555d1abb9SHong Zhang   ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
367655d1abb9SHong Zhang 
367755d1abb9SHong Zhang   ierr = PetscFree(abuf_r);CHKERRQ(ierr);
367855d1abb9SHong Zhang   ierr = PetscFree(ba_i);CHKERRQ(ierr);
367955d1abb9SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
36803c2c1871SHong Zhang   ierr = PetscLogEventEnd(logkey_seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
368155d1abb9SHong Zhang   PetscFunctionReturn(0);
368255d1abb9SHong Zhang }
368338f152feSBarry Smith 
36843c2c1871SHong Zhang static PetscEvent logkey_seqstompisym = 0;
368538f152feSBarry Smith #undef __FUNCT__
368638f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic"
3687be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat)
3688e5f2cdd8SHong Zhang {
3689f08fae4eSHong Zhang   PetscErrorCode       ierr;
369055a3bba9SHong Zhang   Mat                  B_mpi;
3691c2234fe3SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3692b1d57f15SBarry Smith   PetscMPIInt          size,rank,tagi,tagj,*len_s,*len_si,*len_ri;
3693b1d57f15SBarry Smith   PetscInt             **buf_rj,**buf_ri,**buf_ri_k;
3694899cda47SBarry Smith   PetscInt             M=seqmat->rmap.n,N=seqmat->cmap.n,i,*owners,*ai=a->i,*aj=a->j;
3695b1d57f15SBarry Smith   PetscInt             len,proc,*dnz,*onz;
3696b1d57f15SBarry Smith   PetscInt             k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0;
3697b1d57f15SBarry Smith   PetscInt             nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai;
369855d1abb9SHong Zhang   MPI_Request          *si_waits,*sj_waits,*ri_waits,*rj_waits;
369958cb9c82SHong Zhang   MPI_Status           *status;
3700a1a86e44SBarry Smith   PetscFreeSpaceList   free_space=PETSC_NULL,current_space=PETSC_NULL;
3701be0fcf8dSHong Zhang   PetscBT              lnkbt;
370251a7d1a8SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3703776b82aeSLisandro Dalcin   PetscContainer       container;
370402c68681SHong Zhang 
3705e5f2cdd8SHong Zhang   PetscFunctionBegin;
37063c2c1871SHong Zhang   if (!logkey_seqstompisym) {
37073c2c1871SHong Zhang     ierr = PetscLogEventRegister(&logkey_seqstompisym,"MatMerge_SeqsToMPISymbolic",MAT_COOKIE);
37083c2c1871SHong Zhang   }
37093c2c1871SHong Zhang   ierr = PetscLogEventBegin(logkey_seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
37103c2c1871SHong Zhang 
371138f152feSBarry Smith   /* make sure it is a PETSc comm */
371238f152feSBarry Smith   ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr);
3713e5f2cdd8SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3714e5f2cdd8SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
371555d1abb9SHong Zhang 
371651a7d1a8SHong Zhang   ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr);
3717c2234fe3SHong Zhang   ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
3718e5f2cdd8SHong Zhang 
37196abd8857SHong Zhang   /* determine row ownership */
3720f08fae4eSHong Zhang   /*---------------------------------------------------------*/
3721b167c4dbSHong Zhang   ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr);
3722899cda47SBarry Smith   merge->rowmap.n = m;
3723899cda47SBarry Smith   merge->rowmap.N = M;
3724fc42d0c8SSatish Balay   merge->rowmap.bs = 1;
37256148ca0dSBarry Smith   ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr);
3726b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr);
3727b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr);
372855d1abb9SHong Zhang 
3729357abbc8SBarry Smith   m      = merge->rowmap.n;
3730357abbc8SBarry Smith   M      = merge->rowmap.N;
3731357abbc8SBarry Smith   owners = merge->rowmap.range;
37326abd8857SHong Zhang 
37336abd8857SHong Zhang   /* determine the number of messages to send, their lengths */
37346abd8857SHong Zhang   /*---------------------------------------------------------*/
37353e06a4e6SHong Zhang   len_s  = merge->len_s;
373651a7d1a8SHong Zhang 
37372257cef7SHong Zhang   len = 0;  /* length of buf_si[] */
3738c2234fe3SHong Zhang   merge->nsend = 0;
3739409913e3SHong Zhang   for (proc=0; proc<size; proc++){
37402257cef7SHong Zhang     len_si[proc] = 0;
37413e06a4e6SHong Zhang     if (proc == rank){
37426abd8857SHong Zhang       len_s[proc] = 0;
37433e06a4e6SHong Zhang     } else {
374402c68681SHong Zhang       len_si[proc] = owners[proc+1] - owners[proc] + 1;
37453e06a4e6SHong Zhang       len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */
37463e06a4e6SHong Zhang     }
37473e06a4e6SHong Zhang     if (len_s[proc]) {
3748c2234fe3SHong Zhang       merge->nsend++;
37492257cef7SHong Zhang       nrows = 0;
37502257cef7SHong Zhang       for (i=owners[proc]; i<owners[proc+1]; i++){
37512257cef7SHong Zhang         if (ai[i+1] > ai[i]) nrows++;
37522257cef7SHong Zhang       }
37532257cef7SHong Zhang       len_si[proc] = 2*(nrows+1);
37542257cef7SHong Zhang       len += len_si[proc];
3755409913e3SHong Zhang     }
375658cb9c82SHong Zhang   }
3757409913e3SHong Zhang 
37582257cef7SHong Zhang   /* determine the number and length of messages to receive for ij-structure */
37592257cef7SHong Zhang   /*-------------------------------------------------------------------------*/
376051a7d1a8SHong Zhang   ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr);
376155d1abb9SHong Zhang   ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr);
3762671beff6SHong Zhang 
37633e06a4e6SHong Zhang   /* post the Irecv of j-structure */
37643e06a4e6SHong Zhang   /*-------------------------------*/
37652c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
37663e06a4e6SHong Zhang   ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr);
376702c68681SHong Zhang 
37683e06a4e6SHong Zhang   /* post the Isend of j-structure */
3769affca5deSHong Zhang   /*--------------------------------*/
37702257cef7SHong Zhang   ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr);
377102c68681SHong Zhang   sj_waits = si_waits + merge->nsend;
37723e06a4e6SHong Zhang 
37732257cef7SHong Zhang   for (proc=0, k=0; proc<size; proc++){
3774409913e3SHong Zhang     if (!len_s[proc]) continue;
377502c68681SHong Zhang     i = owners[proc];
3776b1d57f15SBarry Smith     ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr);
377751a7d1a8SHong Zhang     k++;
377851a7d1a8SHong Zhang   }
377951a7d1a8SHong Zhang 
37803e06a4e6SHong Zhang   /* receives and sends of j-structure are complete */
37813e06a4e6SHong Zhang   /*------------------------------------------------*/
37820c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);}
37830c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);}
378402c68681SHong Zhang 
378502c68681SHong Zhang   /* send and recv i-structure */
378602c68681SHong Zhang   /*---------------------------*/
37872c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
378802c68681SHong Zhang   ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr);
378902c68681SHong Zhang 
3790b1d57f15SBarry Smith   ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr);
37913e06a4e6SHong Zhang   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
37922257cef7SHong Zhang   for (proc=0,k=0; proc<size; proc++){
379302c68681SHong Zhang     if (!len_s[proc]) continue;
37943e06a4e6SHong Zhang     /* form outgoing message for i-structure:
37953e06a4e6SHong Zhang          buf_si[0]:                 nrows to be sent
37963e06a4e6SHong Zhang                [1:nrows]:           row index (global)
37973e06a4e6SHong Zhang                [nrows+1:2*nrows+1]: i-structure index
37983e06a4e6SHong Zhang     */
37993e06a4e6SHong Zhang     /*-------------------------------------------*/
38002257cef7SHong Zhang     nrows = len_si[proc]/2 - 1;
38013e06a4e6SHong Zhang     buf_si_i    = buf_si + nrows+1;
38023e06a4e6SHong Zhang     buf_si[0]   = nrows;
38033e06a4e6SHong Zhang     buf_si_i[0] = 0;
38043e06a4e6SHong Zhang     nrows = 0;
38053e06a4e6SHong Zhang     for (i=owners[proc]; i<owners[proc+1]; i++){
38063e06a4e6SHong Zhang       anzi = ai[i+1] - ai[i];
38073e06a4e6SHong Zhang       if (anzi) {
38083e06a4e6SHong Zhang         buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */
38093e06a4e6SHong Zhang         buf_si[nrows+1] = i-owners[proc]; /* local row index */
38103e06a4e6SHong Zhang         nrows++;
38113e06a4e6SHong Zhang       }
38123e06a4e6SHong Zhang     }
3813b1d57f15SBarry Smith     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr);
381402c68681SHong Zhang     k++;
38152257cef7SHong Zhang     buf_si += len_si[proc];
381602c68681SHong Zhang   }
38172257cef7SHong Zhang 
38180c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);}
38190c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);}
382002c68681SHong Zhang 
3821ae15b995SBarry Smith   ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr);
38223e06a4e6SHong Zhang   for (i=0; i<merge->nrecv; i++){
3823ae15b995SBarry Smith     ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr);
38243e06a4e6SHong Zhang   }
38253e06a4e6SHong Zhang 
38263e06a4e6SHong Zhang   ierr = PetscFree(len_si);CHKERRQ(ierr);
382702c68681SHong Zhang   ierr = PetscFree(len_ri);CHKERRQ(ierr);
382802c68681SHong Zhang   ierr = PetscFree(rj_waits);CHKERRQ(ierr);
38293e06a4e6SHong Zhang   ierr = PetscFree(si_waits);CHKERRQ(ierr);
38302257cef7SHong Zhang   ierr = PetscFree(ri_waits);CHKERRQ(ierr);
38313e06a4e6SHong Zhang   ierr = PetscFree(buf_s);CHKERRQ(ierr);
3832bcc1bcd5SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
383358cb9c82SHong Zhang 
3834bcc1bcd5SHong Zhang   /* compute a local seq matrix in each processor */
3835bcc1bcd5SHong Zhang   /*----------------------------------------------*/
383658cb9c82SHong Zhang   /* allocate bi array and free space for accumulating nonzero column info */
3837b1d57f15SBarry Smith   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr);
383858cb9c82SHong Zhang   bi[0] = 0;
383958cb9c82SHong Zhang 
3840be0fcf8dSHong Zhang   /* create and initialize a linked list */
3841be0fcf8dSHong Zhang   nlnk = N+1;
3842be0fcf8dSHong Zhang   ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
384358cb9c82SHong Zhang 
3844bcc1bcd5SHong Zhang   /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */
384558cb9c82SHong Zhang   len = 0;
3846bcc1bcd5SHong Zhang   len  = ai[owners[rank+1]] - ai[owners[rank]];
3847a1a86e44SBarry Smith   ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr);
384858cb9c82SHong Zhang   current_space = free_space;
384958cb9c82SHong Zhang 
3850bcc1bcd5SHong Zhang   /* determine symbolic info for each local row */
3851b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
38523e06a4e6SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
38533e06a4e6SHong Zhang   nextai  = nextrow + merge->nrecv;
38543e06a4e6SHong Zhang   for (k=0; k<merge->nrecv; k++){
38552257cef7SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
38563e06a4e6SHong Zhang     nrows = *buf_ri_k[k];
38573e06a4e6SHong Zhang     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
38582257cef7SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
38593e06a4e6SHong Zhang   }
38602257cef7SHong Zhang 
3861bcc1bcd5SHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
3862bcc1bcd5SHong Zhang   len = 0;
386358cb9c82SHong Zhang   for (i=0;i<m;i++) {
386458cb9c82SHong Zhang     bnzi   = 0;
386558cb9c82SHong Zhang     /* add local non-zero cols of this proc's seqmat into lnk */
386658cb9c82SHong Zhang     arow   = owners[rank] + i;
386758cb9c82SHong Zhang     anzi   = ai[arow+1] - ai[arow];
386858cb9c82SHong Zhang     aj     = a->j + ai[arow];
3869be0fcf8dSHong Zhang     ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
387058cb9c82SHong Zhang     bnzi += nlnk;
387158cb9c82SHong Zhang     /* add received col data into lnk */
387251a7d1a8SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
387355d1abb9SHong Zhang       if (i == *nextrow[k]) { /* i-th row */
38743e06a4e6SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
38753e06a4e6SHong Zhang         aj   = buf_rj[k] + *nextai[k];
38763e06a4e6SHong Zhang         ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
38773e06a4e6SHong Zhang         bnzi += nlnk;
38783e06a4e6SHong Zhang         nextrow[k]++; nextai[k]++;
38793e06a4e6SHong Zhang       }
388058cb9c82SHong Zhang     }
3881bcc1bcd5SHong Zhang     if (len < bnzi) len = bnzi;  /* =max(bnzi) */
388258cb9c82SHong Zhang 
388358cb9c82SHong Zhang     /* if free space is not available, make more free space */
388458cb9c82SHong Zhang     if (current_space->local_remaining<bnzi) {
3885a1a86e44SBarry Smith       ierr = PetscFreeSpaceGet(current_space->total_array_size,&current_space);CHKERRQ(ierr);
388658cb9c82SHong Zhang       nspacedouble++;
388758cb9c82SHong Zhang     }
388858cb9c82SHong Zhang     /* copy data into free space, then initialize lnk */
3889be0fcf8dSHong Zhang     ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr);
3890bcc1bcd5SHong Zhang     ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr);
3891bcc1bcd5SHong Zhang 
389258cb9c82SHong Zhang     current_space->array           += bnzi;
389358cb9c82SHong Zhang     current_space->local_used      += bnzi;
389458cb9c82SHong Zhang     current_space->local_remaining -= bnzi;
389558cb9c82SHong Zhang 
389658cb9c82SHong Zhang     bi[i+1] = bi[i] + bnzi;
389758cb9c82SHong Zhang   }
3898bcc1bcd5SHong Zhang 
3899bcc1bcd5SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
3900bcc1bcd5SHong Zhang 
3901b1d57f15SBarry Smith   ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr);
3902a1a86e44SBarry Smith   ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr);
3903be0fcf8dSHong Zhang   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
3904409913e3SHong Zhang 
3905bcc1bcd5SHong Zhang   /* create symbolic parallel matrix B_mpi */
3906bcc1bcd5SHong Zhang   /*---------------------------------------*/
3907f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr);
390854b84b50SHong Zhang   if (n==PETSC_DECIDE) {
3909f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr);
391054b84b50SHong Zhang   } else {
3911f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
391254b84b50SHong Zhang   }
3913bcc1bcd5SHong Zhang   ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr);
3914bcc1bcd5SHong Zhang   ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr);
3915bcc1bcd5SHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
391658cb9c82SHong Zhang 
39176abd8857SHong Zhang   /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */
39186abd8857SHong Zhang   B_mpi->assembled     = PETSC_FALSE;
3919affca5deSHong Zhang   B_mpi->ops->destroy  = MatDestroy_MPIAIJ_SeqsToMPI;
3920affca5deSHong Zhang   merge->bi            = bi;
3921affca5deSHong Zhang   merge->bj            = bj;
392202c68681SHong Zhang   merge->buf_ri        = buf_ri;
392302c68681SHong Zhang   merge->buf_rj        = buf_rj;
3924de0260b3SHong Zhang   merge->coi           = PETSC_NULL;
3925de0260b3SHong Zhang   merge->coj           = PETSC_NULL;
3926de0260b3SHong Zhang   merge->owners_co     = PETSC_NULL;
3927affca5deSHong Zhang 
3928affca5deSHong Zhang   /* attach the supporting struct to B_mpi for reuse */
3929776b82aeSLisandro Dalcin   ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
3930776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr);
3931affca5deSHong Zhang   ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr);
3932affca5deSHong Zhang   *mpimat = B_mpi;
393338f152feSBarry Smith 
393438f152feSBarry Smith   ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);
39353c2c1871SHong Zhang   ierr = PetscLogEventEnd(logkey_seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
3936e5f2cdd8SHong Zhang   PetscFunctionReturn(0);
3937e5f2cdd8SHong Zhang }
393825616d81SHong Zhang 
3939e462e02cSHong Zhang static PetscEvent logkey_seqstompi = 0;
394038f152feSBarry Smith #undef __FUNCT__
394138f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI"
3942be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat)
394355d1abb9SHong Zhang {
394455d1abb9SHong Zhang   PetscErrorCode   ierr;
394555d1abb9SHong Zhang 
394655d1abb9SHong Zhang   PetscFunctionBegin;
3947e462e02cSHong Zhang   if (!logkey_seqstompi) {
3948e462e02cSHong Zhang     ierr = PetscLogEventRegister(&logkey_seqstompi,"MatMerge_SeqsToMPI",MAT_COOKIE);
3949e462e02cSHong Zhang   }
3950e462e02cSHong Zhang   ierr = PetscLogEventBegin(logkey_seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
395155d1abb9SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
395255d1abb9SHong Zhang     ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr);
395355d1abb9SHong Zhang   }
395455d1abb9SHong Zhang   ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr);
3955e462e02cSHong Zhang   ierr = PetscLogEventEnd(logkey_seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
395655d1abb9SHong Zhang   PetscFunctionReturn(0);
395755d1abb9SHong Zhang }
3958a61c8c0fSHong Zhang static PetscEvent logkey_getlocalmat = 0;
395925616d81SHong Zhang #undef __FUNCT__
396025616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat"
396125616d81SHong Zhang /*@C
396232fba14fSHong Zhang      MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows
396325616d81SHong Zhang 
396432fba14fSHong Zhang     Not Collective
396525616d81SHong Zhang 
396625616d81SHong Zhang    Input Parameters:
396725616d81SHong Zhang +    A - the matrix
396825616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
396925616d81SHong Zhang 
397025616d81SHong Zhang    Output Parameter:
397125616d81SHong Zhang .    A_loc - the local sequential matrix generated
397225616d81SHong Zhang 
397325616d81SHong Zhang     Level: developer
397425616d81SHong Zhang 
397525616d81SHong Zhang @*/
3976be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc)
397725616d81SHong Zhang {
397825616d81SHong Zhang   PetscErrorCode  ierr;
397901b7ae99SHong Zhang   Mat_MPIAIJ      *mpimat=(Mat_MPIAIJ*)A->data;
398001b7ae99SHong Zhang   Mat_SeqAIJ      *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data;
398101b7ae99SHong Zhang   PetscInt        *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray;
3982dea91ad1SHong Zhang   PetscScalar     *aa=a->a,*ba=b->a,*ca;
3983899cda47SBarry Smith   PetscInt        am=A->rmap.n,i,j,k,cstart=A->cmap.rstart;
39845a7d977cSHong Zhang   PetscInt        *ci,*cj,col,ncols_d,ncols_o,jo;
398525616d81SHong Zhang 
398625616d81SHong Zhang   PetscFunctionBegin;
3987e462e02cSHong Zhang   if (!logkey_getlocalmat) {
3988e462e02cSHong Zhang     ierr = PetscLogEventRegister(&logkey_getlocalmat,"MatGetLocalMat",MAT_COOKIE);
3989e462e02cSHong Zhang   }
3990e462e02cSHong Zhang   ierr = PetscLogEventBegin(logkey_getlocalmat,A,0,0,0);CHKERRQ(ierr);
399101b7ae99SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
3992dea91ad1SHong Zhang     ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr);
3993dea91ad1SHong Zhang     ci[0] = 0;
399401b7ae99SHong Zhang     for (i=0; i<am; i++){
3995dea91ad1SHong Zhang       ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]);
399601b7ae99SHong Zhang     }
3997dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr);
3998dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr);
3999dea91ad1SHong Zhang     k = 0;
400001b7ae99SHong Zhang     for (i=0; i<am; i++) {
40015a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
40025a7d977cSHong Zhang       ncols_d = ai[i+1] - ai[i];
400301b7ae99SHong Zhang       /* off-diagonal portion of A */
40045a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
40055a7d977cSHong Zhang         col = cmap[*bj];
40065a7d977cSHong Zhang         if (col >= cstart) break;
40075a7d977cSHong Zhang         cj[k]   = col; bj++;
40085a7d977cSHong Zhang         ca[k++] = *ba++;
40095a7d977cSHong Zhang       }
40105a7d977cSHong Zhang       /* diagonal portion of A */
40115a7d977cSHong Zhang       for (j=0; j<ncols_d; j++) {
40125a7d977cSHong Zhang         cj[k]   = cstart + *aj++;
40135a7d977cSHong Zhang         ca[k++] = *aa++;
40145a7d977cSHong Zhang       }
40155a7d977cSHong Zhang       /* off-diagonal portion of A */
40165a7d977cSHong Zhang       for (j=jo; j<ncols_o; j++) {
40175a7d977cSHong Zhang         cj[k]   = cmap[*bj++];
40185a7d977cSHong Zhang         ca[k++] = *ba++;
40195a7d977cSHong Zhang       }
402025616d81SHong Zhang     }
4021dea91ad1SHong Zhang     /* put together the new matrix */
4022899cda47SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap.N,ci,cj,ca,A_loc);CHKERRQ(ierr);
4023dea91ad1SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4024dea91ad1SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
4025dea91ad1SHong Zhang     mat          = (Mat_SeqAIJ*)(*A_loc)->data;
4026e6b907acSBarry Smith     mat->free_a  = PETSC_TRUE;
4027e6b907acSBarry Smith     mat->free_ij = PETSC_TRUE;
4028dea91ad1SHong Zhang     mat->nonew   = 0;
40295a7d977cSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
40305a7d977cSHong Zhang     mat=(Mat_SeqAIJ*)(*A_loc)->data;
40315a7d977cSHong Zhang     ci = mat->i; cj = mat->j; ca = mat->a;
40325a7d977cSHong Zhang     for (i=0; i<am; i++) {
40335a7d977cSHong Zhang       /* off-diagonal portion of A */
40345a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
40355a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
40365a7d977cSHong Zhang         col = cmap[*bj];
40375a7d977cSHong Zhang         if (col >= cstart) break;
4038f33d1a9aSHong Zhang         *ca++ = *ba++; bj++;
40395a7d977cSHong Zhang       }
40405a7d977cSHong Zhang       /* diagonal portion of A */
4041ecc9b87dSHong Zhang       ncols_d = ai[i+1] - ai[i];
4042ecc9b87dSHong Zhang       for (j=0; j<ncols_d; j++) *ca++ = *aa++;
40435a7d977cSHong Zhang       /* off-diagonal portion of A */
4044f33d1a9aSHong Zhang       for (j=jo; j<ncols_o; j++) {
4045f33d1a9aSHong Zhang         *ca++ = *ba++; bj++;
4046f33d1a9aSHong Zhang       }
40475a7d977cSHong Zhang     }
40485a7d977cSHong Zhang   } else {
40495a7d977cSHong Zhang     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
405025616d81SHong Zhang   }
405101b7ae99SHong Zhang 
4052e462e02cSHong Zhang   ierr = PetscLogEventEnd(logkey_getlocalmat,A,0,0,0);CHKERRQ(ierr);
405325616d81SHong Zhang   PetscFunctionReturn(0);
405425616d81SHong Zhang }
405525616d81SHong Zhang 
405632fba14fSHong Zhang static PetscEvent logkey_getlocalmatcondensed = 0;
405732fba14fSHong Zhang #undef __FUNCT__
405832fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed"
405932fba14fSHong Zhang /*@C
406032fba14fSHong Zhang      MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns
406132fba14fSHong Zhang 
406232fba14fSHong Zhang     Not Collective
406332fba14fSHong Zhang 
406432fba14fSHong Zhang    Input Parameters:
406532fba14fSHong Zhang +    A - the matrix
406632fba14fSHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
406732fba14fSHong Zhang -    row, col - index sets of rows and columns to extract (or PETSC_NULL)
406832fba14fSHong Zhang 
406932fba14fSHong Zhang    Output Parameter:
407032fba14fSHong Zhang .    A_loc - the local sequential matrix generated
407132fba14fSHong Zhang 
407232fba14fSHong Zhang     Level: developer
407332fba14fSHong Zhang 
407432fba14fSHong Zhang @*/
4075be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc)
407632fba14fSHong Zhang {
407732fba14fSHong Zhang   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
407832fba14fSHong Zhang   PetscErrorCode    ierr;
407932fba14fSHong Zhang   PetscInt          i,start,end,ncols,nzA,nzB,*cmap,imark,*idx;
408032fba14fSHong Zhang   IS                isrowa,iscola;
408132fba14fSHong Zhang   Mat               *aloc;
408232fba14fSHong Zhang 
408332fba14fSHong Zhang   PetscFunctionBegin;
408432fba14fSHong Zhang   if (!logkey_getlocalmatcondensed) {
408532fba14fSHong Zhang     ierr = PetscLogEventRegister(&logkey_getlocalmatcondensed,"MatGetLocalMatCondensed",MAT_COOKIE);
408632fba14fSHong Zhang   }
408732fba14fSHong Zhang   ierr = PetscLogEventBegin(logkey_getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
408832fba14fSHong Zhang   if (!row){
4089899cda47SBarry Smith     start = A->rmap.rstart; end = A->rmap.rend;
409032fba14fSHong Zhang     ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr);
409132fba14fSHong Zhang   } else {
409232fba14fSHong Zhang     isrowa = *row;
409332fba14fSHong Zhang   }
409432fba14fSHong Zhang   if (!col){
4095899cda47SBarry Smith     start = A->cmap.rstart;
409632fba14fSHong Zhang     cmap  = a->garray;
4097899cda47SBarry Smith     nzA   = a->A->cmap.n;
4098899cda47SBarry Smith     nzB   = a->B->cmap.n;
409932fba14fSHong Zhang     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
410032fba14fSHong Zhang     ncols = 0;
410132fba14fSHong Zhang     for (i=0; i<nzB; i++) {
410232fba14fSHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
410332fba14fSHong Zhang       else break;
410432fba14fSHong Zhang     }
410532fba14fSHong Zhang     imark = i;
410632fba14fSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;
410732fba14fSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i];
410832fba14fSHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr);
410932fba14fSHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
411032fba14fSHong Zhang   } else {
411132fba14fSHong Zhang     iscola = *col;
411232fba14fSHong Zhang   }
411332fba14fSHong Zhang   if (scall != MAT_INITIAL_MATRIX){
411432fba14fSHong Zhang     ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr);
411532fba14fSHong Zhang     aloc[0] = *A_loc;
411632fba14fSHong Zhang   }
411732fba14fSHong Zhang   ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr);
411832fba14fSHong Zhang   *A_loc = aloc[0];
411932fba14fSHong Zhang   ierr = PetscFree(aloc);CHKERRQ(ierr);
412032fba14fSHong Zhang   if (!row){
412132fba14fSHong Zhang     ierr = ISDestroy(isrowa);CHKERRQ(ierr);
412232fba14fSHong Zhang   }
412332fba14fSHong Zhang   if (!col){
412432fba14fSHong Zhang     ierr = ISDestroy(iscola);CHKERRQ(ierr);
412532fba14fSHong Zhang   }
412632fba14fSHong Zhang   ierr = PetscLogEventEnd(logkey_getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
412732fba14fSHong Zhang   PetscFunctionReturn(0);
412832fba14fSHong Zhang }
412932fba14fSHong Zhang 
4130a61c8c0fSHong Zhang static PetscEvent logkey_GetBrowsOfAcols = 0;
413125616d81SHong Zhang #undef __FUNCT__
413225616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols"
413325616d81SHong Zhang /*@C
413432fba14fSHong Zhang     MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A
413525616d81SHong Zhang 
413625616d81SHong Zhang     Collective on Mat
413725616d81SHong Zhang 
413825616d81SHong Zhang    Input Parameters:
4139e240928fSHong Zhang +    A,B - the matrices in mpiaij format
414025616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
414125616d81SHong Zhang -    rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL)
414225616d81SHong Zhang 
414325616d81SHong Zhang    Output Parameter:
414425616d81SHong Zhang +    rowb, colb - index sets of rows and columns of B to extract
4145899cda47SBarry Smith .    brstart - row index of B_seq from which next B->rmap.n rows are taken from B's local rows
414625616d81SHong Zhang -    B_seq - the sequential matrix generated
414725616d81SHong Zhang 
414825616d81SHong Zhang     Level: developer
414925616d81SHong Zhang 
415025616d81SHong Zhang @*/
4151be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq)
415225616d81SHong Zhang {
4153899cda47SBarry Smith   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
415425616d81SHong Zhang   PetscErrorCode    ierr;
4155b1d57f15SBarry Smith   PetscInt          *idx,i,start,ncols,nzA,nzB,*cmap,imark;
415625616d81SHong Zhang   IS                isrowb,iscolb;
415725616d81SHong Zhang   Mat               *bseq;
415825616d81SHong Zhang 
415925616d81SHong Zhang   PetscFunctionBegin;
4160899cda47SBarry Smith   if (A->cmap.rstart != B->rmap.rstart || A->cmap.rend != B->rmap.rend){
4161899cda47SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap.rstart,A->cmap.rend,B->rmap.rstart,B->rmap.rend);
416225616d81SHong Zhang   }
4163e462e02cSHong Zhang   if (!logkey_GetBrowsOfAcols) {
4164e462e02cSHong Zhang     ierr = PetscLogEventRegister(&logkey_GetBrowsOfAcols,"MatGetBrowsOfAcols",MAT_COOKIE);
4165e462e02cSHong Zhang   }
4166e462e02cSHong Zhang   ierr = PetscLogEventBegin(logkey_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
416725616d81SHong Zhang 
416825616d81SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4169899cda47SBarry Smith     start = A->cmap.rstart;
417025616d81SHong Zhang     cmap  = a->garray;
4171899cda47SBarry Smith     nzA   = a->A->cmap.n;
4172899cda47SBarry Smith     nzB   = a->B->cmap.n;
4173b1d57f15SBarry Smith     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
417425616d81SHong Zhang     ncols = 0;
41750390132cSHong Zhang     for (i=0; i<nzB; i++) {  /* row < local row index */
417625616d81SHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
417725616d81SHong Zhang       else break;
417825616d81SHong Zhang     }
417925616d81SHong Zhang     imark = i;
41800390132cSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;  /* local rows */
41810390132cSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */
418225616d81SHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr);
418325616d81SHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
418425616d81SHong Zhang     *brstart = imark;
4185899cda47SBarry Smith     ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap.N,0,1,&iscolb);CHKERRQ(ierr);
418625616d81SHong Zhang   } else {
418725616d81SHong Zhang     if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX");
418825616d81SHong Zhang     isrowb = *rowb; iscolb = *colb;
418925616d81SHong Zhang     ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr);
419025616d81SHong Zhang     bseq[0] = *B_seq;
419125616d81SHong Zhang   }
419225616d81SHong Zhang   ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr);
419325616d81SHong Zhang   *B_seq = bseq[0];
419425616d81SHong Zhang   ierr = PetscFree(bseq);CHKERRQ(ierr);
419525616d81SHong Zhang   if (!rowb){
419625616d81SHong Zhang     ierr = ISDestroy(isrowb);CHKERRQ(ierr);
419725616d81SHong Zhang   } else {
419825616d81SHong Zhang     *rowb = isrowb;
419925616d81SHong Zhang   }
420025616d81SHong Zhang   if (!colb){
420125616d81SHong Zhang     ierr = ISDestroy(iscolb);CHKERRQ(ierr);
420225616d81SHong Zhang   } else {
420325616d81SHong Zhang     *colb = iscolb;
420425616d81SHong Zhang   }
4205e462e02cSHong Zhang   ierr = PetscLogEventEnd(logkey_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
420625616d81SHong Zhang   PetscFunctionReturn(0);
420725616d81SHong Zhang }
4208429d309bSHong Zhang 
4209a61c8c0fSHong Zhang static PetscEvent logkey_GetBrowsOfAocols = 0;
4210a61c8c0fSHong Zhang #undef __FUNCT__
4211a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols"
4212429d309bSHong Zhang /*@C
4213429d309bSHong Zhang     MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns
421401b7ae99SHong Zhang     of the OFF-DIAGONAL portion of local A
4215429d309bSHong Zhang 
4216429d309bSHong Zhang     Collective on Mat
4217429d309bSHong Zhang 
4218429d309bSHong Zhang    Input Parameters:
4219429d309bSHong Zhang +    A,B - the matrices in mpiaij format
422087025532SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
422187025532SHong Zhang .    startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL)
422287025532SHong Zhang -    bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL)
4223429d309bSHong Zhang 
4224429d309bSHong Zhang    Output Parameter:
422587025532SHong Zhang +    B_oth - the sequential matrix generated
4226429d309bSHong Zhang 
4227429d309bSHong Zhang     Level: developer
4228429d309bSHong Zhang 
4229429d309bSHong Zhang @*/
4230be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,PetscScalar **bufa_ptr,Mat *B_oth)
4231429d309bSHong Zhang {
4232a6b2eed2SHong Zhang   VecScatter_MPI_General *gen_to,*gen_from;
4233429d309bSHong Zhang   PetscErrorCode         ierr;
4234899cda47SBarry Smith   Mat_MPIAIJ             *a=(Mat_MPIAIJ*)A->data;
423587025532SHong Zhang   Mat_SeqAIJ             *b_oth;
4236a6b2eed2SHong Zhang   VecScatter             ctx=a->Mvctx;
42377adad957SLisandro Dalcin   MPI_Comm               comm=((PetscObject)ctx)->comm;
42387adad957SLisandro Dalcin   PetscMPIInt            *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank;
4239899cda47SBarry Smith   PetscInt               *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap.n,row,*b_othi,*b_othj;
424087025532SHong Zhang   PetscScalar            *rvalues,*svalues,*b_otha,*bufa,*bufA;
4241e42f35eeSHong Zhang   PetscInt               i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len;
4242910ba992SMatthew Knepley   MPI_Request            *rwaits = PETSC_NULL,*swaits = PETSC_NULL;
424387025532SHong Zhang   MPI_Status             *sstatus,rstatus;
4244aa5bb8c0SSatish Balay   PetscMPIInt            jj;
4245e42f35eeSHong Zhang   PetscInt               *cols,sbs,rbs;
4246ba8c8a56SBarry Smith   PetscScalar            *vals;
4247429d309bSHong Zhang 
4248429d309bSHong Zhang   PetscFunctionBegin;
4249899cda47SBarry Smith   if (A->cmap.rstart != B->rmap.rstart || A->cmap.rend != B->rmap.rend){
4250899cda47SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap.rstart,A->cmap.rend,B->rmap.rstart,B->rmap.rend);
4251429d309bSHong Zhang   }
4252429d309bSHong Zhang   if (!logkey_GetBrowsOfAocols) {
42531677a5d7SHong Zhang     ierr = PetscLogEventRegister(&logkey_GetBrowsOfAocols,"MatGetBrAoCol",MAT_COOKIE);
4254429d309bSHong Zhang   }
4255429d309bSHong Zhang   ierr = PetscLogEventBegin(logkey_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4256a6b2eed2SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
4257a6b2eed2SHong Zhang 
4258a6b2eed2SHong Zhang   gen_to   = (VecScatter_MPI_General*)ctx->todata;
4259a6b2eed2SHong Zhang   gen_from = (VecScatter_MPI_General*)ctx->fromdata;
4260e42f35eeSHong Zhang   rvalues  = gen_from->values; /* holds the length of receiving row */
4261e42f35eeSHong Zhang   svalues  = gen_to->values;   /* holds the length of sending row */
4262a6b2eed2SHong Zhang   nrecvs   = gen_from->n;
4263a6b2eed2SHong Zhang   nsends   = gen_to->n;
4264d7ee0231SBarry Smith 
4265d7ee0231SBarry Smith   ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr);
4266a6b2eed2SHong Zhang   srow     = gen_to->indices;   /* local row index to be sent */
4267a6b2eed2SHong Zhang   sstarts  = gen_to->starts;
4268a6b2eed2SHong Zhang   sprocs   = gen_to->procs;
4269a6b2eed2SHong Zhang   sstatus  = gen_to->sstatus;
4270e42f35eeSHong Zhang   sbs      = gen_to->bs;
4271e42f35eeSHong Zhang   rstarts  = gen_from->starts;
4272e42f35eeSHong Zhang   rprocs   = gen_from->procs;
4273e42f35eeSHong Zhang   rbs      = gen_from->bs;
4274429d309bSHong Zhang 
4275dea91ad1SHong Zhang   if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX;
4276429d309bSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4277a6b2eed2SHong Zhang     /* i-array */
4278a6b2eed2SHong Zhang     /*---------*/
4279a6b2eed2SHong Zhang     /*  post receives */
4280a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4281e42f35eeSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4282e42f35eeSHong Zhang       nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */
428387025532SHong Zhang       ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4284429d309bSHong Zhang     }
4285a6b2eed2SHong Zhang 
4286a6b2eed2SHong Zhang     /* pack the outgoing message */
428787025532SHong Zhang     ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr);
4288a6b2eed2SHong Zhang     rstartsj = sstartsj + nsends +1;
4289a6b2eed2SHong Zhang     sstartsj[0] = 0;  rstartsj[0] = 0;
4290a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be sent */
4291a6b2eed2SHong Zhang     k = 0;
4292a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4293e42f35eeSHong Zhang       rowlen = (PetscInt*)svalues + sstarts[i]*sbs;
4294e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
429587025532SHong Zhang       for (j=0; j<nrows; j++) {
4296899cda47SBarry Smith         row = srow[k] + B->rmap.range[rank]; /* global row idx */
4297e42f35eeSHong Zhang         for (l=0; l<sbs; l++){
4298e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */
4299e42f35eeSHong Zhang           rowlen[j*sbs+l] = ncols;
4300e42f35eeSHong Zhang           len += ncols;
4301e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
4302e42f35eeSHong Zhang         }
4303a6b2eed2SHong Zhang         k++;
4304429d309bSHong Zhang       }
4305e42f35eeSHong Zhang       ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4306dea91ad1SHong Zhang       sstartsj[i+1] = len;  /* starting point of (i+1)-th outgoing msg in bufj and bufa */
4307429d309bSHong Zhang     }
430887025532SHong Zhang     /* recvs and sends of i-array are completed */
430987025532SHong Zhang     i = nrecvs;
431087025532SHong Zhang     while (i--) {
4311aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
431287025532SHong Zhang     }
43130c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4314e42f35eeSHong Zhang 
4315a6b2eed2SHong Zhang     /* allocate buffers for sending j and a arrays */
4316a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr);
4317a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr);
4318a6b2eed2SHong Zhang 
431987025532SHong Zhang     /* create i-array of B_oth */
432087025532SHong Zhang     ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr);
432187025532SHong Zhang     b_othi[0] = 0;
4322a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be received */
4323a6b2eed2SHong Zhang     k = 0;
4324a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4325fd0ff01cSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4326e42f35eeSHong Zhang       nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */
432787025532SHong Zhang       for (j=0; j<nrows; j++) {
432887025532SHong Zhang         b_othi[k+1] = b_othi[k] + rowlen[j];
4329a6b2eed2SHong Zhang         len += rowlen[j]; k++;
4330a6b2eed2SHong Zhang       }
4331dea91ad1SHong Zhang       rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */
4332a6b2eed2SHong Zhang     }
4333a6b2eed2SHong Zhang 
433487025532SHong Zhang     /* allocate space for j and a arrrays of B_oth */
433587025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr);
433687025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscScalar),&b_otha);CHKERRQ(ierr);
4337a6b2eed2SHong Zhang 
433887025532SHong Zhang     /* j-array */
433987025532SHong Zhang     /*---------*/
4340a6b2eed2SHong Zhang     /*  post receives of j-array */
4341a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
434287025532SHong Zhang       nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
434387025532SHong Zhang       ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4344a6b2eed2SHong Zhang     }
4345e42f35eeSHong Zhang 
4346e42f35eeSHong Zhang     /* pack the outgoing message j-array */
4347a6b2eed2SHong Zhang     k = 0;
4348a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4349e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
4350a6b2eed2SHong Zhang       bufJ = bufj+sstartsj[i];
435187025532SHong Zhang       for (j=0; j<nrows; j++) {
4352899cda47SBarry Smith         row  = srow[k++] + B->rmap.range[rank]; /* global row idx */
4353e42f35eeSHong Zhang         for (ll=0; ll<sbs; ll++){
4354e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4355a6b2eed2SHong Zhang           for (l=0; l<ncols; l++){
4356a6b2eed2SHong Zhang             *bufJ++ = cols[l];
435787025532SHong Zhang           }
4358e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4359e42f35eeSHong Zhang         }
436087025532SHong Zhang       }
436187025532SHong Zhang       ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
436287025532SHong Zhang     }
436387025532SHong Zhang 
436487025532SHong Zhang     /* recvs and sends of j-array are completed */
436587025532SHong Zhang     i = nrecvs;
436687025532SHong Zhang     while (i--) {
4367aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
436887025532SHong Zhang     }
43690c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
437087025532SHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
437187025532SHong Zhang     sstartsj = *startsj;
437287025532SHong Zhang     rstartsj = sstartsj + nsends +1;
437387025532SHong Zhang     bufa     = *bufa_ptr;
437487025532SHong Zhang     b_oth    = (Mat_SeqAIJ*)(*B_oth)->data;
437587025532SHong Zhang     b_otha   = b_oth->a;
437687025532SHong Zhang   } else {
437787025532SHong Zhang     SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container");
437887025532SHong Zhang   }
437987025532SHong Zhang 
438087025532SHong Zhang   /* a-array */
438187025532SHong Zhang   /*---------*/
438287025532SHong Zhang   /*  post receives of a-array */
438387025532SHong Zhang   for (i=0; i<nrecvs; i++){
438487025532SHong Zhang     nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
438587025532SHong Zhang     ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
438687025532SHong Zhang   }
4387e42f35eeSHong Zhang 
4388e42f35eeSHong Zhang   /* pack the outgoing message a-array */
438987025532SHong Zhang   k = 0;
439087025532SHong Zhang   for (i=0; i<nsends; i++){
4391e42f35eeSHong Zhang     nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
439287025532SHong Zhang     bufA = bufa+sstartsj[i];
439387025532SHong Zhang     for (j=0; j<nrows; j++) {
4394899cda47SBarry Smith       row  = srow[k++] + B->rmap.range[rank]; /* global row idx */
4395e42f35eeSHong Zhang       for (ll=0; ll<sbs; ll++){
4396e42f35eeSHong Zhang         ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
439787025532SHong Zhang         for (l=0; l<ncols; l++){
4398a6b2eed2SHong Zhang           *bufA++ = vals[l];
4399a6b2eed2SHong Zhang         }
4400e42f35eeSHong Zhang         ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
4401e42f35eeSHong Zhang       }
4402a6b2eed2SHong Zhang     }
440387025532SHong Zhang     ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4404a6b2eed2SHong Zhang   }
440587025532SHong Zhang   /* recvs and sends of a-array are completed */
440687025532SHong Zhang   i = nrecvs;
440787025532SHong Zhang   while (i--) {
4408aa5bb8c0SSatish Balay     ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
440987025532SHong Zhang   }
44100c468ba9SBarry Smith   if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4411d7ee0231SBarry Smith   ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr);
4412a6b2eed2SHong Zhang 
441387025532SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4414a6b2eed2SHong Zhang     /* put together the new matrix */
4415899cda47SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap.N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr);
4416a6b2eed2SHong Zhang 
4417a6b2eed2SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4418a6b2eed2SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
441987025532SHong Zhang     b_oth          = (Mat_SeqAIJ *)(*B_oth)->data;
4420e6b907acSBarry Smith     b_oth->free_a  = PETSC_TRUE;
4421e6b907acSBarry Smith     b_oth->free_ij = PETSC_TRUE;
442287025532SHong Zhang     b_oth->nonew   = 0;
4423a6b2eed2SHong Zhang 
4424a6b2eed2SHong Zhang     ierr = PetscFree(bufj);CHKERRQ(ierr);
4425dea91ad1SHong Zhang     if (!startsj || !bufa_ptr){
4426dea91ad1SHong Zhang       ierr = PetscFree(sstartsj);CHKERRQ(ierr);
4427dea91ad1SHong Zhang       ierr = PetscFree(bufa_ptr);CHKERRQ(ierr);
4428dea91ad1SHong Zhang     } else {
442987025532SHong Zhang       *startsj  = sstartsj;
443087025532SHong Zhang       *bufa_ptr = bufa;
443187025532SHong Zhang     }
4432dea91ad1SHong Zhang   }
4433429d309bSHong Zhang   ierr = PetscLogEventEnd(logkey_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4434429d309bSHong Zhang   PetscFunctionReturn(0);
4435429d309bSHong Zhang }
4436ccd8e176SBarry Smith 
443743eb5e2fSMatthew Knepley #undef __FUNCT__
443843eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs"
443943eb5e2fSMatthew Knepley /*@C
444043eb5e2fSMatthew Knepley   MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication.
444143eb5e2fSMatthew Knepley 
444243eb5e2fSMatthew Knepley   Not Collective
444343eb5e2fSMatthew Knepley 
444443eb5e2fSMatthew Knepley   Input Parameters:
444543eb5e2fSMatthew Knepley . A - The matrix in mpiaij format
444643eb5e2fSMatthew Knepley 
444743eb5e2fSMatthew Knepley   Output Parameter:
444843eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product
444943eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec
445043eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec
445143eb5e2fSMatthew Knepley 
445243eb5e2fSMatthew Knepley   Level: developer
445343eb5e2fSMatthew Knepley 
445443eb5e2fSMatthew Knepley @*/
445543eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE)
445643eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter)
445743eb5e2fSMatthew Knepley #else
445843eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter)
445943eb5e2fSMatthew Knepley #endif
446043eb5e2fSMatthew Knepley {
446143eb5e2fSMatthew Knepley   Mat_MPIAIJ *a;
446243eb5e2fSMatthew Knepley 
446343eb5e2fSMatthew Knepley   PetscFunctionBegin;
446443eb5e2fSMatthew Knepley   PetscValidHeaderSpecific(A, MAT_COOKIE, 1);
446543eb5e2fSMatthew Knepley   PetscValidPointer(lvec, 2)
446643eb5e2fSMatthew Knepley   PetscValidPointer(colmap, 3)
446743eb5e2fSMatthew Knepley   PetscValidPointer(multScatter, 4)
446843eb5e2fSMatthew Knepley   a = (Mat_MPIAIJ *) A->data;
446943eb5e2fSMatthew Knepley   if (lvec) *lvec = a->lvec;
447043eb5e2fSMatthew Knepley   if (colmap) *colmap = a->colmap;
447143eb5e2fSMatthew Knepley   if (multScatter) *multScatter = a->Mvctx;
447243eb5e2fSMatthew Knepley   PetscFunctionReturn(0);
447343eb5e2fSMatthew Knepley }
447443eb5e2fSMatthew Knepley 
447517667f90SBarry Smith EXTERN_C_BEGIN
447617667f90SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,MatType,MatReuse,Mat*);
447717667f90SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,MatType,MatReuse,Mat*);
447817667f90SBarry Smith EXTERN_C_END
447917667f90SBarry Smith 
4480ccd8e176SBarry Smith /*MC
4481ccd8e176SBarry Smith    MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices.
4482ccd8e176SBarry Smith 
4483ccd8e176SBarry Smith    Options Database Keys:
4484ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions()
4485ccd8e176SBarry Smith 
4486ccd8e176SBarry Smith   Level: beginner
4487ccd8e176SBarry Smith 
4488175b88e8SBarry Smith .seealso: MatCreateMPIAIJ()
4489ccd8e176SBarry Smith M*/
4490ccd8e176SBarry Smith 
4491ccd8e176SBarry Smith EXTERN_C_BEGIN
4492ccd8e176SBarry Smith #undef __FUNCT__
4493ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ"
4494be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B)
4495ccd8e176SBarry Smith {
4496ccd8e176SBarry Smith   Mat_MPIAIJ     *b;
4497ccd8e176SBarry Smith   PetscErrorCode ierr;
4498ccd8e176SBarry Smith   PetscMPIInt    size;
4499ccd8e176SBarry Smith 
4500ccd8e176SBarry Smith   PetscFunctionBegin;
45017adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
4502ccd8e176SBarry Smith 
450338f2d2fdSLisandro Dalcin   ierr            = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr);
4504ccd8e176SBarry Smith   B->data         = (void*)b;
4505ccd8e176SBarry Smith   ierr            = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
4506ccd8e176SBarry Smith   B->factor       = 0;
4507899cda47SBarry Smith   B->rmap.bs      = 1;
4508ccd8e176SBarry Smith   B->assembled    = PETSC_FALSE;
4509ccd8e176SBarry Smith   B->mapping      = 0;
4510ccd8e176SBarry Smith 
4511ccd8e176SBarry Smith   B->insertmode      = NOT_SET_VALUES;
4512ccd8e176SBarry Smith   b->size            = size;
45137adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
4514ccd8e176SBarry Smith 
4515ccd8e176SBarry Smith   /* build cache for off array entries formed */
45167adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
4517ccd8e176SBarry Smith   b->donotstash  = PETSC_FALSE;
4518ccd8e176SBarry Smith   b->colmap      = 0;
4519ccd8e176SBarry Smith   b->garray      = 0;
4520ccd8e176SBarry Smith   b->roworiented = PETSC_TRUE;
4521ccd8e176SBarry Smith 
4522ccd8e176SBarry Smith   /* stuff used for matrix vector multiply */
4523ccd8e176SBarry Smith   b->lvec      = PETSC_NULL;
4524ccd8e176SBarry Smith   b->Mvctx     = PETSC_NULL;
4525ccd8e176SBarry Smith 
4526ccd8e176SBarry Smith   /* stuff for MatGetRow() */
4527ccd8e176SBarry Smith   b->rowindices   = 0;
4528ccd8e176SBarry Smith   b->rowvalues    = 0;
4529ccd8e176SBarry Smith   b->getrowactive = PETSC_FALSE;
4530ccd8e176SBarry Smith 
4531ccd8e176SBarry Smith 
4532ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
4533ccd8e176SBarry Smith                                      "MatStoreValues_MPIAIJ",
4534ccd8e176SBarry Smith                                      MatStoreValues_MPIAIJ);CHKERRQ(ierr);
4535ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
4536ccd8e176SBarry Smith                                      "MatRetrieveValues_MPIAIJ",
4537ccd8e176SBarry Smith                                      MatRetrieveValues_MPIAIJ);CHKERRQ(ierr);
4538ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
4539ccd8e176SBarry Smith 				     "MatGetDiagonalBlock_MPIAIJ",
4540ccd8e176SBarry Smith                                      MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr);
4541ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C",
4542ccd8e176SBarry Smith 				     "MatIsTranspose_MPIAIJ",
4543ccd8e176SBarry Smith 				     MatIsTranspose_MPIAIJ);CHKERRQ(ierr);
4544ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
4545ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocation_MPIAIJ",
4546ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr);
4547ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",
4548ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocationCSR_MPIAIJ",
4549ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr);
4550ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
4551ccd8e176SBarry Smith 				     "MatDiagonalScaleLocal_MPIAIJ",
4552ccd8e176SBarry Smith 				     MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr);
455317667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C",
455417667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICSRPERM",
455517667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr);
455617667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C",
455717667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICRL",
455817667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr);
455917667f90SBarry Smith   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr);
4560ccd8e176SBarry Smith   PetscFunctionReturn(0);
4561ccd8e176SBarry Smith }
4562ccd8e176SBarry Smith EXTERN_C_END
456381824310SBarry Smith 
456403bfb495SBarry Smith #undef __FUNCT__
456503bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays"
456603bfb495SBarry Smith /*@C
456703bfb495SBarry Smith      MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal"
456803bfb495SBarry Smith          and "off-diagonal" part of the matrix in CSR format.
456903bfb495SBarry Smith 
457003bfb495SBarry Smith    Collective on MPI_Comm
457103bfb495SBarry Smith 
457203bfb495SBarry Smith    Input Parameters:
457303bfb495SBarry Smith +  comm - MPI communicator
457403bfb495SBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
457503bfb495SBarry Smith .  n - This value should be the same as the local size used in creating the
457603bfb495SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
457703bfb495SBarry Smith        calculated if N is given) For square matrices n is almost always m.
457803bfb495SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
457903bfb495SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
458003bfb495SBarry Smith .   i - row indices for "diagonal" portion of matrix
458103bfb495SBarry Smith .   j - column indices
458203bfb495SBarry Smith .   a - matrix values
458303bfb495SBarry Smith .   oi - row indices for "off-diagonal" portion of matrix
458403bfb495SBarry Smith .   oj - column indices
458503bfb495SBarry Smith -   oa - matrix values
458603bfb495SBarry Smith 
458703bfb495SBarry Smith    Output Parameter:
458803bfb495SBarry Smith .   mat - the matrix
458903bfb495SBarry Smith 
459003bfb495SBarry Smith    Level: advanced
459103bfb495SBarry Smith 
459203bfb495SBarry Smith    Notes:
459303bfb495SBarry Smith        The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc.
459403bfb495SBarry Smith 
459503bfb495SBarry Smith        The i and j indices are 0 based
459603bfb495SBarry Smith 
459703bfb495SBarry Smith        See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix
459803bfb495SBarry Smith 
459903bfb495SBarry Smith 
460003bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
460103bfb495SBarry Smith 
460203bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
46038d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays()
460403bfb495SBarry Smith @*/
46058d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],
460603bfb495SBarry Smith 								PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat)
460703bfb495SBarry Smith {
460803bfb495SBarry Smith   PetscErrorCode ierr;
460903bfb495SBarry Smith   Mat_MPIAIJ     *maij;
461003bfb495SBarry Smith 
461103bfb495SBarry Smith  PetscFunctionBegin;
461203bfb495SBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
461303bfb495SBarry Smith   if (i[0]) {
461403bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
461503bfb495SBarry Smith   }
461603bfb495SBarry Smith   if (oi[0]) {
461703bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0");
461803bfb495SBarry Smith   }
461903bfb495SBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
462003bfb495SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
462103bfb495SBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
462203bfb495SBarry Smith   maij = (Mat_MPIAIJ*) (*mat)->data;
46238d7a6e47SBarry Smith   maij->donotstash     = PETSC_TRUE;
46248d7a6e47SBarry Smith   (*mat)->preallocated = PETSC_TRUE;
462503bfb495SBarry Smith 
462603bfb495SBarry Smith   (*mat)->rmap.bs = (*mat)->cmap.bs = 1;
46276148ca0dSBarry Smith   ierr = PetscMapSetUp(&(*mat)->rmap);CHKERRQ(ierr);
46286148ca0dSBarry Smith   ierr = PetscMapSetUp(&(*mat)->cmap);CHKERRQ(ierr);
462903bfb495SBarry Smith 
463003bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr);
463103bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap.N,oi,oj,oa,&maij->B);CHKERRQ(ierr);
463203bfb495SBarry Smith 
46338d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46348d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46358d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46368d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46378d7a6e47SBarry Smith 
463803bfb495SBarry Smith   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
463903bfb495SBarry Smith   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
464003bfb495SBarry Smith   PetscFunctionReturn(0);
464103bfb495SBarry Smith }
464203bfb495SBarry Smith 
464381824310SBarry Smith /*
464481824310SBarry Smith     Special version for direct calls from Fortran
464581824310SBarry Smith */
464681824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS)
464781824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ
464881824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
464981824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij
465081824310SBarry Smith #endif
465181824310SBarry Smith 
465281824310SBarry Smith /* Change these macros so can be used in void function */
465381824310SBarry Smith #undef CHKERRQ
46547adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr)
465581824310SBarry Smith #undef SETERRQ2
46567adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr)
465781824310SBarry Smith #undef SETERRQ
46587adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr)
465981824310SBarry Smith 
466081824310SBarry Smith EXTERN_C_BEGIN
466181824310SBarry Smith #undef __FUNCT__
466281824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_"
46631f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr)
466481824310SBarry Smith {
466581824310SBarry Smith   Mat            mat = *mmat;
466681824310SBarry Smith   PetscInt       m = *mm, n = *mn;
466781824310SBarry Smith   InsertMode     addv = *maddv;
466881824310SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
466981824310SBarry Smith   PetscScalar    value;
467081824310SBarry Smith   PetscErrorCode ierr;
4671899cda47SBarry Smith 
467281824310SBarry Smith   MatPreallocated(mat);
467381824310SBarry Smith   if (mat->insertmode == NOT_SET_VALUES) {
467481824310SBarry Smith     mat->insertmode = addv;
467581824310SBarry Smith   }
467681824310SBarry Smith #if defined(PETSC_USE_DEBUG)
467781824310SBarry Smith   else if (mat->insertmode != addv) {
467881824310SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
467981824310SBarry Smith   }
468081824310SBarry Smith #endif
468181824310SBarry Smith   {
4682899cda47SBarry Smith   PetscInt       i,j,rstart = mat->rmap.rstart,rend = mat->rmap.rend;
4683899cda47SBarry Smith   PetscInt       cstart = mat->cmap.rstart,cend = mat->cmap.rend,row,col;
468481824310SBarry Smith   PetscTruth     roworiented = aij->roworiented;
468581824310SBarry Smith 
468681824310SBarry Smith   /* Some Variables required in the macro */
468781824310SBarry Smith   Mat            A = aij->A;
468881824310SBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
468981824310SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
469081824310SBarry Smith   PetscScalar    *aa = a->a;
469181824310SBarry Smith   PetscTruth     ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE);
469281824310SBarry Smith   Mat            B = aij->B;
469381824310SBarry Smith   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
4694899cda47SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap.n,am = aij->A->rmap.n;
469581824310SBarry Smith   PetscScalar    *ba = b->a;
469681824310SBarry Smith 
469781824310SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
469881824310SBarry Smith   PetscInt       nonew = a->nonew;
469981824310SBarry Smith   PetscScalar    *ap1,*ap2;
470081824310SBarry Smith 
470181824310SBarry Smith   PetscFunctionBegin;
470281824310SBarry Smith   for (i=0; i<m; i++) {
470381824310SBarry Smith     if (im[i] < 0) continue;
470481824310SBarry Smith #if defined(PETSC_USE_DEBUG)
4705899cda47SBarry Smith     if (im[i] >= mat->rmap.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap.N-1);
470681824310SBarry Smith #endif
470781824310SBarry Smith     if (im[i] >= rstart && im[i] < rend) {
470881824310SBarry Smith       row      = im[i] - rstart;
470981824310SBarry Smith       lastcol1 = -1;
471081824310SBarry Smith       rp1      = aj + ai[row];
471181824310SBarry Smith       ap1      = aa + ai[row];
471281824310SBarry Smith       rmax1    = aimax[row];
471381824310SBarry Smith       nrow1    = ailen[row];
471481824310SBarry Smith       low1     = 0;
471581824310SBarry Smith       high1    = nrow1;
471681824310SBarry Smith       lastcol2 = -1;
471781824310SBarry Smith       rp2      = bj + bi[row];
471881824310SBarry Smith       ap2      = ba + bi[row];
471981824310SBarry Smith       rmax2    = bimax[row];
472081824310SBarry Smith       nrow2    = bilen[row];
472181824310SBarry Smith       low2     = 0;
472281824310SBarry Smith       high2    = nrow2;
472381824310SBarry Smith 
472481824310SBarry Smith       for (j=0; j<n; j++) {
472581824310SBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
472681824310SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
472781824310SBarry Smith         if (in[j] >= cstart && in[j] < cend){
472881824310SBarry Smith           col = in[j] - cstart;
472981824310SBarry Smith           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
473081824310SBarry Smith         } else if (in[j] < 0) continue;
473181824310SBarry Smith #if defined(PETSC_USE_DEBUG)
4732899cda47SBarry Smith         else if (in[j] >= mat->cmap.N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap.N-1);}
473381824310SBarry Smith #endif
473481824310SBarry Smith         else {
473581824310SBarry Smith           if (mat->was_assembled) {
473681824310SBarry Smith             if (!aij->colmap) {
473781824310SBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
473881824310SBarry Smith             }
473981824310SBarry Smith #if defined (PETSC_USE_CTABLE)
474081824310SBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
474181824310SBarry Smith 	    col--;
474281824310SBarry Smith #else
474381824310SBarry Smith             col = aij->colmap[in[j]] - 1;
474481824310SBarry Smith #endif
474581824310SBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
474681824310SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
474781824310SBarry Smith               col =  in[j];
474881824310SBarry Smith               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
474981824310SBarry Smith               B = aij->B;
475081824310SBarry Smith               b = (Mat_SeqAIJ*)B->data;
475181824310SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
475281824310SBarry Smith               rp2      = bj + bi[row];
475381824310SBarry Smith               ap2      = ba + bi[row];
475481824310SBarry Smith               rmax2    = bimax[row];
475581824310SBarry Smith               nrow2    = bilen[row];
475681824310SBarry Smith               low2     = 0;
475781824310SBarry Smith               high2    = nrow2;
4758899cda47SBarry Smith               bm       = aij->B->rmap.n;
475981824310SBarry Smith               ba = b->a;
476081824310SBarry Smith             }
476181824310SBarry Smith           } else col = in[j];
476281824310SBarry Smith           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
476381824310SBarry Smith         }
476481824310SBarry Smith       }
476581824310SBarry Smith     } else {
476681824310SBarry Smith       if (!aij->donotstash) {
476781824310SBarry Smith         if (roworiented) {
476881824310SBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
476981824310SBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
477081824310SBarry Smith         } else {
477181824310SBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
477281824310SBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
477381824310SBarry Smith         }
477481824310SBarry Smith       }
477581824310SBarry Smith     }
477681824310SBarry Smith   }}
477781824310SBarry Smith   PetscFunctionReturnVoid();
477881824310SBarry Smith }
477981824310SBarry Smith EXTERN_C_END
478003bfb495SBarry Smith 
4781