xref: /petsc/src/mat/impls/aij/mpi/mpiaij.c (revision 5d0c19d75c660d4fec594a5399ec8d8ba29c54a8)
1be1d678aSKris Buschelman #define PETSCMAT_DLL
28a729477SBarry Smith 
3b47fd4b1SSatish Balay #include "src/mat/impls/aij/mpi/mpiaij.h"   /*I "petscmat.h" I*/
4d9942c19SSatish Balay #include "src/inline/spops.h"
58a729477SBarry Smith 
6dd6ea824SBarry Smith #undef __FUNCT__
7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ"
8dd6ea824SBarry Smith /*
9dd6ea824SBarry Smith     Distributes a SeqAIJ matrix across a set of processes. Code stolen from
10dd6ea824SBarry Smith     MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type.
11dd6ea824SBarry Smith 
12dd6ea824SBarry Smith     Only for square matrices
13dd6ea824SBarry Smith */
14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat)
15dd6ea824SBarry Smith {
16dd6ea824SBarry Smith   PetscMPIInt    rank,size;
17dd6ea824SBarry Smith   PetscInt       *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld;
18dd6ea824SBarry Smith   PetscErrorCode ierr;
19dd6ea824SBarry Smith   Mat            mat;
20dd6ea824SBarry Smith   Mat_SeqAIJ     *gmata;
21dd6ea824SBarry Smith   PetscMPIInt    tag;
22dd6ea824SBarry Smith   MPI_Status     status;
23dd6ea824SBarry Smith   PetscTruth     aij;
24dd6ea824SBarry Smith   MatScalar      *gmataa,*ao,*ad,*gmataarestore=0;
25dd6ea824SBarry Smith 
26dd6ea824SBarry Smith   PetscFunctionBegin;
27dd6ea824SBarry Smith   CHKMEMQ;
28dd6ea824SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
29dd6ea824SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
30dd6ea824SBarry Smith   if (!rank) {
31dd6ea824SBarry Smith     ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr);
32dd6ea824SBarry Smith     if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name);
33dd6ea824SBarry Smith   }
34dd6ea824SBarry Smith   if (reuse == MAT_INITIAL_MATRIX) {
35dd6ea824SBarry Smith     ierr = MatCreate(comm,&mat);CHKERRQ(ierr);
36dd6ea824SBarry Smith     ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
37dd6ea824SBarry Smith     ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr);
38dd6ea824SBarry Smith     ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
39dd6ea824SBarry Smith     ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr);
40dd6ea824SBarry Smith     ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
41dd6ea824SBarry Smith     rowners[0] = 0;
42dd6ea824SBarry Smith     for (i=2; i<=size; i++) {
43dd6ea824SBarry Smith       rowners[i] += rowners[i-1];
44dd6ea824SBarry Smith     }
45dd6ea824SBarry Smith     rstart = rowners[rank];
46dd6ea824SBarry Smith     rend   = rowners[rank+1];
47dd6ea824SBarry Smith     ierr   = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
48dd6ea824SBarry Smith     if (!rank) {
49dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
50dd6ea824SBarry Smith       /* send row lengths to all processors */
51dd6ea824SBarry Smith       for (i=0; i<m; i++) dlens[i] = gmata->ilen[i];
52dd6ea824SBarry Smith       for (i=1; i<size; i++) {
53dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
54dd6ea824SBarry Smith       }
55dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
56dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
57dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
58dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
59dd6ea824SBarry Smith       jj = 0;
60dd6ea824SBarry Smith       for (i=0; i<m; i++) {
61dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
62dd6ea824SBarry Smith           if (gmata->j[jj] < rstart) ld[i]++;
63dd6ea824SBarry Smith 	  if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++;
64dd6ea824SBarry Smith 	  jj++;
65dd6ea824SBarry Smith 	}
66dd6ea824SBarry Smith       }
67dd6ea824SBarry Smith       /* send column indices to other processes */
68dd6ea824SBarry Smith       for (i=1; i<size; i++) {
69dd6ea824SBarry Smith 	nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
70dd6ea824SBarry Smith 	ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
71dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
72dd6ea824SBarry Smith       }
73dd6ea824SBarry Smith 
74dd6ea824SBarry Smith       /* send numerical values to other processes */
75dd6ea824SBarry Smith       for (i=1; i<size; i++) {
76dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
77dd6ea824SBarry Smith         ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
78dd6ea824SBarry Smith       }
79dd6ea824SBarry Smith       gmataa = gmata->a;
80dd6ea824SBarry Smith       gmataj = gmata->j;
81dd6ea824SBarry Smith 
82dd6ea824SBarry Smith     } else {
83dd6ea824SBarry Smith       /* receive row lengths */
84dd6ea824SBarry Smith       ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
85dd6ea824SBarry Smith       /* receive column indices */
86dd6ea824SBarry Smith       ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
87dd6ea824SBarry Smith       ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr);
88dd6ea824SBarry Smith       ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
89dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
90dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
91dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
92dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
93dd6ea824SBarry Smith       jj = 0;
94dd6ea824SBarry Smith       for (i=0; i<m; i++) {
95dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
96dd6ea824SBarry Smith           if (gmataj[jj] < rstart) ld[i]++;
97dd6ea824SBarry Smith 	  if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++;
98dd6ea824SBarry Smith 	  jj++;
99dd6ea824SBarry Smith 	}
100dd6ea824SBarry Smith       }
101dd6ea824SBarry Smith       /* receive numerical values */
102dd6ea824SBarry Smith       ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr);
103dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
104dd6ea824SBarry Smith     }
105dd6ea824SBarry Smith     /* set preallocation */
106dd6ea824SBarry Smith     for (i=0; i<m; i++) {
107dd6ea824SBarry Smith       dlens[i] -= olens[i];
108dd6ea824SBarry Smith     }
109dd6ea824SBarry Smith     ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr);
110dd6ea824SBarry Smith     ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr);
111dd6ea824SBarry Smith 
112dd6ea824SBarry Smith     for (i=0; i<m; i++) {
113dd6ea824SBarry Smith       dlens[i] += olens[i];
114dd6ea824SBarry Smith     }
115dd6ea824SBarry Smith     cnt  = 0;
116dd6ea824SBarry Smith     for (i=0; i<m; i++) {
117dd6ea824SBarry Smith       row  = rstart + i;
118dd6ea824SBarry Smith       ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr);
119dd6ea824SBarry Smith       cnt += dlens[i];
120dd6ea824SBarry Smith     }
121dd6ea824SBarry Smith     if (rank) {
122dd6ea824SBarry Smith       ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr);
123dd6ea824SBarry Smith     }
124dd6ea824SBarry Smith     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
125dd6ea824SBarry Smith     ierr = PetscFree(rowners);CHKERRQ(ierr);
126dd6ea824SBarry Smith     ((Mat_MPIAIJ*)(mat->data))->ld = ld;
127dd6ea824SBarry Smith     *inmat = mat;
128dd6ea824SBarry Smith   } else {   /* column indices are already set; only need to move over numerical values from process 0 */
129dd6ea824SBarry Smith     Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data;
130dd6ea824SBarry Smith     Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data;
131dd6ea824SBarry Smith     mat   = *inmat;
132dd6ea824SBarry Smith     ierr  = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
133dd6ea824SBarry Smith     if (!rank) {
134dd6ea824SBarry Smith       /* send numerical values to other processes */
135dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
136dd6ea824SBarry Smith       ierr   = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr);
137dd6ea824SBarry Smith       gmataa = gmata->a;
138dd6ea824SBarry Smith       for (i=1; i<size; i++) {
139dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
140dd6ea824SBarry Smith         ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
141dd6ea824SBarry Smith       }
142dd6ea824SBarry Smith       nz   = gmata->i[rowners[1]]-gmata->i[rowners[0]];
143dd6ea824SBarry Smith     } else {
144dd6ea824SBarry Smith       /* receive numerical values from process 0*/
145dd6ea824SBarry Smith       nz   = Ad->nz + Ao->nz;
146dd6ea824SBarry Smith       ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa;
147dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
148dd6ea824SBarry Smith     }
149dd6ea824SBarry Smith     /* transfer numerical values into the diagonal A and off diagonal B parts of mat */
150dd6ea824SBarry Smith     ld = ((Mat_MPIAIJ*)(mat->data))->ld;
151dd6ea824SBarry Smith     ad = Ad->a;
152dd6ea824SBarry Smith     ao = Ao->a;
153d0f46423SBarry Smith     if (mat->rmap->n) {
154dd6ea824SBarry Smith       i  = 0;
155dd6ea824SBarry Smith       nz = ld[i];                                   ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
156dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
157dd6ea824SBarry Smith     }
158d0f46423SBarry Smith     for (i=1; i<mat->rmap->n; i++) {
159dd6ea824SBarry Smith       nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
160dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
161dd6ea824SBarry Smith     }
162dd6ea824SBarry Smith     i--;
163d0f46423SBarry Smith     if (mat->rmap->n) {
164dd6ea824SBarry Smith       nz = Ao->i[i+1] - Ao->i[i] - ld[i];           ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
165dd6ea824SBarry Smith     }
166dd6ea824SBarry Smith     if (rank) {
167dd6ea824SBarry Smith       ierr = PetscFree(gmataarestore);CHKERRQ(ierr);
168dd6ea824SBarry Smith     }
169dd6ea824SBarry Smith   }
170dd6ea824SBarry Smith   ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
171dd6ea824SBarry Smith   ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
172dd6ea824SBarry Smith   CHKMEMQ;
173dd6ea824SBarry Smith   PetscFunctionReturn(0);
174dd6ea824SBarry Smith }
175dd6ea824SBarry Smith 
1760f5bd95cSBarry Smith /*
1770f5bd95cSBarry Smith   Local utility routine that creates a mapping from the global column
1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local
1790f5bd95cSBarry Smith storage of the matrix.  When PETSC_USE_CTABLE is used this is scalable at
1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor
1810f5bd95cSBarry Smith has an order N integer array but is fast to acess.
1829e25ed09SBarry Smith */
1834a2ae208SSatish Balay #undef __FUNCT__
1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private"
185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat)
1869e25ed09SBarry Smith {
18744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1886849ba73SBarry Smith   PetscErrorCode ierr;
189d0f46423SBarry Smith   PetscInt       n = aij->B->cmap->n,i;
190dbb450caSBarry Smith 
1913a40ed3dSBarry Smith   PetscFunctionBegin;
192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
193273d9f13SBarry Smith   ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr);
194b1fc9764SSatish Balay   for (i=0; i<n; i++){
1950f5bd95cSBarry Smith     ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr);
196b1fc9764SSatish Balay   }
197b1fc9764SSatish Balay #else
198d0f46423SBarry Smith   ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr);
199d0f46423SBarry Smith   ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
200d0f46423SBarry Smith   ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
201905e6a2fSBarry Smith   for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1;
202b1fc9764SSatish Balay #endif
2033a40ed3dSBarry Smith   PetscFunctionReturn(0);
2049e25ed09SBarry Smith }
2059e25ed09SBarry Smith 
206085a36d4SBarry Smith 
2070520107fSSatish Balay #define CHUNKSIZE   15
20830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \
2090520107fSSatish Balay { \
2107cd84e04SBarry Smith     if (col <= lastcol1) low1 = 0; else high1 = nrow1; \
211fd3458f5SBarry Smith     lastcol1 = col;\
212fd3458f5SBarry Smith     while (high1-low1 > 5) { \
213fd3458f5SBarry Smith       t = (low1+high1)/2; \
214fd3458f5SBarry Smith       if (rp1[t] > col) high1 = t; \
215fd3458f5SBarry Smith       else             low1  = t; \
216ba4e3ef2SSatish Balay     } \
217fd3458f5SBarry Smith       for (_i=low1; _i<high1; _i++) { \
218fd3458f5SBarry Smith         if (rp1[_i] > col) break; \
219fd3458f5SBarry Smith         if (rp1[_i] == col) { \
220fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap1[_i] += value;   \
221fd3458f5SBarry Smith           else                    ap1[_i] = value; \
22230770e4dSSatish Balay           goto a_noinsert; \
2230520107fSSatish Balay         } \
2240520107fSSatish Balay       }  \
225e44c0bd4SBarry Smith       if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \
226e44c0bd4SBarry Smith       if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;}		\
227085a36d4SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
228421e10b8SBarry Smith       MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \
229669a8dbcSSatish Balay       N = nrow1++ - 1; a->nz++; high1++; \
2300520107fSSatish Balay       /* shift up all the later entries in this row */ \
2310520107fSSatish Balay       for (ii=N; ii>=_i; ii--) { \
232fd3458f5SBarry Smith         rp1[ii+1] = rp1[ii]; \
233fd3458f5SBarry Smith         ap1[ii+1] = ap1[ii]; \
2340520107fSSatish Balay       } \
235fd3458f5SBarry Smith       rp1[_i] = col;  \
236fd3458f5SBarry Smith       ap1[_i] = value;  \
23730770e4dSSatish Balay       a_noinsert: ; \
238fd3458f5SBarry Smith       ailen[row] = nrow1; \
2390520107fSSatish Balay }
2400a198c4cSBarry Smith 
241085a36d4SBarry Smith 
24230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \
24330770e4dSSatish Balay { \
2447cd84e04SBarry Smith     if (col <= lastcol2) low2 = 0; else high2 = nrow2; \
245fd3458f5SBarry Smith     lastcol2 = col;\
246fd3458f5SBarry Smith     while (high2-low2 > 5) { \
247fd3458f5SBarry Smith       t = (low2+high2)/2; \
248fd3458f5SBarry Smith       if (rp2[t] > col) high2 = t; \
249fd3458f5SBarry Smith       else             low2  = t; \
250ba4e3ef2SSatish Balay     } \
251fd3458f5SBarry Smith     for (_i=low2; _i<high2; _i++) {		\
252fd3458f5SBarry Smith       if (rp2[_i] > col) break;			\
253fd3458f5SBarry Smith       if (rp2[_i] == col) {			      \
254fd3458f5SBarry Smith 	if (addv == ADD_VALUES) ap2[_i] += value;     \
255fd3458f5SBarry Smith 	else                    ap2[_i] = value;      \
25630770e4dSSatish Balay 	goto b_noinsert;			      \
25730770e4dSSatish Balay       }						      \
25830770e4dSSatish Balay     }							      \
259e44c0bd4SBarry Smith     if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \
260e44c0bd4SBarry Smith     if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;}		\
261085a36d4SBarry Smith     if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
262421e10b8SBarry Smith     MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \
263669a8dbcSSatish Balay     N = nrow2++ - 1; b->nz++; high2++;					\
26430770e4dSSatish Balay     /* shift up all the later entries in this row */			\
26530770e4dSSatish Balay     for (ii=N; ii>=_i; ii--) {						\
266fd3458f5SBarry Smith       rp2[ii+1] = rp2[ii];						\
267fd3458f5SBarry Smith       ap2[ii+1] = ap2[ii];						\
26830770e4dSSatish Balay     }									\
269fd3458f5SBarry Smith     rp2[_i] = col;							\
270fd3458f5SBarry Smith     ap2[_i] = value;							\
27130770e4dSSatish Balay     b_noinsert: ;								\
272fd3458f5SBarry Smith     bilen[row] = nrow2;							\
27330770e4dSSatish Balay }
27430770e4dSSatish Balay 
2754a2ae208SSatish Balay #undef __FUNCT__
2762fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ"
2772fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[])
2782fd7e33dSBarry Smith {
2792fd7e33dSBarry Smith   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)A->data;
2802fd7e33dSBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data;
2812fd7e33dSBarry Smith   PetscErrorCode ierr;
2822fd7e33dSBarry Smith   PetscInt       l,*garray = mat->garray,diag;
2832fd7e33dSBarry Smith 
2842fd7e33dSBarry Smith   PetscFunctionBegin;
2852fd7e33dSBarry Smith   /* code only works for square matrices A */
2862fd7e33dSBarry Smith 
2872fd7e33dSBarry Smith   /* find size of row to the left of the diagonal part */
2882fd7e33dSBarry Smith   ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr);
2892fd7e33dSBarry Smith   row  = row - diag;
2902fd7e33dSBarry Smith   for (l=0; l<b->i[row+1]-b->i[row]; l++) {
2912fd7e33dSBarry Smith     if (garray[b->j[b->i[row]+l]] > diag) break;
2922fd7e33dSBarry Smith   }
2932fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr);
2942fd7e33dSBarry Smith 
2952fd7e33dSBarry Smith   /* diagonal part */
2962fd7e33dSBarry Smith   ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr);
2972fd7e33dSBarry Smith 
2982fd7e33dSBarry Smith   /* right of diagonal part */
2992fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr);
3002fd7e33dSBarry Smith   PetscFunctionReturn(0);
3012fd7e33dSBarry Smith }
3022fd7e33dSBarry Smith 
3032fd7e33dSBarry Smith #undef __FUNCT__
3044a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ"
305b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
3068a729477SBarry Smith {
30744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
30887828ca2SBarry Smith   PetscScalar    value;
309dfbe8321SBarry Smith   PetscErrorCode ierr;
310d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
311d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
312273d9f13SBarry Smith   PetscTruth     roworiented = aij->roworiented;
3138a729477SBarry Smith 
3140520107fSSatish Balay   /* Some Variables required in the macro */
3154ee7247eSSatish Balay   Mat            A = aij->A;
3164ee7247eSSatish Balay   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
31757809a77SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
318a77337e4SBarry Smith   MatScalar      *aa = a->a;
319edb03aefSBarry Smith   PetscTruth     ignorezeroentries = a->ignorezeroentries;
32030770e4dSSatish Balay   Mat            B = aij->B;
32130770e4dSSatish Balay   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
322d0f46423SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
323a77337e4SBarry Smith   MatScalar      *ba = b->a;
32430770e4dSSatish Balay 
325fd3458f5SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
326fd3458f5SBarry Smith   PetscInt       nonew = a->nonew;
327a77337e4SBarry Smith   MatScalar      *ap1,*ap2;
3284ee7247eSSatish Balay 
3293a40ed3dSBarry Smith   PetscFunctionBegin;
3308a729477SBarry Smith   for (i=0; i<m; i++) {
3315ef9f2a5SBarry Smith     if (im[i] < 0) continue;
3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
333d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
3340a198c4cSBarry Smith #endif
3354b0e389bSBarry Smith     if (im[i] >= rstart && im[i] < rend) {
3364b0e389bSBarry Smith       row      = im[i] - rstart;
337fd3458f5SBarry Smith       lastcol1 = -1;
338fd3458f5SBarry Smith       rp1      = aj + ai[row];
339fd3458f5SBarry Smith       ap1      = aa + ai[row];
340fd3458f5SBarry Smith       rmax1    = aimax[row];
341fd3458f5SBarry Smith       nrow1    = ailen[row];
342fd3458f5SBarry Smith       low1     = 0;
343fd3458f5SBarry Smith       high1    = nrow1;
344fd3458f5SBarry Smith       lastcol2 = -1;
345fd3458f5SBarry Smith       rp2      = bj + bi[row];
346d498b1e9SBarry Smith       ap2      = ba + bi[row];
347fd3458f5SBarry Smith       rmax2    = bimax[row];
348d498b1e9SBarry Smith       nrow2    = bilen[row];
349fd3458f5SBarry Smith       low2     = 0;
350fd3458f5SBarry Smith       high2    = nrow2;
351fd3458f5SBarry Smith 
3521eb62cbbSBarry Smith       for (j=0; j<n; j++) {
35316371a99SBarry Smith         if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0;
354abc0a331SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
355fd3458f5SBarry Smith         if (in[j] >= cstart && in[j] < cend){
356fd3458f5SBarry Smith           col = in[j] - cstart;
35730770e4dSSatish Balay           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
358273d9f13SBarry Smith         } else if (in[j] < 0) continue;
3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
360d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
3610a198c4cSBarry Smith #endif
3621eb62cbbSBarry Smith         else {
363227d817aSBarry Smith           if (mat->was_assembled) {
364905e6a2fSBarry Smith             if (!aij->colmap) {
365905e6a2fSBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
366905e6a2fSBarry Smith             }
367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
3680f5bd95cSBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
369fa46199cSSatish Balay 	    col--;
370b1fc9764SSatish Balay #else
371905e6a2fSBarry Smith             col = aij->colmap[in[j]] - 1;
372b1fc9764SSatish Balay #endif
373ec8511deSBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
3742493cbb0SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
3754b0e389bSBarry Smith               col =  in[j];
3769bf004c3SSatish Balay               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
377f9508a3cSSatish Balay               B = aij->B;
378f9508a3cSSatish Balay               b = (Mat_SeqAIJ*)B->data;
379e44c0bd4SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a;
380d498b1e9SBarry Smith               rp2      = bj + bi[row];
381d498b1e9SBarry Smith               ap2      = ba + bi[row];
382d498b1e9SBarry Smith               rmax2    = bimax[row];
383d498b1e9SBarry Smith               nrow2    = bilen[row];
384d498b1e9SBarry Smith               low2     = 0;
385d498b1e9SBarry Smith               high2    = nrow2;
386d0f46423SBarry Smith               bm       = aij->B->rmap->n;
387f9508a3cSSatish Balay               ba = b->a;
388d6dfbf8fSBarry Smith             }
389c48de900SBarry Smith           } else col = in[j];
39030770e4dSSatish Balay           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
3911eb62cbbSBarry Smith         }
3921eb62cbbSBarry Smith       }
3935ef9f2a5SBarry Smith     } else {
39490f02eecSBarry Smith       if (!aij->donotstash) {
395d36fbae8SSatish Balay         if (roworiented) {
3965b8514ebSBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
3978798bf22SSatish Balay           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
398d36fbae8SSatish Balay         } else {
3995b8514ebSBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
4008798bf22SSatish Balay           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
4014b0e389bSBarry Smith         }
4021eb62cbbSBarry Smith       }
4038a729477SBarry Smith     }
40490f02eecSBarry Smith   }
4053a40ed3dSBarry Smith   PetscFunctionReturn(0);
4068a729477SBarry Smith }
4078a729477SBarry Smith 
4084a2ae208SSatish Balay #undef __FUNCT__
4094a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ"
410b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
411b49de8d1SLois Curfman McInnes {
412b49de8d1SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
413dfbe8321SBarry Smith   PetscErrorCode ierr;
414d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
415d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
416b49de8d1SLois Curfman McInnes 
4173a40ed3dSBarry Smith   PetscFunctionBegin;
418b49de8d1SLois Curfman McInnes   for (i=0; i<m; i++) {
41997e567efSBarry Smith     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
420d0f46423SBarry Smith     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
421b49de8d1SLois Curfman McInnes     if (idxm[i] >= rstart && idxm[i] < rend) {
422b49de8d1SLois Curfman McInnes       row = idxm[i] - rstart;
423b49de8d1SLois Curfman McInnes       for (j=0; j<n; j++) {
42497e567efSBarry Smith         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
425d0f46423SBarry Smith         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
426b49de8d1SLois Curfman McInnes         if (idxn[j] >= cstart && idxn[j] < cend){
427b49de8d1SLois Curfman McInnes           col = idxn[j] - cstart;
428b49de8d1SLois Curfman McInnes           ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
429fa852ad4SSatish Balay         } else {
430905e6a2fSBarry Smith           if (!aij->colmap) {
431905e6a2fSBarry Smith             ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
432905e6a2fSBarry Smith           }
433aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
4340f5bd95cSBarry Smith           ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr);
435fa46199cSSatish Balay           col --;
436b1fc9764SSatish Balay #else
437905e6a2fSBarry Smith           col = aij->colmap[idxn[j]] - 1;
438b1fc9764SSatish Balay #endif
439e60e1c95SSatish Balay           if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0;
440d9d09a02SSatish Balay           else {
441b49de8d1SLois Curfman McInnes             ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
442b49de8d1SLois Curfman McInnes           }
443b49de8d1SLois Curfman McInnes         }
444b49de8d1SLois Curfman McInnes       }
445a8c6a408SBarry Smith     } else {
44629bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"Only local values currently supported");
447b49de8d1SLois Curfman McInnes     }
448b49de8d1SLois Curfman McInnes   }
4493a40ed3dSBarry Smith   PetscFunctionReturn(0);
450b49de8d1SLois Curfman McInnes }
451bc5ccf88SSatish Balay 
4524a2ae208SSatish Balay #undef __FUNCT__
4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ"
454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode)
455bc5ccf88SSatish Balay {
456bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
457dfbe8321SBarry Smith   PetscErrorCode ierr;
458b1d57f15SBarry Smith   PetscInt       nstash,reallocs;
459bc5ccf88SSatish Balay   InsertMode     addv;
460bc5ccf88SSatish Balay 
461bc5ccf88SSatish Balay   PetscFunctionBegin;
462bc5ccf88SSatish Balay   if (aij->donotstash) {
463bc5ccf88SSatish Balay     PetscFunctionReturn(0);
464bc5ccf88SSatish Balay   }
465bc5ccf88SSatish Balay 
466bc5ccf88SSatish Balay   /* make sure all processors are either in INSERTMODE or ADDMODE */
4677adad957SLisandro Dalcin   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
468bc5ccf88SSatish Balay   if (addv == (ADD_VALUES|INSERT_VALUES)) {
46929bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
470bc5ccf88SSatish Balay   }
471bc5ccf88SSatish Balay   mat->insertmode = addv; /* in case this processor had no cache */
472bc5ccf88SSatish Balay 
473d0f46423SBarry Smith   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
4748798bf22SSatish Balay   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
475ae15b995SBarry Smith   ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
476bc5ccf88SSatish Balay   PetscFunctionReturn(0);
477bc5ccf88SSatish Balay }
478bc5ccf88SSatish Balay 
4794a2ae208SSatish Balay #undef __FUNCT__
4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ"
481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode)
482bc5ccf88SSatish Balay {
483bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
48491c97fd4SSatish Balay   Mat_SeqAIJ     *a=(Mat_SeqAIJ *)aij->A->data;
4856849ba73SBarry Smith   PetscErrorCode ierr;
486b1d57f15SBarry Smith   PetscMPIInt    n;
487b1d57f15SBarry Smith   PetscInt       i,j,rstart,ncols,flg;
488e44c0bd4SBarry Smith   PetscInt       *row,*col;
489e44c0bd4SBarry Smith   PetscTruth     other_disassembled;
49087828ca2SBarry Smith   PetscScalar    *val;
491bc5ccf88SSatish Balay   InsertMode     addv = mat->insertmode;
492bc5ccf88SSatish Balay 
49391c97fd4SSatish Balay   /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */
494bc5ccf88SSatish Balay   PetscFunctionBegin;
495bc5ccf88SSatish Balay   if (!aij->donotstash) {
496a2d1c673SSatish Balay     while (1) {
4978798bf22SSatish Balay       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
498a2d1c673SSatish Balay       if (!flg) break;
499a2d1c673SSatish Balay 
500bc5ccf88SSatish Balay       for (i=0; i<n;) {
501bc5ccf88SSatish Balay         /* Now identify the consecutive vals belonging to the same row */
502bc5ccf88SSatish Balay         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
503bc5ccf88SSatish Balay         if (j < n) ncols = j-i;
504bc5ccf88SSatish Balay         else       ncols = n-i;
505bc5ccf88SSatish Balay         /* Now assemble all these values with a single function call */
506bc5ccf88SSatish Balay         ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
507bc5ccf88SSatish Balay         i = j;
508bc5ccf88SSatish Balay       }
509bc5ccf88SSatish Balay     }
5108798bf22SSatish Balay     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
511bc5ccf88SSatish Balay   }
5122f53aa61SHong Zhang   a->compressedrow.use     = PETSC_FALSE;
513bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr);
514bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr);
515bc5ccf88SSatish Balay 
516bc5ccf88SSatish Balay   /* determine if any processor has disassembled, if so we must
517bc5ccf88SSatish Balay      also disassemble ourselfs, in order that we may reassemble. */
518bc5ccf88SSatish Balay   /*
519bc5ccf88SSatish Balay      if nonzero structure of submatrix B cannot change then we know that
520bc5ccf88SSatish Balay      no processor disassembled thus we can skip this stuff
521bc5ccf88SSatish Balay   */
522bc5ccf88SSatish Balay   if (!((Mat_SeqAIJ*)aij->B->data)->nonew)  {
5237adad957SLisandro Dalcin     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
524bc5ccf88SSatish Balay     if (mat->was_assembled && !other_disassembled) {
525bc5ccf88SSatish Balay       ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
526ad59fb31SSatish Balay     }
527ad59fb31SSatish Balay   }
528bc5ccf88SSatish Balay   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
529bc5ccf88SSatish Balay     ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr);
530bc5ccf88SSatish Balay   }
5314e0d8c25SBarry Smith   ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr);
53291c97fd4SSatish Balay   ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */
533bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr);
534bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr);
535bc5ccf88SSatish Balay 
536606d414cSSatish Balay   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
537606d414cSSatish Balay   aij->rowvalues = 0;
538a30b2313SHong Zhang 
539a30b2313SHong Zhang   /* used by MatAXPY() */
54091c97fd4SSatish Balay   a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0;  /* b->xtoy = 0 */
54191c97fd4SSatish Balay   a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0;  /* b->XtoY = 0 */
542a30b2313SHong Zhang 
543bc5ccf88SSatish Balay   PetscFunctionReturn(0);
544bc5ccf88SSatish Balay }
545bc5ccf88SSatish Balay 
5464a2ae208SSatish Balay #undef __FUNCT__
5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ"
548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A)
5491eb62cbbSBarry Smith {
55044a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
551dfbe8321SBarry Smith   PetscErrorCode ierr;
5523a40ed3dSBarry Smith 
5533a40ed3dSBarry Smith   PetscFunctionBegin;
55478b31e54SBarry Smith   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
55578b31e54SBarry Smith   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
5563a40ed3dSBarry Smith   PetscFunctionReturn(0);
5571eb62cbbSBarry Smith }
5581eb62cbbSBarry Smith 
5594a2ae208SSatish Balay #undef __FUNCT__
5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ"
561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
5621eb62cbbSBarry Smith {
56344a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
5646849ba73SBarry Smith   PetscErrorCode ierr;
5657adad957SLisandro Dalcin   PetscMPIInt    size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
566d0f46423SBarry Smith   PetscInt       i,*owners = A->rmap->range;
567b1d57f15SBarry Smith   PetscInt       *nprocs,j,idx,nsends,row;
568b1d57f15SBarry Smith   PetscInt       nmax,*svalues,*starts,*owner,nrecvs;
569b1d57f15SBarry Smith   PetscInt       *rvalues,count,base,slen,*source;
570d0f46423SBarry Smith   PetscInt       *lens,*lrows,*values,rstart=A->rmap->rstart;
5717adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)A)->comm;
5721eb62cbbSBarry Smith   MPI_Request    *send_waits,*recv_waits;
5731eb62cbbSBarry Smith   MPI_Status     recv_status,*send_status;
5746543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5756543fbbaSBarry Smith   PetscTruth     found = PETSC_FALSE;
5766543fbbaSBarry Smith #endif
5771eb62cbbSBarry Smith 
5783a40ed3dSBarry Smith   PetscFunctionBegin;
5791eb62cbbSBarry Smith   /*  first count number of contributors to each processor */
580b1d57f15SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
581b1d57f15SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
582b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
5836543fbbaSBarry Smith   j = 0;
5841eb62cbbSBarry Smith   for (i=0; i<N; i++) {
5856543fbbaSBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
5866543fbbaSBarry Smith     lastidx = idx;
5876543fbbaSBarry Smith     for (; j<size; j++) {
5881eb62cbbSBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
5896543fbbaSBarry Smith         nprocs[2*j]++;
5906543fbbaSBarry Smith         nprocs[2*j+1] = 1;
5916543fbbaSBarry Smith         owner[i] = j;
5926543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5936543fbbaSBarry Smith         found = PETSC_TRUE;
5946543fbbaSBarry Smith #endif
5956543fbbaSBarry Smith         break;
5961eb62cbbSBarry Smith       }
5971eb62cbbSBarry Smith     }
5986543fbbaSBarry Smith #if defined(PETSC_DEBUG)
59929bbc08cSBarry Smith     if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
6006543fbbaSBarry Smith     found = PETSC_FALSE;
6016543fbbaSBarry Smith #endif
6021eb62cbbSBarry Smith   }
603c1dc657dSBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
6041eb62cbbSBarry Smith 
6051eb62cbbSBarry Smith   /* inform other processors of number of messages and max length*/
606c1dc657dSBarry Smith   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
6071eb62cbbSBarry Smith 
6081eb62cbbSBarry Smith   /* post receives:   */
609b1d57f15SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
610b0a32e0cSBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
6111eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
612b1d57f15SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
6131eb62cbbSBarry Smith   }
6141eb62cbbSBarry Smith 
6151eb62cbbSBarry Smith   /* do sends:
6161eb62cbbSBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
6171eb62cbbSBarry Smith          the ith processor
6181eb62cbbSBarry Smith   */
619b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
620b0a32e0cSBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
621b1d57f15SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
6221eb62cbbSBarry Smith   starts[0] = 0;
623c1dc657dSBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6241eb62cbbSBarry Smith   for (i=0; i<N; i++) {
6251eb62cbbSBarry Smith     svalues[starts[owner[i]]++] = rows[i];
6261eb62cbbSBarry Smith   }
6271eb62cbbSBarry Smith 
6281eb62cbbSBarry Smith   starts[0] = 0;
629c1dc657dSBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6301eb62cbbSBarry Smith   count = 0;
63117699dbbSLois Curfman McInnes   for (i=0; i<size; i++) {
632c1dc657dSBarry Smith     if (nprocs[2*i+1]) {
633b1d57f15SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
6341eb62cbbSBarry Smith     }
6351eb62cbbSBarry Smith   }
636606d414cSSatish Balay   ierr = PetscFree(starts);CHKERRQ(ierr);
6371eb62cbbSBarry Smith 
63817699dbbSLois Curfman McInnes   base = owners[rank];
6391eb62cbbSBarry Smith 
6401eb62cbbSBarry Smith   /*  wait on receives */
641b1d57f15SBarry Smith   ierr   = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
6421eb62cbbSBarry Smith   source = lens + nrecvs;
6431eb62cbbSBarry Smith   count  = nrecvs; slen = 0;
6441eb62cbbSBarry Smith   while (count) {
645ca161407SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
6461eb62cbbSBarry Smith     /* unpack receives into our local space */
647b1d57f15SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
648d6dfbf8fSBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
649d6dfbf8fSBarry Smith     lens[imdex]    = n;
6501eb62cbbSBarry Smith     slen          += n;
6511eb62cbbSBarry Smith     count--;
6521eb62cbbSBarry Smith   }
653606d414cSSatish Balay   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
6541eb62cbbSBarry Smith 
6551eb62cbbSBarry Smith   /* move the data into the send scatter */
656b1d57f15SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
6571eb62cbbSBarry Smith   count = 0;
6581eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
6591eb62cbbSBarry Smith     values = rvalues + i*nmax;
6601eb62cbbSBarry Smith     for (j=0; j<lens[i]; j++) {
6611eb62cbbSBarry Smith       lrows[count++] = values[j] - base;
6621eb62cbbSBarry Smith     }
6631eb62cbbSBarry Smith   }
664606d414cSSatish Balay   ierr = PetscFree(rvalues);CHKERRQ(ierr);
665606d414cSSatish Balay   ierr = PetscFree(lens);CHKERRQ(ierr);
666606d414cSSatish Balay   ierr = PetscFree(owner);CHKERRQ(ierr);
667606d414cSSatish Balay   ierr = PetscFree(nprocs);CHKERRQ(ierr);
6681eb62cbbSBarry Smith 
6691eb62cbbSBarry Smith   /* actually zap the local rows */
6706eb55b6aSBarry Smith   /*
6716eb55b6aSBarry Smith         Zero the required rows. If the "diagonal block" of the matrix
672a8c7a070SBarry Smith      is square and the user wishes to set the diagonal we use separate
6736eb55b6aSBarry Smith      code so that MatSetValues() is not called for each diagonal allocating
6746eb55b6aSBarry Smith      new memory, thus calling lots of mallocs and slowing things down.
6756eb55b6aSBarry Smith 
676f4df32b1SMatthew Knepley        Contributed by: Matthew Knepley
6776eb55b6aSBarry Smith   */
678e2d53e46SBarry Smith   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
679f4df32b1SMatthew Knepley   ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr);
680d0f46423SBarry Smith   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
681f4df32b1SMatthew Knepley     ierr      = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr);
682f4df32b1SMatthew Knepley   } else if (diag != 0.0) {
683f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
684fa46199cSSatish Balay     if (((Mat_SeqAIJ*)l->A->data)->nonew) {
68529bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\
686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
6876525c446SSatish Balay     }
688e2d53e46SBarry Smith     for (i = 0; i < slen; i++) {
689e2d53e46SBarry Smith       row  = lrows[i] + rstart;
690f4df32b1SMatthew Knepley       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
691e2d53e46SBarry Smith     }
692e2d53e46SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
693e2d53e46SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
6946eb55b6aSBarry Smith   } else {
695f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
6966eb55b6aSBarry Smith   }
697606d414cSSatish Balay   ierr = PetscFree(lrows);CHKERRQ(ierr);
69872dacd9aSBarry Smith 
6991eb62cbbSBarry Smith   /* wait on sends */
7001eb62cbbSBarry Smith   if (nsends) {
701b0a32e0cSBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
702ca161407SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
703606d414cSSatish Balay     ierr = PetscFree(send_status);CHKERRQ(ierr);
7041eb62cbbSBarry Smith   }
705606d414cSSatish Balay   ierr = PetscFree(send_waits);CHKERRQ(ierr);
706606d414cSSatish Balay   ierr = PetscFree(svalues);CHKERRQ(ierr);
7071eb62cbbSBarry Smith 
7083a40ed3dSBarry Smith   PetscFunctionReturn(0);
7091eb62cbbSBarry Smith }
7101eb62cbbSBarry Smith 
7114a2ae208SSatish Balay #undef __FUNCT__
7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ"
713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy)
7141eb62cbbSBarry Smith {
715416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
716dfbe8321SBarry Smith   PetscErrorCode ierr;
717b1d57f15SBarry Smith   PetscInt       nt;
718416022c9SBarry Smith 
7193a40ed3dSBarry Smith   PetscFunctionBegin;
720a2ce50c7SBarry Smith   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
721d0f46423SBarry Smith   if (nt != A->cmap->n) {
722d0f46423SBarry Smith     SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt);
723fbd6ef76SBarry Smith   }
724ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
725f830108cSBarry Smith   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
726ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
727f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
7283a40ed3dSBarry Smith   PetscFunctionReturn(0);
7291eb62cbbSBarry Smith }
7301eb62cbbSBarry Smith 
7314a2ae208SSatish Balay #undef __FUNCT__
7324a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ"
733dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
734da3a660dSBarry Smith {
735416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
736dfbe8321SBarry Smith   PetscErrorCode ierr;
7373a40ed3dSBarry Smith 
7383a40ed3dSBarry Smith   PetscFunctionBegin;
739ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
740f830108cSBarry Smith   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
741ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
742f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
7433a40ed3dSBarry Smith   PetscFunctionReturn(0);
744da3a660dSBarry Smith }
745da3a660dSBarry Smith 
7464a2ae208SSatish Balay #undef __FUNCT__
7474a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ"
748dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy)
749da3a660dSBarry Smith {
750416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
751dfbe8321SBarry Smith   PetscErrorCode ierr;
752a5ff213dSBarry Smith   PetscTruth     merged;
753da3a660dSBarry Smith 
7543a40ed3dSBarry Smith   PetscFunctionBegin;
755a5ff213dSBarry Smith   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
756da3a660dSBarry Smith   /* do nondiagonal part */
7577c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
758a5ff213dSBarry Smith   if (!merged) {
759da3a660dSBarry Smith     /* send it on its way */
760ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
761da3a660dSBarry Smith     /* do local part */
7627c922b88SBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
763da3a660dSBarry Smith     /* receive remote parts: note this assumes the values are not actually */
764a5ff213dSBarry Smith     /* added in yy until the next line, */
765ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
766a5ff213dSBarry Smith   } else {
767a5ff213dSBarry Smith     /* do local part */
768a5ff213dSBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
769a5ff213dSBarry Smith     /* send it on its way */
770ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
771a5ff213dSBarry Smith     /* values actually were received in the Begin() but we need to call this nop */
772ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
773a5ff213dSBarry Smith   }
7743a40ed3dSBarry Smith   PetscFunctionReturn(0);
775da3a660dSBarry Smith }
776da3a660dSBarry Smith 
777cd0d46ebSvictorle EXTERN_C_BEGIN
778cd0d46ebSvictorle #undef __FUNCT__
7795fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ"
78013c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f)
781cd0d46ebSvictorle {
7824f423910Svictorle   MPI_Comm       comm;
783cd0d46ebSvictorle   Mat_MPIAIJ     *Aij = (Mat_MPIAIJ *) Amat->data, *Bij;
78466501d38Svictorle   Mat            Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs;
785cd0d46ebSvictorle   IS             Me,Notme;
7866849ba73SBarry Smith   PetscErrorCode ierr;
787b1d57f15SBarry Smith   PetscInt       M,N,first,last,*notme,i;
788b1d57f15SBarry Smith   PetscMPIInt    size;
789cd0d46ebSvictorle 
790cd0d46ebSvictorle   PetscFunctionBegin;
79142e5f5b4Svictorle 
79242e5f5b4Svictorle   /* Easy test: symmetric diagonal block */
79366501d38Svictorle   Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A;
7945485867bSBarry Smith   ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr);
795cd0d46ebSvictorle   if (!*f) PetscFunctionReturn(0);
7964f423910Svictorle   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
797b1d57f15SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
798b1d57f15SBarry Smith   if (size == 1) PetscFunctionReturn(0);
79942e5f5b4Svictorle 
80042e5f5b4Svictorle   /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */
801cd0d46ebSvictorle   ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr);
802cd0d46ebSvictorle   ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr);
803b1d57f15SBarry Smith   ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),&notme);CHKERRQ(ierr);
804cd0d46ebSvictorle   for (i=0; i<first; i++) notme[i] = i;
805cd0d46ebSvictorle   for (i=last; i<M; i++) notme[i-last+first] = i;
806268466fbSBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr);
807268466fbSBarry Smith   ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr);
808268466fbSBarry Smith   ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr);
80966501d38Svictorle   Aoff = Aoffs[0];
810268466fbSBarry Smith   ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr);
81166501d38Svictorle   Boff = Boffs[0];
8125485867bSBarry Smith   ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr);
81366501d38Svictorle   ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr);
81466501d38Svictorle   ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr);
81542e5f5b4Svictorle   ierr = ISDestroy(Me);CHKERRQ(ierr);
81642e5f5b4Svictorle   ierr = ISDestroy(Notme);CHKERRQ(ierr);
81742e5f5b4Svictorle 
818cd0d46ebSvictorle   PetscFunctionReturn(0);
819cd0d46ebSvictorle }
820cd0d46ebSvictorle EXTERN_C_END
821cd0d46ebSvictorle 
8224a2ae208SSatish Balay #undef __FUNCT__
8234a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ"
824dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
825da3a660dSBarry Smith {
826416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
827dfbe8321SBarry Smith   PetscErrorCode ierr;
828da3a660dSBarry Smith 
8293a40ed3dSBarry Smith   PetscFunctionBegin;
830da3a660dSBarry Smith   /* do nondiagonal part */
8317c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
832da3a660dSBarry Smith   /* send it on its way */
833ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
834da3a660dSBarry Smith   /* do local part */
8357c922b88SBarry Smith   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
836a5ff213dSBarry Smith   /* receive remote parts */
837ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
8383a40ed3dSBarry Smith   PetscFunctionReturn(0);
839da3a660dSBarry Smith }
840da3a660dSBarry Smith 
8411eb62cbbSBarry Smith /*
8421eb62cbbSBarry Smith   This only works correctly for square matrices where the subblock A->A is the
8431eb62cbbSBarry Smith    diagonal block
8441eb62cbbSBarry Smith */
8454a2ae208SSatish Balay #undef __FUNCT__
8464a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ"
847dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v)
8481eb62cbbSBarry Smith {
849dfbe8321SBarry Smith   PetscErrorCode ierr;
850416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
8513a40ed3dSBarry Smith 
8523a40ed3dSBarry Smith   PetscFunctionBegin;
853d0f46423SBarry Smith   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
854d0f46423SBarry Smith   if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) {
85529bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition");
8563a40ed3dSBarry Smith   }
8573a40ed3dSBarry Smith   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
8583a40ed3dSBarry Smith   PetscFunctionReturn(0);
8591eb62cbbSBarry Smith }
8601eb62cbbSBarry Smith 
8614a2ae208SSatish Balay #undef __FUNCT__
8624a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ"
863f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa)
864052efed2SBarry Smith {
865052efed2SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
866dfbe8321SBarry Smith   PetscErrorCode ierr;
8673a40ed3dSBarry Smith 
8683a40ed3dSBarry Smith   PetscFunctionBegin;
869f4df32b1SMatthew Knepley   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
870f4df32b1SMatthew Knepley   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
8713a40ed3dSBarry Smith   PetscFunctionReturn(0);
872052efed2SBarry Smith }
873052efed2SBarry Smith 
8744a2ae208SSatish Balay #undef __FUNCT__
8754a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ"
876dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat)
8771eb62cbbSBarry Smith {
87844a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
879dfbe8321SBarry Smith   PetscErrorCode ierr;
88083e2fdc7SBarry Smith 
8813a40ed3dSBarry Smith   PetscFunctionBegin;
882aa482453SBarry Smith #if defined(PETSC_USE_LOG)
883d0f46423SBarry Smith   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N);
884a5a9c739SBarry Smith #endif
8858798bf22SSatish Balay   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
88678b31e54SBarry Smith   ierr = MatDestroy(aij->A);CHKERRQ(ierr);
88778b31e54SBarry Smith   ierr = MatDestroy(aij->B);CHKERRQ(ierr);
888aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
8899c666560SBarry Smith   if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);}
890b1fc9764SSatish Balay #else
89105b42c5fSBarry Smith   ierr = PetscFree(aij->colmap);CHKERRQ(ierr);
892b1fc9764SSatish Balay #endif
89305b42c5fSBarry Smith   ierr = PetscFree(aij->garray);CHKERRQ(ierr);
8947c922b88SBarry Smith   if (aij->lvec)   {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);}
8957c922b88SBarry Smith   if (aij->Mvctx)  {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);}
89605b42c5fSBarry Smith   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
8978aa348c1SBarry Smith   ierr = PetscFree(aij->ld);CHKERRQ(ierr);
898606d414cSSatish Balay   ierr = PetscFree(aij);CHKERRQ(ierr);
899901853e0SKris Buschelman 
900dbd8c25aSHong Zhang   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
901901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
902901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
903901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
904901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr);
905901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
906ff69c46cSKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
907901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
9083a40ed3dSBarry Smith   PetscFunctionReturn(0);
9091eb62cbbSBarry Smith }
910ee50ffe9SBarry Smith 
9114a2ae208SSatish Balay #undef __FUNCT__
9128e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary"
913dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer)
9148e2fed03SBarry Smith {
9158e2fed03SBarry Smith   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
9168e2fed03SBarry Smith   Mat_SeqAIJ*       A = (Mat_SeqAIJ*)aij->A->data;
9178e2fed03SBarry Smith   Mat_SeqAIJ*       B = (Mat_SeqAIJ*)aij->B->data;
9186849ba73SBarry Smith   PetscErrorCode    ierr;
91932dcc486SBarry Smith   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
9206f69ff64SBarry Smith   int               fd;
921a788621eSSatish Balay   PetscInt          nz,header[4],*row_lengths,*range=0,rlen,i;
922d0f46423SBarry Smith   PetscInt          nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz;
9238e2fed03SBarry Smith   PetscScalar       *column_values;
9248e2fed03SBarry Smith 
9258e2fed03SBarry Smith   PetscFunctionBegin;
9267adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
9277adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
9288e2fed03SBarry Smith   nz   = A->nz + B->nz;
929958c9bccSBarry Smith   if (!rank) {
9308e2fed03SBarry Smith     header[0] = MAT_FILE_COOKIE;
931d0f46423SBarry Smith     header[1] = mat->rmap->N;
932d0f46423SBarry Smith     header[2] = mat->cmap->N;
9337adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
9348e2fed03SBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
9356f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9368e2fed03SBarry Smith     /* get largest number of rows any processor has */
937d0f46423SBarry Smith     rlen = mat->rmap->n;
938d0f46423SBarry Smith     range = mat->rmap->range;
9398e2fed03SBarry Smith     for (i=1; i<size; i++) {
9408e2fed03SBarry Smith       rlen = PetscMax(rlen,range[i+1] - range[i]);
9418e2fed03SBarry Smith     }
9428e2fed03SBarry Smith   } else {
9437adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
944d0f46423SBarry Smith     rlen = mat->rmap->n;
9458e2fed03SBarry Smith   }
9468e2fed03SBarry Smith 
9478e2fed03SBarry Smith   /* load up the local row counts */
948b1d57f15SBarry Smith   ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr);
949d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9508e2fed03SBarry Smith     row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
9518e2fed03SBarry Smith   }
9528e2fed03SBarry Smith 
9538e2fed03SBarry Smith   /* store the row lengths to the file */
954958c9bccSBarry Smith   if (!rank) {
9558e2fed03SBarry Smith     MPI_Status status;
956d0f46423SBarry Smith     ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9578e2fed03SBarry Smith     for (i=1; i<size; i++) {
9588e2fed03SBarry Smith       rlen = range[i+1] - range[i];
9597adad957SLisandro Dalcin       ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9606f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9618e2fed03SBarry Smith     }
9628e2fed03SBarry Smith   } else {
963d0f46423SBarry Smith     ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9648e2fed03SBarry Smith   }
9658e2fed03SBarry Smith   ierr = PetscFree(row_lengths);CHKERRQ(ierr);
9668e2fed03SBarry Smith 
9678e2fed03SBarry Smith   /* load up the local column indices */
9688e2fed03SBarry Smith   nzmax = nz; /* )th processor needs space a largest processor needs */
9697adad957SLisandro Dalcin   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
970b1d57f15SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
9718e2fed03SBarry Smith   cnt  = 0;
972d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9738e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
9748e2fed03SBarry Smith       if ( (col = garray[B->j[j]]) > cstart) break;
9758e2fed03SBarry Smith       column_indices[cnt++] = col;
9768e2fed03SBarry Smith     }
9778e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
9788e2fed03SBarry Smith       column_indices[cnt++] = A->j[k] + cstart;
9798e2fed03SBarry Smith     }
9808e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
9818e2fed03SBarry Smith       column_indices[cnt++] = garray[B->j[j]];
9828e2fed03SBarry Smith     }
9838e2fed03SBarry Smith   }
98477431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
9858e2fed03SBarry Smith 
9868e2fed03SBarry Smith   /* store the column indices to the file */
987958c9bccSBarry Smith   if (!rank) {
9888e2fed03SBarry Smith     MPI_Status status;
9896f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9908e2fed03SBarry Smith     for (i=1; i<size; i++) {
9917adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
99277431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
9937adad957SLisandro Dalcin       ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9946f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9958e2fed03SBarry Smith     }
9968e2fed03SBarry Smith   } else {
9977adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9987adad957SLisandro Dalcin     ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9998e2fed03SBarry Smith   }
10008e2fed03SBarry Smith   ierr = PetscFree(column_indices);CHKERRQ(ierr);
10018e2fed03SBarry Smith 
10028e2fed03SBarry Smith   /* load up the local column values */
10038e2fed03SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
10048e2fed03SBarry Smith   cnt  = 0;
1005d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
10068e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
10078e2fed03SBarry Smith       if ( garray[B->j[j]] > cstart) break;
10088e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10098e2fed03SBarry Smith     }
10108e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
10118e2fed03SBarry Smith       column_values[cnt++] = A->a[k];
10128e2fed03SBarry Smith     }
10138e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
10148e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10158e2fed03SBarry Smith     }
10168e2fed03SBarry Smith   }
101777431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
10188e2fed03SBarry Smith 
10198e2fed03SBarry Smith   /* store the column values to the file */
1020958c9bccSBarry Smith   if (!rank) {
10218e2fed03SBarry Smith     MPI_Status status;
10226f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10238e2fed03SBarry Smith     for (i=1; i<size; i++) {
10247adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
102577431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
10267adad957SLisandro Dalcin       ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
10276f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10288e2fed03SBarry Smith     }
10298e2fed03SBarry Smith   } else {
10307adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10317adad957SLisandro Dalcin     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10328e2fed03SBarry Smith   }
10338e2fed03SBarry Smith   ierr = PetscFree(column_values);CHKERRQ(ierr);
10348e2fed03SBarry Smith   PetscFunctionReturn(0);
10358e2fed03SBarry Smith }
10368e2fed03SBarry Smith 
10378e2fed03SBarry Smith #undef __FUNCT__
10384a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket"
1039dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1040416022c9SBarry Smith {
104144a69424SLois Curfman McInnes   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
1042dfbe8321SBarry Smith   PetscErrorCode    ierr;
104332dcc486SBarry Smith   PetscMPIInt       rank = aij->rank,size = aij->size;
1044d38fa0fbSBarry Smith   PetscTruth        isdraw,iascii,isbinary;
1045b0a32e0cSBarry Smith   PetscViewer       sviewer;
1046f3ef73ceSBarry Smith   PetscViewerFormat format;
1047416022c9SBarry Smith 
10483a40ed3dSBarry Smith   PetscFunctionBegin;
1049fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
105032077d6dSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
10518e2fed03SBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
105232077d6dSBarry Smith   if (iascii) {
1053b0a32e0cSBarry Smith     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1054456192e2SBarry Smith     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
10554e220ebcSLois Curfman McInnes       MatInfo    info;
1056923f20ffSKris Buschelman       PetscTruth inodes;
1057923f20ffSKris Buschelman 
10587adad957SLisandro Dalcin       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
1059888f2ed8SSatish Balay       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1060923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr);
1061923f20ffSKris Buschelman       if (!inodes) {
106277431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n",
1063d0f46423SBarry Smith 					      rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10646831982aSBarry Smith       } else {
106577431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n",
1066d0f46423SBarry Smith 		    rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10676831982aSBarry Smith       }
1068888f2ed8SSatish Balay       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
106977431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1070888f2ed8SSatish Balay       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
107177431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1072b0a32e0cSBarry Smith       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
107307d81ca4SBarry Smith       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1074a40aa06bSLois Curfman McInnes       ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr);
10753a40ed3dSBarry Smith       PetscFunctionReturn(0);
1076fb9695e5SSatish Balay     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1077923f20ffSKris Buschelman       PetscInt   inodecount,inodelimit,*inodes;
1078923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr);
1079923f20ffSKris Buschelman       if (inodes) {
1080923f20ffSKris Buschelman         ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr);
1081d38fa0fbSBarry Smith       } else {
1082d38fa0fbSBarry Smith         ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr);
1083d38fa0fbSBarry Smith       }
10843a40ed3dSBarry Smith       PetscFunctionReturn(0);
10854aedb280SBarry Smith     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
10864aedb280SBarry Smith       PetscFunctionReturn(0);
108708480c60SBarry Smith     }
10888e2fed03SBarry Smith   } else if (isbinary) {
10898e2fed03SBarry Smith     if (size == 1) {
10907adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
10918e2fed03SBarry Smith       ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
10928e2fed03SBarry Smith     } else {
10938e2fed03SBarry Smith       ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr);
10948e2fed03SBarry Smith     }
10958e2fed03SBarry Smith     PetscFunctionReturn(0);
10960f5bd95cSBarry Smith   } else if (isdraw) {
1097b0a32e0cSBarry Smith     PetscDraw  draw;
109819bcc07fSBarry Smith     PetscTruth isnull;
1099b0a32e0cSBarry Smith     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1100b0a32e0cSBarry Smith     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
110119bcc07fSBarry Smith   }
110219bcc07fSBarry Smith 
110317699dbbSLois Curfman McInnes   if (size == 1) {
11047adad957SLisandro Dalcin     ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
110578b31e54SBarry Smith     ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
11063a40ed3dSBarry Smith   } else {
110795373324SBarry Smith     /* assemble the entire matrix onto first processor. */
110895373324SBarry Smith     Mat         A;
1109ec8511deSBarry Smith     Mat_SeqAIJ  *Aloc;
1110d0f46423SBarry Smith     PetscInt    M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct;
1111dd6ea824SBarry Smith     MatScalar   *a;
11122ee70a88SLois Curfman McInnes 
1113d0f46423SBarry Smith     if (mat->rmap->N > 1024) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 512 rows, use binary format instead");
11140805154bSBarry Smith 
11157adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
111617699dbbSLois Curfman McInnes     if (!rank) {
1117f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
11183a40ed3dSBarry Smith     } else {
1119f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
112095373324SBarry Smith     }
1121f204ca49SKris Buschelman     /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */
1122f204ca49SKris Buschelman     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
1123f204ca49SKris Buschelman     ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
112452e6d16bSBarry Smith     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
1125416022c9SBarry Smith 
112695373324SBarry Smith     /* copy over the A part */
1127ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->A->data;
1128d0f46423SBarry Smith     m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1129d0f46423SBarry Smith     row = mat->rmap->rstart;
1130d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;}
113195373324SBarry Smith     for (i=0; i<m; i++) {
1132416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr);
113395373324SBarry Smith       row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i];
113495373324SBarry Smith     }
11352ee70a88SLois Curfman McInnes     aj = Aloc->j;
1136d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;}
113795373324SBarry Smith 
113895373324SBarry Smith     /* copy over the B part */
1139ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->B->data;
1140d0f46423SBarry Smith     m    = aij->B->rmap->n;  ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1141d0f46423SBarry Smith     row  = mat->rmap->rstart;
1142b1d57f15SBarry Smith     ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1143b0a32e0cSBarry Smith     ct   = cols;
1144bfec09a0SHong Zhang     for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];}
114595373324SBarry Smith     for (i=0; i<m; i++) {
1146416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr);
114795373324SBarry Smith       row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i];
114895373324SBarry Smith     }
1149606d414cSSatish Balay     ierr = PetscFree(ct);CHKERRQ(ierr);
11506d4a8577SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
11516d4a8577SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
115255843e3eSBarry Smith     /*
115355843e3eSBarry Smith        Everyone has to call to draw the matrix since the graphics waits are
1154b0a32e0cSBarry Smith        synchronized across all processors that share the PetscDraw object
115555843e3eSBarry Smith     */
1156b0a32e0cSBarry Smith     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1157e03a110bSBarry Smith     if (!rank) {
11587adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
11596831982aSBarry Smith       ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
116095373324SBarry Smith     }
1161b0a32e0cSBarry Smith     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
116278b31e54SBarry Smith     ierr = MatDestroy(A);CHKERRQ(ierr);
116395373324SBarry Smith   }
11643a40ed3dSBarry Smith   PetscFunctionReturn(0);
11651eb62cbbSBarry Smith }
11661eb62cbbSBarry Smith 
11674a2ae208SSatish Balay #undef __FUNCT__
11684a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ"
1169dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer)
1170416022c9SBarry Smith {
1171dfbe8321SBarry Smith   PetscErrorCode ierr;
117232077d6dSBarry Smith   PetscTruth     iascii,isdraw,issocket,isbinary;
1173416022c9SBarry Smith 
11743a40ed3dSBarry Smith   PetscFunctionBegin;
117532077d6dSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
1176fb9695e5SSatish Balay   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
1177fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
1178b0a32e0cSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr);
117932077d6dSBarry Smith   if (iascii || isdraw || isbinary || issocket) {
11807b2a1423SBarry Smith     ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
11815cd90555SBarry Smith   } else {
118279a5c55eSBarry Smith     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name);
1183416022c9SBarry Smith   }
11843a40ed3dSBarry Smith   PetscFunctionReturn(0);
1185416022c9SBarry Smith }
1186416022c9SBarry Smith 
11874a2ae208SSatish Balay #undef __FUNCT__
11884a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ"
1189b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
11908a729477SBarry Smith {
119144a69424SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1192dfbe8321SBarry Smith   PetscErrorCode ierr;
1193c14dc6b6SHong Zhang   Vec            bb1;
11948a729477SBarry Smith 
11953a40ed3dSBarry Smith   PetscFunctionBegin;
1196c14dc6b6SHong Zhang   ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
11972798e883SHong Zhang 
1198c16cb8f2SBarry Smith   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
1199da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1200bd3bf7d3SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
12012798e883SHong Zhang       its--;
1202da3a660dSBarry Smith     }
12032798e883SHong Zhang 
12042798e883SHong Zhang     while (its--) {
1205ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1206ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12072798e883SHong Zhang 
1208c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1209efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1210c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12112798e883SHong Zhang 
1212c14dc6b6SHong Zhang       /* local sweep */
121371f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
12142798e883SHong Zhang     }
12153a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
1216da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1217c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12182798e883SHong Zhang       its--;
1219da3a660dSBarry Smith     }
12202798e883SHong Zhang     while (its--) {
1221ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1222ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12232798e883SHong Zhang 
1224c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1225efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1226c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
1227c14dc6b6SHong Zhang 
1228c14dc6b6SHong Zhang       /* local sweep */
122971f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12302798e883SHong Zhang     }
12313a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
1232da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1233c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12342798e883SHong Zhang       its--;
1235da3a660dSBarry Smith     }
12362798e883SHong Zhang     while (its--) {
1237ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1238ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12392798e883SHong Zhang 
1240c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1241efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1242c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12432798e883SHong Zhang 
1244c14dc6b6SHong Zhang       /* local sweep */
124571f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12462798e883SHong Zhang     }
12473a40ed3dSBarry Smith   } else {
124829bbc08cSBarry Smith     SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported");
1249c16cb8f2SBarry Smith   }
1250c14dc6b6SHong Zhang 
1251c14dc6b6SHong Zhang   ierr = VecDestroy(bb1);CHKERRQ(ierr);
12523a40ed3dSBarry Smith   PetscFunctionReturn(0);
12538a729477SBarry Smith }
1254a66be287SLois Curfman McInnes 
12554a2ae208SSatish Balay #undef __FUNCT__
125642e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ"
125742e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B)
125842e855d1Svictor {
125942e855d1Svictor   MPI_Comm       comm,pcomm;
1260*5d0c19d7SBarry Smith   PetscInt       first,local_size,nrows;
1261*5d0c19d7SBarry Smith   const PetscInt *rows;
126242e855d1Svictor   int            ntids;
126342e855d1Svictor   IS             crowp,growp,irowp,lrowp,lcolp,icolp;
126442e855d1Svictor   PetscErrorCode ierr;
126542e855d1Svictor 
126642e855d1Svictor   PetscFunctionBegin;
126742e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)A,&comm); CHKERRQ(ierr);
126842e855d1Svictor   /* make a collective version of 'rowp' */
126942e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm); CHKERRQ(ierr);
127042e855d1Svictor   if (pcomm==comm) {
127142e855d1Svictor     crowp = rowp;
127242e855d1Svictor   } else {
127342e855d1Svictor     ierr = ISGetSize(rowp,&nrows); CHKERRQ(ierr);
127442e855d1Svictor     ierr = ISGetIndices(rowp,&rows); CHKERRQ(ierr);
127542e855d1Svictor     ierr = ISCreateGeneral(comm,nrows,rows,&crowp); CHKERRQ(ierr);
127642e855d1Svictor     ierr = ISRestoreIndices(rowp,&rows); CHKERRQ(ierr);
127742e855d1Svictor   }
127842e855d1Svictor   /* collect the global row permutation and invert it */
127942e855d1Svictor   ierr = ISAllGather(crowp,&growp); CHKERRQ(ierr);
128042e855d1Svictor   ierr = ISSetPermutation(growp); CHKERRQ(ierr);
128142e855d1Svictor   if (pcomm!=comm) {
128242e855d1Svictor     ierr = ISDestroy(crowp); CHKERRQ(ierr);
128342e855d1Svictor   }
128442e855d1Svictor   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
128542e855d1Svictor   /* get the local target indices */
128642e855d1Svictor   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL); CHKERRQ(ierr);
128742e855d1Svictor   ierr = MatGetLocalSize(A,&local_size,PETSC_NULL); CHKERRQ(ierr);
128842e855d1Svictor   ierr = ISGetIndices(irowp,&rows); CHKERRQ(ierr);
128942e855d1Svictor   ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp); CHKERRQ(ierr);
129042e855d1Svictor   ierr = ISRestoreIndices(irowp,&rows); CHKERRQ(ierr);
129142e855d1Svictor   ierr = ISDestroy(irowp); CHKERRQ(ierr);
129242e855d1Svictor   /* the column permutation is so much easier;
129342e855d1Svictor      make a local version of 'colp' and invert it */
129442e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm); CHKERRQ(ierr);
129542e855d1Svictor   ierr = MPI_Comm_size(pcomm,&ntids); CHKERRQ(ierr);
129642e855d1Svictor   if (ntids==1) {
129742e855d1Svictor     lcolp = colp;
129842e855d1Svictor   } else {
129942e855d1Svictor     ierr = ISGetSize(colp,&nrows); CHKERRQ(ierr);
130042e855d1Svictor     ierr = ISGetIndices(colp,&rows); CHKERRQ(ierr);
130142e855d1Svictor     ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp); CHKERRQ(ierr);
130242e855d1Svictor   }
130342e855d1Svictor   ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp); CHKERRQ(ierr);
130442e855d1Svictor   ierr = ISSetPermutation(lcolp); CHKERRQ(ierr);
130542e855d1Svictor   if (ntids>1) {
130642e855d1Svictor     ierr = ISRestoreIndices(colp,&rows); CHKERRQ(ierr);
130742e855d1Svictor     ierr = ISDestroy(lcolp); CHKERRQ(ierr);
130842e855d1Svictor   }
130942e855d1Svictor   /* now we just get the submatrix */
131042e855d1Svictor   ierr = MatGetSubMatrix(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B); CHKERRQ(ierr);
131142e855d1Svictor   /* clean up */
131242e855d1Svictor   ierr = ISDestroy(lrowp); CHKERRQ(ierr);
131342e855d1Svictor   ierr = ISDestroy(icolp); CHKERRQ(ierr);
131442e855d1Svictor   PetscFunctionReturn(0);
131542e855d1Svictor }
131642e855d1Svictor 
131742e855d1Svictor #undef __FUNCT__
13184a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ"
1319dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1320a66be287SLois Curfman McInnes {
1321a66be287SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1322a66be287SLois Curfman McInnes   Mat            A = mat->A,B = mat->B;
1323dfbe8321SBarry Smith   PetscErrorCode ierr;
1324329f5518SBarry Smith   PetscReal      isend[5],irecv[5];
1325a66be287SLois Curfman McInnes 
13263a40ed3dSBarry Smith   PetscFunctionBegin;
13274e220ebcSLois Curfman McInnes   info->block_size     = 1.0;
13284e220ebcSLois Curfman McInnes   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
13294e220ebcSLois Curfman McInnes   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
13304e220ebcSLois Curfman McInnes   isend[3] = info->memory;  isend[4] = info->mallocs;
13314e220ebcSLois Curfman McInnes   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
13324e220ebcSLois Curfman McInnes   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
13334e220ebcSLois Curfman McInnes   isend[3] += info->memory;  isend[4] += info->mallocs;
1334a66be287SLois Curfman McInnes   if (flag == MAT_LOCAL) {
13354e220ebcSLois Curfman McInnes     info->nz_used      = isend[0];
13364e220ebcSLois Curfman McInnes     info->nz_allocated = isend[1];
13374e220ebcSLois Curfman McInnes     info->nz_unneeded  = isend[2];
13384e220ebcSLois Curfman McInnes     info->memory       = isend[3];
13394e220ebcSLois Curfman McInnes     info->mallocs      = isend[4];
1340a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_MAX) {
13417adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
13424e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13434e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13444e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13454e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13464e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1347a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_SUM) {
13487adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
13494e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13504e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13514e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13524e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13534e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1354a66be287SLois Curfman McInnes   }
13554e220ebcSLois Curfman McInnes   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
13564e220ebcSLois Curfman McInnes   info->fill_ratio_needed = 0;
13574e220ebcSLois Curfman McInnes   info->factor_mallocs    = 0;
1358d0f46423SBarry Smith   info->rows_global       = (double)matin->rmap->N;
1359d0f46423SBarry Smith   info->columns_global    = (double)matin->cmap->N;
1360d0f46423SBarry Smith   info->rows_local        = (double)matin->rmap->n;
1361d0f46423SBarry Smith   info->columns_local     = (double)matin->cmap->N;
13624e220ebcSLois Curfman McInnes 
13633a40ed3dSBarry Smith   PetscFunctionReturn(0);
1364a66be287SLois Curfman McInnes }
1365a66be287SLois Curfman McInnes 
13664a2ae208SSatish Balay #undef __FUNCT__
13674a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ"
13684e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg)
1369c74985f6SBarry Smith {
1370c0bbcb79SLois Curfman McInnes   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1371dfbe8321SBarry Smith   PetscErrorCode ierr;
1372c74985f6SBarry Smith 
13733a40ed3dSBarry Smith   PetscFunctionBegin;
137412c028f9SKris Buschelman   switch (op) {
1375512a5fc5SBarry Smith   case MAT_NEW_NONZERO_LOCATIONS:
137612c028f9SKris Buschelman   case MAT_NEW_NONZERO_ALLOCATION_ERR:
137712c028f9SKris Buschelman   case MAT_KEEP_ZEROED_ROWS:
137812c028f9SKris Buschelman   case MAT_NEW_NONZERO_LOCATION_ERR:
137912c028f9SKris Buschelman   case MAT_USE_INODES:
138012c028f9SKris Buschelman   case MAT_IGNORE_ZERO_ENTRIES:
13814e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13824e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
138312c028f9SKris Buschelman     break;
138412c028f9SKris Buschelman   case MAT_ROW_ORIENTED:
13854e0d8c25SBarry Smith     a->roworiented = flg;
13864e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13874e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
138812c028f9SKris Buschelman     break;
13894e0d8c25SBarry Smith   case MAT_NEW_DIAGONALS:
1390290bbb0aSBarry Smith     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
139112c028f9SKris Buschelman     break;
139212c028f9SKris Buschelman   case MAT_IGNORE_OFF_PROC_ENTRIES:
13937c922b88SBarry Smith     a->donotstash = PETSC_TRUE;
139412c028f9SKris Buschelman     break;
139577e54ba9SKris Buschelman   case MAT_SYMMETRIC:
13964e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
139725f421beSHong Zhang     break;
139877e54ba9SKris Buschelman   case MAT_STRUCTURALLY_SYMMETRIC:
1399bf108f30SBarry Smith   case MAT_HERMITIAN:
1400bf108f30SBarry Smith   case MAT_SYMMETRY_ETERNAL:
14014e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
140277e54ba9SKris Buschelman     break;
140312c028f9SKris Buschelman   default:
1404ad86a440SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
14053a40ed3dSBarry Smith   }
14063a40ed3dSBarry Smith   PetscFunctionReturn(0);
1407c74985f6SBarry Smith }
1408c74985f6SBarry Smith 
14094a2ae208SSatish Balay #undef __FUNCT__
14104a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ"
1411b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
141239e00950SLois Curfman McInnes {
1413154123eaSLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
141487828ca2SBarry Smith   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
14156849ba73SBarry Smith   PetscErrorCode ierr;
1416d0f46423SBarry Smith   PetscInt       i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart;
1417d0f46423SBarry Smith   PetscInt       nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend;
1418b1d57f15SBarry Smith   PetscInt       *cmap,*idx_p;
141939e00950SLois Curfman McInnes 
14203a40ed3dSBarry Smith   PetscFunctionBegin;
1421abc0a331SBarry Smith   if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active");
14227a0afa10SBarry Smith   mat->getrowactive = PETSC_TRUE;
14237a0afa10SBarry Smith 
142470f0671dSBarry Smith   if (!mat->rowvalues && (idx || v)) {
14257a0afa10SBarry Smith     /*
14267a0afa10SBarry Smith         allocate enough space to hold information from the longest row.
14277a0afa10SBarry Smith     */
14287a0afa10SBarry Smith     Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data;
1429b1d57f15SBarry Smith     PetscInt     max = 1,tmp;
1430d0f46423SBarry Smith     for (i=0; i<matin->rmap->n; i++) {
14317a0afa10SBarry Smith       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
14327a0afa10SBarry Smith       if (max < tmp) { max = tmp; }
14337a0afa10SBarry Smith     }
1434b1d57f15SBarry Smith     ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr);
1435b1d57f15SBarry Smith     mat->rowindices = (PetscInt*)(mat->rowvalues + max);
14367a0afa10SBarry Smith   }
14377a0afa10SBarry Smith 
143829bbc08cSBarry Smith   if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows")
1439abc0e9e4SLois Curfman McInnes   lrow = row - rstart;
144039e00950SLois Curfman McInnes 
1441154123eaSLois Curfman McInnes   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1442154123eaSLois Curfman McInnes   if (!v)   {pvA = 0; pvB = 0;}
1443154123eaSLois Curfman McInnes   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1444f830108cSBarry Smith   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1445f830108cSBarry Smith   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1446154123eaSLois Curfman McInnes   nztot = nzA + nzB;
1447154123eaSLois Curfman McInnes 
144870f0671dSBarry Smith   cmap  = mat->garray;
1449154123eaSLois Curfman McInnes   if (v  || idx) {
1450154123eaSLois Curfman McInnes     if (nztot) {
1451154123eaSLois Curfman McInnes       /* Sort by increasing column numbers, assuming A and B already sorted */
1452b1d57f15SBarry Smith       PetscInt imark = -1;
1453154123eaSLois Curfman McInnes       if (v) {
145470f0671dSBarry Smith         *v = v_p = mat->rowvalues;
145539e00950SLois Curfman McInnes         for (i=0; i<nzB; i++) {
145670f0671dSBarry Smith           if (cmap[cworkB[i]] < cstart)   v_p[i] = vworkB[i];
1457154123eaSLois Curfman McInnes           else break;
1458154123eaSLois Curfman McInnes         }
1459154123eaSLois Curfman McInnes         imark = i;
146070f0671dSBarry Smith         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
146170f0671dSBarry Smith         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1462154123eaSLois Curfman McInnes       }
1463154123eaSLois Curfman McInnes       if (idx) {
146470f0671dSBarry Smith         *idx = idx_p = mat->rowindices;
146570f0671dSBarry Smith         if (imark > -1) {
146670f0671dSBarry Smith           for (i=0; i<imark; i++) {
146770f0671dSBarry Smith             idx_p[i] = cmap[cworkB[i]];
146870f0671dSBarry Smith           }
146970f0671dSBarry Smith         } else {
1470154123eaSLois Curfman McInnes           for (i=0; i<nzB; i++) {
147170f0671dSBarry Smith             if (cmap[cworkB[i]] < cstart)   idx_p[i] = cmap[cworkB[i]];
1472154123eaSLois Curfman McInnes             else break;
1473154123eaSLois Curfman McInnes           }
1474154123eaSLois Curfman McInnes           imark = i;
147570f0671dSBarry Smith         }
147670f0671dSBarry Smith         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart + cworkA[i];
147770f0671dSBarry Smith         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]];
147839e00950SLois Curfman McInnes       }
14793f97c4b0SBarry Smith     } else {
14801ca473b0SSatish Balay       if (idx) *idx = 0;
14811ca473b0SSatish Balay       if (v)   *v   = 0;
14821ca473b0SSatish Balay     }
1483154123eaSLois Curfman McInnes   }
148439e00950SLois Curfman McInnes   *nz = nztot;
1485f830108cSBarry Smith   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1486f830108cSBarry Smith   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
14873a40ed3dSBarry Smith   PetscFunctionReturn(0);
148839e00950SLois Curfman McInnes }
148939e00950SLois Curfman McInnes 
14904a2ae208SSatish Balay #undef __FUNCT__
14914a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ"
1492b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
149339e00950SLois Curfman McInnes {
14947a0afa10SBarry Smith   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
14953a40ed3dSBarry Smith 
14963a40ed3dSBarry Smith   PetscFunctionBegin;
1497abc0a331SBarry Smith   if (!aij->getrowactive) {
1498abc0a331SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
14997a0afa10SBarry Smith   }
15007a0afa10SBarry Smith   aij->getrowactive = PETSC_FALSE;
15013a40ed3dSBarry Smith   PetscFunctionReturn(0);
150239e00950SLois Curfman McInnes }
150339e00950SLois Curfman McInnes 
15044a2ae208SSatish Balay #undef __FUNCT__
15054a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ"
1506dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm)
1507855ac2c5SLois Curfman McInnes {
1508855ac2c5SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1509ec8511deSBarry Smith   Mat_SeqAIJ     *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data;
1510dfbe8321SBarry Smith   PetscErrorCode ierr;
1511d0f46423SBarry Smith   PetscInt       i,j,cstart = mat->cmap->rstart;
1512329f5518SBarry Smith   PetscReal      sum = 0.0;
1513a77337e4SBarry Smith   MatScalar      *v;
151404ca555eSLois Curfman McInnes 
15153a40ed3dSBarry Smith   PetscFunctionBegin;
151617699dbbSLois Curfman McInnes   if (aij->size == 1) {
151714183eadSLois Curfman McInnes     ierr =  MatNorm(aij->A,type,norm);CHKERRQ(ierr);
151837fa93a5SLois Curfman McInnes   } else {
151904ca555eSLois Curfman McInnes     if (type == NORM_FROBENIUS) {
152004ca555eSLois Curfman McInnes       v = amat->a;
152104ca555eSLois Curfman McInnes       for (i=0; i<amat->nz; i++) {
1522aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1523329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
152404ca555eSLois Curfman McInnes #else
152504ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
152604ca555eSLois Curfman McInnes #endif
152704ca555eSLois Curfman McInnes       }
152804ca555eSLois Curfman McInnes       v = bmat->a;
152904ca555eSLois Curfman McInnes       for (i=0; i<bmat->nz; i++) {
1530aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1531329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
153204ca555eSLois Curfman McInnes #else
153304ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
153404ca555eSLois Curfman McInnes #endif
153504ca555eSLois Curfman McInnes       }
15367adad957SLisandro Dalcin       ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
153704ca555eSLois Curfman McInnes       *norm = sqrt(*norm);
15383a40ed3dSBarry Smith     } else if (type == NORM_1) { /* max column norm */
1539329f5518SBarry Smith       PetscReal *tmp,*tmp2;
1540b1d57f15SBarry Smith       PetscInt  *jj,*garray = aij->garray;
1541d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr);
1542d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr);
1543d0f46423SBarry Smith       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
154404ca555eSLois Curfman McInnes       *norm = 0.0;
154504ca555eSLois Curfman McInnes       v = amat->a; jj = amat->j;
154604ca555eSLois Curfman McInnes       for (j=0; j<amat->nz; j++) {
1547bfec09a0SHong Zhang         tmp[cstart + *jj++ ] += PetscAbsScalar(*v);  v++;
154804ca555eSLois Curfman McInnes       }
154904ca555eSLois Curfman McInnes       v = bmat->a; jj = bmat->j;
155004ca555eSLois Curfman McInnes       for (j=0; j<bmat->nz; j++) {
1551bfec09a0SHong Zhang         tmp[garray[*jj++]] += PetscAbsScalar(*v); v++;
155204ca555eSLois Curfman McInnes       }
1553d0f46423SBarry Smith       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
1554d0f46423SBarry Smith       for (j=0; j<mat->cmap->N; j++) {
155504ca555eSLois Curfman McInnes         if (tmp2[j] > *norm) *norm = tmp2[j];
155604ca555eSLois Curfman McInnes       }
1557606d414cSSatish Balay       ierr = PetscFree(tmp);CHKERRQ(ierr);
1558606d414cSSatish Balay       ierr = PetscFree(tmp2);CHKERRQ(ierr);
15593a40ed3dSBarry Smith     } else if (type == NORM_INFINITY) { /* max row norm */
1560329f5518SBarry Smith       PetscReal ntemp = 0.0;
1561d0f46423SBarry Smith       for (j=0; j<aij->A->rmap->n; j++) {
1562bfec09a0SHong Zhang         v = amat->a + amat->i[j];
156304ca555eSLois Curfman McInnes         sum = 0.0;
156404ca555eSLois Curfman McInnes         for (i=0; i<amat->i[j+1]-amat->i[j]; i++) {
1565cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
156604ca555eSLois Curfman McInnes         }
1567bfec09a0SHong Zhang         v = bmat->a + bmat->i[j];
156804ca555eSLois Curfman McInnes         for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) {
1569cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
157004ca555eSLois Curfman McInnes         }
1571515d9167SLois Curfman McInnes         if (sum > ntemp) ntemp = sum;
157204ca555eSLois Curfman McInnes       }
15737adad957SLisandro Dalcin       ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
1574ca161407SBarry Smith     } else {
157529bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"No support for two norm");
157604ca555eSLois Curfman McInnes     }
157737fa93a5SLois Curfman McInnes   }
15783a40ed3dSBarry Smith   PetscFunctionReturn(0);
1579855ac2c5SLois Curfman McInnes }
1580855ac2c5SLois Curfman McInnes 
15814a2ae208SSatish Balay #undef __FUNCT__
15824a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ"
1583fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout)
1584b7c46309SBarry Smith {
1585b7c46309SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1586da668accSHong Zhang   Mat_SeqAIJ     *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data;
1587dfbe8321SBarry Smith   PetscErrorCode ierr;
1588d0f46423SBarry Smith   PetscInt       M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz;
1589d0f46423SBarry Smith   PetscInt       cstart=A->cmap->rstart,ncol;
15903a40ed3dSBarry Smith   Mat            B;
1591a77337e4SBarry Smith   MatScalar      *array;
1592b7c46309SBarry Smith 
15933a40ed3dSBarry Smith   PetscFunctionBegin;
1594e9695a30SBarry Smith   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1595da668accSHong Zhang 
1596d0f46423SBarry Smith   ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n;
1597da668accSHong Zhang   ai = Aloc->i; aj = Aloc->j;
1598da668accSHong Zhang   bi = Bloc->i; bj = Bloc->j;
1599fc73b1b3SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1600fc73b1b3SBarry Smith     /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */
1601fc73b1b3SBarry Smith     ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
1602da668accSHong Zhang     ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr);
1603da668accSHong Zhang     for (i=0; i<ai[ma]; i++){
1604da668accSHong Zhang       d_nnz[aj[i]] ++;
1605da668accSHong Zhang       aj[i] += cstart; /* global col index to be used by MatSetValues() */
1606d4bb536fSBarry Smith     }
1607d4bb536fSBarry Smith 
16087adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1609d0f46423SBarry Smith     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
16107adad957SLisandro Dalcin     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1611da668accSHong Zhang     ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr);
1612fc73b1b3SBarry Smith     ierr = PetscFree(d_nnz);CHKERRQ(ierr);
1613fc4dec0aSBarry Smith   } else {
1614fc4dec0aSBarry Smith     B = *matout;
1615fc4dec0aSBarry Smith   }
1616b7c46309SBarry Smith 
1617b7c46309SBarry Smith   /* copy over the A part */
1618da668accSHong Zhang   array = Aloc->a;
1619d0f46423SBarry Smith   row = A->rmap->rstart;
1620da668accSHong Zhang   for (i=0; i<ma; i++) {
1621da668accSHong Zhang     ncol = ai[i+1]-ai[i];
1622da668accSHong Zhang     ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
1623da668accSHong Zhang     row++; array += ncol; aj += ncol;
1624b7c46309SBarry Smith   }
1625b7c46309SBarry Smith   aj = Aloc->j;
1626da668accSHong Zhang   for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */
1627b7c46309SBarry Smith 
1628b7c46309SBarry Smith   /* copy over the B part */
1629fc73b1b3SBarry Smith   ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1630fc73b1b3SBarry Smith   ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr);
1631da668accSHong Zhang   array = Bloc->a;
1632d0f46423SBarry Smith   row = A->rmap->rstart;
1633da668accSHong Zhang   for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];}
163461a2fbbaSHong Zhang   cols_tmp = cols;
1635da668accSHong Zhang   for (i=0; i<mb; i++) {
1636da668accSHong Zhang     ncol = bi[i+1]-bi[i];
163761a2fbbaSHong Zhang     ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
163861a2fbbaSHong Zhang     row++; array += ncol; cols_tmp += ncol;
1639b7c46309SBarry Smith   }
1640fc73b1b3SBarry Smith   ierr = PetscFree(cols);CHKERRQ(ierr);
1641fc73b1b3SBarry Smith 
16426d4a8577SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
16436d4a8577SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1644815cbec1SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout != A) {
16450de55854SLois Curfman McInnes     *matout = B;
16460de55854SLois Curfman McInnes   } else {
1647273d9f13SBarry Smith     ierr = MatHeaderCopy(A,B);CHKERRQ(ierr);
16480de55854SLois Curfman McInnes   }
16493a40ed3dSBarry Smith   PetscFunctionReturn(0);
1650b7c46309SBarry Smith }
1651b7c46309SBarry Smith 
16524a2ae208SSatish Balay #undef __FUNCT__
16534a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ"
1654dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr)
1655a008b906SSatish Balay {
16564b967eb1SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
16574b967eb1SSatish Balay   Mat            a = aij->A,b = aij->B;
1658dfbe8321SBarry Smith   PetscErrorCode ierr;
1659b1d57f15SBarry Smith   PetscInt       s1,s2,s3;
1660a008b906SSatish Balay 
16613a40ed3dSBarry Smith   PetscFunctionBegin;
16624b967eb1SSatish Balay   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
16634b967eb1SSatish Balay   if (rr) {
1664e1311b90SBarry Smith     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
166529bbc08cSBarry Smith     if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
16664b967eb1SSatish Balay     /* Overlap communication with computation. */
1667ca9f406cSSatish Balay     ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1668a008b906SSatish Balay   }
16694b967eb1SSatish Balay   if (ll) {
1670e1311b90SBarry Smith     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
167129bbc08cSBarry Smith     if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1672f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr);
16734b967eb1SSatish Balay   }
16744b967eb1SSatish Balay   /* scale  the diagonal block */
1675f830108cSBarry Smith   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
16764b967eb1SSatish Balay 
16774b967eb1SSatish Balay   if (rr) {
16784b967eb1SSatish Balay     /* Do a scatter end and then right scale the off-diagonal block */
1679ca9f406cSSatish Balay     ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1680f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr);
16814b967eb1SSatish Balay   }
16824b967eb1SSatish Balay 
16833a40ed3dSBarry Smith   PetscFunctionReturn(0);
1684a008b906SSatish Balay }
1685a008b906SSatish Balay 
16864a2ae208SSatish Balay #undef __FUNCT__
1687521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ"
1688521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs)
16895a838052SSatish Balay {
1690521d7252SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1691521d7252SBarry Smith   PetscErrorCode ierr;
1692521d7252SBarry Smith 
16933a40ed3dSBarry Smith   PetscFunctionBegin;
1694521d7252SBarry Smith   ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr);
1695521d7252SBarry Smith   ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr);
16963a40ed3dSBarry Smith   PetscFunctionReturn(0);
16975a838052SSatish Balay }
16984a2ae208SSatish Balay #undef __FUNCT__
16994a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ"
1700dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A)
1701bb5a7306SBarry Smith {
1702bb5a7306SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1703dfbe8321SBarry Smith   PetscErrorCode ierr;
17043a40ed3dSBarry Smith 
17053a40ed3dSBarry Smith   PetscFunctionBegin;
1706bb5a7306SBarry Smith   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
17073a40ed3dSBarry Smith   PetscFunctionReturn(0);
1708bb5a7306SBarry Smith }
1709bb5a7306SBarry Smith 
17104a2ae208SSatish Balay #undef __FUNCT__
17114a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ"
1712dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag)
1713d4bb536fSBarry Smith {
1714d4bb536fSBarry Smith   Mat_MPIAIJ     *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data;
1715d4bb536fSBarry Smith   Mat            a,b,c,d;
1716d4bb536fSBarry Smith   PetscTruth     flg;
1717dfbe8321SBarry Smith   PetscErrorCode ierr;
1718d4bb536fSBarry Smith 
17193a40ed3dSBarry Smith   PetscFunctionBegin;
1720d4bb536fSBarry Smith   a = matA->A; b = matA->B;
1721d4bb536fSBarry Smith   c = matB->A; d = matB->B;
1722d4bb536fSBarry Smith 
1723d4bb536fSBarry Smith   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1724abc0a331SBarry Smith   if (flg) {
1725d4bb536fSBarry Smith     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1726d4bb536fSBarry Smith   }
17277adad957SLisandro Dalcin   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
17283a40ed3dSBarry Smith   PetscFunctionReturn(0);
1729d4bb536fSBarry Smith }
1730d4bb536fSBarry Smith 
17314a2ae208SSatish Balay #undef __FUNCT__
17324a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ"
1733dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str)
1734cb5b572fSBarry Smith {
1735dfbe8321SBarry Smith   PetscErrorCode ierr;
1736cb5b572fSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ *)A->data;
1737cb5b572fSBarry Smith   Mat_MPIAIJ     *b = (Mat_MPIAIJ *)B->data;
1738cb5b572fSBarry Smith 
1739cb5b572fSBarry Smith   PetscFunctionBegin;
174033f4a19fSKris Buschelman   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
174133f4a19fSKris Buschelman   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1742cb5b572fSBarry Smith     /* because of the column compression in the off-processor part of the matrix a->B,
1743cb5b572fSBarry Smith        the number of columns in a->B and b->B may be different, hence we cannot call
1744cb5b572fSBarry Smith        the MatCopy() directly on the two parts. If need be, we can provide a more
1745cb5b572fSBarry Smith        efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices
1746cb5b572fSBarry Smith        then copying the submatrices */
1747cb5b572fSBarry Smith     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1748cb5b572fSBarry Smith   } else {
1749cb5b572fSBarry Smith     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1750cb5b572fSBarry Smith     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1751cb5b572fSBarry Smith   }
1752cb5b572fSBarry Smith   PetscFunctionReturn(0);
1753cb5b572fSBarry Smith }
1754cb5b572fSBarry Smith 
17554a2ae208SSatish Balay #undef __FUNCT__
17564a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ"
1757dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A)
1758273d9f13SBarry Smith {
1759dfbe8321SBarry Smith   PetscErrorCode ierr;
1760273d9f13SBarry Smith 
1761273d9f13SBarry Smith   PetscFunctionBegin;
1762273d9f13SBarry Smith   ierr =  MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1763273d9f13SBarry Smith   PetscFunctionReturn(0);
1764273d9f13SBarry Smith }
1765273d9f13SBarry Smith 
1766ac90fabeSBarry Smith #include "petscblaslapack.h"
1767ac90fabeSBarry Smith #undef __FUNCT__
1768ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ"
1769f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1770ac90fabeSBarry Smith {
1771dfbe8321SBarry Smith   PetscErrorCode ierr;
1772b1d57f15SBarry Smith   PetscInt       i;
1773ac90fabeSBarry Smith   Mat_MPIAIJ     *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data;
17744ce68768SBarry Smith   PetscBLASInt   bnz,one=1;
1775ac90fabeSBarry Smith   Mat_SeqAIJ     *x,*y;
1776ac90fabeSBarry Smith 
1777ac90fabeSBarry Smith   PetscFunctionBegin;
1778ac90fabeSBarry Smith   if (str == SAME_NONZERO_PATTERN) {
1779f4df32b1SMatthew Knepley     PetscScalar alpha = a;
1780ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->A->data;
1781ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->A->data;
17820805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1783f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1784ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->B->data;
1785ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->B->data;
17860805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1787f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1788a30b2313SHong Zhang   } else if (str == SUBSET_NONZERO_PATTERN) {
1789f4df32b1SMatthew Knepley     ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr);
1790c537a176SHong Zhang 
1791c537a176SHong Zhang     x = (Mat_SeqAIJ *)xx->B->data;
1792a30b2313SHong Zhang     y = (Mat_SeqAIJ *)yy->B->data;
1793a30b2313SHong Zhang     if (y->xtoy && y->XtoY != xx->B) {
1794a30b2313SHong Zhang       ierr = PetscFree(y->xtoy);CHKERRQ(ierr);
1795a30b2313SHong Zhang       ierr = MatDestroy(y->XtoY);CHKERRQ(ierr);
1796c537a176SHong Zhang     }
1797a30b2313SHong Zhang     if (!y->xtoy) { /* get xtoy */
1798d0f46423SBarry Smith       ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr);
1799a30b2313SHong Zhang       y->XtoY = xx->B;
1800407f6b05SHong Zhang       ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr);
1801c537a176SHong Zhang     }
1802f4df32b1SMatthew Knepley     for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]);
1803ac90fabeSBarry Smith   } else {
1804f4df32b1SMatthew Knepley     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1805ac90fabeSBarry Smith   }
1806ac90fabeSBarry Smith   PetscFunctionReturn(0);
1807ac90fabeSBarry Smith }
1808ac90fabeSBarry Smith 
1809354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat);
1810354c94deSBarry Smith 
1811354c94deSBarry Smith #undef __FUNCT__
1812354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ"
1813354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat)
1814354c94deSBarry Smith {
1815354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX)
1816354c94deSBarry Smith   PetscErrorCode ierr;
1817354c94deSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
1818354c94deSBarry Smith 
1819354c94deSBarry Smith   PetscFunctionBegin;
1820354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr);
1821354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr);
1822354c94deSBarry Smith #else
1823354c94deSBarry Smith   PetscFunctionBegin;
1824354c94deSBarry Smith #endif
1825354c94deSBarry Smith   PetscFunctionReturn(0);
1826354c94deSBarry Smith }
1827354c94deSBarry Smith 
182899cafbc1SBarry Smith #undef __FUNCT__
182999cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ"
183099cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A)
183199cafbc1SBarry Smith {
183299cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
183399cafbc1SBarry Smith   PetscErrorCode ierr;
183499cafbc1SBarry Smith 
183599cafbc1SBarry Smith   PetscFunctionBegin;
183699cafbc1SBarry Smith   ierr = MatRealPart(a->A);CHKERRQ(ierr);
183799cafbc1SBarry Smith   ierr = MatRealPart(a->B);CHKERRQ(ierr);
183899cafbc1SBarry Smith   PetscFunctionReturn(0);
183999cafbc1SBarry Smith }
184099cafbc1SBarry Smith 
184199cafbc1SBarry Smith #undef __FUNCT__
184299cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ"
184399cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A)
184499cafbc1SBarry Smith {
184599cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
184699cafbc1SBarry Smith   PetscErrorCode ierr;
184799cafbc1SBarry Smith 
184899cafbc1SBarry Smith   PetscFunctionBegin;
184999cafbc1SBarry Smith   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
185099cafbc1SBarry Smith   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
185199cafbc1SBarry Smith   PetscFunctionReturn(0);
185299cafbc1SBarry Smith }
185399cafbc1SBarry Smith 
1854103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
1855103bf8bdSMatthew Knepley 
1856103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp>
1857a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp>
1858a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp>
1859a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp>
1860103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp>
1861a2c909beSMatthew Knepley #include <boost/multi_array.hpp>
1862d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp>
1863103bf8bdSMatthew Knepley 
1864103bf8bdSMatthew Knepley #undef __FUNCT__
1865103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ"
1866103bf8bdSMatthew Knepley /*
1867103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1868103bf8bdSMatthew Knepley */
1869719d5645SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, MatFactorInfo *info)
1870103bf8bdSMatthew Knepley {
1871a2c909beSMatthew Knepley   namespace petsc = boost::distributed::petsc;
1872a2c909beSMatthew Knepley 
1873a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1874a2c909beSMatthew Knepley   using boost::graph::distributed::ilu_default::process_group_type;
1875a2c909beSMatthew Knepley   using boost::graph::ilu_permuted;
1876a2c909beSMatthew Knepley 
1877103bf8bdSMatthew Knepley   PetscTruth      row_identity, col_identity;
1878776b82aeSLisandro Dalcin   PetscContainer  c;
1879103bf8bdSMatthew Knepley   PetscInt        m, n, M, N;
1880103bf8bdSMatthew Knepley   PetscErrorCode  ierr;
1881103bf8bdSMatthew Knepley 
1882103bf8bdSMatthew Knepley   PetscFunctionBegin;
1883103bf8bdSMatthew Knepley   if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu");
1884103bf8bdSMatthew Knepley   ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr);
1885103bf8bdSMatthew Knepley   ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr);
1886103bf8bdSMatthew Knepley   if (!row_identity || !col_identity) {
1887103bf8bdSMatthew Knepley     SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU");
1888103bf8bdSMatthew Knepley   }
1889103bf8bdSMatthew Knepley 
1890103bf8bdSMatthew Knepley   process_group_type pg;
1891a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1892a2c909beSMatthew Knepley   lgraph_type*   lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg));
1893a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1894a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1895a2c909beSMatthew Knepley 
1896103bf8bdSMatthew Knepley   petsc::read_matrix(A, graph, get(boost::edge_weight, graph));
1897a2c909beSMatthew Knepley   ilu_permuted(level_graph);
1898103bf8bdSMatthew Knepley 
1899103bf8bdSMatthew Knepley   /* put together the new matrix */
19007adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr);
1901103bf8bdSMatthew Knepley   ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr);
1902103bf8bdSMatthew Knepley   ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr);
1903719d5645SBarry Smith   ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr);
1904719d5645SBarry Smith   ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr);
1905719d5645SBarry Smith   ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1906719d5645SBarry Smith   ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1907103bf8bdSMatthew Knepley 
19087adad957SLisandro Dalcin   ierr = PetscContainerCreate(((PetscObject)A)->comm, &c);
1909776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(c, lgraph_p);
1910719d5645SBarry Smith   ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c);
1911103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1912103bf8bdSMatthew Knepley }
1913103bf8bdSMatthew Knepley 
1914103bf8bdSMatthew Knepley #undef __FUNCT__
1915103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ"
1916719d5645SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, MatFactorInfo *info)
1917103bf8bdSMatthew Knepley {
1918103bf8bdSMatthew Knepley   PetscFunctionBegin;
1919103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1920103bf8bdSMatthew Knepley }
1921103bf8bdSMatthew Knepley 
1922103bf8bdSMatthew Knepley #undef __FUNCT__
1923103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ"
1924103bf8bdSMatthew Knepley /*
1925103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1926103bf8bdSMatthew Knepley */
1927103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x)
1928103bf8bdSMatthew Knepley {
1929a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1930a2c909beSMatthew Knepley 
1931a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1932a2c909beSMatthew Knepley   lgraph_type*   lgraph_p;
1933776b82aeSLisandro Dalcin   PetscContainer c;
1934103bf8bdSMatthew Knepley   PetscErrorCode ierr;
1935103bf8bdSMatthew Knepley 
1936103bf8bdSMatthew Knepley   PetscFunctionBegin;
1937103bf8bdSMatthew Knepley   ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr);
1938776b82aeSLisandro Dalcin   ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr);
1939103bf8bdSMatthew Knepley   ierr = VecCopy(b, x); CHKERRQ(ierr);
1940a2c909beSMatthew Knepley 
1941a2c909beSMatthew Knepley   PetscScalar* array_x;
1942a2c909beSMatthew Knepley   ierr = VecGetArray(x, &array_x);CHKERRQ(ierr);
1943a2c909beSMatthew Knepley   PetscInt sx;
1944a2c909beSMatthew Knepley   ierr = VecGetSize(x, &sx);CHKERRQ(ierr);
1945a2c909beSMatthew Knepley 
1946a2c909beSMatthew Knepley   PetscScalar* array_b;
1947a2c909beSMatthew Knepley   ierr = VecGetArray(b, &array_b);CHKERRQ(ierr);
1948a2c909beSMatthew Knepley   PetscInt sb;
1949a2c909beSMatthew Knepley   ierr = VecGetSize(b, &sb);CHKERRQ(ierr);
1950a2c909beSMatthew Knepley 
1951a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1952a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1953a2c909beSMatthew Knepley 
1954a2c909beSMatthew Knepley   typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type;
1955a2c909beSMatthew Knepley   array_ref_type                                 ref_b(array_b, boost::extents[num_vertices(graph)]),
1956a2c909beSMatthew Knepley                                                  ref_x(array_x, boost::extents[num_vertices(graph)]);
1957a2c909beSMatthew Knepley 
1958a2c909beSMatthew Knepley   typedef boost::iterator_property_map<array_ref_type::iterator,
1959a2c909beSMatthew Knepley                                 boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type>  gvector_type;
1960a2c909beSMatthew Knepley   gvector_type                                   vector_b(ref_b.begin(), get(boost::vertex_index, graph)),
1961a2c909beSMatthew Knepley                                                  vector_x(ref_x.begin(), get(boost::vertex_index, graph));
1962a2c909beSMatthew Knepley 
1963a2c909beSMatthew Knepley   ilu_set_solve(*lgraph_p, vector_b, vector_x);
1964a2c909beSMatthew Knepley 
1965103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1966103bf8bdSMatthew Knepley }
1967103bf8bdSMatthew Knepley #endif
1968103bf8bdSMatthew Knepley 
196969db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */
197069db28dcSHong Zhang   PetscInt       nzlocal,nsends,nrecvs;
1971aa5bb8c0SSatish Balay   PetscMPIInt    *send_rank;
1972aa5bb8c0SSatish Balay   PetscInt       *sbuf_nz,*sbuf_j,**rbuf_j;
197369db28dcSHong Zhang   PetscScalar    *sbuf_a,**rbuf_a;
197469db28dcSHong Zhang   PetscErrorCode (*MatDestroy)(Mat);
197569db28dcSHong Zhang } Mat_Redundant;
197669db28dcSHong Zhang 
197769db28dcSHong Zhang #undef __FUNCT__
197869db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant"
197969db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr)
198069db28dcSHong Zhang {
198169db28dcSHong Zhang   PetscErrorCode       ierr;
198269db28dcSHong Zhang   Mat_Redundant        *redund=(Mat_Redundant*)ptr;
198369db28dcSHong Zhang   PetscInt             i;
198469db28dcSHong Zhang 
198569db28dcSHong Zhang   PetscFunctionBegin;
198669db28dcSHong Zhang   ierr = PetscFree(redund->send_rank);CHKERRQ(ierr);
198769db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr);
198869db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr);
198969db28dcSHong Zhang   for (i=0; i<redund->nrecvs; i++){
199069db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr);
199169db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr);
199269db28dcSHong Zhang   }
199369db28dcSHong Zhang   ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr);
199469db28dcSHong Zhang   ierr = PetscFree(redund);CHKERRQ(ierr);
199569db28dcSHong Zhang   PetscFunctionReturn(0);
199669db28dcSHong Zhang }
199769db28dcSHong Zhang 
199869db28dcSHong Zhang #undef __FUNCT__
199969db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant"
200069db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A)
200169db28dcSHong Zhang {
200269db28dcSHong Zhang   PetscErrorCode  ierr;
200369db28dcSHong Zhang   PetscContainer  container;
200469db28dcSHong Zhang   Mat_Redundant   *redund=PETSC_NULL;
200569db28dcSHong Zhang 
200669db28dcSHong Zhang   PetscFunctionBegin;
200769db28dcSHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
200869db28dcSHong Zhang   if (container) {
200969db28dcSHong Zhang     ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
201069db28dcSHong Zhang   } else {
201169db28dcSHong Zhang     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
201269db28dcSHong Zhang   }
201369db28dcSHong Zhang   A->ops->destroy = redund->MatDestroy;
201469db28dcSHong Zhang   ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr);
201569db28dcSHong Zhang   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
201669db28dcSHong Zhang   ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
201769db28dcSHong Zhang   PetscFunctionReturn(0);
201869db28dcSHong Zhang }
201969db28dcSHong Zhang 
202069db28dcSHong Zhang #undef __FUNCT__
202169db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ"
202269db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant)
202369db28dcSHong Zhang {
202469db28dcSHong Zhang   PetscMPIInt    rank,size;
20257adad957SLisandro Dalcin   MPI_Comm       comm=((PetscObject)mat)->comm;
202669db28dcSHong Zhang   PetscErrorCode ierr;
202769db28dcSHong Zhang   PetscInt       nsends=0,nrecvs=0,i,rownz_max=0;
202869db28dcSHong Zhang   PetscMPIInt    *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL;
2029d0f46423SBarry Smith   PetscInt       *rowrange=mat->rmap->range;
203069db28dcSHong Zhang   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
203169db28dcSHong Zhang   Mat            A=aij->A,B=aij->B,C=*matredundant;
203269db28dcSHong Zhang   Mat_SeqAIJ     *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data;
203369db28dcSHong Zhang   PetscScalar    *sbuf_a;
203469db28dcSHong Zhang   PetscInt       nzlocal=a->nz+b->nz;
2035d0f46423SBarry Smith   PetscInt       j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB;
2036d0f46423SBarry Smith   PetscInt       rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N;
203769db28dcSHong Zhang   PetscInt       *cols,ctmp,lwrite,*rptr,l,*sbuf_j;
2038a77337e4SBarry Smith   MatScalar      *aworkA,*aworkB;
2039a77337e4SBarry Smith   PetscScalar    *vals;
204069db28dcSHong Zhang   PetscMPIInt    tag1,tag2,tag3,imdex;
204169db28dcSHong Zhang   MPI_Request    *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL,
204269db28dcSHong Zhang                  *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL;
204369db28dcSHong Zhang   MPI_Status     recv_status,*send_status;
204469db28dcSHong Zhang   PetscInt       *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count;
204569db28dcSHong Zhang   PetscInt       **rbuf_j=PETSC_NULL;
204669db28dcSHong Zhang   PetscScalar    **rbuf_a=PETSC_NULL;
204769db28dcSHong Zhang   Mat_Redundant  *redund=PETSC_NULL;
204869db28dcSHong Zhang   PetscContainer container;
204969db28dcSHong Zhang 
205069db28dcSHong Zhang   PetscFunctionBegin;
205169db28dcSHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
205269db28dcSHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
205369db28dcSHong Zhang 
205469db28dcSHong Zhang   if (reuse == MAT_REUSE_MATRIX) {
205569db28dcSHong Zhang     ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2056d0f46423SBarry Smith     if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size");
205769db28dcSHong Zhang     ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr);
205869db28dcSHong Zhang     if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size");
205969db28dcSHong Zhang     ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
206069db28dcSHong Zhang     if (container) {
206169db28dcSHong Zhang       ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
206269db28dcSHong Zhang     } else {
206369db28dcSHong Zhang       SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
206469db28dcSHong Zhang     }
206569db28dcSHong Zhang     if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal");
206669db28dcSHong Zhang 
206769db28dcSHong Zhang     nsends    = redund->nsends;
206869db28dcSHong Zhang     nrecvs    = redund->nrecvs;
206969db28dcSHong Zhang     send_rank = redund->send_rank; recv_rank = send_rank + size;
207069db28dcSHong Zhang     sbuf_nz   = redund->sbuf_nz;     rbuf_nz = sbuf_nz + nsends;
207169db28dcSHong Zhang     sbuf_j    = redund->sbuf_j;
207269db28dcSHong Zhang     sbuf_a    = redund->sbuf_a;
207369db28dcSHong Zhang     rbuf_j    = redund->rbuf_j;
207469db28dcSHong Zhang     rbuf_a    = redund->rbuf_a;
207569db28dcSHong Zhang   }
207669db28dcSHong Zhang 
207769db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
207869db28dcSHong Zhang     PetscMPIInt  subrank,subsize;
207969db28dcSHong Zhang     PetscInt     nleftover,np_subcomm;
208069db28dcSHong Zhang     /* get the destination processors' id send_rank, nsends and nrecvs */
208169db28dcSHong Zhang     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
208269db28dcSHong Zhang     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
208369db28dcSHong Zhang     ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank);
208469db28dcSHong Zhang     recv_rank = send_rank + size;
208569db28dcSHong Zhang     np_subcomm = size/nsubcomm;
208669db28dcSHong Zhang     nleftover  = size - nsubcomm*np_subcomm;
208769db28dcSHong Zhang     nsends = 0; nrecvs = 0;
208869db28dcSHong Zhang     for (i=0; i<size; i++){ /* i=rank*/
208969db28dcSHong Zhang       if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */
209069db28dcSHong Zhang         send_rank[nsends] = i; nsends++;
209169db28dcSHong Zhang         recv_rank[nrecvs++] = i;
209269db28dcSHong Zhang       }
209369db28dcSHong Zhang     }
209469db28dcSHong Zhang     if (rank >= size - nleftover){/* this proc is a leftover processor */
209569db28dcSHong Zhang       i = size-nleftover-1;
209669db28dcSHong Zhang       j = 0;
209769db28dcSHong Zhang       while (j < nsubcomm - nleftover){
209869db28dcSHong Zhang         send_rank[nsends++] = i;
209969db28dcSHong Zhang         i--; j++;
210069db28dcSHong Zhang       }
210169db28dcSHong Zhang     }
210269db28dcSHong Zhang 
210369db28dcSHong Zhang     if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */
210469db28dcSHong Zhang       for (i=0; i<nleftover; i++){
210569db28dcSHong Zhang         recv_rank[nrecvs++] = size-nleftover+i;
210669db28dcSHong Zhang       }
210769db28dcSHong Zhang     }
210869db28dcSHong Zhang 
210969db28dcSHong Zhang     /* allocate sbuf_j, sbuf_a */
211069db28dcSHong Zhang     i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2;
211169db28dcSHong Zhang     ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr);
211269db28dcSHong Zhang     ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr);
211369db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
211469db28dcSHong Zhang 
211569db28dcSHong Zhang   /* copy mat's local entries into the buffers */
211669db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
211769db28dcSHong Zhang     rownz_max = 0;
211869db28dcSHong Zhang     rptr = sbuf_j;
211969db28dcSHong Zhang     cols = sbuf_j + rend-rstart + 1;
212069db28dcSHong Zhang     vals = sbuf_a;
212169db28dcSHong Zhang     rptr[0] = 0;
212269db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
212369db28dcSHong Zhang       row = i + rstart;
212469db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
212569db28dcSHong Zhang       ncols  = nzA + nzB;
212669db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
212769db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
212869db28dcSHong Zhang       /* load the column indices for this row into cols */
212969db28dcSHong Zhang       lwrite = 0;
213069db28dcSHong Zhang       for (l=0; l<nzB; l++) {
213169db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart){
213269db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
213369db28dcSHong Zhang           cols[lwrite++] = ctmp;
213469db28dcSHong Zhang         }
213569db28dcSHong Zhang       }
213669db28dcSHong Zhang       for (l=0; l<nzA; l++){
213769db28dcSHong Zhang         vals[lwrite]   = aworkA[l];
213869db28dcSHong Zhang         cols[lwrite++] = cstart + cworkA[l];
213969db28dcSHong Zhang       }
214069db28dcSHong Zhang       for (l=0; l<nzB; l++) {
214169db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend){
214269db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
214369db28dcSHong Zhang           cols[lwrite++] = ctmp;
214469db28dcSHong Zhang         }
214569db28dcSHong Zhang       }
214669db28dcSHong Zhang       vals += ncols;
214769db28dcSHong Zhang       cols += ncols;
214869db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
214969db28dcSHong Zhang       if (rownz_max < ncols) rownz_max = ncols;
215069db28dcSHong Zhang     }
215169db28dcSHong Zhang     if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz);
215269db28dcSHong Zhang   } else { /* only copy matrix values into sbuf_a */
215369db28dcSHong Zhang     rptr = sbuf_j;
215469db28dcSHong Zhang     vals = sbuf_a;
215569db28dcSHong Zhang     rptr[0] = 0;
215669db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
215769db28dcSHong Zhang       row = i + rstart;
215869db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
215969db28dcSHong Zhang       ncols  = nzA + nzB;
216069db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
216169db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
216269db28dcSHong Zhang       lwrite = 0;
216369db28dcSHong Zhang       for (l=0; l<nzB; l++) {
216469db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l];
216569db28dcSHong Zhang       }
216669db28dcSHong Zhang       for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l];
216769db28dcSHong Zhang       for (l=0; l<nzB; l++) {
216869db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l];
216969db28dcSHong Zhang       }
217069db28dcSHong Zhang       vals += ncols;
217169db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
217269db28dcSHong Zhang     }
217369db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
217469db28dcSHong Zhang 
217569db28dcSHong Zhang   /* send nzlocal to others, and recv other's nzlocal */
217669db28dcSHong Zhang   /*--------------------------------------------------*/
217769db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
217869db28dcSHong Zhang     ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
217969db28dcSHong Zhang     s_waits2 = s_waits3 + nsends;
218069db28dcSHong Zhang     s_waits1 = s_waits2 + nsends;
218169db28dcSHong Zhang     r_waits1 = s_waits1 + nsends;
218269db28dcSHong Zhang     r_waits2 = r_waits1 + nrecvs;
218369db28dcSHong Zhang     r_waits3 = r_waits2 + nrecvs;
218469db28dcSHong Zhang   } else {
218569db28dcSHong Zhang     ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
218669db28dcSHong Zhang     r_waits3 = s_waits3 + nsends;
218769db28dcSHong Zhang   }
218869db28dcSHong Zhang 
218969db28dcSHong Zhang   ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr);
219069db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
219169db28dcSHong Zhang     /* get new tags to keep the communication clean */
219269db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr);
219369db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr);
219469db28dcSHong Zhang     ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr);
219569db28dcSHong Zhang     rbuf_nz = sbuf_nz + nsends;
219669db28dcSHong Zhang 
219769db28dcSHong Zhang     /* post receives of other's nzlocal */
219869db28dcSHong Zhang     for (i=0; i<nrecvs; i++){
219969db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr);
220069db28dcSHong Zhang     }
220169db28dcSHong Zhang     /* send nzlocal to others */
220269db28dcSHong Zhang     for (i=0; i<nsends; i++){
220369db28dcSHong Zhang       sbuf_nz[i] = nzlocal;
220469db28dcSHong Zhang       ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr);
220569db28dcSHong Zhang     }
220669db28dcSHong Zhang     /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */
220769db28dcSHong Zhang     count = nrecvs;
220869db28dcSHong Zhang     while (count) {
220969db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr);
221069db28dcSHong Zhang       recv_rank[imdex] = recv_status.MPI_SOURCE;
221169db28dcSHong Zhang       /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */
221269db28dcSHong Zhang       ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr);
221369db28dcSHong Zhang 
221469db28dcSHong Zhang       i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */
221569db28dcSHong Zhang       rbuf_nz[imdex] += i + 2;
221669db28dcSHong Zhang       ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr);
221769db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr);
221869db28dcSHong Zhang       count--;
221969db28dcSHong Zhang     }
222069db28dcSHong Zhang     /* wait on sends of nzlocal */
222169db28dcSHong Zhang     if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);}
222269db28dcSHong Zhang     /* send mat->i,j to others, and recv from other's */
222369db28dcSHong Zhang     /*------------------------------------------------*/
222469db28dcSHong Zhang     for (i=0; i<nsends; i++){
222569db28dcSHong Zhang       j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1;
222669db28dcSHong Zhang       ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
222769db28dcSHong Zhang     }
222869db28dcSHong Zhang     /* wait on receives of mat->i,j */
222969db28dcSHong Zhang     /*------------------------------*/
223069db28dcSHong Zhang     count = nrecvs;
223169db28dcSHong Zhang     while (count) {
223269db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr);
223369db28dcSHong Zhang       if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
223469db28dcSHong Zhang       count--;
223569db28dcSHong Zhang     }
223669db28dcSHong Zhang     /* wait on sends of mat->i,j */
223769db28dcSHong Zhang     /*---------------------------*/
223869db28dcSHong Zhang     if (nsends) {
223969db28dcSHong Zhang       ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr);
224069db28dcSHong Zhang     }
224169db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
224269db28dcSHong Zhang 
224369db28dcSHong Zhang   /* post receives, send and receive mat->a */
224469db28dcSHong Zhang   /*----------------------------------------*/
224569db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++) {
224669db28dcSHong Zhang     ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr);
224769db28dcSHong Zhang   }
224869db28dcSHong Zhang   for (i=0; i<nsends; i++){
224969db28dcSHong Zhang     ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
225069db28dcSHong Zhang   }
225169db28dcSHong Zhang   count = nrecvs;
225269db28dcSHong Zhang   while (count) {
225369db28dcSHong Zhang     ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr);
225469db28dcSHong Zhang     if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
225569db28dcSHong Zhang     count--;
225669db28dcSHong Zhang   }
225769db28dcSHong Zhang   if (nsends) {
225869db28dcSHong Zhang     ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr);
225969db28dcSHong Zhang   }
226069db28dcSHong Zhang 
226169db28dcSHong Zhang   ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr);
226269db28dcSHong Zhang 
226369db28dcSHong Zhang   /* create redundant matrix */
226469db28dcSHong Zhang   /*-------------------------*/
226569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
226669db28dcSHong Zhang     /* compute rownz_max for preallocation */
226769db28dcSHong Zhang     for (imdex=0; imdex<nrecvs; imdex++){
226869db28dcSHong Zhang       j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]];
226969db28dcSHong Zhang       rptr = rbuf_j[imdex];
227069db28dcSHong Zhang       for (i=0; i<j; i++){
227169db28dcSHong Zhang         ncols = rptr[i+1] - rptr[i];
227269db28dcSHong Zhang         if (rownz_max < ncols) rownz_max = ncols;
227369db28dcSHong Zhang       }
227469db28dcSHong Zhang     }
227569db28dcSHong Zhang 
227669db28dcSHong Zhang     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
227769db28dcSHong Zhang     ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
227869db28dcSHong Zhang     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
227969db28dcSHong Zhang     ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr);
228069db28dcSHong Zhang     ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr);
228169db28dcSHong Zhang   } else {
228269db28dcSHong Zhang     C = *matredundant;
228369db28dcSHong Zhang   }
228469db28dcSHong Zhang 
228569db28dcSHong Zhang   /* insert local matrix entries */
228669db28dcSHong Zhang   rptr = sbuf_j;
228769db28dcSHong Zhang   cols = sbuf_j + rend-rstart + 1;
228869db28dcSHong Zhang   vals = sbuf_a;
228969db28dcSHong Zhang   for (i=0; i<rend-rstart; i++){
229069db28dcSHong Zhang     row   = i + rstart;
229169db28dcSHong Zhang     ncols = rptr[i+1] - rptr[i];
229269db28dcSHong Zhang     ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
229369db28dcSHong Zhang     vals += ncols;
229469db28dcSHong Zhang     cols += ncols;
229569db28dcSHong Zhang   }
229669db28dcSHong Zhang   /* insert received matrix entries */
229769db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++){
229869db28dcSHong Zhang     rstart = rowrange[recv_rank[imdex]];
229969db28dcSHong Zhang     rend   = rowrange[recv_rank[imdex]+1];
230069db28dcSHong Zhang     rptr = rbuf_j[imdex];
230169db28dcSHong Zhang     cols = rbuf_j[imdex] + rend-rstart + 1;
230269db28dcSHong Zhang     vals = rbuf_a[imdex];
230369db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
230469db28dcSHong Zhang       row   = i + rstart;
230569db28dcSHong Zhang       ncols = rptr[i+1] - rptr[i];
230669db28dcSHong Zhang       ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
230769db28dcSHong Zhang       vals += ncols;
230869db28dcSHong Zhang       cols += ncols;
230969db28dcSHong Zhang     }
231069db28dcSHong Zhang   }
231169db28dcSHong Zhang   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
231269db28dcSHong Zhang   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
231369db28dcSHong Zhang   ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2314d0f46423SBarry Smith   if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N);
231569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
231669db28dcSHong Zhang     PetscContainer container;
231769db28dcSHong Zhang     *matredundant = C;
231869db28dcSHong Zhang     /* create a supporting struct and attach it to C for reuse */
231938f2d2fdSLisandro Dalcin     ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr);
232069db28dcSHong Zhang     ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
232169db28dcSHong Zhang     ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr);
232269db28dcSHong Zhang     ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr);
232369db28dcSHong Zhang     ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr);
232469db28dcSHong Zhang 
232569db28dcSHong Zhang     redund->nzlocal = nzlocal;
232669db28dcSHong Zhang     redund->nsends  = nsends;
232769db28dcSHong Zhang     redund->nrecvs  = nrecvs;
232869db28dcSHong Zhang     redund->send_rank = send_rank;
232969db28dcSHong Zhang     redund->sbuf_nz = sbuf_nz;
233069db28dcSHong Zhang     redund->sbuf_j  = sbuf_j;
233169db28dcSHong Zhang     redund->sbuf_a  = sbuf_a;
233269db28dcSHong Zhang     redund->rbuf_j  = rbuf_j;
233369db28dcSHong Zhang     redund->rbuf_a  = rbuf_a;
233469db28dcSHong Zhang 
233569db28dcSHong Zhang     redund->MatDestroy = C->ops->destroy;
233669db28dcSHong Zhang     C->ops->destroy    = MatDestroy_MatRedundant;
233769db28dcSHong Zhang   }
233869db28dcSHong Zhang   PetscFunctionReturn(0);
233969db28dcSHong Zhang }
234069db28dcSHong Zhang 
234103bc72f1SMatthew Knepley #undef __FUNCT__
2342c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ"
2343c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2344c91732d9SHong Zhang {
2345c91732d9SHong Zhang   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2346c91732d9SHong Zhang   PetscErrorCode ierr;
2347c91732d9SHong Zhang   PetscInt       i,*idxb = 0;
2348c91732d9SHong Zhang   PetscScalar    *va,*vb;
2349c91732d9SHong Zhang   Vec            vtmp;
2350c91732d9SHong Zhang 
2351c91732d9SHong Zhang   PetscFunctionBegin;
2352c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
2353c91732d9SHong Zhang   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2354c91732d9SHong Zhang   if (idx) {
2355d0f46423SBarry Smith     for (i=0; i<A->cmap->n; i++) {
2356d0f46423SBarry Smith       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2357c91732d9SHong Zhang     }
2358c91732d9SHong Zhang   }
2359c91732d9SHong Zhang 
2360d0f46423SBarry Smith   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2361c91732d9SHong Zhang   if (idx) {
2362d0f46423SBarry Smith     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2363c91732d9SHong Zhang   }
2364c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2365c91732d9SHong Zhang   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2366c91732d9SHong Zhang 
2367d0f46423SBarry Smith   for (i=0; i<A->rmap->n; i++){
2368c91732d9SHong Zhang     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
2369c91732d9SHong Zhang       va[i] = vb[i];
2370c91732d9SHong Zhang       if (idx) idx[i] = a->garray[idxb[i]];
2371c91732d9SHong Zhang     }
2372c91732d9SHong Zhang   }
2373c91732d9SHong Zhang 
2374c91732d9SHong Zhang   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2375c91732d9SHong Zhang   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2376c91732d9SHong Zhang   if (idxb) {
2377c91732d9SHong Zhang     ierr = PetscFree(idxb);CHKERRQ(ierr);
2378c91732d9SHong Zhang   }
2379c91732d9SHong Zhang   ierr = VecDestroy(vtmp);CHKERRQ(ierr);
2380c91732d9SHong Zhang   PetscFunctionReturn(0);
2381c91732d9SHong Zhang }
2382c91732d9SHong Zhang 
2383c91732d9SHong Zhang #undef __FUNCT__
238403bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ"
238503bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[])
238603bc72f1SMatthew Knepley {
238703bc72f1SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2388d0f46423SBarry Smith   PetscInt       n      = A->rmap->n;
2389d0f46423SBarry Smith   PetscInt       cstart = A->cmap->rstart;
239003bc72f1SMatthew Knepley   PetscInt      *cmap   = mat->garray;
239103bc72f1SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
239203bc72f1SMatthew Knepley   Vec            diagV, offdiagV;
239303bc72f1SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
239403bc72f1SMatthew Knepley   PetscInt       r;
239503bc72f1SMatthew Knepley   PetscErrorCode ierr;
239603bc72f1SMatthew Knepley 
239703bc72f1SMatthew Knepley   PetscFunctionBegin;
239803bc72f1SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2399e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2400e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
240103bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
240203bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
240303bc72f1SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
240403bc72f1SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
240503bc72f1SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
240603bc72f1SMatthew Knepley   for(r = 0; r < n; ++r) {
2407028cd4eaSSatish Balay     if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) {
240803bc72f1SMatthew Knepley       a[r]   = diagA[r];
240903bc72f1SMatthew Knepley       idx[r] = cstart + diagIdx[r];
241003bc72f1SMatthew Knepley     } else {
241103bc72f1SMatthew Knepley       a[r]   = offdiagA[r];
241203bc72f1SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
241303bc72f1SMatthew Knepley     }
241403bc72f1SMatthew Knepley   }
241503bc72f1SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
241603bc72f1SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
241703bc72f1SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
241803bc72f1SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
241903bc72f1SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
242003bc72f1SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
242103bc72f1SMatthew Knepley   PetscFunctionReturn(0);
242203bc72f1SMatthew Knepley }
242303bc72f1SMatthew Knepley 
24245494a064SHong Zhang #undef __FUNCT__
2425829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ"
2426829201f2SHong Zhang PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat[])
24275494a064SHong Zhang {
24285494a064SHong Zhang   PetscErrorCode ierr;
24295494a064SHong Zhang 
24305494a064SHong Zhang   PetscFunctionBegin;
24315494a064SHong Zhang   ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,newmat);CHKERRQ(ierr);
24325494a064SHong Zhang   PetscFunctionReturn(0);
24335494a064SHong Zhang }
24345494a064SHong Zhang 
24358a729477SBarry Smith /* -------------------------------------------------------------------*/
2436cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ,
2437cda55fadSBarry Smith        MatGetRow_MPIAIJ,
2438cda55fadSBarry Smith        MatRestoreRow_MPIAIJ,
2439cda55fadSBarry Smith        MatMult_MPIAIJ,
244097304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ,
24417c922b88SBarry Smith        MatMultTranspose_MPIAIJ,
24427c922b88SBarry Smith        MatMultTransposeAdd_MPIAIJ,
2443103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2444103bf8bdSMatthew Knepley        MatSolve_MPIAIJ,
2445103bf8bdSMatthew Knepley #else
2446cda55fadSBarry Smith        0,
2447103bf8bdSMatthew Knepley #endif
2448cda55fadSBarry Smith        0,
2449cda55fadSBarry Smith        0,
245097304618SKris Buschelman /*10*/ 0,
2451cda55fadSBarry Smith        0,
2452cda55fadSBarry Smith        0,
245344a69424SLois Curfman McInnes        MatRelax_MPIAIJ,
2454b7c46309SBarry Smith        MatTranspose_MPIAIJ,
245597304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ,
2456cda55fadSBarry Smith        MatEqual_MPIAIJ,
2457cda55fadSBarry Smith        MatGetDiagonal_MPIAIJ,
2458cda55fadSBarry Smith        MatDiagonalScale_MPIAIJ,
2459cda55fadSBarry Smith        MatNorm_MPIAIJ,
246097304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ,
2461cda55fadSBarry Smith        MatAssemblyEnd_MPIAIJ,
24621eb62cbbSBarry Smith        0,
2463cda55fadSBarry Smith        MatSetOption_MPIAIJ,
2464cda55fadSBarry Smith        MatZeroEntries_MPIAIJ,
246597304618SKris Buschelman /*25*/ MatZeroRows_MPIAIJ,
2466cda55fadSBarry Smith        0,
2467103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2468719d5645SBarry Smith        0,
2469103bf8bdSMatthew Knepley #else
2470cda55fadSBarry Smith        0,
2471103bf8bdSMatthew Knepley #endif
2472cda55fadSBarry Smith        0,
2473cda55fadSBarry Smith        0,
247497304618SKris Buschelman /*30*/ MatSetUpPreallocation_MPIAIJ,
2475103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2476719d5645SBarry Smith        0,
2477103bf8bdSMatthew Knepley #else
2478cda55fadSBarry Smith        0,
2479103bf8bdSMatthew Knepley #endif
2480cda55fadSBarry Smith        0,
2481cda55fadSBarry Smith        0,
2482cda55fadSBarry Smith        0,
248397304618SKris Buschelman /*35*/ MatDuplicate_MPIAIJ,
2484cda55fadSBarry Smith        0,
2485cda55fadSBarry Smith        0,
2486cda55fadSBarry Smith        0,
2487cda55fadSBarry Smith        0,
248897304618SKris Buschelman /*40*/ MatAXPY_MPIAIJ,
2489cda55fadSBarry Smith        MatGetSubMatrices_MPIAIJ,
2490cda55fadSBarry Smith        MatIncreaseOverlap_MPIAIJ,
2491cda55fadSBarry Smith        MatGetValues_MPIAIJ,
2492cb5b572fSBarry Smith        MatCopy_MPIAIJ,
24938c07d4e3SBarry Smith /*45*/ 0,
2494cda55fadSBarry Smith        MatScale_MPIAIJ,
2495cda55fadSBarry Smith        0,
2496cda55fadSBarry Smith        0,
2497cda55fadSBarry Smith        0,
2498521d7252SBarry Smith /*50*/ MatSetBlockSize_MPIAIJ,
2499cda55fadSBarry Smith        0,
2500cda55fadSBarry Smith        0,
2501cda55fadSBarry Smith        0,
2502cda55fadSBarry Smith        0,
250397304618SKris Buschelman /*55*/ MatFDColoringCreate_MPIAIJ,
2504cda55fadSBarry Smith        0,
2505cda55fadSBarry Smith        MatSetUnfactored_MPIAIJ,
250642e855d1Svictor        MatPermute_MPIAIJ,
2507cda55fadSBarry Smith        0,
250897304618SKris Buschelman /*60*/ MatGetSubMatrix_MPIAIJ,
2509e03a110bSBarry Smith        MatDestroy_MPIAIJ,
2510e03a110bSBarry Smith        MatView_MPIAIJ,
2511357abbc8SBarry Smith        0,
2512a2243be0SBarry Smith        0,
251397304618SKris Buschelman /*65*/ 0,
2514a2243be0SBarry Smith        0,
2515a2243be0SBarry Smith        0,
2516a2243be0SBarry Smith        0,
2517a2243be0SBarry Smith        0,
2518c91732d9SHong Zhang /*70*/ MatGetRowMaxAbs_MPIAIJ,
2519a2243be0SBarry Smith        0,
2520a2243be0SBarry Smith        MatSetColoring_MPIAIJ,
2521dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
2522779c1a83SBarry Smith        MatSetValuesAdic_MPIAIJ,
2523dcf5cc72SBarry Smith #else
2524dcf5cc72SBarry Smith        0,
2525dcf5cc72SBarry Smith #endif
252697304618SKris Buschelman        MatSetValuesAdifor_MPIAIJ,
252797304618SKris Buschelman /*75*/ 0,
252897304618SKris Buschelman        0,
252997304618SKris Buschelman        0,
253097304618SKris Buschelman        0,
253197304618SKris Buschelman        0,
253297304618SKris Buschelman /*80*/ 0,
253397304618SKris Buschelman        0,
253497304618SKris Buschelman        0,
253597304618SKris Buschelman        0,
253641acf15aSKris Buschelman /*84*/ MatLoad_MPIAIJ,
25376284ec50SHong Zhang        0,
25386284ec50SHong Zhang        0,
25396284ec50SHong Zhang        0,
25406284ec50SHong Zhang        0,
2541865e5f61SKris Buschelman        0,
2542865e5f61SKris Buschelman /*90*/ MatMatMult_MPIAIJ_MPIAIJ,
254326be0446SHong Zhang        MatMatMultSymbolic_MPIAIJ_MPIAIJ,
254426be0446SHong Zhang        MatMatMultNumeric_MPIAIJ_MPIAIJ,
25457a7894deSKris Buschelman        MatPtAP_Basic,
25467a7894deSKris Buschelman        MatPtAPSymbolic_MPIAIJ,
25477a7894deSKris Buschelman /*95*/ MatPtAPNumeric_MPIAIJ,
25487a7894deSKris Buschelman        0,
25497a7894deSKris Buschelman        0,
25507a7894deSKris Buschelman        0,
25517a7894deSKris Buschelman        0,
25527a7894deSKris Buschelman /*100*/0,
2553865e5f61SKris Buschelman        MatPtAPSymbolic_MPIAIJ_MPIAIJ,
25547a7894deSKris Buschelman        MatPtAPNumeric_MPIAIJ_MPIAIJ,
25552fd7e33dSBarry Smith        MatConjugate_MPIAIJ,
25562fd7e33dSBarry Smith        0,
255799cafbc1SBarry Smith /*105*/MatSetValuesRow_MPIAIJ,
255899cafbc1SBarry Smith        MatRealPart_MPIAIJ,
255969db28dcSHong Zhang        MatImaginaryPart_MPIAIJ,
256069db28dcSHong Zhang        0,
256169db28dcSHong Zhang        0,
256269db28dcSHong Zhang /*110*/0,
256303bc72f1SMatthew Knepley        MatGetRedundantMatrix_MPIAIJ,
25645494a064SHong Zhang        MatGetRowMin_MPIAIJ,
25655494a064SHong Zhang        0,
25665494a064SHong Zhang        0,
2567829201f2SHong Zhang /*115*/MatGetSeqNonzerostructure_MPIAIJ};
256836ce4990SBarry Smith 
25692e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/
25702e8a6d31SBarry Smith 
2571fb2e594dSBarry Smith EXTERN_C_BEGIN
25724a2ae208SSatish Balay #undef __FUNCT__
25734a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ"
2574be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat)
25752e8a6d31SBarry Smith {
25762e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2577dfbe8321SBarry Smith   PetscErrorCode ierr;
25782e8a6d31SBarry Smith 
25792e8a6d31SBarry Smith   PetscFunctionBegin;
25802e8a6d31SBarry Smith   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
25812e8a6d31SBarry Smith   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
25822e8a6d31SBarry Smith   PetscFunctionReturn(0);
25832e8a6d31SBarry Smith }
2584fb2e594dSBarry Smith EXTERN_C_END
25852e8a6d31SBarry Smith 
2586fb2e594dSBarry Smith EXTERN_C_BEGIN
25874a2ae208SSatish Balay #undef __FUNCT__
25884a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ"
2589be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat)
25902e8a6d31SBarry Smith {
25912e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2592dfbe8321SBarry Smith   PetscErrorCode ierr;
25932e8a6d31SBarry Smith 
25942e8a6d31SBarry Smith   PetscFunctionBegin;
25952e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
25962e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
25972e8a6d31SBarry Smith   PetscFunctionReturn(0);
25982e8a6d31SBarry Smith }
2599fb2e594dSBarry Smith EXTERN_C_END
26008a729477SBarry Smith 
2601e090d566SSatish Balay #include "petscpc.h"
260227508adbSBarry Smith EXTERN_C_BEGIN
26034a2ae208SSatish Balay #undef __FUNCT__
2604a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ"
2605be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
2606a23d5eceSKris Buschelman {
2607a23d5eceSKris Buschelman   Mat_MPIAIJ     *b;
2608dfbe8321SBarry Smith   PetscErrorCode ierr;
2609b1d57f15SBarry Smith   PetscInt       i;
2610a23d5eceSKris Buschelman 
2611a23d5eceSKris Buschelman   PetscFunctionBegin;
2612a23d5eceSKris Buschelman   B->preallocated = PETSC_TRUE;
2613a23d5eceSKris Buschelman   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
2614a23d5eceSKris Buschelman   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
261577431f27SBarry Smith   if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
261677431f27SBarry Smith   if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
2617899cda47SBarry Smith 
2618d0f46423SBarry Smith   B->rmap->bs = B->cmap->bs = 1;
2619d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
2620d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
2621a23d5eceSKris Buschelman   if (d_nnz) {
2622d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
262377431f27SBarry Smith       if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
2624a23d5eceSKris Buschelman     }
2625a23d5eceSKris Buschelman   }
2626a23d5eceSKris Buschelman   if (o_nnz) {
2627d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
262877431f27SBarry Smith       if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
2629a23d5eceSKris Buschelman     }
2630a23d5eceSKris Buschelman   }
2631a23d5eceSKris Buschelman   b = (Mat_MPIAIJ*)B->data;
2632899cda47SBarry Smith 
2633899cda47SBarry Smith   /* Explicitly create 2 MATSEQAIJ matrices. */
2634899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2635d0f46423SBarry Smith   ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2636899cda47SBarry Smith   ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr);
2637899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
2638899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2639d0f46423SBarry Smith   ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2640899cda47SBarry Smith   ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr);
2641899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
2642899cda47SBarry Smith 
2643c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr);
2644c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr);
2645a23d5eceSKris Buschelman 
2646a23d5eceSKris Buschelman   PetscFunctionReturn(0);
2647a23d5eceSKris Buschelman }
2648a23d5eceSKris Buschelman EXTERN_C_END
2649a23d5eceSKris Buschelman 
26504a2ae208SSatish Balay #undef __FUNCT__
26514a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ"
2652dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2653d6dfbf8fSBarry Smith {
2654d6dfbf8fSBarry Smith   Mat            mat;
2655416022c9SBarry Smith   Mat_MPIAIJ     *a,*oldmat = (Mat_MPIAIJ*)matin->data;
2656dfbe8321SBarry Smith   PetscErrorCode ierr;
2657d6dfbf8fSBarry Smith 
26583a40ed3dSBarry Smith   PetscFunctionBegin;
2659416022c9SBarry Smith   *newmat       = 0;
26607adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
2661d0f46423SBarry Smith   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
26627adad957SLisandro Dalcin   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
26631d5dac46SHong Zhang   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2664273d9f13SBarry Smith   a    = (Mat_MPIAIJ*)mat->data;
2665e1b6402fSHong Zhang 
2666d6dfbf8fSBarry Smith   mat->factor       = matin->factor;
2667d0f46423SBarry Smith   mat->rmap->bs      = matin->rmap->bs;
2668c456f294SBarry Smith   mat->assembled    = PETSC_TRUE;
2669e7641de0SSatish Balay   mat->insertmode   = NOT_SET_VALUES;
2670273d9f13SBarry Smith   mat->preallocated = PETSC_TRUE;
2671d6dfbf8fSBarry Smith 
267217699dbbSLois Curfman McInnes   a->size           = oldmat->size;
267317699dbbSLois Curfman McInnes   a->rank           = oldmat->rank;
2674e7641de0SSatish Balay   a->donotstash     = oldmat->donotstash;
2675e7641de0SSatish Balay   a->roworiented    = oldmat->roworiented;
2676e7641de0SSatish Balay   a->rowindices     = 0;
2677bcd2baecSBarry Smith   a->rowvalues      = 0;
2678bcd2baecSBarry Smith   a->getrowactive   = PETSC_FALSE;
2679d6dfbf8fSBarry Smith 
2680d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr);
2681d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr);
2682899cda47SBarry Smith 
26837adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr);
26842ee70a88SLois Curfman McInnes   if (oldmat->colmap) {
2685aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
26860f5bd95cSBarry Smith     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2687b1fc9764SSatish Balay #else
2688d0f46423SBarry Smith     ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
2689d0f46423SBarry Smith     ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2690d0f46423SBarry Smith     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2691b1fc9764SSatish Balay #endif
2692416022c9SBarry Smith   } else a->colmap = 0;
26933f41c07dSBarry Smith   if (oldmat->garray) {
2694b1d57f15SBarry Smith     PetscInt len;
2695d0f46423SBarry Smith     len  = oldmat->B->cmap->n;
2696b1d57f15SBarry Smith     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
269752e6d16bSBarry Smith     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2698b1d57f15SBarry Smith     if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); }
2699416022c9SBarry Smith   } else a->garray = 0;
2700d6dfbf8fSBarry Smith 
2701416022c9SBarry Smith   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
270252e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
2703a56f8943SBarry Smith   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
270452e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
27052e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
270652e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
27072e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
270852e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
27097adad957SLisandro Dalcin   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
27108a729477SBarry Smith   *newmat = mat;
27113a40ed3dSBarry Smith   PetscFunctionReturn(0);
27128a729477SBarry Smith }
2713416022c9SBarry Smith 
2714e090d566SSatish Balay #include "petscsys.h"
2715416022c9SBarry Smith 
27164a2ae208SSatish Balay #undef __FUNCT__
27174a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ"
2718a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat)
2719416022c9SBarry Smith {
2720d65a2f8fSBarry Smith   Mat            A;
272187828ca2SBarry Smith   PetscScalar    *vals,*svals;
272219bcc07fSBarry Smith   MPI_Comm       comm = ((PetscObject)viewer)->comm;
2723416022c9SBarry Smith   MPI_Status     status;
27246849ba73SBarry Smith   PetscErrorCode ierr;
2725dc231df0SBarry Smith   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,maxnz;
2726167e7480SBarry Smith   PetscInt       i,nz,j,rstart,rend,mmax;
2727b1d57f15SBarry Smith   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols;
2728910ba992SMatthew Knepley   PetscInt       *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols;
2729dc231df0SBarry Smith   PetscInt       cend,cstart,n,*rowners;
2730b1d57f15SBarry Smith   int            fd;
2731416022c9SBarry Smith 
27323a40ed3dSBarry Smith   PetscFunctionBegin;
27331dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
27341dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
273517699dbbSLois Curfman McInnes   if (!rank) {
2736b0a32e0cSBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
27370752156aSBarry Smith     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
2738552e946dSBarry Smith     if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
27396c5fab8fSBarry Smith   }
27406c5fab8fSBarry Smith 
2741b1d57f15SBarry Smith   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2742416022c9SBarry Smith   M = header[1]; N = header[2];
2743416022c9SBarry Smith   /* determine ownership of all rows */
274429cdbbc8SSatish Balay   m    = M/size + ((M % size) > rank);
2745dc231df0SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
2746dc231df0SBarry Smith   ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
2747167e7480SBarry Smith 
2748167e7480SBarry Smith   /* First process needs enough room for process with most rows */
2749167e7480SBarry Smith   if (!rank) {
2750167e7480SBarry Smith     mmax       = rowners[1];
2751167e7480SBarry Smith     for (i=2; i<size; i++) {
2752167e7480SBarry Smith       mmax = PetscMax(mmax,rowners[i]);
2753167e7480SBarry Smith     }
2754167e7480SBarry Smith   } else mmax = m;
2755167e7480SBarry Smith 
2756416022c9SBarry Smith   rowners[0] = 0;
275717699dbbSLois Curfman McInnes   for (i=2; i<=size; i++) {
2758416022c9SBarry Smith     rowners[i] += rowners[i-1];
2759416022c9SBarry Smith   }
276017699dbbSLois Curfman McInnes   rstart = rowners[rank];
276117699dbbSLois Curfman McInnes   rend   = rowners[rank+1];
2762416022c9SBarry Smith 
2763416022c9SBarry Smith   /* distribute row lengths to all processors */
2764167e7480SBarry Smith   ierr    = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr);
276517699dbbSLois Curfman McInnes   if (!rank) {
2766dc231df0SBarry Smith     ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr);
2767dc231df0SBarry Smith     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
2768b1d57f15SBarry Smith     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
2769b1d57f15SBarry Smith     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2770dc231df0SBarry Smith     for (j=0; j<m; j++) {
2771dc231df0SBarry Smith       procsnz[0] += ourlens[j];
2772dc231df0SBarry Smith     }
2773dc231df0SBarry Smith     for (i=1; i<size; i++) {
2774dc231df0SBarry Smith       ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr);
2775dc231df0SBarry Smith       /* calculate the number of nonzeros on each processor */
2776dc231df0SBarry Smith       for (j=0; j<rowners[i+1]-rowners[i]; j++) {
2777416022c9SBarry Smith         procsnz[i] += rowlengths[j];
2778416022c9SBarry Smith       }
2779dc231df0SBarry Smith       ierr = MPI_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2780416022c9SBarry Smith     }
2781606d414cSSatish Balay     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2782dc231df0SBarry Smith   } else {
2783dc231df0SBarry Smith     ierr = MPI_Recv(ourlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2784dc231df0SBarry Smith   }
2785416022c9SBarry Smith 
2786dc231df0SBarry Smith   if (!rank) {
2787416022c9SBarry Smith     /* determine max buffer needed and allocate it */
2788416022c9SBarry Smith     maxnz = 0;
27898a8e0b3aSBarry Smith     for (i=0; i<size; i++) {
27900452661fSBarry Smith       maxnz = PetscMax(maxnz,procsnz[i]);
2791416022c9SBarry Smith     }
2792b1d57f15SBarry Smith     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2793416022c9SBarry Smith 
2794416022c9SBarry Smith     /* read in my part of the matrix column indices  */
2795416022c9SBarry Smith     nz   = procsnz[0];
2796b1d57f15SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
27970752156aSBarry Smith     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2798d65a2f8fSBarry Smith 
2799d65a2f8fSBarry Smith     /* read in every one elses and ship off */
280017699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2801d65a2f8fSBarry Smith       nz   = procsnz[i];
28020752156aSBarry Smith       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2803b1d57f15SBarry Smith       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2804d65a2f8fSBarry Smith     }
2805606d414cSSatish Balay     ierr = PetscFree(cols);CHKERRQ(ierr);
28063a40ed3dSBarry Smith   } else {
2807416022c9SBarry Smith     /* determine buffer space needed for message */
2808416022c9SBarry Smith     nz = 0;
2809416022c9SBarry Smith     for (i=0; i<m; i++) {
2810416022c9SBarry Smith       nz += ourlens[i];
2811416022c9SBarry Smith     }
2812dc231df0SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
2813416022c9SBarry Smith 
2814416022c9SBarry Smith     /* receive message of column indices*/
2815b1d57f15SBarry Smith     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2816b1d57f15SBarry Smith     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
281729bbc08cSBarry Smith     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2818416022c9SBarry Smith   }
2819416022c9SBarry Smith 
2820b362ba68SBarry Smith   /* determine column ownership if matrix is not square */
2821b362ba68SBarry Smith   if (N != M) {
2822b362ba68SBarry Smith     n      = N/size + ((N % size) > rank);
2823b1d57f15SBarry Smith     ierr   = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2824b362ba68SBarry Smith     cstart = cend - n;
2825b362ba68SBarry Smith   } else {
2826b362ba68SBarry Smith     cstart = rstart;
2827b362ba68SBarry Smith     cend   = rend;
2828fb2e594dSBarry Smith     n      = cend - cstart;
2829b362ba68SBarry Smith   }
2830b362ba68SBarry Smith 
2831416022c9SBarry Smith   /* loop over local rows, determining number of off diagonal entries */
2832b1d57f15SBarry Smith   ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr);
2833416022c9SBarry Smith   jj = 0;
2834416022c9SBarry Smith   for (i=0; i<m; i++) {
2835416022c9SBarry Smith     for (j=0; j<ourlens[i]; j++) {
2836b362ba68SBarry Smith       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
2837416022c9SBarry Smith       jj++;
2838416022c9SBarry Smith     }
2839416022c9SBarry Smith   }
2840d65a2f8fSBarry Smith 
2841d65a2f8fSBarry Smith   /* create our matrix */
2842416022c9SBarry Smith   for (i=0; i<m; i++) {
2843416022c9SBarry Smith     ourlens[i] -= offlens[i];
2844416022c9SBarry Smith   }
2845f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&A);CHKERRQ(ierr);
2846f69a0ea3SMatthew Knepley   ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr);
2847d10c748bSKris Buschelman   ierr = MatSetType(A,type);CHKERRQ(ierr);
2848d10c748bSKris Buschelman   ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr);
2849d10c748bSKris Buschelman 
2850d65a2f8fSBarry Smith   for (i=0; i<m; i++) {
2851d65a2f8fSBarry Smith     ourlens[i] += offlens[i];
2852d65a2f8fSBarry Smith   }
2853416022c9SBarry Smith 
285417699dbbSLois Curfman McInnes   if (!rank) {
2855906b51c7SHong Zhang     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2856416022c9SBarry Smith 
2857416022c9SBarry Smith     /* read in my part of the matrix numerical values  */
2858416022c9SBarry Smith     nz   = procsnz[0];
28590752156aSBarry Smith     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2860d65a2f8fSBarry Smith 
2861d65a2f8fSBarry Smith     /* insert into matrix */
2862d65a2f8fSBarry Smith     jj      = rstart;
2863d65a2f8fSBarry Smith     smycols = mycols;
2864d65a2f8fSBarry Smith     svals   = vals;
2865d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2866dc231df0SBarry Smith       ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2867d65a2f8fSBarry Smith       smycols += ourlens[i];
2868d65a2f8fSBarry Smith       svals   += ourlens[i];
2869d65a2f8fSBarry Smith       jj++;
2870416022c9SBarry Smith     }
2871416022c9SBarry Smith 
2872d65a2f8fSBarry Smith     /* read in other processors and ship out */
287317699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2874416022c9SBarry Smith       nz   = procsnz[i];
28750752156aSBarry Smith       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
28767adad957SLisandro Dalcin       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr);
2877416022c9SBarry Smith     }
2878606d414cSSatish Balay     ierr = PetscFree(procsnz);CHKERRQ(ierr);
28793a40ed3dSBarry Smith   } else {
2880d65a2f8fSBarry Smith     /* receive numeric values */
288187828ca2SBarry Smith     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2882416022c9SBarry Smith 
2883d65a2f8fSBarry Smith     /* receive message of values*/
28847adad957SLisandro Dalcin     ierr = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr);
2885ca161407SBarry Smith     ierr = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
288629bbc08cSBarry Smith     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2887d65a2f8fSBarry Smith 
2888d65a2f8fSBarry Smith     /* insert into matrix */
2889d65a2f8fSBarry Smith     jj      = rstart;
2890d65a2f8fSBarry Smith     smycols = mycols;
2891d65a2f8fSBarry Smith     svals   = vals;
2892d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2893dc231df0SBarry Smith       ierr     = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2894d65a2f8fSBarry Smith       smycols += ourlens[i];
2895d65a2f8fSBarry Smith       svals   += ourlens[i];
2896d65a2f8fSBarry Smith       jj++;
2897d65a2f8fSBarry Smith     }
2898d65a2f8fSBarry Smith   }
2899dc231df0SBarry Smith   ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr);
2900606d414cSSatish Balay   ierr = PetscFree(vals);CHKERRQ(ierr);
2901606d414cSSatish Balay   ierr = PetscFree(mycols);CHKERRQ(ierr);
2902606d414cSSatish Balay   ierr = PetscFree(rowners);CHKERRQ(ierr);
2903d65a2f8fSBarry Smith 
29046d4a8577SBarry Smith   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
29056d4a8577SBarry Smith   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2906d10c748bSKris Buschelman   *newmat = A;
29073a40ed3dSBarry Smith   PetscFunctionReturn(0);
2908416022c9SBarry Smith }
2909a0ff6018SBarry Smith 
29104a2ae208SSatish Balay #undef __FUNCT__
29114a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ"
2912a0ff6018SBarry Smith /*
291329da9460SBarry Smith     Not great since it makes two copies of the submatrix, first an SeqAIJ
291429da9460SBarry Smith   in local and then by concatenating the local matrices the end result.
291529da9460SBarry Smith   Writing it directly would be much like MatGetSubMatrices_MPIAIJ()
2916a0ff6018SBarry Smith */
2917b1d57f15SBarry Smith PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2918a0ff6018SBarry Smith {
2919dfbe8321SBarry Smith   PetscErrorCode ierr;
292032dcc486SBarry Smith   PetscMPIInt    rank,size;
2921b1d57f15SBarry Smith   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j;
2922b1d57f15SBarry Smith   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal;
2923fee21e36SBarry Smith   Mat            *local,M,Mreuse;
2924a77337e4SBarry Smith   MatScalar      *vwork,*aa;
29257adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)mat)->comm;
292600e6dbe6SBarry Smith   Mat_SeqAIJ     *aij;
29277e2c5f70SBarry Smith 
2928a0ff6018SBarry Smith 
2929a0ff6018SBarry Smith   PetscFunctionBegin;
29301dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
29311dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
293200e6dbe6SBarry Smith 
2933fee21e36SBarry Smith   if (call ==  MAT_REUSE_MATRIX) {
2934fee21e36SBarry Smith     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
2935e005ede5SBarry Smith     if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2936fee21e36SBarry Smith     local = &Mreuse;
2937fee21e36SBarry Smith     ierr  = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr);
2938fee21e36SBarry Smith   } else {
2939a0ff6018SBarry Smith     ierr   = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr);
2940fee21e36SBarry Smith     Mreuse = *local;
2941606d414cSSatish Balay     ierr   = PetscFree(local);CHKERRQ(ierr);
2942fee21e36SBarry Smith   }
2943a0ff6018SBarry Smith 
2944a0ff6018SBarry Smith   /*
2945a0ff6018SBarry Smith       m - number of local rows
2946a0ff6018SBarry Smith       n - number of columns (same on all processors)
2947a0ff6018SBarry Smith       rstart - first row in new global matrix generated
2948a0ff6018SBarry Smith   */
2949fee21e36SBarry Smith   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2950a0ff6018SBarry Smith   if (call == MAT_INITIAL_MATRIX) {
2951fee21e36SBarry Smith     aij = (Mat_SeqAIJ*)(Mreuse)->data;
295200e6dbe6SBarry Smith     ii  = aij->i;
295300e6dbe6SBarry Smith     jj  = aij->j;
295400e6dbe6SBarry Smith 
2955a0ff6018SBarry Smith     /*
295600e6dbe6SBarry Smith         Determine the number of non-zeros in the diagonal and off-diagonal
295700e6dbe6SBarry Smith         portions of the matrix in order to do correct preallocation
2958a0ff6018SBarry Smith     */
295900e6dbe6SBarry Smith 
296000e6dbe6SBarry Smith     /* first get start and end of "diagonal" columns */
29616a6a5d1dSBarry Smith     if (csize == PETSC_DECIDE) {
2962ab50ec6bSBarry Smith       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2963ab50ec6bSBarry Smith       if (mglobal == n) { /* square matrix */
2964e2c4fddaSBarry Smith 	nlocal = m;
29656a6a5d1dSBarry Smith       } else {
2966ab50ec6bSBarry Smith         nlocal = n/size + ((n % size) > rank);
2967ab50ec6bSBarry Smith       }
2968ab50ec6bSBarry Smith     } else {
29696a6a5d1dSBarry Smith       nlocal = csize;
29706a6a5d1dSBarry Smith     }
2971b1d57f15SBarry Smith     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
297200e6dbe6SBarry Smith     rstart = rend - nlocal;
29736a6a5d1dSBarry Smith     if (rank == size - 1 && rend != n) {
297477431f27SBarry Smith       SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
29756a6a5d1dSBarry Smith     }
297600e6dbe6SBarry Smith 
297700e6dbe6SBarry Smith     /* next, compute all the lengths */
2978b1d57f15SBarry Smith     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
297900e6dbe6SBarry Smith     olens = dlens + m;
298000e6dbe6SBarry Smith     for (i=0; i<m; i++) {
298100e6dbe6SBarry Smith       jend = ii[i+1] - ii[i];
298200e6dbe6SBarry Smith       olen = 0;
298300e6dbe6SBarry Smith       dlen = 0;
298400e6dbe6SBarry Smith       for (j=0; j<jend; j++) {
298500e6dbe6SBarry Smith         if (*jj < rstart || *jj >= rend) olen++;
298600e6dbe6SBarry Smith         else dlen++;
298700e6dbe6SBarry Smith         jj++;
298800e6dbe6SBarry Smith       }
298900e6dbe6SBarry Smith       olens[i] = olen;
299000e6dbe6SBarry Smith       dlens[i] = dlen;
299100e6dbe6SBarry Smith     }
2992f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2993f69a0ea3SMatthew Knepley     ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr);
29947adad957SLisandro Dalcin     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2995e2d9671bSKris Buschelman     ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr);
2996606d414cSSatish Balay     ierr = PetscFree(dlens);CHKERRQ(ierr);
2997a0ff6018SBarry Smith   } else {
2998b1d57f15SBarry Smith     PetscInt ml,nl;
2999a0ff6018SBarry Smith 
3000a0ff6018SBarry Smith     M = *newmat;
3001a0ff6018SBarry Smith     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
300229bbc08cSBarry Smith     if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
3003a0ff6018SBarry Smith     ierr = MatZeroEntries(M);CHKERRQ(ierr);
3004c48de900SBarry Smith     /*
3005c48de900SBarry Smith          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
3006c48de900SBarry Smith        rather than the slower MatSetValues().
3007c48de900SBarry Smith     */
3008c48de900SBarry Smith     M->was_assembled = PETSC_TRUE;
3009c48de900SBarry Smith     M->assembled     = PETSC_FALSE;
3010a0ff6018SBarry Smith   }
3011a0ff6018SBarry Smith   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
3012fee21e36SBarry Smith   aij = (Mat_SeqAIJ*)(Mreuse)->data;
301300e6dbe6SBarry Smith   ii  = aij->i;
301400e6dbe6SBarry Smith   jj  = aij->j;
301500e6dbe6SBarry Smith   aa  = aij->a;
3016a0ff6018SBarry Smith   for (i=0; i<m; i++) {
3017a0ff6018SBarry Smith     row   = rstart + i;
301800e6dbe6SBarry Smith     nz    = ii[i+1] - ii[i];
301900e6dbe6SBarry Smith     cwork = jj;     jj += nz;
302000e6dbe6SBarry Smith     vwork = aa;     aa += nz;
30218c638d02SBarry Smith     ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
3022a0ff6018SBarry Smith   }
3023a0ff6018SBarry Smith 
3024a0ff6018SBarry Smith   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3025a0ff6018SBarry Smith   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3026a0ff6018SBarry Smith   *newmat = M;
3027fee21e36SBarry Smith 
3028fee21e36SBarry Smith   /* save submatrix used in processor for next request */
3029fee21e36SBarry Smith   if (call ==  MAT_INITIAL_MATRIX) {
3030fee21e36SBarry Smith     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
3031fee21e36SBarry Smith     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
3032fee21e36SBarry Smith   }
3033fee21e36SBarry Smith 
3034a0ff6018SBarry Smith   PetscFunctionReturn(0);
3035a0ff6018SBarry Smith }
3036273d9f13SBarry Smith 
3037e2e86b8fSSatish Balay EXTERN_C_BEGIN
30384a2ae208SSatish Balay #undef __FUNCT__
3039ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ"
3040b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[])
3041ccd8e176SBarry Smith {
3042899cda47SBarry Smith   PetscInt       m,cstart, cend,j,nnz,i,d;
3043899cda47SBarry Smith   PetscInt       *d_nnz,*o_nnz,nnz_max = 0,rstart,ii;
3044ccd8e176SBarry Smith   const PetscInt *JJ;
3045ccd8e176SBarry Smith   PetscScalar    *values;
3046ccd8e176SBarry Smith   PetscErrorCode ierr;
3047ccd8e176SBarry Smith 
3048ccd8e176SBarry Smith   PetscFunctionBegin;
3049b7940d39SSatish Balay   if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]);
3050899cda47SBarry Smith 
3051d0f46423SBarry Smith   B->rmap->bs = B->cmap->bs = 1;
3052d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
3053d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
3054d0f46423SBarry Smith   m      = B->rmap->n;
3055d0f46423SBarry Smith   cstart = B->cmap->rstart;
3056d0f46423SBarry Smith   cend   = B->cmap->rend;
3057d0f46423SBarry Smith   rstart = B->rmap->rstart;
3058899cda47SBarry Smith 
3059ccd8e176SBarry Smith   ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
3060ccd8e176SBarry Smith   o_nnz = d_nnz + m;
3061ccd8e176SBarry Smith 
3062ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING)
3063ecc77c7aSBarry Smith   for (i=0; i<m; i++) {
3064ecc77c7aSBarry Smith     nnz     = Ii[i+1]- Ii[i];
3065ecc77c7aSBarry Smith     JJ      = J + Ii[i];
3066ecc77c7aSBarry Smith     if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz);
3067ecc77c7aSBarry Smith     if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j);
3068d0f46423SBarry Smith     if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N);
3069ecc77c7aSBarry Smith     for (j=1; j<nnz; j++) {
3070ecc77c7aSBarry Smith       if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j);
3071ecc77c7aSBarry Smith     }
3072ecc77c7aSBarry Smith   }
3073ecc77c7aSBarry Smith #endif
3074ecc77c7aSBarry Smith 
3075ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3076b7940d39SSatish Balay     nnz     = Ii[i+1]- Ii[i];
3077b7940d39SSatish Balay     JJ      = J + Ii[i];
3078ccd8e176SBarry Smith     nnz_max = PetscMax(nnz_max,nnz);
3079ccd8e176SBarry Smith     for (j=0; j<nnz; j++) {
3080ccd8e176SBarry Smith       if (*JJ >= cstart) break;
3081ccd8e176SBarry Smith       JJ++;
3082ccd8e176SBarry Smith     }
3083ccd8e176SBarry Smith     d = 0;
3084ccd8e176SBarry Smith     for (; j<nnz; j++) {
3085ccd8e176SBarry Smith       if (*JJ++ >= cend) break;
3086ccd8e176SBarry Smith       d++;
3087ccd8e176SBarry Smith     }
3088ccd8e176SBarry Smith     d_nnz[i] = d;
3089ccd8e176SBarry Smith     o_nnz[i] = nnz - d;
3090ccd8e176SBarry Smith   }
3091ccd8e176SBarry Smith   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
3092ccd8e176SBarry Smith   ierr = PetscFree(d_nnz);CHKERRQ(ierr);
3093ccd8e176SBarry Smith 
3094ccd8e176SBarry Smith   if (v) values = (PetscScalar*)v;
3095ccd8e176SBarry Smith   else {
3096ccd8e176SBarry Smith     ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr);
3097ccd8e176SBarry Smith     ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr);
3098ccd8e176SBarry Smith   }
3099ccd8e176SBarry Smith 
3100ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3101ccd8e176SBarry Smith     ii   = i + rstart;
3102b7940d39SSatish Balay     nnz  = Ii[i+1]- Ii[i];
3103b7940d39SSatish Balay     ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr);
3104ccd8e176SBarry Smith   }
3105ccd8e176SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3106ccd8e176SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3107ccd8e176SBarry Smith 
3108ccd8e176SBarry Smith   if (!v) {
3109ccd8e176SBarry Smith     ierr = PetscFree(values);CHKERRQ(ierr);
3110ccd8e176SBarry Smith   }
3111ccd8e176SBarry Smith   PetscFunctionReturn(0);
3112ccd8e176SBarry Smith }
3113e2e86b8fSSatish Balay EXTERN_C_END
3114ccd8e176SBarry Smith 
3115ccd8e176SBarry Smith #undef __FUNCT__
3116ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR"
31171eea217eSSatish Balay /*@
3118ccd8e176SBarry Smith    MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format
3119ccd8e176SBarry Smith    (the default parallel PETSc format).
3120ccd8e176SBarry Smith 
3121ccd8e176SBarry Smith    Collective on MPI_Comm
3122ccd8e176SBarry Smith 
3123ccd8e176SBarry Smith    Input Parameters:
3124a1661176SMatthew Knepley +  B - the matrix
3125ccd8e176SBarry Smith .  i - the indices into j for the start of each local row (starts with zero)
3126ccd8e176SBarry Smith .  j - the column indices for each local row (starts with zero) these must be sorted for each row
3127ccd8e176SBarry Smith -  v - optional values in the matrix
3128ccd8e176SBarry Smith 
3129ccd8e176SBarry Smith    Level: developer
3130ccd8e176SBarry Smith 
313112251496SSatish Balay    Notes:
313212251496SSatish Balay        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
313312251496SSatish Balay      thus you CANNOT change the matrix entries by changing the values of a[] after you have
313412251496SSatish Balay      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
313512251496SSatish Balay 
313612251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
313712251496SSatish Balay 
313812251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
313912251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
314012251496SSatish Balay     as shown:
314112251496SSatish Balay 
314212251496SSatish Balay         1 0 0
314312251496SSatish Balay         2 0 3     P0
314412251496SSatish Balay        -------
314512251496SSatish Balay         4 5 6     P1
314612251496SSatish Balay 
314712251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
314812251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
314912251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
315012251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
315112251496SSatish Balay 
315212251496SSatish Balay      Process1 [P1]: rows_owned=[2]
315312251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
315412251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
315512251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
315612251496SSatish Balay 
3157ecc77c7aSBarry Smith       The column indices for each row MUST be sorted.
31582fb0ec9aSBarry Smith 
3159ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3160ccd8e176SBarry Smith 
31612fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ,
31628d7a6e47SBarry Smith           MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays()
3163ccd8e176SBarry Smith @*/
3164be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3165ccd8e176SBarry Smith {
3166ccd8e176SBarry Smith   PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
3167ccd8e176SBarry Smith 
3168ccd8e176SBarry Smith   PetscFunctionBegin;
3169ccd8e176SBarry Smith   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr);
3170ccd8e176SBarry Smith   if (f) {
3171ccd8e176SBarry Smith     ierr = (*f)(B,i,j,v);CHKERRQ(ierr);
3172ccd8e176SBarry Smith   }
3173ccd8e176SBarry Smith   PetscFunctionReturn(0);
3174ccd8e176SBarry Smith }
3175ccd8e176SBarry Smith 
3176ccd8e176SBarry Smith #undef __FUNCT__
31774a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation"
3178273d9f13SBarry Smith /*@C
3179ccd8e176SBarry Smith    MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format
3180273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3181273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3182273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3183273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3184273d9f13SBarry Smith 
3185273d9f13SBarry Smith    Collective on MPI_Comm
3186273d9f13SBarry Smith 
3187273d9f13SBarry Smith    Input Parameters:
3188273d9f13SBarry Smith +  A - the matrix
3189273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3190273d9f13SBarry Smith            (same value is used for all local rows)
3191273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3192273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3193273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3194273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3195273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3196273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3197273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3198273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3199273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3200273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3201273d9f13SBarry Smith            structure. The size of this array is equal to the number
3202273d9f13SBarry Smith            of local rows, i.e 'm'.
3203273d9f13SBarry Smith 
320449a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
320549a6f317SBarry Smith 
3206273d9f13SBarry Smith    The AIJ format (also called the Yale sparse matrix format or
3207ccd8e176SBarry Smith    compressed row storage (CSR)), is fully compatible with standard Fortran 77
3208ccd8e176SBarry Smith    storage.  The stored row and column indices begin with zero.  See the users manual for details.
3209273d9f13SBarry Smith 
3210273d9f13SBarry Smith    The parallel matrix is partitioned such that the first m0 rows belong to
3211273d9f13SBarry Smith    process 0, the next m1 rows belong to process 1, the next m2 rows belong
3212273d9f13SBarry Smith    to process 2 etc.. where m0,m1,m2... are the input parameter 'm'.
3213273d9f13SBarry Smith 
3214273d9f13SBarry Smith    The DIAGONAL portion of the local submatrix of a processor can be defined
3215273d9f13SBarry Smith    as the submatrix which is obtained by extraction the part corresponding
3216273d9f13SBarry Smith    to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the
3217273d9f13SBarry Smith    first row that belongs to the processor, and r2 is the last row belonging
3218273d9f13SBarry Smith    to the this processor. This is a square mxm matrix. The remaining portion
3219273d9f13SBarry Smith    of the local submatrix (mxN) constitute the OFF-DIAGONAL portion.
3220273d9f13SBarry Smith 
3221273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3222273d9f13SBarry Smith 
3223aa95bbe8SBarry Smith    You can call MatGetInfo() to get information on how effective the preallocation was;
3224aa95bbe8SBarry Smith    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3225aa95bbe8SBarry Smith    You can also run with the option -info and look for messages with the string
3226aa95bbe8SBarry Smith    malloc in them to see if additional memory allocation was needed.
3227aa95bbe8SBarry Smith 
3228273d9f13SBarry Smith    Example usage:
3229273d9f13SBarry Smith 
3230273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3231273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3232273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3233273d9f13SBarry Smith    as follows:
3234273d9f13SBarry Smith 
3235273d9f13SBarry Smith .vb
3236273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3237273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3238273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3239273d9f13SBarry Smith     -------------------------------------
3240273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3241273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3242273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3243273d9f13SBarry Smith     -------------------------------------
3244273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3245273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3246273d9f13SBarry Smith .ve
3247273d9f13SBarry Smith 
3248273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3249273d9f13SBarry Smith 
3250273d9f13SBarry Smith .vb
3251273d9f13SBarry Smith       A B C
3252273d9f13SBarry Smith       D E F
3253273d9f13SBarry Smith       G H I
3254273d9f13SBarry Smith .ve
3255273d9f13SBarry Smith 
3256273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3257273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3258273d9f13SBarry Smith 
3259273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3260273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3261273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3262273d9f13SBarry Smith 
3263273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3264273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3265273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3266273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3267273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3268273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3269273d9f13SBarry Smith 
3270273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3271273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3272273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3273273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3274273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3275273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3276273d9f13SBarry Smith .vb
3277273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3278273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3279273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3280273d9f13SBarry Smith .ve
3281273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3282273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3283273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3284273d9f13SBarry Smith    34 values.
3285273d9f13SBarry Smith 
3286273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3287273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3288273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3289273d9f13SBarry Smith .vb
3290273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3291273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3292273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3293273d9f13SBarry Smith .ve
3294273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3295273d9f13SBarry Smith    hence pre-allocation is perfect.
3296273d9f13SBarry Smith 
3297273d9f13SBarry Smith    Level: intermediate
3298273d9f13SBarry Smith 
3299273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3300273d9f13SBarry Smith 
3301ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(),
3302aa95bbe8SBarry Smith           MPIAIJ, MatGetInfo()
3303273d9f13SBarry Smith @*/
3304be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3305273d9f13SBarry Smith {
3306b1d57f15SBarry Smith   PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
3307273d9f13SBarry Smith 
3308273d9f13SBarry Smith   PetscFunctionBegin;
3309a23d5eceSKris Buschelman   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
3310a23d5eceSKris Buschelman   if (f) {
3311a23d5eceSKris Buschelman     ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3312273d9f13SBarry Smith   }
3313273d9f13SBarry Smith   PetscFunctionReturn(0);
3314273d9f13SBarry Smith }
3315273d9f13SBarry Smith 
33164a2ae208SSatish Balay #undef __FUNCT__
33172fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays"
331858d36128SBarry Smith /*@
33192fb0ec9aSBarry Smith      MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard
33202fb0ec9aSBarry Smith          CSR format the local rows.
33212fb0ec9aSBarry Smith 
33222fb0ec9aSBarry Smith    Collective on MPI_Comm
33232fb0ec9aSBarry Smith 
33242fb0ec9aSBarry Smith    Input Parameters:
33252fb0ec9aSBarry Smith +  comm - MPI communicator
33262fb0ec9aSBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
33272fb0ec9aSBarry Smith .  n - This value should be the same as the local size used in creating the
33282fb0ec9aSBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
33292fb0ec9aSBarry Smith        calculated if N is given) For square matrices n is almost always m.
33302fb0ec9aSBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
33312fb0ec9aSBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
33322fb0ec9aSBarry Smith .   i - row indices
33332fb0ec9aSBarry Smith .   j - column indices
33342fb0ec9aSBarry Smith -   a - matrix values
33352fb0ec9aSBarry Smith 
33362fb0ec9aSBarry Smith    Output Parameter:
33372fb0ec9aSBarry Smith .   mat - the matrix
333803bfb495SBarry Smith 
33392fb0ec9aSBarry Smith    Level: intermediate
33402fb0ec9aSBarry Smith 
33412fb0ec9aSBarry Smith    Notes:
33422fb0ec9aSBarry Smith        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
33432fb0ec9aSBarry Smith      thus you CANNOT change the matrix entries by changing the values of a[] after you have
33448d7a6e47SBarry Smith      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
33452fb0ec9aSBarry Smith 
334612251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
334712251496SSatish Balay 
334812251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
334912251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
335012251496SSatish Balay     as shown:
335112251496SSatish Balay 
335212251496SSatish Balay         1 0 0
335312251496SSatish Balay         2 0 3     P0
335412251496SSatish Balay        -------
335512251496SSatish Balay         4 5 6     P1
335612251496SSatish Balay 
335712251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
335812251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
335912251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
336012251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
336112251496SSatish Balay 
336212251496SSatish Balay      Process1 [P1]: rows_owned=[2]
336312251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
336412251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
336512251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
33662fb0ec9aSBarry Smith 
33672fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
33682fb0ec9aSBarry Smith 
33692fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
33708d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays()
33712fb0ec9aSBarry Smith @*/
337282b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
33732fb0ec9aSBarry Smith {
33742fb0ec9aSBarry Smith   PetscErrorCode ierr;
33752fb0ec9aSBarry Smith 
33762fb0ec9aSBarry Smith  PetscFunctionBegin;
33772fb0ec9aSBarry Smith   if (i[0]) {
33782fb0ec9aSBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
33792fb0ec9aSBarry Smith   }
33802fb0ec9aSBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
33812fb0ec9aSBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3382d4146a68SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
33832fb0ec9aSBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
33842fb0ec9aSBarry Smith   ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr);
33852fb0ec9aSBarry Smith   PetscFunctionReturn(0);
33862fb0ec9aSBarry Smith }
33872fb0ec9aSBarry Smith 
33882fb0ec9aSBarry Smith #undef __FUNCT__
33894a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ"
3390273d9f13SBarry Smith /*@C
3391273d9f13SBarry Smith    MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format
3392273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3393273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3394273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3395273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3396273d9f13SBarry Smith 
3397273d9f13SBarry Smith    Collective on MPI_Comm
3398273d9f13SBarry Smith 
3399273d9f13SBarry Smith    Input Parameters:
3400273d9f13SBarry Smith +  comm - MPI communicator
3401273d9f13SBarry Smith .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3402273d9f13SBarry Smith            This value should be the same as the local size used in creating the
3403273d9f13SBarry Smith            y vector for the matrix-vector product y = Ax.
3404273d9f13SBarry Smith .  n - This value should be the same as the local size used in creating the
3405273d9f13SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3406273d9f13SBarry Smith        calculated if N is given) For square matrices n is almost always m.
3407273d9f13SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3408273d9f13SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3409273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3410273d9f13SBarry Smith            (same value is used for all local rows)
3411273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3412273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3413273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3414273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3415273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3416273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3417273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3418273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3419273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3420273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3421273d9f13SBarry Smith            structure. The size of this array is equal to the number
3422273d9f13SBarry Smith            of local rows, i.e 'm'.
3423273d9f13SBarry Smith 
3424273d9f13SBarry Smith    Output Parameter:
3425273d9f13SBarry Smith .  A - the matrix
3426273d9f13SBarry Smith 
3427175b88e8SBarry Smith    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3428175b88e8SBarry Smith    MatXXXXSetPreallocation() paradgm instead of this routine directly. This is definitely
3429175b88e8SBarry Smith    true if you plan to use the external direct solvers such as SuperLU, MUMPS or Spooles.
3430175b88e8SBarry Smith    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3431175b88e8SBarry Smith 
3432273d9f13SBarry Smith    Notes:
343349a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
343449a6f317SBarry Smith 
3435273d9f13SBarry Smith    m,n,M,N parameters specify the size of the matrix, and its partitioning across
3436273d9f13SBarry Smith    processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate
3437273d9f13SBarry Smith    storage requirements for this matrix.
3438273d9f13SBarry Smith 
3439273d9f13SBarry Smith    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one
3440273d9f13SBarry Smith    processor than it must be used on all processors that share the object for
3441273d9f13SBarry Smith    that argument.
3442273d9f13SBarry Smith 
3443273d9f13SBarry Smith    The user MUST specify either the local or global matrix dimensions
3444273d9f13SBarry Smith    (possibly both).
3445273d9f13SBarry Smith 
344633a7c187SSatish Balay    The parallel matrix is partitioned across processors such that the
344733a7c187SSatish Balay    first m0 rows belong to process 0, the next m1 rows belong to
344833a7c187SSatish Balay    process 1, the next m2 rows belong to process 2 etc.. where
344933a7c187SSatish Balay    m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores
345033a7c187SSatish Balay    values corresponding to [m x N] submatrix.
3451273d9f13SBarry Smith 
345233a7c187SSatish Balay    The columns are logically partitioned with the n0 columns belonging
345333a7c187SSatish Balay    to 0th partition, the next n1 columns belonging to the next
345433a7c187SSatish Balay    partition etc.. where n0,n1,n2... are the the input parameter 'n'.
345533a7c187SSatish Balay 
345633a7c187SSatish Balay    The DIAGONAL portion of the local submatrix on any given processor
345733a7c187SSatish Balay    is the submatrix corresponding to the rows and columns m,n
345833a7c187SSatish Balay    corresponding to the given processor. i.e diagonal matrix on
345933a7c187SSatish Balay    process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1]
346033a7c187SSatish Balay    etc. The remaining portion of the local submatrix [m x (N-n)]
346133a7c187SSatish Balay    constitute the OFF-DIAGONAL portion. The example below better
346233a7c187SSatish Balay    illustrates this concept.
346333a7c187SSatish Balay 
346433a7c187SSatish Balay    For a square global matrix we define each processor's diagonal portion
346533a7c187SSatish Balay    to be its local rows and the corresponding columns (a square submatrix);
346633a7c187SSatish Balay    each processor's off-diagonal portion encompasses the remainder of the
346733a7c187SSatish Balay    local matrix (a rectangular submatrix).
3468273d9f13SBarry Smith 
3469273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3470273d9f13SBarry Smith 
347197d05335SKris Buschelman    When calling this routine with a single process communicator, a matrix of
347297d05335SKris Buschelman    type SEQAIJ is returned.  If a matrix of type MPIAIJ is desired for this
347397d05335SKris Buschelman    type of communicator, use the construction mechanism:
347497d05335SKris Buschelman      MatCreate(...,&A); MatSetType(A,MPIAIJ); MatMPIAIJSetPreallocation(A,...);
347597d05335SKris Buschelman 
3476273d9f13SBarry Smith    By default, this format uses inodes (identical nodes) when possible.
3477273d9f13SBarry Smith    We search for consecutive rows with the same nonzero structure, thereby
3478273d9f13SBarry Smith    reusing matrix information to achieve increased efficiency.
3479273d9f13SBarry Smith 
3480273d9f13SBarry Smith    Options Database Keys:
3481923f20ffSKris Buschelman +  -mat_no_inode  - Do not use inodes
3482923f20ffSKris Buschelman .  -mat_inode_limit <limit> - Sets inode limit (max limit=5)
3483273d9f13SBarry Smith -  -mat_aij_oneindex - Internally use indexing starting at 1
3484273d9f13SBarry Smith         rather than 0.  Note that when calling MatSetValues(),
3485273d9f13SBarry Smith         the user still MUST index entries starting at 0!
3486273d9f13SBarry Smith 
3487273d9f13SBarry Smith 
3488273d9f13SBarry Smith    Example usage:
3489273d9f13SBarry Smith 
3490273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3491273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3492273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3493273d9f13SBarry Smith    as follows:
3494273d9f13SBarry Smith 
3495273d9f13SBarry Smith .vb
3496273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3497273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3498273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3499273d9f13SBarry Smith     -------------------------------------
3500273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3501273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3502273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3503273d9f13SBarry Smith     -------------------------------------
3504273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3505273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3506273d9f13SBarry Smith .ve
3507273d9f13SBarry Smith 
3508273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3509273d9f13SBarry Smith 
3510273d9f13SBarry Smith .vb
3511273d9f13SBarry Smith       A B C
3512273d9f13SBarry Smith       D E F
3513273d9f13SBarry Smith       G H I
3514273d9f13SBarry Smith .ve
3515273d9f13SBarry Smith 
3516273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3517273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3518273d9f13SBarry Smith 
3519273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3520273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3521273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3522273d9f13SBarry Smith 
3523273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3524273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3525273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3526273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3527273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3528273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3529273d9f13SBarry Smith 
3530273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3531273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3532273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3533273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3534273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3535273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3536273d9f13SBarry Smith .vb
3537273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3538273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3539273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3540273d9f13SBarry Smith .ve
3541273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3542273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3543273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3544273d9f13SBarry Smith    34 values.
3545273d9f13SBarry Smith 
3546273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3547273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3548273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3549273d9f13SBarry Smith .vb
3550273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3551273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3552273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3553273d9f13SBarry Smith .ve
3554273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3555273d9f13SBarry Smith    hence pre-allocation is perfect.
3556273d9f13SBarry Smith 
3557273d9f13SBarry Smith    Level: intermediate
3558273d9f13SBarry Smith 
3559273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3560273d9f13SBarry Smith 
3561ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
35622fb0ec9aSBarry Smith           MPIAIJ, MatCreateMPIAIJWithArrays()
3563273d9f13SBarry Smith @*/
3564be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3565273d9f13SBarry Smith {
35666849ba73SBarry Smith   PetscErrorCode ierr;
3567b1d57f15SBarry Smith   PetscMPIInt    size;
3568273d9f13SBarry Smith 
3569273d9f13SBarry Smith   PetscFunctionBegin;
3570f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3571f69a0ea3SMatthew Knepley   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3572273d9f13SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3573273d9f13SBarry Smith   if (size > 1) {
3574273d9f13SBarry Smith     ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr);
3575273d9f13SBarry Smith     ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3576273d9f13SBarry Smith   } else {
3577273d9f13SBarry Smith     ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr);
3578273d9f13SBarry Smith     ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr);
3579273d9f13SBarry Smith   }
3580273d9f13SBarry Smith   PetscFunctionReturn(0);
3581273d9f13SBarry Smith }
3582195d93cdSBarry Smith 
35834a2ae208SSatish Balay #undef __FUNCT__
35844a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ"
3585be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[])
3586195d93cdSBarry Smith {
3587195d93cdSBarry Smith   Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
3588b1d57f15SBarry Smith 
3589195d93cdSBarry Smith   PetscFunctionBegin;
3590195d93cdSBarry Smith   *Ad     = a->A;
3591195d93cdSBarry Smith   *Ao     = a->B;
3592195d93cdSBarry Smith   *colmap = a->garray;
3593195d93cdSBarry Smith   PetscFunctionReturn(0);
3594195d93cdSBarry Smith }
3595a2243be0SBarry Smith 
3596a2243be0SBarry Smith #undef __FUNCT__
3597a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ"
3598dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring)
3599a2243be0SBarry Smith {
3600dfbe8321SBarry Smith   PetscErrorCode ierr;
3601b1d57f15SBarry Smith   PetscInt       i;
3602a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3603a2243be0SBarry Smith 
3604a2243be0SBarry Smith   PetscFunctionBegin;
36058ee2e534SBarry Smith   if (coloring->ctype == IS_COLORING_GLOBAL) {
360608b6dcc0SBarry Smith     ISColoringValue *allcolors,*colors;
3607a2243be0SBarry Smith     ISColoring      ocoloring;
3608a2243be0SBarry Smith 
3609a2243be0SBarry Smith     /* set coloring for diagonal portion */
3610a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr);
3611a2243be0SBarry Smith 
3612a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
36137adad957SLisandro Dalcin     ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr);
3614d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3615d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3616a2243be0SBarry Smith       colors[i] = allcolors[a->garray[i]];
3617a2243be0SBarry Smith     }
3618a2243be0SBarry Smith     ierr = PetscFree(allcolors);CHKERRQ(ierr);
3619d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3620a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3621a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3622a2243be0SBarry Smith   } else if (coloring->ctype == IS_COLORING_GHOSTED) {
362308b6dcc0SBarry Smith     ISColoringValue *colors;
3624b1d57f15SBarry Smith     PetscInt        *larray;
3625a2243be0SBarry Smith     ISColoring      ocoloring;
3626a2243be0SBarry Smith 
3627a2243be0SBarry Smith     /* set coloring for diagonal portion */
3628d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3629d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3630d0f46423SBarry Smith       larray[i] = i + A->cmap->rstart;
3631a2243be0SBarry Smith     }
3632d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr);
3633d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3634d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3635a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3636a2243be0SBarry Smith     }
3637a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3638d0f46423SBarry Smith     ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3639a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr);
3640a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3641a2243be0SBarry Smith 
3642a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
3643d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3644d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr);
3645d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3646d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3647a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3648a2243be0SBarry Smith     }
3649a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3650d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3651a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3652a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3653a2243be0SBarry Smith   } else {
365477431f27SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype);
3655a2243be0SBarry Smith   }
3656a2243be0SBarry Smith 
3657a2243be0SBarry Smith   PetscFunctionReturn(0);
3658a2243be0SBarry Smith }
3659a2243be0SBarry Smith 
3660dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
3661a2243be0SBarry Smith #undef __FUNCT__
3662779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ"
3663dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues)
3664a2243be0SBarry Smith {
3665a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3666dfbe8321SBarry Smith   PetscErrorCode ierr;
3667a2243be0SBarry Smith 
3668a2243be0SBarry Smith   PetscFunctionBegin;
3669779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr);
3670779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr);
3671779c1a83SBarry Smith   PetscFunctionReturn(0);
3672779c1a83SBarry Smith }
3673dcf5cc72SBarry Smith #endif
3674779c1a83SBarry Smith 
3675779c1a83SBarry Smith #undef __FUNCT__
3676779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ"
3677b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues)
3678779c1a83SBarry Smith {
3679779c1a83SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3680dfbe8321SBarry Smith   PetscErrorCode ierr;
3681779c1a83SBarry Smith 
3682779c1a83SBarry Smith   PetscFunctionBegin;
3683779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr);
3684779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr);
3685a2243be0SBarry Smith   PetscFunctionReturn(0);
3686a2243be0SBarry Smith }
3687c5d6d63eSBarry Smith 
3688c5d6d63eSBarry Smith #undef __FUNCT__
368951dd7536SBarry Smith #define __FUNCT__ "MatMerge"
3690bc08b0f1SBarry Smith /*@
369151dd7536SBarry Smith       MatMerge - Creates a single large PETSc matrix by concatinating sequential
369251dd7536SBarry Smith                  matrices from each processor
3693c5d6d63eSBarry Smith 
3694c5d6d63eSBarry Smith     Collective on MPI_Comm
3695c5d6d63eSBarry Smith 
3696c5d6d63eSBarry Smith    Input Parameters:
369751dd7536SBarry Smith +    comm - the communicators the parallel matrix will live on
3698d6bb3c2dSHong Zhang .    inmat - the input sequential matrices
36990e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3700d6bb3c2dSHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
370151dd7536SBarry Smith 
370251dd7536SBarry Smith    Output Parameter:
370351dd7536SBarry Smith .    outmat - the parallel matrix generated
3704c5d6d63eSBarry Smith 
37057e25d530SSatish Balay     Level: advanced
37067e25d530SSatish Balay 
3707f08fae4eSHong Zhang    Notes: The number of columns of the matrix in EACH processor MUST be the same.
3708c5d6d63eSBarry Smith 
3709c5d6d63eSBarry Smith @*/
3710be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3711c5d6d63eSBarry Smith {
3712dfbe8321SBarry Smith   PetscErrorCode ierr;
3713b7940d39SSatish Balay   PetscInt       m,N,i,rstart,nnz,Ii,*dnz,*onz;
3714ba8c8a56SBarry Smith   PetscInt       *indx;
3715ba8c8a56SBarry Smith   PetscScalar    *values;
3716c5d6d63eSBarry Smith 
3717c5d6d63eSBarry Smith   PetscFunctionBegin;
37180e36024fSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3719d6bb3c2dSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
3720d6bb3c2dSHong Zhang     /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */
37210e36024fSHong Zhang     if (n == PETSC_DECIDE){
3722357abbc8SBarry Smith       ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
37230e36024fSHong Zhang     }
3724357abbc8SBarry Smith     ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3725357abbc8SBarry Smith     rstart -= m;
3726d6bb3c2dSHong Zhang 
3727d6bb3c2dSHong Zhang     ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
3728d6bb3c2dSHong Zhang     for (i=0;i<m;i++) {
3729ba8c8a56SBarry Smith       ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3730d6bb3c2dSHong Zhang       ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr);
3731ba8c8a56SBarry Smith       ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3732d6bb3c2dSHong Zhang     }
3733d6bb3c2dSHong Zhang     /* This routine will ONLY return MPIAIJ type matrix */
3734f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3735f69a0ea3SMatthew Knepley     ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3736d6bb3c2dSHong Zhang     ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr);
3737d6bb3c2dSHong Zhang     ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr);
3738d6bb3c2dSHong Zhang     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3739d6bb3c2dSHong Zhang 
3740d6bb3c2dSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
3741d6bb3c2dSHong Zhang     ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr);
3742d6bb3c2dSHong Zhang   } else {
374377431f27SBarry Smith     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
3744d6bb3c2dSHong Zhang   }
3745d6bb3c2dSHong Zhang 
3746d6bb3c2dSHong Zhang   for (i=0;i<m;i++) {
3747ba8c8a56SBarry Smith     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3748b7940d39SSatish Balay     Ii    = i + rstart;
3749b7940d39SSatish Balay     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3750ba8c8a56SBarry Smith     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3751d6bb3c2dSHong Zhang   }
3752d6bb3c2dSHong Zhang   ierr = MatDestroy(inmat);CHKERRQ(ierr);
3753d6bb3c2dSHong Zhang   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3754d6bb3c2dSHong Zhang   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
375551dd7536SBarry Smith 
3756c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3757c5d6d63eSBarry Smith }
3758c5d6d63eSBarry Smith 
3759c5d6d63eSBarry Smith #undef __FUNCT__
3760c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit"
3761dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile)
3762c5d6d63eSBarry Smith {
3763dfbe8321SBarry Smith   PetscErrorCode    ierr;
376432dcc486SBarry Smith   PetscMPIInt       rank;
3765b1d57f15SBarry Smith   PetscInt          m,N,i,rstart,nnz;
3766de4209c5SBarry Smith   size_t            len;
3767b1d57f15SBarry Smith   const PetscInt    *indx;
3768c5d6d63eSBarry Smith   PetscViewer       out;
3769c5d6d63eSBarry Smith   char              *name;
3770c5d6d63eSBarry Smith   Mat               B;
3771b3cc6726SBarry Smith   const PetscScalar *values;
3772c5d6d63eSBarry Smith 
3773c5d6d63eSBarry Smith   PetscFunctionBegin;
3774c5d6d63eSBarry Smith   ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr);
3775c5d6d63eSBarry Smith   ierr = MatGetSize(A,0,&N);CHKERRQ(ierr);
3776f204ca49SKris Buschelman   /* Should this be the type of the diagonal block of A? */
3777f69a0ea3SMatthew Knepley   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
3778f69a0ea3SMatthew Knepley   ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr);
3779f204ca49SKris Buschelman   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
3780f204ca49SKris Buschelman   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
3781c5d6d63eSBarry Smith   ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr);
3782c5d6d63eSBarry Smith   for (i=0;i<m;i++) {
3783c5d6d63eSBarry Smith     ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3784c5d6d63eSBarry Smith     ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3785c5d6d63eSBarry Smith     ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3786c5d6d63eSBarry Smith   }
3787c5d6d63eSBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3788c5d6d63eSBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3789c5d6d63eSBarry Smith 
37907adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
3791c5d6d63eSBarry Smith   ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr);
3792c5d6d63eSBarry Smith   ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr);
3793c5d6d63eSBarry Smith   sprintf(name,"%s.%d",outfile,rank);
3794852598b0SBarry Smith   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr);
3795c5d6d63eSBarry Smith   ierr = PetscFree(name);
3796c5d6d63eSBarry Smith   ierr = MatView(B,out);CHKERRQ(ierr);
3797c5d6d63eSBarry Smith   ierr = PetscViewerDestroy(out);CHKERRQ(ierr);
3798c5d6d63eSBarry Smith   ierr = MatDestroy(B);CHKERRQ(ierr);
3799c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3800c5d6d63eSBarry Smith }
3801e5f2cdd8SHong Zhang 
380251a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
380351a7d1a8SHong Zhang #undef __FUNCT__
380451a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI"
3805be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A)
380651a7d1a8SHong Zhang {
380751a7d1a8SHong Zhang   PetscErrorCode       ierr;
3808671beff6SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3809776b82aeSLisandro Dalcin   PetscContainer       container;
381051a7d1a8SHong Zhang 
381151a7d1a8SHong Zhang   PetscFunctionBegin;
3812671beff6SHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
3813671beff6SHong Zhang   if (container) {
3814776b82aeSLisandro Dalcin     ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
381551a7d1a8SHong Zhang     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
38163e06a4e6SHong Zhang     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
38173e06a4e6SHong Zhang     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
381851a7d1a8SHong Zhang     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
381951a7d1a8SHong Zhang     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
382002c68681SHong Zhang     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
382102c68681SHong Zhang     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
382205b42c5fSBarry Smith     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
382305b42c5fSBarry Smith     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
382405b42c5fSBarry Smith     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
38252c72b5baSSatish Balay     ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr);
3826671beff6SHong Zhang 
3827776b82aeSLisandro Dalcin     ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
3828671beff6SHong Zhang     ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr);
3829671beff6SHong Zhang   }
383051a7d1a8SHong Zhang   ierr = PetscFree(merge);CHKERRQ(ierr);
383151a7d1a8SHong Zhang 
383251a7d1a8SHong Zhang   ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
383351a7d1a8SHong Zhang   PetscFunctionReturn(0);
383451a7d1a8SHong Zhang }
383551a7d1a8SHong Zhang 
383658cb9c82SHong Zhang #include "src/mat/utils/freespace.h"
3837be0fcf8dSHong Zhang #include "petscbt.h"
38384ebed01fSBarry Smith 
3839e5f2cdd8SHong Zhang #undef __FUNCT__
384038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric"
3841e5f2cdd8SHong Zhang /*@C
3842f08fae4eSHong Zhang       MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential
3843e5f2cdd8SHong Zhang                  matrices from each processor
3844e5f2cdd8SHong Zhang 
3845e5f2cdd8SHong Zhang     Collective on MPI_Comm
3846e5f2cdd8SHong Zhang 
3847e5f2cdd8SHong Zhang    Input Parameters:
3848e5f2cdd8SHong Zhang +    comm - the communicators the parallel matrix will live on
3849f08fae4eSHong Zhang .    seqmat - the input sequential matrices
38500e36024fSHong Zhang .    m - number of local rows (or PETSC_DECIDE)
38510e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3852e5f2cdd8SHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
3853e5f2cdd8SHong Zhang 
3854e5f2cdd8SHong Zhang    Output Parameter:
3855f08fae4eSHong Zhang .    mpimat - the parallel matrix generated
3856e5f2cdd8SHong Zhang 
3857e5f2cdd8SHong Zhang     Level: advanced
3858e5f2cdd8SHong Zhang 
3859affca5deSHong Zhang    Notes:
3860affca5deSHong Zhang      The dimensions of the sequential matrix in each processor MUST be the same.
3861affca5deSHong Zhang      The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be
3862affca5deSHong Zhang      destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat.
3863e5f2cdd8SHong Zhang @*/
3864be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat)
386555d1abb9SHong Zhang {
386655d1abb9SHong Zhang   PetscErrorCode       ierr;
38677adad957SLisandro Dalcin   MPI_Comm             comm=((PetscObject)mpimat)->comm;
386855d1abb9SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3869b1d57f15SBarry Smith   PetscMPIInt          size,rank,taga,*len_s;
3870d0f46423SBarry Smith   PetscInt             N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j;
3871b1d57f15SBarry Smith   PetscInt             proc,m;
3872b1d57f15SBarry Smith   PetscInt             **buf_ri,**buf_rj;
3873b1d57f15SBarry Smith   PetscInt             k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj;
3874b1d57f15SBarry Smith   PetscInt             nrows,**buf_ri_k,**nextrow,**nextai;
387555d1abb9SHong Zhang   MPI_Request          *s_waits,*r_waits;
387655d1abb9SHong Zhang   MPI_Status           *status;
3877a77337e4SBarry Smith   MatScalar            *aa=a->a;
3878dd6ea824SBarry Smith   MatScalar            **abuf_r,*ba_i;
387955d1abb9SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3880776b82aeSLisandro Dalcin   PetscContainer       container;
388155d1abb9SHong Zhang 
388255d1abb9SHong Zhang   PetscFunctionBegin;
38834ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
38843c2c1871SHong Zhang 
388555d1abb9SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
388655d1abb9SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
388755d1abb9SHong Zhang 
388855d1abb9SHong Zhang   ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
388955d1abb9SHong Zhang   if (container) {
3890776b82aeSLisandro Dalcin     ierr  = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
389155d1abb9SHong Zhang   }
389255d1abb9SHong Zhang   bi     = merge->bi;
389355d1abb9SHong Zhang   bj     = merge->bj;
389455d1abb9SHong Zhang   buf_ri = merge->buf_ri;
389555d1abb9SHong Zhang   buf_rj = merge->buf_rj;
389655d1abb9SHong Zhang 
389755d1abb9SHong Zhang   ierr   = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
3898357abbc8SBarry Smith   owners = merge->rowmap.range;
389955d1abb9SHong Zhang   len_s  = merge->len_s;
390055d1abb9SHong Zhang 
390155d1abb9SHong Zhang   /* send and recv matrix values */
390255d1abb9SHong Zhang   /*-----------------------------*/
3903357abbc8SBarry Smith   ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr);
390455d1abb9SHong Zhang   ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr);
390555d1abb9SHong Zhang 
390655d1abb9SHong Zhang   ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr);
390755d1abb9SHong Zhang   for (proc=0,k=0; proc<size; proc++){
390855d1abb9SHong Zhang     if (!len_s[proc]) continue;
390955d1abb9SHong Zhang     i = owners[proc];
391055d1abb9SHong Zhang     ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr);
391155d1abb9SHong Zhang     k++;
391255d1abb9SHong Zhang   }
391355d1abb9SHong Zhang 
39140c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);}
39150c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);}
391655d1abb9SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
391755d1abb9SHong Zhang 
391855d1abb9SHong Zhang   ierr = PetscFree(s_waits);CHKERRQ(ierr);
391955d1abb9SHong Zhang   ierr = PetscFree(r_waits);CHKERRQ(ierr);
392055d1abb9SHong Zhang 
392155d1abb9SHong Zhang   /* insert mat values of mpimat */
392255d1abb9SHong Zhang   /*----------------------------*/
3923a77337e4SBarry Smith   ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr);
3924b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
392555d1abb9SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
392655d1abb9SHong Zhang   nextai  = nextrow + merge->nrecv;
392755d1abb9SHong Zhang 
392855d1abb9SHong Zhang   for (k=0; k<merge->nrecv; k++){
392955d1abb9SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
393055d1abb9SHong Zhang     nrows = *(buf_ri_k[k]);
393155d1abb9SHong Zhang     nextrow[k]  = buf_ri_k[k]+1;  /* next row number of k-th recved i-structure */
393255d1abb9SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
393355d1abb9SHong Zhang   }
393455d1abb9SHong Zhang 
393555d1abb9SHong Zhang   /* set values of ba */
3936357abbc8SBarry Smith   m = merge->rowmap.n;
393755d1abb9SHong Zhang   for (i=0; i<m; i++) {
393855d1abb9SHong Zhang     arow = owners[rank] + i;
393955d1abb9SHong Zhang     bj_i = bj+bi[i];  /* col indices of the i-th row of mpimat */
394055d1abb9SHong Zhang     bnzi = bi[i+1] - bi[i];
3941a77337e4SBarry Smith     ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr);
394255d1abb9SHong Zhang 
394355d1abb9SHong Zhang     /* add local non-zero vals of this proc's seqmat into ba */
394455d1abb9SHong Zhang     anzi = ai[arow+1] - ai[arow];
394555d1abb9SHong Zhang     aj   = a->j + ai[arow];
394655d1abb9SHong Zhang     aa   = a->a + ai[arow];
394755d1abb9SHong Zhang     nextaj = 0;
394855d1abb9SHong Zhang     for (j=0; nextaj<anzi; j++){
394955d1abb9SHong Zhang       if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
395055d1abb9SHong Zhang         ba_i[j] += aa[nextaj++];
395155d1abb9SHong Zhang       }
395255d1abb9SHong Zhang     }
395355d1abb9SHong Zhang 
395455d1abb9SHong Zhang     /* add received vals into ba */
395555d1abb9SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
395655d1abb9SHong Zhang       /* i-th row */
395755d1abb9SHong Zhang       if (i == *nextrow[k]) {
395855d1abb9SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
395955d1abb9SHong Zhang         aj   = buf_rj[k] + *(nextai[k]);
396055d1abb9SHong Zhang         aa   = abuf_r[k] + *(nextai[k]);
396155d1abb9SHong Zhang         nextaj = 0;
396255d1abb9SHong Zhang         for (j=0; nextaj<anzi; j++){
396355d1abb9SHong Zhang           if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
396455d1abb9SHong Zhang             ba_i[j] += aa[nextaj++];
396555d1abb9SHong Zhang           }
396655d1abb9SHong Zhang         }
396755d1abb9SHong Zhang         nextrow[k]++; nextai[k]++;
396855d1abb9SHong Zhang       }
396955d1abb9SHong Zhang     }
397055d1abb9SHong Zhang     ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr);
397155d1abb9SHong Zhang   }
397255d1abb9SHong Zhang   ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
397355d1abb9SHong Zhang   ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
397455d1abb9SHong Zhang 
397555d1abb9SHong Zhang   ierr = PetscFree(abuf_r);CHKERRQ(ierr);
397655d1abb9SHong Zhang   ierr = PetscFree(ba_i);CHKERRQ(ierr);
397755d1abb9SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
39784ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
397955d1abb9SHong Zhang   PetscFunctionReturn(0);
398055d1abb9SHong Zhang }
398138f152feSBarry Smith 
398238f152feSBarry Smith #undef __FUNCT__
398338f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic"
3984be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat)
3985e5f2cdd8SHong Zhang {
3986f08fae4eSHong Zhang   PetscErrorCode       ierr;
398755a3bba9SHong Zhang   Mat                  B_mpi;
3988c2234fe3SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3989b1d57f15SBarry Smith   PetscMPIInt          size,rank,tagi,tagj,*len_s,*len_si,*len_ri;
3990b1d57f15SBarry Smith   PetscInt             **buf_rj,**buf_ri,**buf_ri_k;
3991d0f46423SBarry Smith   PetscInt             M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j;
3992b1d57f15SBarry Smith   PetscInt             len,proc,*dnz,*onz;
3993b1d57f15SBarry Smith   PetscInt             k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0;
3994b1d57f15SBarry Smith   PetscInt             nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai;
399555d1abb9SHong Zhang   MPI_Request          *si_waits,*sj_waits,*ri_waits,*rj_waits;
399658cb9c82SHong Zhang   MPI_Status           *status;
3997a1a86e44SBarry Smith   PetscFreeSpaceList   free_space=PETSC_NULL,current_space=PETSC_NULL;
3998be0fcf8dSHong Zhang   PetscBT              lnkbt;
399951a7d1a8SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4000776b82aeSLisandro Dalcin   PetscContainer       container;
400102c68681SHong Zhang 
4002e5f2cdd8SHong Zhang   PetscFunctionBegin;
40034ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
40043c2c1871SHong Zhang 
400538f152feSBarry Smith   /* make sure it is a PETSc comm */
400638f152feSBarry Smith   ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr);
4007e5f2cdd8SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
4008e5f2cdd8SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
400955d1abb9SHong Zhang 
401051a7d1a8SHong Zhang   ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr);
4011c2234fe3SHong Zhang   ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
4012e5f2cdd8SHong Zhang 
40136abd8857SHong Zhang   /* determine row ownership */
4014f08fae4eSHong Zhang   /*---------------------------------------------------------*/
4015b167c4dbSHong Zhang   ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr);
4016899cda47SBarry Smith   merge->rowmap.n = m;
4017899cda47SBarry Smith   merge->rowmap.N = M;
4018fc42d0c8SSatish Balay   merge->rowmap.bs = 1;
40196148ca0dSBarry Smith   ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr);
4020b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr);
4021b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr);
402255d1abb9SHong Zhang 
4023357abbc8SBarry Smith   m      = merge->rowmap.n;
4024357abbc8SBarry Smith   M      = merge->rowmap.N;
4025357abbc8SBarry Smith   owners = merge->rowmap.range;
40266abd8857SHong Zhang 
40276abd8857SHong Zhang   /* determine the number of messages to send, their lengths */
40286abd8857SHong Zhang   /*---------------------------------------------------------*/
40293e06a4e6SHong Zhang   len_s  = merge->len_s;
403051a7d1a8SHong Zhang 
40312257cef7SHong Zhang   len = 0;  /* length of buf_si[] */
4032c2234fe3SHong Zhang   merge->nsend = 0;
4033409913e3SHong Zhang   for (proc=0; proc<size; proc++){
40342257cef7SHong Zhang     len_si[proc] = 0;
40353e06a4e6SHong Zhang     if (proc == rank){
40366abd8857SHong Zhang       len_s[proc] = 0;
40373e06a4e6SHong Zhang     } else {
403802c68681SHong Zhang       len_si[proc] = owners[proc+1] - owners[proc] + 1;
40393e06a4e6SHong Zhang       len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */
40403e06a4e6SHong Zhang     }
40413e06a4e6SHong Zhang     if (len_s[proc]) {
4042c2234fe3SHong Zhang       merge->nsend++;
40432257cef7SHong Zhang       nrows = 0;
40442257cef7SHong Zhang       for (i=owners[proc]; i<owners[proc+1]; i++){
40452257cef7SHong Zhang         if (ai[i+1] > ai[i]) nrows++;
40462257cef7SHong Zhang       }
40472257cef7SHong Zhang       len_si[proc] = 2*(nrows+1);
40482257cef7SHong Zhang       len += len_si[proc];
4049409913e3SHong Zhang     }
405058cb9c82SHong Zhang   }
4051409913e3SHong Zhang 
40522257cef7SHong Zhang   /* determine the number and length of messages to receive for ij-structure */
40532257cef7SHong Zhang   /*-------------------------------------------------------------------------*/
405451a7d1a8SHong Zhang   ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr);
405555d1abb9SHong Zhang   ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr);
4056671beff6SHong Zhang 
40573e06a4e6SHong Zhang   /* post the Irecv of j-structure */
40583e06a4e6SHong Zhang   /*-------------------------------*/
40592c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
40603e06a4e6SHong Zhang   ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr);
406102c68681SHong Zhang 
40623e06a4e6SHong Zhang   /* post the Isend of j-structure */
4063affca5deSHong Zhang   /*--------------------------------*/
40642257cef7SHong Zhang   ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr);
406502c68681SHong Zhang   sj_waits = si_waits + merge->nsend;
40663e06a4e6SHong Zhang 
40672257cef7SHong Zhang   for (proc=0, k=0; proc<size; proc++){
4068409913e3SHong Zhang     if (!len_s[proc]) continue;
406902c68681SHong Zhang     i = owners[proc];
4070b1d57f15SBarry Smith     ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr);
407151a7d1a8SHong Zhang     k++;
407251a7d1a8SHong Zhang   }
407351a7d1a8SHong Zhang 
40743e06a4e6SHong Zhang   /* receives and sends of j-structure are complete */
40753e06a4e6SHong Zhang   /*------------------------------------------------*/
40760c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);}
40770c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);}
407802c68681SHong Zhang 
407902c68681SHong Zhang   /* send and recv i-structure */
408002c68681SHong Zhang   /*---------------------------*/
40812c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
408202c68681SHong Zhang   ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr);
408302c68681SHong Zhang 
4084b1d57f15SBarry Smith   ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr);
40853e06a4e6SHong Zhang   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
40862257cef7SHong Zhang   for (proc=0,k=0; proc<size; proc++){
408702c68681SHong Zhang     if (!len_s[proc]) continue;
40883e06a4e6SHong Zhang     /* form outgoing message for i-structure:
40893e06a4e6SHong Zhang          buf_si[0]:                 nrows to be sent
40903e06a4e6SHong Zhang                [1:nrows]:           row index (global)
40913e06a4e6SHong Zhang                [nrows+1:2*nrows+1]: i-structure index
40923e06a4e6SHong Zhang     */
40933e06a4e6SHong Zhang     /*-------------------------------------------*/
40942257cef7SHong Zhang     nrows = len_si[proc]/2 - 1;
40953e06a4e6SHong Zhang     buf_si_i    = buf_si + nrows+1;
40963e06a4e6SHong Zhang     buf_si[0]   = nrows;
40973e06a4e6SHong Zhang     buf_si_i[0] = 0;
40983e06a4e6SHong Zhang     nrows = 0;
40993e06a4e6SHong Zhang     for (i=owners[proc]; i<owners[proc+1]; i++){
41003e06a4e6SHong Zhang       anzi = ai[i+1] - ai[i];
41013e06a4e6SHong Zhang       if (anzi) {
41023e06a4e6SHong Zhang         buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */
41033e06a4e6SHong Zhang         buf_si[nrows+1] = i-owners[proc]; /* local row index */
41043e06a4e6SHong Zhang         nrows++;
41053e06a4e6SHong Zhang       }
41063e06a4e6SHong Zhang     }
4107b1d57f15SBarry Smith     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr);
410802c68681SHong Zhang     k++;
41092257cef7SHong Zhang     buf_si += len_si[proc];
411002c68681SHong Zhang   }
41112257cef7SHong Zhang 
41120c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);}
41130c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);}
411402c68681SHong Zhang 
4115ae15b995SBarry Smith   ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr);
41163e06a4e6SHong Zhang   for (i=0; i<merge->nrecv; i++){
4117ae15b995SBarry Smith     ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr);
41183e06a4e6SHong Zhang   }
41193e06a4e6SHong Zhang 
41203e06a4e6SHong Zhang   ierr = PetscFree(len_si);CHKERRQ(ierr);
412102c68681SHong Zhang   ierr = PetscFree(len_ri);CHKERRQ(ierr);
412202c68681SHong Zhang   ierr = PetscFree(rj_waits);CHKERRQ(ierr);
41233e06a4e6SHong Zhang   ierr = PetscFree(si_waits);CHKERRQ(ierr);
41242257cef7SHong Zhang   ierr = PetscFree(ri_waits);CHKERRQ(ierr);
41253e06a4e6SHong Zhang   ierr = PetscFree(buf_s);CHKERRQ(ierr);
4126bcc1bcd5SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
412758cb9c82SHong Zhang 
4128bcc1bcd5SHong Zhang   /* compute a local seq matrix in each processor */
4129bcc1bcd5SHong Zhang   /*----------------------------------------------*/
413058cb9c82SHong Zhang   /* allocate bi array and free space for accumulating nonzero column info */
4131b1d57f15SBarry Smith   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr);
413258cb9c82SHong Zhang   bi[0] = 0;
413358cb9c82SHong Zhang 
4134be0fcf8dSHong Zhang   /* create and initialize a linked list */
4135be0fcf8dSHong Zhang   nlnk = N+1;
4136be0fcf8dSHong Zhang   ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
413758cb9c82SHong Zhang 
4138bcc1bcd5SHong Zhang   /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */
413958cb9c82SHong Zhang   len = 0;
4140bcc1bcd5SHong Zhang   len  = ai[owners[rank+1]] - ai[owners[rank]];
4141a1a86e44SBarry Smith   ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr);
414258cb9c82SHong Zhang   current_space = free_space;
414358cb9c82SHong Zhang 
4144bcc1bcd5SHong Zhang   /* determine symbolic info for each local row */
4145b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
41463e06a4e6SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
41473e06a4e6SHong Zhang   nextai  = nextrow + merge->nrecv;
41483e06a4e6SHong Zhang   for (k=0; k<merge->nrecv; k++){
41492257cef7SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
41503e06a4e6SHong Zhang     nrows = *buf_ri_k[k];
41513e06a4e6SHong Zhang     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
41522257cef7SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
41533e06a4e6SHong Zhang   }
41542257cef7SHong Zhang 
4155bcc1bcd5SHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
4156bcc1bcd5SHong Zhang   len = 0;
415758cb9c82SHong Zhang   for (i=0;i<m;i++) {
415858cb9c82SHong Zhang     bnzi   = 0;
415958cb9c82SHong Zhang     /* add local non-zero cols of this proc's seqmat into lnk */
416058cb9c82SHong Zhang     arow   = owners[rank] + i;
416158cb9c82SHong Zhang     anzi   = ai[arow+1] - ai[arow];
416258cb9c82SHong Zhang     aj     = a->j + ai[arow];
4163be0fcf8dSHong Zhang     ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
416458cb9c82SHong Zhang     bnzi += nlnk;
416558cb9c82SHong Zhang     /* add received col data into lnk */
416651a7d1a8SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
416755d1abb9SHong Zhang       if (i == *nextrow[k]) { /* i-th row */
41683e06a4e6SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
41693e06a4e6SHong Zhang         aj   = buf_rj[k] + *nextai[k];
41703e06a4e6SHong Zhang         ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
41713e06a4e6SHong Zhang         bnzi += nlnk;
41723e06a4e6SHong Zhang         nextrow[k]++; nextai[k]++;
41733e06a4e6SHong Zhang       }
417458cb9c82SHong Zhang     }
4175bcc1bcd5SHong Zhang     if (len < bnzi) len = bnzi;  /* =max(bnzi) */
417658cb9c82SHong Zhang 
417758cb9c82SHong Zhang     /* if free space is not available, make more free space */
417858cb9c82SHong Zhang     if (current_space->local_remaining<bnzi) {
41794238b7adSHong Zhang       ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,&current_space);CHKERRQ(ierr);
418058cb9c82SHong Zhang       nspacedouble++;
418158cb9c82SHong Zhang     }
418258cb9c82SHong Zhang     /* copy data into free space, then initialize lnk */
4183be0fcf8dSHong Zhang     ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr);
4184bcc1bcd5SHong Zhang     ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr);
4185bcc1bcd5SHong Zhang 
418658cb9c82SHong Zhang     current_space->array           += bnzi;
418758cb9c82SHong Zhang     current_space->local_used      += bnzi;
418858cb9c82SHong Zhang     current_space->local_remaining -= bnzi;
418958cb9c82SHong Zhang 
419058cb9c82SHong Zhang     bi[i+1] = bi[i] + bnzi;
419158cb9c82SHong Zhang   }
4192bcc1bcd5SHong Zhang 
4193bcc1bcd5SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
4194bcc1bcd5SHong Zhang 
4195b1d57f15SBarry Smith   ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr);
4196a1a86e44SBarry Smith   ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr);
4197be0fcf8dSHong Zhang   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
4198409913e3SHong Zhang 
4199bcc1bcd5SHong Zhang   /* create symbolic parallel matrix B_mpi */
4200bcc1bcd5SHong Zhang   /*---------------------------------------*/
4201f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr);
420254b84b50SHong Zhang   if (n==PETSC_DECIDE) {
4203f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr);
420454b84b50SHong Zhang   } else {
4205f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
420654b84b50SHong Zhang   }
4207bcc1bcd5SHong Zhang   ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr);
4208bcc1bcd5SHong Zhang   ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr);
4209bcc1bcd5SHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
421058cb9c82SHong Zhang 
42116abd8857SHong Zhang   /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */
42126abd8857SHong Zhang   B_mpi->assembled     = PETSC_FALSE;
4213affca5deSHong Zhang   B_mpi->ops->destroy  = MatDestroy_MPIAIJ_SeqsToMPI;
4214affca5deSHong Zhang   merge->bi            = bi;
4215affca5deSHong Zhang   merge->bj            = bj;
421602c68681SHong Zhang   merge->buf_ri        = buf_ri;
421702c68681SHong Zhang   merge->buf_rj        = buf_rj;
4218de0260b3SHong Zhang   merge->coi           = PETSC_NULL;
4219de0260b3SHong Zhang   merge->coj           = PETSC_NULL;
4220de0260b3SHong Zhang   merge->owners_co     = PETSC_NULL;
4221affca5deSHong Zhang 
4222affca5deSHong Zhang   /* attach the supporting struct to B_mpi for reuse */
4223776b82aeSLisandro Dalcin   ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
4224776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr);
4225affca5deSHong Zhang   ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr);
4226affca5deSHong Zhang   *mpimat = B_mpi;
422738f152feSBarry Smith 
422838f152feSBarry Smith   ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);
42294ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
4230e5f2cdd8SHong Zhang   PetscFunctionReturn(0);
4231e5f2cdd8SHong Zhang }
423225616d81SHong Zhang 
423338f152feSBarry Smith #undef __FUNCT__
423438f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI"
4235be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat)
423655d1abb9SHong Zhang {
423755d1abb9SHong Zhang   PetscErrorCode   ierr;
423855d1abb9SHong Zhang 
423955d1abb9SHong Zhang   PetscFunctionBegin;
42404ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
424155d1abb9SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
424255d1abb9SHong Zhang     ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr);
424355d1abb9SHong Zhang   }
424455d1abb9SHong Zhang   ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr);
42454ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
424655d1abb9SHong Zhang   PetscFunctionReturn(0);
424755d1abb9SHong Zhang }
42484ebed01fSBarry Smith 
424925616d81SHong Zhang #undef __FUNCT__
425025616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat"
4251bc08b0f1SBarry Smith /*@
425232fba14fSHong Zhang      MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows
425325616d81SHong Zhang 
425432fba14fSHong Zhang     Not Collective
425525616d81SHong Zhang 
425625616d81SHong Zhang    Input Parameters:
425725616d81SHong Zhang +    A - the matrix
425825616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
425925616d81SHong Zhang 
426025616d81SHong Zhang    Output Parameter:
426125616d81SHong Zhang .    A_loc - the local sequential matrix generated
426225616d81SHong Zhang 
426325616d81SHong Zhang     Level: developer
426425616d81SHong Zhang 
426525616d81SHong Zhang @*/
4266be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc)
426725616d81SHong Zhang {
426825616d81SHong Zhang   PetscErrorCode  ierr;
426901b7ae99SHong Zhang   Mat_MPIAIJ      *mpimat=(Mat_MPIAIJ*)A->data;
427001b7ae99SHong Zhang   Mat_SeqAIJ      *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data;
427101b7ae99SHong Zhang   PetscInt        *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray;
4272a77337e4SBarry Smith   MatScalar       *aa=a->a,*ba=b->a,*cam;
4273a77337e4SBarry Smith   PetscScalar     *ca;
4274d0f46423SBarry Smith   PetscInt        am=A->rmap->n,i,j,k,cstart=A->cmap->rstart;
42755a7d977cSHong Zhang   PetscInt        *ci,*cj,col,ncols_d,ncols_o,jo;
427625616d81SHong Zhang 
427725616d81SHong Zhang   PetscFunctionBegin;
42784ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
427901b7ae99SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4280dea91ad1SHong Zhang     ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr);
4281dea91ad1SHong Zhang     ci[0] = 0;
428201b7ae99SHong Zhang     for (i=0; i<am; i++){
4283dea91ad1SHong Zhang       ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]);
428401b7ae99SHong Zhang     }
4285dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr);
4286dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr);
4287dea91ad1SHong Zhang     k = 0;
428801b7ae99SHong Zhang     for (i=0; i<am; i++) {
42895a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
42905a7d977cSHong Zhang       ncols_d = ai[i+1] - ai[i];
429101b7ae99SHong Zhang       /* off-diagonal portion of A */
42925a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
42935a7d977cSHong Zhang         col = cmap[*bj];
42945a7d977cSHong Zhang         if (col >= cstart) break;
42955a7d977cSHong Zhang         cj[k]   = col; bj++;
42965a7d977cSHong Zhang         ca[k++] = *ba++;
42975a7d977cSHong Zhang       }
42985a7d977cSHong Zhang       /* diagonal portion of A */
42995a7d977cSHong Zhang       for (j=0; j<ncols_d; j++) {
43005a7d977cSHong Zhang         cj[k]   = cstart + *aj++;
43015a7d977cSHong Zhang         ca[k++] = *aa++;
43025a7d977cSHong Zhang       }
43035a7d977cSHong Zhang       /* off-diagonal portion of A */
43045a7d977cSHong Zhang       for (j=jo; j<ncols_o; j++) {
43055a7d977cSHong Zhang         cj[k]   = cmap[*bj++];
43065a7d977cSHong Zhang         ca[k++] = *ba++;
43075a7d977cSHong Zhang       }
430825616d81SHong Zhang     }
4309dea91ad1SHong Zhang     /* put together the new matrix */
4310d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr);
4311dea91ad1SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4312dea91ad1SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
4313dea91ad1SHong Zhang     mat          = (Mat_SeqAIJ*)(*A_loc)->data;
4314e6b907acSBarry Smith     mat->free_a  = PETSC_TRUE;
4315e6b907acSBarry Smith     mat->free_ij = PETSC_TRUE;
4316dea91ad1SHong Zhang     mat->nonew   = 0;
43175a7d977cSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
43185a7d977cSHong Zhang     mat=(Mat_SeqAIJ*)(*A_loc)->data;
4319a77337e4SBarry Smith     ci = mat->i; cj = mat->j; cam = mat->a;
43205a7d977cSHong Zhang     for (i=0; i<am; i++) {
43215a7d977cSHong Zhang       /* off-diagonal portion of A */
43225a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
43235a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
43245a7d977cSHong Zhang         col = cmap[*bj];
43255a7d977cSHong Zhang         if (col >= cstart) break;
4326a77337e4SBarry Smith         *cam++ = *ba++; bj++;
43275a7d977cSHong Zhang       }
43285a7d977cSHong Zhang       /* diagonal portion of A */
4329ecc9b87dSHong Zhang       ncols_d = ai[i+1] - ai[i];
4330a77337e4SBarry Smith       for (j=0; j<ncols_d; j++) *cam++ = *aa++;
43315a7d977cSHong Zhang       /* off-diagonal portion of A */
4332f33d1a9aSHong Zhang       for (j=jo; j<ncols_o; j++) {
4333a77337e4SBarry Smith         *cam++ = *ba++; bj++;
4334f33d1a9aSHong Zhang       }
43355a7d977cSHong Zhang     }
43365a7d977cSHong Zhang   } else {
43375a7d977cSHong Zhang     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
433825616d81SHong Zhang   }
433901b7ae99SHong Zhang 
43404ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
434125616d81SHong Zhang   PetscFunctionReturn(0);
434225616d81SHong Zhang }
434325616d81SHong Zhang 
434432fba14fSHong Zhang #undef __FUNCT__
434532fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed"
434632fba14fSHong Zhang /*@C
434732fba14fSHong Zhang      MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns
434832fba14fSHong Zhang 
434932fba14fSHong Zhang     Not Collective
435032fba14fSHong Zhang 
435132fba14fSHong Zhang    Input Parameters:
435232fba14fSHong Zhang +    A - the matrix
435332fba14fSHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
435432fba14fSHong Zhang -    row, col - index sets of rows and columns to extract (or PETSC_NULL)
435532fba14fSHong Zhang 
435632fba14fSHong Zhang    Output Parameter:
435732fba14fSHong Zhang .    A_loc - the local sequential matrix generated
435832fba14fSHong Zhang 
435932fba14fSHong Zhang     Level: developer
436032fba14fSHong Zhang 
436132fba14fSHong Zhang @*/
4362be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc)
436332fba14fSHong Zhang {
436432fba14fSHong Zhang   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
436532fba14fSHong Zhang   PetscErrorCode    ierr;
436632fba14fSHong Zhang   PetscInt          i,start,end,ncols,nzA,nzB,*cmap,imark,*idx;
436732fba14fSHong Zhang   IS                isrowa,iscola;
436832fba14fSHong Zhang   Mat               *aloc;
436932fba14fSHong Zhang 
437032fba14fSHong Zhang   PetscFunctionBegin;
43714ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
437232fba14fSHong Zhang   if (!row){
4373d0f46423SBarry Smith     start = A->rmap->rstart; end = A->rmap->rend;
437432fba14fSHong Zhang     ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr);
437532fba14fSHong Zhang   } else {
437632fba14fSHong Zhang     isrowa = *row;
437732fba14fSHong Zhang   }
437832fba14fSHong Zhang   if (!col){
4379d0f46423SBarry Smith     start = A->cmap->rstart;
438032fba14fSHong Zhang     cmap  = a->garray;
4381d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4382d0f46423SBarry Smith     nzB   = a->B->cmap->n;
438332fba14fSHong Zhang     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
438432fba14fSHong Zhang     ncols = 0;
438532fba14fSHong Zhang     for (i=0; i<nzB; i++) {
438632fba14fSHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
438732fba14fSHong Zhang       else break;
438832fba14fSHong Zhang     }
438932fba14fSHong Zhang     imark = i;
439032fba14fSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;
439132fba14fSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i];
439232fba14fSHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr);
439332fba14fSHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
439432fba14fSHong Zhang   } else {
439532fba14fSHong Zhang     iscola = *col;
439632fba14fSHong Zhang   }
439732fba14fSHong Zhang   if (scall != MAT_INITIAL_MATRIX){
439832fba14fSHong Zhang     ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr);
439932fba14fSHong Zhang     aloc[0] = *A_loc;
440032fba14fSHong Zhang   }
440132fba14fSHong Zhang   ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr);
440232fba14fSHong Zhang   *A_loc = aloc[0];
440332fba14fSHong Zhang   ierr = PetscFree(aloc);CHKERRQ(ierr);
440432fba14fSHong Zhang   if (!row){
440532fba14fSHong Zhang     ierr = ISDestroy(isrowa);CHKERRQ(ierr);
440632fba14fSHong Zhang   }
440732fba14fSHong Zhang   if (!col){
440832fba14fSHong Zhang     ierr = ISDestroy(iscola);CHKERRQ(ierr);
440932fba14fSHong Zhang   }
44104ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
441132fba14fSHong Zhang   PetscFunctionReturn(0);
441232fba14fSHong Zhang }
441332fba14fSHong Zhang 
441425616d81SHong Zhang #undef __FUNCT__
441525616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols"
441625616d81SHong Zhang /*@C
441732fba14fSHong Zhang     MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A
441825616d81SHong Zhang 
441925616d81SHong Zhang     Collective on Mat
442025616d81SHong Zhang 
442125616d81SHong Zhang    Input Parameters:
4422e240928fSHong Zhang +    A,B - the matrices in mpiaij format
442325616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
442425616d81SHong Zhang -    rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL)
442525616d81SHong Zhang 
442625616d81SHong Zhang    Output Parameter:
442725616d81SHong Zhang +    rowb, colb - index sets of rows and columns of B to extract
4428d0f46423SBarry Smith .    brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows
442925616d81SHong Zhang -    B_seq - the sequential matrix generated
443025616d81SHong Zhang 
443125616d81SHong Zhang     Level: developer
443225616d81SHong Zhang 
443325616d81SHong Zhang @*/
4434be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq)
443525616d81SHong Zhang {
4436899cda47SBarry Smith   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
443725616d81SHong Zhang   PetscErrorCode    ierr;
4438b1d57f15SBarry Smith   PetscInt          *idx,i,start,ncols,nzA,nzB,*cmap,imark;
443925616d81SHong Zhang   IS                isrowb,iscolb;
444025616d81SHong Zhang   Mat               *bseq;
444125616d81SHong Zhang 
444225616d81SHong Zhang   PetscFunctionBegin;
4443d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4444d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
444525616d81SHong Zhang   }
44464ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
444725616d81SHong Zhang 
444825616d81SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4449d0f46423SBarry Smith     start = A->cmap->rstart;
445025616d81SHong Zhang     cmap  = a->garray;
4451d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4452d0f46423SBarry Smith     nzB   = a->B->cmap->n;
4453b1d57f15SBarry Smith     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
445425616d81SHong Zhang     ncols = 0;
44550390132cSHong Zhang     for (i=0; i<nzB; i++) {  /* row < local row index */
445625616d81SHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
445725616d81SHong Zhang       else break;
445825616d81SHong Zhang     }
445925616d81SHong Zhang     imark = i;
44600390132cSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;  /* local rows */
44610390132cSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */
446225616d81SHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr);
446325616d81SHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
446425616d81SHong Zhang     *brstart = imark;
4465d0f46423SBarry Smith     ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr);
446625616d81SHong Zhang   } else {
446725616d81SHong Zhang     if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX");
446825616d81SHong Zhang     isrowb = *rowb; iscolb = *colb;
446925616d81SHong Zhang     ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr);
447025616d81SHong Zhang     bseq[0] = *B_seq;
447125616d81SHong Zhang   }
447225616d81SHong Zhang   ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr);
447325616d81SHong Zhang   *B_seq = bseq[0];
447425616d81SHong Zhang   ierr = PetscFree(bseq);CHKERRQ(ierr);
447525616d81SHong Zhang   if (!rowb){
447625616d81SHong Zhang     ierr = ISDestroy(isrowb);CHKERRQ(ierr);
447725616d81SHong Zhang   } else {
447825616d81SHong Zhang     *rowb = isrowb;
447925616d81SHong Zhang   }
448025616d81SHong Zhang   if (!colb){
448125616d81SHong Zhang     ierr = ISDestroy(iscolb);CHKERRQ(ierr);
448225616d81SHong Zhang   } else {
448325616d81SHong Zhang     *colb = iscolb;
448425616d81SHong Zhang   }
44854ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
448625616d81SHong Zhang   PetscFunctionReturn(0);
448725616d81SHong Zhang }
4488429d309bSHong Zhang 
4489a61c8c0fSHong Zhang #undef __FUNCT__
4490a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols"
4491429d309bSHong Zhang /*@C
4492429d309bSHong Zhang     MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns
449301b7ae99SHong Zhang     of the OFF-DIAGONAL portion of local A
4494429d309bSHong Zhang 
4495429d309bSHong Zhang     Collective on Mat
4496429d309bSHong Zhang 
4497429d309bSHong Zhang    Input Parameters:
4498429d309bSHong Zhang +    A,B - the matrices in mpiaij format
449987025532SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
450087025532SHong Zhang .    startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL)
450187025532SHong Zhang -    bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL)
4502429d309bSHong Zhang 
4503429d309bSHong Zhang    Output Parameter:
450487025532SHong Zhang +    B_oth - the sequential matrix generated
4505429d309bSHong Zhang 
4506429d309bSHong Zhang     Level: developer
4507429d309bSHong Zhang 
4508429d309bSHong Zhang @*/
4509dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth)
4510429d309bSHong Zhang {
4511a6b2eed2SHong Zhang   VecScatter_MPI_General *gen_to,*gen_from;
4512429d309bSHong Zhang   PetscErrorCode         ierr;
4513899cda47SBarry Smith   Mat_MPIAIJ             *a=(Mat_MPIAIJ*)A->data;
451487025532SHong Zhang   Mat_SeqAIJ             *b_oth;
4515a6b2eed2SHong Zhang   VecScatter             ctx=a->Mvctx;
45167adad957SLisandro Dalcin   MPI_Comm               comm=((PetscObject)ctx)->comm;
45177adad957SLisandro Dalcin   PetscMPIInt            *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank;
4518d0f46423SBarry Smith   PetscInt               *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj;
4519dd6ea824SBarry Smith   PetscScalar            *rvalues,*svalues;
4520dd6ea824SBarry Smith   MatScalar              *b_otha,*bufa,*bufA;
4521e42f35eeSHong Zhang   PetscInt               i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len;
4522910ba992SMatthew Knepley   MPI_Request            *rwaits = PETSC_NULL,*swaits = PETSC_NULL;
452387025532SHong Zhang   MPI_Status             *sstatus,rstatus;
4524aa5bb8c0SSatish Balay   PetscMPIInt            jj;
4525e42f35eeSHong Zhang   PetscInt               *cols,sbs,rbs;
4526ba8c8a56SBarry Smith   PetscScalar            *vals;
4527429d309bSHong Zhang 
4528429d309bSHong Zhang   PetscFunctionBegin;
4529d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4530d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
4531429d309bSHong Zhang   }
45324ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4533a6b2eed2SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
4534a6b2eed2SHong Zhang 
4535a6b2eed2SHong Zhang   gen_to   = (VecScatter_MPI_General*)ctx->todata;
4536a6b2eed2SHong Zhang   gen_from = (VecScatter_MPI_General*)ctx->fromdata;
4537e42f35eeSHong Zhang   rvalues  = gen_from->values; /* holds the length of receiving row */
4538e42f35eeSHong Zhang   svalues  = gen_to->values;   /* holds the length of sending row */
4539a6b2eed2SHong Zhang   nrecvs   = gen_from->n;
4540a6b2eed2SHong Zhang   nsends   = gen_to->n;
4541d7ee0231SBarry Smith 
4542d7ee0231SBarry Smith   ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr);
4543a6b2eed2SHong Zhang   srow     = gen_to->indices;   /* local row index to be sent */
4544a6b2eed2SHong Zhang   sstarts  = gen_to->starts;
4545a6b2eed2SHong Zhang   sprocs   = gen_to->procs;
4546a6b2eed2SHong Zhang   sstatus  = gen_to->sstatus;
4547e42f35eeSHong Zhang   sbs      = gen_to->bs;
4548e42f35eeSHong Zhang   rstarts  = gen_from->starts;
4549e42f35eeSHong Zhang   rprocs   = gen_from->procs;
4550e42f35eeSHong Zhang   rbs      = gen_from->bs;
4551429d309bSHong Zhang 
4552dea91ad1SHong Zhang   if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX;
4553429d309bSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4554a6b2eed2SHong Zhang     /* i-array */
4555a6b2eed2SHong Zhang     /*---------*/
4556a6b2eed2SHong Zhang     /*  post receives */
4557a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4558e42f35eeSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4559e42f35eeSHong Zhang       nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */
456087025532SHong Zhang       ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4561429d309bSHong Zhang     }
4562a6b2eed2SHong Zhang 
4563a6b2eed2SHong Zhang     /* pack the outgoing message */
456487025532SHong Zhang     ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr);
4565a6b2eed2SHong Zhang     rstartsj = sstartsj + nsends +1;
4566a6b2eed2SHong Zhang     sstartsj[0] = 0;  rstartsj[0] = 0;
4567a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be sent */
4568a6b2eed2SHong Zhang     k = 0;
4569a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4570e42f35eeSHong Zhang       rowlen = (PetscInt*)svalues + sstarts[i]*sbs;
4571e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
457287025532SHong Zhang       for (j=0; j<nrows; j++) {
4573d0f46423SBarry Smith         row = srow[k] + B->rmap->range[rank]; /* global row idx */
4574e42f35eeSHong Zhang         for (l=0; l<sbs; l++){
4575e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */
4576e42f35eeSHong Zhang           rowlen[j*sbs+l] = ncols;
4577e42f35eeSHong Zhang           len += ncols;
4578e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
4579e42f35eeSHong Zhang         }
4580a6b2eed2SHong Zhang         k++;
4581429d309bSHong Zhang       }
4582e42f35eeSHong Zhang       ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4583dea91ad1SHong Zhang       sstartsj[i+1] = len;  /* starting point of (i+1)-th outgoing msg in bufj and bufa */
4584429d309bSHong Zhang     }
458587025532SHong Zhang     /* recvs and sends of i-array are completed */
458687025532SHong Zhang     i = nrecvs;
458787025532SHong Zhang     while (i--) {
4588aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
458987025532SHong Zhang     }
45900c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4591e42f35eeSHong Zhang 
4592a6b2eed2SHong Zhang     /* allocate buffers for sending j and a arrays */
4593a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr);
4594a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr);
4595a6b2eed2SHong Zhang 
459687025532SHong Zhang     /* create i-array of B_oth */
459787025532SHong Zhang     ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr);
459887025532SHong Zhang     b_othi[0] = 0;
4599a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be received */
4600a6b2eed2SHong Zhang     k = 0;
4601a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4602fd0ff01cSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4603e42f35eeSHong Zhang       nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */
460487025532SHong Zhang       for (j=0; j<nrows; j++) {
460587025532SHong Zhang         b_othi[k+1] = b_othi[k] + rowlen[j];
4606a6b2eed2SHong Zhang         len += rowlen[j]; k++;
4607a6b2eed2SHong Zhang       }
4608dea91ad1SHong Zhang       rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */
4609a6b2eed2SHong Zhang     }
4610a6b2eed2SHong Zhang 
461187025532SHong Zhang     /* allocate space for j and a arrrays of B_oth */
461287025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr);
4613dd6ea824SBarry Smith     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr);
4614a6b2eed2SHong Zhang 
461587025532SHong Zhang     /* j-array */
461687025532SHong Zhang     /*---------*/
4617a6b2eed2SHong Zhang     /*  post receives of j-array */
4618a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
461987025532SHong Zhang       nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
462087025532SHong Zhang       ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4621a6b2eed2SHong Zhang     }
4622e42f35eeSHong Zhang 
4623e42f35eeSHong Zhang     /* pack the outgoing message j-array */
4624a6b2eed2SHong Zhang     k = 0;
4625a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4626e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
4627a6b2eed2SHong Zhang       bufJ = bufj+sstartsj[i];
462887025532SHong Zhang       for (j=0; j<nrows; j++) {
4629d0f46423SBarry Smith         row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4630e42f35eeSHong Zhang         for (ll=0; ll<sbs; ll++){
4631e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4632a6b2eed2SHong Zhang           for (l=0; l<ncols; l++){
4633a6b2eed2SHong Zhang             *bufJ++ = cols[l];
463487025532SHong Zhang           }
4635e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4636e42f35eeSHong Zhang         }
463787025532SHong Zhang       }
463887025532SHong Zhang       ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
463987025532SHong Zhang     }
464087025532SHong Zhang 
464187025532SHong Zhang     /* recvs and sends of j-array are completed */
464287025532SHong Zhang     i = nrecvs;
464387025532SHong Zhang     while (i--) {
4644aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
464587025532SHong Zhang     }
46460c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
464787025532SHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
464887025532SHong Zhang     sstartsj = *startsj;
464987025532SHong Zhang     rstartsj = sstartsj + nsends +1;
465087025532SHong Zhang     bufa     = *bufa_ptr;
465187025532SHong Zhang     b_oth    = (Mat_SeqAIJ*)(*B_oth)->data;
465287025532SHong Zhang     b_otha   = b_oth->a;
465387025532SHong Zhang   } else {
465487025532SHong Zhang     SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container");
465587025532SHong Zhang   }
465687025532SHong Zhang 
465787025532SHong Zhang   /* a-array */
465887025532SHong Zhang   /*---------*/
465987025532SHong Zhang   /*  post receives of a-array */
466087025532SHong Zhang   for (i=0; i<nrecvs; i++){
466187025532SHong Zhang     nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
466287025532SHong Zhang     ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
466387025532SHong Zhang   }
4664e42f35eeSHong Zhang 
4665e42f35eeSHong Zhang   /* pack the outgoing message a-array */
466687025532SHong Zhang   k = 0;
466787025532SHong Zhang   for (i=0; i<nsends; i++){
4668e42f35eeSHong Zhang     nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
466987025532SHong Zhang     bufA = bufa+sstartsj[i];
467087025532SHong Zhang     for (j=0; j<nrows; j++) {
4671d0f46423SBarry Smith       row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4672e42f35eeSHong Zhang       for (ll=0; ll<sbs; ll++){
4673e42f35eeSHong Zhang         ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
467487025532SHong Zhang         for (l=0; l<ncols; l++){
4675a6b2eed2SHong Zhang           *bufA++ = vals[l];
4676a6b2eed2SHong Zhang         }
4677e42f35eeSHong Zhang         ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
4678e42f35eeSHong Zhang       }
4679a6b2eed2SHong Zhang     }
468087025532SHong Zhang     ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4681a6b2eed2SHong Zhang   }
468287025532SHong Zhang   /* recvs and sends of a-array are completed */
468387025532SHong Zhang   i = nrecvs;
468487025532SHong Zhang   while (i--) {
4685aa5bb8c0SSatish Balay     ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
468687025532SHong Zhang   }
46870c468ba9SBarry Smith   if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4688d7ee0231SBarry Smith   ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr);
4689a6b2eed2SHong Zhang 
469087025532SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4691a6b2eed2SHong Zhang     /* put together the new matrix */
4692d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr);
4693a6b2eed2SHong Zhang 
4694a6b2eed2SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4695a6b2eed2SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
469687025532SHong Zhang     b_oth          = (Mat_SeqAIJ *)(*B_oth)->data;
4697e6b907acSBarry Smith     b_oth->free_a  = PETSC_TRUE;
4698e6b907acSBarry Smith     b_oth->free_ij = PETSC_TRUE;
469987025532SHong Zhang     b_oth->nonew   = 0;
4700a6b2eed2SHong Zhang 
4701a6b2eed2SHong Zhang     ierr = PetscFree(bufj);CHKERRQ(ierr);
4702dea91ad1SHong Zhang     if (!startsj || !bufa_ptr){
4703dea91ad1SHong Zhang       ierr = PetscFree(sstartsj);CHKERRQ(ierr);
4704dea91ad1SHong Zhang       ierr = PetscFree(bufa_ptr);CHKERRQ(ierr);
4705dea91ad1SHong Zhang     } else {
470687025532SHong Zhang       *startsj  = sstartsj;
470787025532SHong Zhang       *bufa_ptr = bufa;
470887025532SHong Zhang     }
4709dea91ad1SHong Zhang   }
47104ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4711429d309bSHong Zhang   PetscFunctionReturn(0);
4712429d309bSHong Zhang }
4713ccd8e176SBarry Smith 
471443eb5e2fSMatthew Knepley #undef __FUNCT__
471543eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs"
471643eb5e2fSMatthew Knepley /*@C
471743eb5e2fSMatthew Knepley   MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication.
471843eb5e2fSMatthew Knepley 
471943eb5e2fSMatthew Knepley   Not Collective
472043eb5e2fSMatthew Knepley 
472143eb5e2fSMatthew Knepley   Input Parameters:
472243eb5e2fSMatthew Knepley . A - The matrix in mpiaij format
472343eb5e2fSMatthew Knepley 
472443eb5e2fSMatthew Knepley   Output Parameter:
472543eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product
472643eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec
472743eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec
472843eb5e2fSMatthew Knepley 
472943eb5e2fSMatthew Knepley   Level: developer
473043eb5e2fSMatthew Knepley 
473143eb5e2fSMatthew Knepley @*/
473243eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE)
473343eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter)
473443eb5e2fSMatthew Knepley #else
473543eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter)
473643eb5e2fSMatthew Knepley #endif
473743eb5e2fSMatthew Knepley {
473843eb5e2fSMatthew Knepley   Mat_MPIAIJ *a;
473943eb5e2fSMatthew Knepley 
474043eb5e2fSMatthew Knepley   PetscFunctionBegin;
474143eb5e2fSMatthew Knepley   PetscValidHeaderSpecific(A, MAT_COOKIE, 1);
474243eb5e2fSMatthew Knepley   PetscValidPointer(lvec, 2)
474343eb5e2fSMatthew Knepley   PetscValidPointer(colmap, 3)
474443eb5e2fSMatthew Knepley   PetscValidPointer(multScatter, 4)
474543eb5e2fSMatthew Knepley   a = (Mat_MPIAIJ *) A->data;
474643eb5e2fSMatthew Knepley   if (lvec) *lvec = a->lvec;
474743eb5e2fSMatthew Knepley   if (colmap) *colmap = a->colmap;
474843eb5e2fSMatthew Knepley   if (multScatter) *multScatter = a->Mvctx;
474943eb5e2fSMatthew Knepley   PetscFunctionReturn(0);
475043eb5e2fSMatthew Knepley }
475143eb5e2fSMatthew Knepley 
475217667f90SBarry Smith EXTERN_C_BEGIN
47538cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*);
47548cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*);
475517667f90SBarry Smith EXTERN_C_END
475617667f90SBarry Smith 
4757fc4dec0aSBarry Smith #include "src/mat/impls/dense/mpi/mpidense.h"
4758fc4dec0aSBarry Smith 
4759fc4dec0aSBarry Smith #undef __FUNCT__
4760fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ"
4761fc4dec0aSBarry Smith /*
4762fc4dec0aSBarry Smith     Computes (B'*A')' since computing B*A directly is untenable
4763fc4dec0aSBarry Smith 
4764fc4dec0aSBarry Smith                n                       p                          p
4765fc4dec0aSBarry Smith         (              )       (              )         (                  )
4766fc4dec0aSBarry Smith       m (      A       )  *  n (       B      )   =   m (         C        )
4767fc4dec0aSBarry Smith         (              )       (              )         (                  )
4768fc4dec0aSBarry Smith 
4769fc4dec0aSBarry Smith */
4770fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C)
4771fc4dec0aSBarry Smith {
4772fc4dec0aSBarry Smith   PetscErrorCode     ierr;
4773fc4dec0aSBarry Smith   Mat                At,Bt,Ct;
4774fc4dec0aSBarry Smith 
4775fc4dec0aSBarry Smith   PetscFunctionBegin;
4776fc4dec0aSBarry Smith   ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr);
4777fc4dec0aSBarry Smith   ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr);
4778fc4dec0aSBarry Smith   ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr);
4779fc4dec0aSBarry Smith   ierr = MatDestroy(At);CHKERRQ(ierr);
4780fc4dec0aSBarry Smith   ierr = MatDestroy(Bt);CHKERRQ(ierr);
4781fc4dec0aSBarry Smith   ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
4782e5e4356aSBarry Smith   ierr = MatDestroy(Ct);CHKERRQ(ierr);
4783fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4784fc4dec0aSBarry Smith }
4785fc4dec0aSBarry Smith 
4786fc4dec0aSBarry Smith #undef __FUNCT__
4787fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ"
4788fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C)
4789fc4dec0aSBarry Smith {
4790fc4dec0aSBarry Smith   PetscErrorCode ierr;
4791d0f46423SBarry Smith   PetscInt       m=A->rmap->n,n=B->cmap->n;
4792fc4dec0aSBarry Smith   Mat            Cmat;
4793fc4dec0aSBarry Smith 
4794fc4dec0aSBarry Smith   PetscFunctionBegin;
4795d0f46423SBarry Smith   if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n);
479639804f7cSBarry Smith   ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr);
4797fc4dec0aSBarry Smith   ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4798fc4dec0aSBarry Smith   ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr);
4799fc4dec0aSBarry Smith   ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr);
480038556019SBarry Smith   ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480138556019SBarry Smith   ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4802fc4dec0aSBarry Smith   *C   = Cmat;
4803fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4804fc4dec0aSBarry Smith }
4805fc4dec0aSBarry Smith 
4806fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/
4807fc4dec0aSBarry Smith #undef __FUNCT__
4808fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ"
4809fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
4810fc4dec0aSBarry Smith {
4811fc4dec0aSBarry Smith   PetscErrorCode ierr;
4812fc4dec0aSBarry Smith 
4813fc4dec0aSBarry Smith   PetscFunctionBegin;
4814fc4dec0aSBarry Smith   if (scall == MAT_INITIAL_MATRIX){
4815fc4dec0aSBarry Smith     ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr);
4816fc4dec0aSBarry Smith   }
4817fc4dec0aSBarry Smith   ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr);
4818fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4819fc4dec0aSBarry Smith }
4820fc4dec0aSBarry Smith 
48215c9eb25fSBarry Smith EXTERN_C_BEGIN
4822611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
48235c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*);
4824611f576cSBarry Smith #endif
4825611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
48265c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*);
4827611f576cSBarry Smith #endif
4828611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
48295c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*);
4830611f576cSBarry Smith #endif
48315c9eb25fSBarry Smith EXTERN_C_END
48325c9eb25fSBarry Smith 
4833ccd8e176SBarry Smith /*MC
4834ccd8e176SBarry Smith    MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices.
4835ccd8e176SBarry Smith 
4836ccd8e176SBarry Smith    Options Database Keys:
4837ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions()
4838ccd8e176SBarry Smith 
4839ccd8e176SBarry Smith   Level: beginner
4840ccd8e176SBarry Smith 
4841175b88e8SBarry Smith .seealso: MatCreateMPIAIJ()
4842ccd8e176SBarry Smith M*/
4843ccd8e176SBarry Smith 
4844ccd8e176SBarry Smith EXTERN_C_BEGIN
4845ccd8e176SBarry Smith #undef __FUNCT__
4846ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ"
4847be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B)
4848ccd8e176SBarry Smith {
4849ccd8e176SBarry Smith   Mat_MPIAIJ     *b;
4850ccd8e176SBarry Smith   PetscErrorCode ierr;
4851ccd8e176SBarry Smith   PetscMPIInt    size;
4852ccd8e176SBarry Smith 
4853ccd8e176SBarry Smith   PetscFunctionBegin;
48547adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
4855ccd8e176SBarry Smith 
485638f2d2fdSLisandro Dalcin   ierr            = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr);
4857ccd8e176SBarry Smith   B->data         = (void*)b;
4858ccd8e176SBarry Smith   ierr            = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
4859d0f46423SBarry Smith   B->rmap->bs      = 1;
4860ccd8e176SBarry Smith   B->assembled    = PETSC_FALSE;
4861ccd8e176SBarry Smith   B->mapping      = 0;
4862ccd8e176SBarry Smith 
4863ccd8e176SBarry Smith   B->insertmode      = NOT_SET_VALUES;
4864ccd8e176SBarry Smith   b->size            = size;
48657adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
4866ccd8e176SBarry Smith 
4867ccd8e176SBarry Smith   /* build cache for off array entries formed */
48687adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
4869ccd8e176SBarry Smith   b->donotstash  = PETSC_FALSE;
4870ccd8e176SBarry Smith   b->colmap      = 0;
4871ccd8e176SBarry Smith   b->garray      = 0;
4872ccd8e176SBarry Smith   b->roworiented = PETSC_TRUE;
4873ccd8e176SBarry Smith 
4874ccd8e176SBarry Smith   /* stuff used for matrix vector multiply */
4875ccd8e176SBarry Smith   b->lvec      = PETSC_NULL;
4876ccd8e176SBarry Smith   b->Mvctx     = PETSC_NULL;
4877ccd8e176SBarry Smith 
4878ccd8e176SBarry Smith   /* stuff for MatGetRow() */
4879ccd8e176SBarry Smith   b->rowindices   = 0;
4880ccd8e176SBarry Smith   b->rowvalues    = 0;
4881ccd8e176SBarry Smith   b->getrowactive = PETSC_FALSE;
4882ccd8e176SBarry Smith 
4883611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
48845c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_spooles_C",
48855c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_spooles",
48865c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_spooles);CHKERRQ(ierr);
4887611f576cSBarry Smith #endif
4888611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
48895c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_mumps_C",
48905c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_mumps",
48915c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_mumps);CHKERRQ(ierr);
4892611f576cSBarry Smith #endif
4893611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
48945c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_superlu_dist_C",
48955c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_superlu_dist",
48965c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr);
4897611f576cSBarry Smith #endif
4898ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
4899ccd8e176SBarry Smith                                      "MatStoreValues_MPIAIJ",
4900ccd8e176SBarry Smith                                      MatStoreValues_MPIAIJ);CHKERRQ(ierr);
4901ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
4902ccd8e176SBarry Smith                                      "MatRetrieveValues_MPIAIJ",
4903ccd8e176SBarry Smith                                      MatRetrieveValues_MPIAIJ);CHKERRQ(ierr);
4904ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
4905ccd8e176SBarry Smith 				     "MatGetDiagonalBlock_MPIAIJ",
4906ccd8e176SBarry Smith                                      MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr);
4907ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C",
4908ccd8e176SBarry Smith 				     "MatIsTranspose_MPIAIJ",
4909ccd8e176SBarry Smith 				     MatIsTranspose_MPIAIJ);CHKERRQ(ierr);
4910ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
4911ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocation_MPIAIJ",
4912ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr);
4913ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",
4914ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocationCSR_MPIAIJ",
4915ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr);
4916ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
4917ccd8e176SBarry Smith 				     "MatDiagonalScaleLocal_MPIAIJ",
4918ccd8e176SBarry Smith 				     MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr);
491917667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C",
492017667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICSRPERM",
492117667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr);
492217667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C",
492317667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICRL",
492417667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr);
4925fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",
4926fc4dec0aSBarry Smith                                      "MatMatMult_MPIDense_MPIAIJ",
4927fc4dec0aSBarry Smith                                       MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr);
4928fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",
4929fc4dec0aSBarry Smith                                      "MatMatMultSymbolic_MPIDense_MPIAIJ",
4930fc4dec0aSBarry Smith                                       MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr);
4931fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",
4932fc4dec0aSBarry Smith                                      "MatMatMultNumeric_MPIDense_MPIAIJ",
4933fc4dec0aSBarry Smith                                       MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr);
493417667f90SBarry Smith   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr);
4935ccd8e176SBarry Smith   PetscFunctionReturn(0);
4936ccd8e176SBarry Smith }
4937ccd8e176SBarry Smith EXTERN_C_END
493881824310SBarry Smith 
493903bfb495SBarry Smith #undef __FUNCT__
494003bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays"
494158d36128SBarry Smith /*@
494203bfb495SBarry Smith      MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal"
494303bfb495SBarry Smith          and "off-diagonal" part of the matrix in CSR format.
494403bfb495SBarry Smith 
494503bfb495SBarry Smith    Collective on MPI_Comm
494603bfb495SBarry Smith 
494703bfb495SBarry Smith    Input Parameters:
494803bfb495SBarry Smith +  comm - MPI communicator
494903bfb495SBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
495003bfb495SBarry Smith .  n - This value should be the same as the local size used in creating the
495103bfb495SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
495203bfb495SBarry Smith        calculated if N is given) For square matrices n is almost always m.
495303bfb495SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
495403bfb495SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
495503bfb495SBarry Smith .   i - row indices for "diagonal" portion of matrix
495603bfb495SBarry Smith .   j - column indices
495703bfb495SBarry Smith .   a - matrix values
495803bfb495SBarry Smith .   oi - row indices for "off-diagonal" portion of matrix
495903bfb495SBarry Smith .   oj - column indices
496003bfb495SBarry Smith -   oa - matrix values
496103bfb495SBarry Smith 
496203bfb495SBarry Smith    Output Parameter:
496303bfb495SBarry Smith .   mat - the matrix
496403bfb495SBarry Smith 
496503bfb495SBarry Smith    Level: advanced
496603bfb495SBarry Smith 
496703bfb495SBarry Smith    Notes:
496803bfb495SBarry Smith        The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc.
496903bfb495SBarry Smith 
497003bfb495SBarry Smith        The i and j indices are 0 based
497103bfb495SBarry Smith 
497203bfb495SBarry Smith        See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix
497303bfb495SBarry Smith 
497403bfb495SBarry Smith 
497503bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
497603bfb495SBarry Smith 
497703bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
49788d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays()
497903bfb495SBarry Smith @*/
49808d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],
498103bfb495SBarry Smith 								PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat)
498203bfb495SBarry Smith {
498303bfb495SBarry Smith   PetscErrorCode ierr;
498403bfb495SBarry Smith   Mat_MPIAIJ     *maij;
498503bfb495SBarry Smith 
498603bfb495SBarry Smith  PetscFunctionBegin;
498703bfb495SBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
498803bfb495SBarry Smith   if (i[0]) {
498903bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
499003bfb495SBarry Smith   }
499103bfb495SBarry Smith   if (oi[0]) {
499203bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0");
499303bfb495SBarry Smith   }
499403bfb495SBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
499503bfb495SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
499603bfb495SBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
499703bfb495SBarry Smith   maij = (Mat_MPIAIJ*) (*mat)->data;
49988d7a6e47SBarry Smith   maij->donotstash     = PETSC_TRUE;
49998d7a6e47SBarry Smith   (*mat)->preallocated = PETSC_TRUE;
500003bfb495SBarry Smith 
5001d0f46423SBarry Smith   (*mat)->rmap->bs = (*mat)->cmap->bs = 1;
5002d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->rmap);CHKERRQ(ierr);
5003d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->cmap);CHKERRQ(ierr);
500403bfb495SBarry Smith 
500503bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr);
5006d0f46423SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr);
500703bfb495SBarry Smith 
50088d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
50098d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
50108d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
50118d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
50128d7a6e47SBarry Smith 
501303bfb495SBarry Smith   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
501403bfb495SBarry Smith   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
501503bfb495SBarry Smith   PetscFunctionReturn(0);
501603bfb495SBarry Smith }
501703bfb495SBarry Smith 
501881824310SBarry Smith /*
501981824310SBarry Smith     Special version for direct calls from Fortran
502081824310SBarry Smith */
502181824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS)
502281824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ
502381824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
502481824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij
502581824310SBarry Smith #endif
502681824310SBarry Smith 
502781824310SBarry Smith /* Change these macros so can be used in void function */
502881824310SBarry Smith #undef CHKERRQ
50297adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr)
503081824310SBarry Smith #undef SETERRQ2
50317adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr)
503281824310SBarry Smith #undef SETERRQ
50337adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr)
503481824310SBarry Smith 
503581824310SBarry Smith EXTERN_C_BEGIN
503681824310SBarry Smith #undef __FUNCT__
503781824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_"
50381f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr)
503981824310SBarry Smith {
504081824310SBarry Smith   Mat             mat = *mmat;
504181824310SBarry Smith   PetscInt        m = *mm, n = *mn;
504281824310SBarry Smith   InsertMode      addv = *maddv;
504381824310SBarry Smith   Mat_MPIAIJ      *aij = (Mat_MPIAIJ*)mat->data;
504481824310SBarry Smith   PetscScalar     value;
504581824310SBarry Smith   PetscErrorCode  ierr;
5046899cda47SBarry Smith 
504781824310SBarry Smith   MatPreallocated(mat);
504881824310SBarry Smith   if (mat->insertmode == NOT_SET_VALUES) {
504981824310SBarry Smith     mat->insertmode = addv;
505081824310SBarry Smith   }
505181824310SBarry Smith #if defined(PETSC_USE_DEBUG)
505281824310SBarry Smith   else if (mat->insertmode != addv) {
505381824310SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
505481824310SBarry Smith   }
505581824310SBarry Smith #endif
505681824310SBarry Smith   {
5057d0f46423SBarry Smith   PetscInt        i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
5058d0f46423SBarry Smith   PetscInt        cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
505981824310SBarry Smith   PetscTruth      roworiented = aij->roworiented;
506081824310SBarry Smith 
506181824310SBarry Smith   /* Some Variables required in the macro */
506281824310SBarry Smith   Mat             A = aij->A;
506381824310SBarry Smith   Mat_SeqAIJ      *a = (Mat_SeqAIJ*)A->data;
506481824310SBarry Smith   PetscInt        *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
5065dd6ea824SBarry Smith   MatScalar       *aa = a->a;
506681824310SBarry Smith   PetscTruth      ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE);
506781824310SBarry Smith   Mat             B = aij->B;
506881824310SBarry Smith   Mat_SeqAIJ      *b = (Mat_SeqAIJ*)B->data;
5069d0f46423SBarry Smith   PetscInt        *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
5070dd6ea824SBarry Smith   MatScalar       *ba = b->a;
507181824310SBarry Smith 
507281824310SBarry Smith   PetscInt        *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
507381824310SBarry Smith   PetscInt        nonew = a->nonew;
5074dd6ea824SBarry Smith   MatScalar       *ap1,*ap2;
507581824310SBarry Smith 
507681824310SBarry Smith   PetscFunctionBegin;
507781824310SBarry Smith   for (i=0; i<m; i++) {
507881824310SBarry Smith     if (im[i] < 0) continue;
507981824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5080d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
508181824310SBarry Smith #endif
508281824310SBarry Smith     if (im[i] >= rstart && im[i] < rend) {
508381824310SBarry Smith       row      = im[i] - rstart;
508481824310SBarry Smith       lastcol1 = -1;
508581824310SBarry Smith       rp1      = aj + ai[row];
508681824310SBarry Smith       ap1      = aa + ai[row];
508781824310SBarry Smith       rmax1    = aimax[row];
508881824310SBarry Smith       nrow1    = ailen[row];
508981824310SBarry Smith       low1     = 0;
509081824310SBarry Smith       high1    = nrow1;
509181824310SBarry Smith       lastcol2 = -1;
509281824310SBarry Smith       rp2      = bj + bi[row];
509381824310SBarry Smith       ap2      = ba + bi[row];
509481824310SBarry Smith       rmax2    = bimax[row];
509581824310SBarry Smith       nrow2    = bilen[row];
509681824310SBarry Smith       low2     = 0;
509781824310SBarry Smith       high2    = nrow2;
509881824310SBarry Smith 
509981824310SBarry Smith       for (j=0; j<n; j++) {
510081824310SBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
510181824310SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
510281824310SBarry Smith         if (in[j] >= cstart && in[j] < cend){
510381824310SBarry Smith           col = in[j] - cstart;
510481824310SBarry Smith           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
510581824310SBarry Smith         } else if (in[j] < 0) continue;
510681824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5107d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
510881824310SBarry Smith #endif
510981824310SBarry Smith         else {
511081824310SBarry Smith           if (mat->was_assembled) {
511181824310SBarry Smith             if (!aij->colmap) {
511281824310SBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
511381824310SBarry Smith             }
511481824310SBarry Smith #if defined (PETSC_USE_CTABLE)
511581824310SBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
511681824310SBarry Smith 	    col--;
511781824310SBarry Smith #else
511881824310SBarry Smith             col = aij->colmap[in[j]] - 1;
511981824310SBarry Smith #endif
512081824310SBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
512181824310SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
512281824310SBarry Smith               col =  in[j];
512381824310SBarry Smith               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
512481824310SBarry Smith               B = aij->B;
512581824310SBarry Smith               b = (Mat_SeqAIJ*)B->data;
512681824310SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
512781824310SBarry Smith               rp2      = bj + bi[row];
512881824310SBarry Smith               ap2      = ba + bi[row];
512981824310SBarry Smith               rmax2    = bimax[row];
513081824310SBarry Smith               nrow2    = bilen[row];
513181824310SBarry Smith               low2     = 0;
513281824310SBarry Smith               high2    = nrow2;
5133d0f46423SBarry Smith               bm       = aij->B->rmap->n;
513481824310SBarry Smith               ba = b->a;
513581824310SBarry Smith             }
513681824310SBarry Smith           } else col = in[j];
513781824310SBarry Smith           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
513881824310SBarry Smith         }
513981824310SBarry Smith       }
514081824310SBarry Smith     } else {
514181824310SBarry Smith       if (!aij->donotstash) {
514281824310SBarry Smith         if (roworiented) {
514381824310SBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
514481824310SBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
514581824310SBarry Smith         } else {
514681824310SBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
514781824310SBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
514881824310SBarry Smith         }
514981824310SBarry Smith       }
515081824310SBarry Smith     }
515181824310SBarry Smith   }}
515281824310SBarry Smith   PetscFunctionReturnVoid();
515381824310SBarry Smith }
515481824310SBarry Smith EXTERN_C_END
515503bfb495SBarry Smith 
5156