xref: /petsc/src/mat/impls/aij/mpi/mpiaij.c (revision 78102f6c55fb0c40f4da559a448bd6b14eec00b1)
1be1d678aSKris Buschelman #define PETSCMAT_DLL
28a729477SBarry Smith 
37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h"   /*I "petscmat.h" I*/
47c4f633dSBarry Smith #include "../src/inline/spops.h"
58a729477SBarry Smith 
6dd6ea824SBarry Smith #undef __FUNCT__
7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ"
8dd6ea824SBarry Smith /*
9dd6ea824SBarry Smith     Distributes a SeqAIJ matrix across a set of processes. Code stolen from
10dd6ea824SBarry Smith     MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type.
11dd6ea824SBarry Smith 
12dd6ea824SBarry Smith     Only for square matrices
13dd6ea824SBarry Smith */
14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat)
15dd6ea824SBarry Smith {
16dd6ea824SBarry Smith   PetscMPIInt    rank,size;
17dd6ea824SBarry Smith   PetscInt       *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld;
18dd6ea824SBarry Smith   PetscErrorCode ierr;
19dd6ea824SBarry Smith   Mat            mat;
20dd6ea824SBarry Smith   Mat_SeqAIJ     *gmata;
21dd6ea824SBarry Smith   PetscMPIInt    tag;
22dd6ea824SBarry Smith   MPI_Status     status;
23dd6ea824SBarry Smith   PetscTruth     aij;
24dd6ea824SBarry Smith   MatScalar      *gmataa,*ao,*ad,*gmataarestore=0;
25dd6ea824SBarry Smith 
26dd6ea824SBarry Smith   PetscFunctionBegin;
27dd6ea824SBarry Smith   CHKMEMQ;
28dd6ea824SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
29dd6ea824SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
30dd6ea824SBarry Smith   if (!rank) {
31dd6ea824SBarry Smith     ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr);
32dd6ea824SBarry Smith     if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name);
33dd6ea824SBarry Smith   }
34dd6ea824SBarry Smith   if (reuse == MAT_INITIAL_MATRIX) {
35dd6ea824SBarry Smith     ierr = MatCreate(comm,&mat);CHKERRQ(ierr);
36dd6ea824SBarry Smith     ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
37dd6ea824SBarry Smith     ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr);
38dd6ea824SBarry Smith     ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
39dd6ea824SBarry Smith     ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr);
40dd6ea824SBarry Smith     ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
41dd6ea824SBarry Smith     rowners[0] = 0;
42dd6ea824SBarry Smith     for (i=2; i<=size; i++) {
43dd6ea824SBarry Smith       rowners[i] += rowners[i-1];
44dd6ea824SBarry Smith     }
45dd6ea824SBarry Smith     rstart = rowners[rank];
46dd6ea824SBarry Smith     rend   = rowners[rank+1];
47dd6ea824SBarry Smith     ierr   = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
48dd6ea824SBarry Smith     if (!rank) {
49dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
50dd6ea824SBarry Smith       /* send row lengths to all processors */
51dd6ea824SBarry Smith       for (i=0; i<m; i++) dlens[i] = gmata->ilen[i];
52dd6ea824SBarry Smith       for (i=1; i<size; i++) {
53dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
54dd6ea824SBarry Smith       }
55dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
56dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
57dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
58dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
59dd6ea824SBarry Smith       jj = 0;
60dd6ea824SBarry Smith       for (i=0; i<m; i++) {
61dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
62dd6ea824SBarry Smith           if (gmata->j[jj] < rstart) ld[i]++;
63dd6ea824SBarry Smith 	  if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++;
64dd6ea824SBarry Smith 	  jj++;
65dd6ea824SBarry Smith 	}
66dd6ea824SBarry Smith       }
67dd6ea824SBarry Smith       /* send column indices to other processes */
68dd6ea824SBarry Smith       for (i=1; i<size; i++) {
69dd6ea824SBarry Smith 	nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
70dd6ea824SBarry Smith 	ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
71dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
72dd6ea824SBarry Smith       }
73dd6ea824SBarry Smith 
74dd6ea824SBarry Smith       /* send numerical values to other processes */
75dd6ea824SBarry Smith       for (i=1; i<size; i++) {
76dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
77dd6ea824SBarry Smith         ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
78dd6ea824SBarry Smith       }
79dd6ea824SBarry Smith       gmataa = gmata->a;
80dd6ea824SBarry Smith       gmataj = gmata->j;
81dd6ea824SBarry Smith 
82dd6ea824SBarry Smith     } else {
83dd6ea824SBarry Smith       /* receive row lengths */
84dd6ea824SBarry Smith       ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
85dd6ea824SBarry Smith       /* receive column indices */
86dd6ea824SBarry Smith       ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
87dd6ea824SBarry Smith       ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr);
88dd6ea824SBarry Smith       ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
89dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
90dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
91dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
92dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
93dd6ea824SBarry Smith       jj = 0;
94dd6ea824SBarry Smith       for (i=0; i<m; i++) {
95dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
96dd6ea824SBarry Smith           if (gmataj[jj] < rstart) ld[i]++;
97dd6ea824SBarry Smith 	  if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++;
98dd6ea824SBarry Smith 	  jj++;
99dd6ea824SBarry Smith 	}
100dd6ea824SBarry Smith       }
101dd6ea824SBarry Smith       /* receive numerical values */
102dd6ea824SBarry Smith       ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr);
103dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
104dd6ea824SBarry Smith     }
105dd6ea824SBarry Smith     /* set preallocation */
106dd6ea824SBarry Smith     for (i=0; i<m; i++) {
107dd6ea824SBarry Smith       dlens[i] -= olens[i];
108dd6ea824SBarry Smith     }
109dd6ea824SBarry Smith     ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr);
110dd6ea824SBarry Smith     ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr);
111dd6ea824SBarry Smith 
112dd6ea824SBarry Smith     for (i=0; i<m; i++) {
113dd6ea824SBarry Smith       dlens[i] += olens[i];
114dd6ea824SBarry Smith     }
115dd6ea824SBarry Smith     cnt  = 0;
116dd6ea824SBarry Smith     for (i=0; i<m; i++) {
117dd6ea824SBarry Smith       row  = rstart + i;
118dd6ea824SBarry Smith       ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr);
119dd6ea824SBarry Smith       cnt += dlens[i];
120dd6ea824SBarry Smith     }
121dd6ea824SBarry Smith     if (rank) {
122dd6ea824SBarry Smith       ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr);
123dd6ea824SBarry Smith     }
124dd6ea824SBarry Smith     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
125dd6ea824SBarry Smith     ierr = PetscFree(rowners);CHKERRQ(ierr);
126dd6ea824SBarry Smith     ((Mat_MPIAIJ*)(mat->data))->ld = ld;
127dd6ea824SBarry Smith     *inmat = mat;
128dd6ea824SBarry Smith   } else {   /* column indices are already set; only need to move over numerical values from process 0 */
129dd6ea824SBarry Smith     Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data;
130dd6ea824SBarry Smith     Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data;
131dd6ea824SBarry Smith     mat   = *inmat;
132dd6ea824SBarry Smith     ierr  = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
133dd6ea824SBarry Smith     if (!rank) {
134dd6ea824SBarry Smith       /* send numerical values to other processes */
135dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
136dd6ea824SBarry Smith       ierr   = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr);
137dd6ea824SBarry Smith       gmataa = gmata->a;
138dd6ea824SBarry Smith       for (i=1; i<size; i++) {
139dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
140dd6ea824SBarry Smith         ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
141dd6ea824SBarry Smith       }
142dd6ea824SBarry Smith       nz   = gmata->i[rowners[1]]-gmata->i[rowners[0]];
143dd6ea824SBarry Smith     } else {
144dd6ea824SBarry Smith       /* receive numerical values from process 0*/
145dd6ea824SBarry Smith       nz   = Ad->nz + Ao->nz;
146dd6ea824SBarry Smith       ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa;
147dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
148dd6ea824SBarry Smith     }
149dd6ea824SBarry Smith     /* transfer numerical values into the diagonal A and off diagonal B parts of mat */
150dd6ea824SBarry Smith     ld = ((Mat_MPIAIJ*)(mat->data))->ld;
151dd6ea824SBarry Smith     ad = Ad->a;
152dd6ea824SBarry Smith     ao = Ao->a;
153d0f46423SBarry Smith     if (mat->rmap->n) {
154dd6ea824SBarry Smith       i  = 0;
155dd6ea824SBarry Smith       nz = ld[i];                                   ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
156dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
157dd6ea824SBarry Smith     }
158d0f46423SBarry Smith     for (i=1; i<mat->rmap->n; i++) {
159dd6ea824SBarry Smith       nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
160dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
161dd6ea824SBarry Smith     }
162dd6ea824SBarry Smith     i--;
163d0f46423SBarry Smith     if (mat->rmap->n) {
164dd6ea824SBarry Smith       nz = Ao->i[i+1] - Ao->i[i] - ld[i];           ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
165dd6ea824SBarry Smith     }
166dd6ea824SBarry Smith     if (rank) {
167dd6ea824SBarry Smith       ierr = PetscFree(gmataarestore);CHKERRQ(ierr);
168dd6ea824SBarry Smith     }
169dd6ea824SBarry Smith   }
170dd6ea824SBarry Smith   ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
171dd6ea824SBarry Smith   ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
172dd6ea824SBarry Smith   CHKMEMQ;
173dd6ea824SBarry Smith   PetscFunctionReturn(0);
174dd6ea824SBarry Smith }
175dd6ea824SBarry Smith 
1760f5bd95cSBarry Smith /*
1770f5bd95cSBarry Smith   Local utility routine that creates a mapping from the global column
1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local
1790f5bd95cSBarry Smith storage of the matrix.  When PETSC_USE_CTABLE is used this is scalable at
1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor
1810f5bd95cSBarry Smith has an order N integer array but is fast to acess.
1829e25ed09SBarry Smith */
1834a2ae208SSatish Balay #undef __FUNCT__
1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private"
185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat)
1869e25ed09SBarry Smith {
18744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1886849ba73SBarry Smith   PetscErrorCode ierr;
189d0f46423SBarry Smith   PetscInt       n = aij->B->cmap->n,i;
190dbb450caSBarry Smith 
1913a40ed3dSBarry Smith   PetscFunctionBegin;
192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
193273d9f13SBarry Smith   ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr);
194b1fc9764SSatish Balay   for (i=0; i<n; i++){
1950f5bd95cSBarry Smith     ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr);
196b1fc9764SSatish Balay   }
197b1fc9764SSatish Balay #else
198d0f46423SBarry Smith   ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr);
199d0f46423SBarry Smith   ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
200d0f46423SBarry Smith   ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
201905e6a2fSBarry Smith   for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1;
202b1fc9764SSatish Balay #endif
2033a40ed3dSBarry Smith   PetscFunctionReturn(0);
2049e25ed09SBarry Smith }
2059e25ed09SBarry Smith 
206085a36d4SBarry Smith 
2070520107fSSatish Balay #define CHUNKSIZE   15
20830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \
2090520107fSSatish Balay { \
2107cd84e04SBarry Smith     if (col <= lastcol1) low1 = 0; else high1 = nrow1; \
211fd3458f5SBarry Smith     lastcol1 = col;\
212fd3458f5SBarry Smith     while (high1-low1 > 5) { \
213fd3458f5SBarry Smith       t = (low1+high1)/2; \
214fd3458f5SBarry Smith       if (rp1[t] > col) high1 = t; \
215fd3458f5SBarry Smith       else             low1  = t; \
216ba4e3ef2SSatish Balay     } \
217fd3458f5SBarry Smith       for (_i=low1; _i<high1; _i++) { \
218fd3458f5SBarry Smith         if (rp1[_i] > col) break; \
219fd3458f5SBarry Smith         if (rp1[_i] == col) { \
220fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap1[_i] += value;   \
221fd3458f5SBarry Smith           else                    ap1[_i] = value; \
22230770e4dSSatish Balay           goto a_noinsert; \
2230520107fSSatish Balay         } \
2240520107fSSatish Balay       }  \
225e44c0bd4SBarry Smith       if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \
226e44c0bd4SBarry Smith       if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;}		\
227085a36d4SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
228421e10b8SBarry Smith       MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \
229669a8dbcSSatish Balay       N = nrow1++ - 1; a->nz++; high1++; \
2300520107fSSatish Balay       /* shift up all the later entries in this row */ \
2310520107fSSatish Balay       for (ii=N; ii>=_i; ii--) { \
232fd3458f5SBarry Smith         rp1[ii+1] = rp1[ii]; \
233fd3458f5SBarry Smith         ap1[ii+1] = ap1[ii]; \
2340520107fSSatish Balay       } \
235fd3458f5SBarry Smith       rp1[_i] = col;  \
236fd3458f5SBarry Smith       ap1[_i] = value;  \
23730770e4dSSatish Balay       a_noinsert: ; \
238fd3458f5SBarry Smith       ailen[row] = nrow1; \
2390520107fSSatish Balay }
2400a198c4cSBarry Smith 
241085a36d4SBarry Smith 
24230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \
24330770e4dSSatish Balay { \
2447cd84e04SBarry Smith     if (col <= lastcol2) low2 = 0; else high2 = nrow2; \
245fd3458f5SBarry Smith     lastcol2 = col;\
246fd3458f5SBarry Smith     while (high2-low2 > 5) { \
247fd3458f5SBarry Smith       t = (low2+high2)/2; \
248fd3458f5SBarry Smith       if (rp2[t] > col) high2 = t; \
249fd3458f5SBarry Smith       else             low2  = t; \
250ba4e3ef2SSatish Balay     } \
251fd3458f5SBarry Smith     for (_i=low2; _i<high2; _i++) {		\
252fd3458f5SBarry Smith       if (rp2[_i] > col) break;			\
253fd3458f5SBarry Smith       if (rp2[_i] == col) {			      \
254fd3458f5SBarry Smith 	if (addv == ADD_VALUES) ap2[_i] += value;     \
255fd3458f5SBarry Smith 	else                    ap2[_i] = value;      \
25630770e4dSSatish Balay 	goto b_noinsert;			      \
25730770e4dSSatish Balay       }						      \
25830770e4dSSatish Balay     }							      \
259e44c0bd4SBarry Smith     if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \
260e44c0bd4SBarry Smith     if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;}		\
261085a36d4SBarry Smith     if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
262421e10b8SBarry Smith     MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \
263669a8dbcSSatish Balay     N = nrow2++ - 1; b->nz++; high2++;					\
26430770e4dSSatish Balay     /* shift up all the later entries in this row */			\
26530770e4dSSatish Balay     for (ii=N; ii>=_i; ii--) {						\
266fd3458f5SBarry Smith       rp2[ii+1] = rp2[ii];						\
267fd3458f5SBarry Smith       ap2[ii+1] = ap2[ii];						\
26830770e4dSSatish Balay     }									\
269fd3458f5SBarry Smith     rp2[_i] = col;							\
270fd3458f5SBarry Smith     ap2[_i] = value;							\
27130770e4dSSatish Balay     b_noinsert: ;								\
272fd3458f5SBarry Smith     bilen[row] = nrow2;							\
27330770e4dSSatish Balay }
27430770e4dSSatish Balay 
2754a2ae208SSatish Balay #undef __FUNCT__
2762fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ"
2772fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[])
2782fd7e33dSBarry Smith {
2792fd7e33dSBarry Smith   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)A->data;
2802fd7e33dSBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data;
2812fd7e33dSBarry Smith   PetscErrorCode ierr;
2822fd7e33dSBarry Smith   PetscInt       l,*garray = mat->garray,diag;
2832fd7e33dSBarry Smith 
2842fd7e33dSBarry Smith   PetscFunctionBegin;
2852fd7e33dSBarry Smith   /* code only works for square matrices A */
2862fd7e33dSBarry Smith 
2872fd7e33dSBarry Smith   /* find size of row to the left of the diagonal part */
2882fd7e33dSBarry Smith   ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr);
2892fd7e33dSBarry Smith   row  = row - diag;
2902fd7e33dSBarry Smith   for (l=0; l<b->i[row+1]-b->i[row]; l++) {
2912fd7e33dSBarry Smith     if (garray[b->j[b->i[row]+l]] > diag) break;
2922fd7e33dSBarry Smith   }
2932fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr);
2942fd7e33dSBarry Smith 
2952fd7e33dSBarry Smith   /* diagonal part */
2962fd7e33dSBarry Smith   ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr);
2972fd7e33dSBarry Smith 
2982fd7e33dSBarry Smith   /* right of diagonal part */
2992fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr);
3002fd7e33dSBarry Smith   PetscFunctionReturn(0);
3012fd7e33dSBarry Smith }
3022fd7e33dSBarry Smith 
3032fd7e33dSBarry Smith #undef __FUNCT__
3044a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ"
305b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
3068a729477SBarry Smith {
30744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
30887828ca2SBarry Smith   PetscScalar    value;
309dfbe8321SBarry Smith   PetscErrorCode ierr;
310d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
311d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
312273d9f13SBarry Smith   PetscTruth     roworiented = aij->roworiented;
3138a729477SBarry Smith 
3140520107fSSatish Balay   /* Some Variables required in the macro */
3154ee7247eSSatish Balay   Mat            A = aij->A;
3164ee7247eSSatish Balay   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
31757809a77SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
318a77337e4SBarry Smith   MatScalar      *aa = a->a;
319edb03aefSBarry Smith   PetscTruth     ignorezeroentries = a->ignorezeroentries;
32030770e4dSSatish Balay   Mat            B = aij->B;
32130770e4dSSatish Balay   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
322d0f46423SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
323a77337e4SBarry Smith   MatScalar      *ba = b->a;
32430770e4dSSatish Balay 
325fd3458f5SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
326fd3458f5SBarry Smith   PetscInt       nonew = a->nonew;
327a77337e4SBarry Smith   MatScalar      *ap1,*ap2;
3284ee7247eSSatish Balay 
3293a40ed3dSBarry Smith   PetscFunctionBegin;
3308a729477SBarry Smith   for (i=0; i<m; i++) {
3315ef9f2a5SBarry Smith     if (im[i] < 0) continue;
3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
333d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
3340a198c4cSBarry Smith #endif
3354b0e389bSBarry Smith     if (im[i] >= rstart && im[i] < rend) {
3364b0e389bSBarry Smith       row      = im[i] - rstart;
337fd3458f5SBarry Smith       lastcol1 = -1;
338fd3458f5SBarry Smith       rp1      = aj + ai[row];
339fd3458f5SBarry Smith       ap1      = aa + ai[row];
340fd3458f5SBarry Smith       rmax1    = aimax[row];
341fd3458f5SBarry Smith       nrow1    = ailen[row];
342fd3458f5SBarry Smith       low1     = 0;
343fd3458f5SBarry Smith       high1    = nrow1;
344fd3458f5SBarry Smith       lastcol2 = -1;
345fd3458f5SBarry Smith       rp2      = bj + bi[row];
346d498b1e9SBarry Smith       ap2      = ba + bi[row];
347fd3458f5SBarry Smith       rmax2    = bimax[row];
348d498b1e9SBarry Smith       nrow2    = bilen[row];
349fd3458f5SBarry Smith       low2     = 0;
350fd3458f5SBarry Smith       high2    = nrow2;
351fd3458f5SBarry Smith 
3521eb62cbbSBarry Smith       for (j=0; j<n; j++) {
35316371a99SBarry Smith         if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0;
354abc0a331SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
355fd3458f5SBarry Smith         if (in[j] >= cstart && in[j] < cend){
356fd3458f5SBarry Smith           col = in[j] - cstart;
35730770e4dSSatish Balay           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
358273d9f13SBarry Smith         } else if (in[j] < 0) continue;
3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
360d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
3610a198c4cSBarry Smith #endif
3621eb62cbbSBarry Smith         else {
363227d817aSBarry Smith           if (mat->was_assembled) {
364905e6a2fSBarry Smith             if (!aij->colmap) {
365905e6a2fSBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
366905e6a2fSBarry Smith             }
367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
3680f5bd95cSBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
369fa46199cSSatish Balay 	    col--;
370b1fc9764SSatish Balay #else
371905e6a2fSBarry Smith             col = aij->colmap[in[j]] - 1;
372b1fc9764SSatish Balay #endif
373ec8511deSBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
3742493cbb0SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
3754b0e389bSBarry Smith               col =  in[j];
3769bf004c3SSatish Balay               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
377f9508a3cSSatish Balay               B = aij->B;
378f9508a3cSSatish Balay               b = (Mat_SeqAIJ*)B->data;
379e44c0bd4SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a;
380d498b1e9SBarry Smith               rp2      = bj + bi[row];
381d498b1e9SBarry Smith               ap2      = ba + bi[row];
382d498b1e9SBarry Smith               rmax2    = bimax[row];
383d498b1e9SBarry Smith               nrow2    = bilen[row];
384d498b1e9SBarry Smith               low2     = 0;
385d498b1e9SBarry Smith               high2    = nrow2;
386d0f46423SBarry Smith               bm       = aij->B->rmap->n;
387f9508a3cSSatish Balay               ba = b->a;
388d6dfbf8fSBarry Smith             }
389c48de900SBarry Smith           } else col = in[j];
39030770e4dSSatish Balay           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
3911eb62cbbSBarry Smith         }
3921eb62cbbSBarry Smith       }
3935ef9f2a5SBarry Smith     } else {
39490f02eecSBarry Smith       if (!aij->donotstash) {
395d36fbae8SSatish Balay         if (roworiented) {
3965b8514ebSBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
3978798bf22SSatish Balay           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
398d36fbae8SSatish Balay         } else {
3995b8514ebSBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
4008798bf22SSatish Balay           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
4014b0e389bSBarry Smith         }
4021eb62cbbSBarry Smith       }
4038a729477SBarry Smith     }
40490f02eecSBarry Smith   }
4053a40ed3dSBarry Smith   PetscFunctionReturn(0);
4068a729477SBarry Smith }
4078a729477SBarry Smith 
4084a2ae208SSatish Balay #undef __FUNCT__
4094a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ"
410b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
411b49de8d1SLois Curfman McInnes {
412b49de8d1SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
413dfbe8321SBarry Smith   PetscErrorCode ierr;
414d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
415d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
416b49de8d1SLois Curfman McInnes 
4173a40ed3dSBarry Smith   PetscFunctionBegin;
418b49de8d1SLois Curfman McInnes   for (i=0; i<m; i++) {
41997e567efSBarry Smith     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
420d0f46423SBarry Smith     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
421b49de8d1SLois Curfman McInnes     if (idxm[i] >= rstart && idxm[i] < rend) {
422b49de8d1SLois Curfman McInnes       row = idxm[i] - rstart;
423b49de8d1SLois Curfman McInnes       for (j=0; j<n; j++) {
42497e567efSBarry Smith         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
425d0f46423SBarry Smith         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
426b49de8d1SLois Curfman McInnes         if (idxn[j] >= cstart && idxn[j] < cend){
427b49de8d1SLois Curfman McInnes           col = idxn[j] - cstart;
428b49de8d1SLois Curfman McInnes           ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
429fa852ad4SSatish Balay         } else {
430905e6a2fSBarry Smith           if (!aij->colmap) {
431905e6a2fSBarry Smith             ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
432905e6a2fSBarry Smith           }
433aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
4340f5bd95cSBarry Smith           ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr);
435fa46199cSSatish Balay           col --;
436b1fc9764SSatish Balay #else
437905e6a2fSBarry Smith           col = aij->colmap[idxn[j]] - 1;
438b1fc9764SSatish Balay #endif
439e60e1c95SSatish Balay           if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0;
440d9d09a02SSatish Balay           else {
441b49de8d1SLois Curfman McInnes             ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
442b49de8d1SLois Curfman McInnes           }
443b49de8d1SLois Curfman McInnes         }
444b49de8d1SLois Curfman McInnes       }
445a8c6a408SBarry Smith     } else {
44629bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"Only local values currently supported");
447b49de8d1SLois Curfman McInnes     }
448b49de8d1SLois Curfman McInnes   }
4493a40ed3dSBarry Smith   PetscFunctionReturn(0);
450b49de8d1SLois Curfman McInnes }
451bc5ccf88SSatish Balay 
4524a2ae208SSatish Balay #undef __FUNCT__
4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ"
454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode)
455bc5ccf88SSatish Balay {
456bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
457dfbe8321SBarry Smith   PetscErrorCode ierr;
458b1d57f15SBarry Smith   PetscInt       nstash,reallocs;
459bc5ccf88SSatish Balay   InsertMode     addv;
460bc5ccf88SSatish Balay 
461bc5ccf88SSatish Balay   PetscFunctionBegin;
462bc5ccf88SSatish Balay   if (aij->donotstash) {
463bc5ccf88SSatish Balay     PetscFunctionReturn(0);
464bc5ccf88SSatish Balay   }
465bc5ccf88SSatish Balay 
466bc5ccf88SSatish Balay   /* make sure all processors are either in INSERTMODE or ADDMODE */
4677adad957SLisandro Dalcin   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
468bc5ccf88SSatish Balay   if (addv == (ADD_VALUES|INSERT_VALUES)) {
46929bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
470bc5ccf88SSatish Balay   }
471bc5ccf88SSatish Balay   mat->insertmode = addv; /* in case this processor had no cache */
472bc5ccf88SSatish Balay 
473d0f46423SBarry Smith   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
4748798bf22SSatish Balay   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
475ae15b995SBarry Smith   ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
476bc5ccf88SSatish Balay   PetscFunctionReturn(0);
477bc5ccf88SSatish Balay }
478bc5ccf88SSatish Balay 
4794a2ae208SSatish Balay #undef __FUNCT__
4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ"
481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode)
482bc5ccf88SSatish Balay {
483bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
48491c97fd4SSatish Balay   Mat_SeqAIJ     *a=(Mat_SeqAIJ *)aij->A->data;
4856849ba73SBarry Smith   PetscErrorCode ierr;
486b1d57f15SBarry Smith   PetscMPIInt    n;
487b1d57f15SBarry Smith   PetscInt       i,j,rstart,ncols,flg;
488e44c0bd4SBarry Smith   PetscInt       *row,*col;
489e44c0bd4SBarry Smith   PetscTruth     other_disassembled;
49087828ca2SBarry Smith   PetscScalar    *val;
491bc5ccf88SSatish Balay   InsertMode     addv = mat->insertmode;
492bc5ccf88SSatish Balay 
49391c97fd4SSatish Balay   /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */
494bc5ccf88SSatish Balay   PetscFunctionBegin;
495bc5ccf88SSatish Balay   if (!aij->donotstash) {
496a2d1c673SSatish Balay     while (1) {
4978798bf22SSatish Balay       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
498a2d1c673SSatish Balay       if (!flg) break;
499a2d1c673SSatish Balay 
500bc5ccf88SSatish Balay       for (i=0; i<n;) {
501bc5ccf88SSatish Balay         /* Now identify the consecutive vals belonging to the same row */
502bc5ccf88SSatish Balay         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
503bc5ccf88SSatish Balay         if (j < n) ncols = j-i;
504bc5ccf88SSatish Balay         else       ncols = n-i;
505bc5ccf88SSatish Balay         /* Now assemble all these values with a single function call */
506bc5ccf88SSatish Balay         ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
507bc5ccf88SSatish Balay         i = j;
508bc5ccf88SSatish Balay       }
509bc5ccf88SSatish Balay     }
5108798bf22SSatish Balay     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
511bc5ccf88SSatish Balay   }
5122f53aa61SHong Zhang   a->compressedrow.use     = PETSC_FALSE;
513bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr);
514bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr);
515bc5ccf88SSatish Balay 
516bc5ccf88SSatish Balay   /* determine if any processor has disassembled, if so we must
517bc5ccf88SSatish Balay      also disassemble ourselfs, in order that we may reassemble. */
518bc5ccf88SSatish Balay   /*
519bc5ccf88SSatish Balay      if nonzero structure of submatrix B cannot change then we know that
520bc5ccf88SSatish Balay      no processor disassembled thus we can skip this stuff
521bc5ccf88SSatish Balay   */
522bc5ccf88SSatish Balay   if (!((Mat_SeqAIJ*)aij->B->data)->nonew)  {
5237adad957SLisandro Dalcin     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
524bc5ccf88SSatish Balay     if (mat->was_assembled && !other_disassembled) {
525bc5ccf88SSatish Balay       ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
526ad59fb31SSatish Balay     }
527ad59fb31SSatish Balay   }
528bc5ccf88SSatish Balay   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
529bc5ccf88SSatish Balay     ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr);
530bc5ccf88SSatish Balay   }
5314e0d8c25SBarry Smith   ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr);
53291c97fd4SSatish Balay   ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */
533bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr);
534bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr);
535bc5ccf88SSatish Balay 
536606d414cSSatish Balay   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
537606d414cSSatish Balay   aij->rowvalues = 0;
538a30b2313SHong Zhang 
539a30b2313SHong Zhang   /* used by MatAXPY() */
54091c97fd4SSatish Balay   a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0;  /* b->xtoy = 0 */
54191c97fd4SSatish Balay   a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0;  /* b->XtoY = 0 */
542a30b2313SHong Zhang 
543bc5ccf88SSatish Balay   PetscFunctionReturn(0);
544bc5ccf88SSatish Balay }
545bc5ccf88SSatish Balay 
5464a2ae208SSatish Balay #undef __FUNCT__
5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ"
548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A)
5491eb62cbbSBarry Smith {
55044a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
551dfbe8321SBarry Smith   PetscErrorCode ierr;
5523a40ed3dSBarry Smith 
5533a40ed3dSBarry Smith   PetscFunctionBegin;
55478b31e54SBarry Smith   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
55578b31e54SBarry Smith   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
5563a40ed3dSBarry Smith   PetscFunctionReturn(0);
5571eb62cbbSBarry Smith }
5581eb62cbbSBarry Smith 
5594a2ae208SSatish Balay #undef __FUNCT__
5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ"
561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
5621eb62cbbSBarry Smith {
56344a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
5646849ba73SBarry Smith   PetscErrorCode ierr;
5657adad957SLisandro Dalcin   PetscMPIInt    size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
566d0f46423SBarry Smith   PetscInt       i,*owners = A->rmap->range;
567b1d57f15SBarry Smith   PetscInt       *nprocs,j,idx,nsends,row;
568b1d57f15SBarry Smith   PetscInt       nmax,*svalues,*starts,*owner,nrecvs;
569b1d57f15SBarry Smith   PetscInt       *rvalues,count,base,slen,*source;
570d0f46423SBarry Smith   PetscInt       *lens,*lrows,*values,rstart=A->rmap->rstart;
5717adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)A)->comm;
5721eb62cbbSBarry Smith   MPI_Request    *send_waits,*recv_waits;
5731eb62cbbSBarry Smith   MPI_Status     recv_status,*send_status;
5746543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5756543fbbaSBarry Smith   PetscTruth     found = PETSC_FALSE;
5766543fbbaSBarry Smith #endif
5771eb62cbbSBarry Smith 
5783a40ed3dSBarry Smith   PetscFunctionBegin;
5791eb62cbbSBarry Smith   /*  first count number of contributors to each processor */
580b1d57f15SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
581b1d57f15SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
582b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
5836543fbbaSBarry Smith   j = 0;
5841eb62cbbSBarry Smith   for (i=0; i<N; i++) {
5856543fbbaSBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
5866543fbbaSBarry Smith     lastidx = idx;
5876543fbbaSBarry Smith     for (; j<size; j++) {
5881eb62cbbSBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
5896543fbbaSBarry Smith         nprocs[2*j]++;
5906543fbbaSBarry Smith         nprocs[2*j+1] = 1;
5916543fbbaSBarry Smith         owner[i] = j;
5926543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5936543fbbaSBarry Smith         found = PETSC_TRUE;
5946543fbbaSBarry Smith #endif
5956543fbbaSBarry Smith         break;
5961eb62cbbSBarry Smith       }
5971eb62cbbSBarry Smith     }
5986543fbbaSBarry Smith #if defined(PETSC_DEBUG)
59929bbc08cSBarry Smith     if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
6006543fbbaSBarry Smith     found = PETSC_FALSE;
6016543fbbaSBarry Smith #endif
6021eb62cbbSBarry Smith   }
603c1dc657dSBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
6041eb62cbbSBarry Smith 
6051eb62cbbSBarry Smith   /* inform other processors of number of messages and max length*/
606c1dc657dSBarry Smith   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
6071eb62cbbSBarry Smith 
6081eb62cbbSBarry Smith   /* post receives:   */
609b1d57f15SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
610b0a32e0cSBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
6111eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
612b1d57f15SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
6131eb62cbbSBarry Smith   }
6141eb62cbbSBarry Smith 
6151eb62cbbSBarry Smith   /* do sends:
6161eb62cbbSBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
6171eb62cbbSBarry Smith          the ith processor
6181eb62cbbSBarry Smith   */
619b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
620b0a32e0cSBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
621b1d57f15SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
6221eb62cbbSBarry Smith   starts[0] = 0;
623c1dc657dSBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6241eb62cbbSBarry Smith   for (i=0; i<N; i++) {
6251eb62cbbSBarry Smith     svalues[starts[owner[i]]++] = rows[i];
6261eb62cbbSBarry Smith   }
6271eb62cbbSBarry Smith 
6281eb62cbbSBarry Smith   starts[0] = 0;
629c1dc657dSBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6301eb62cbbSBarry Smith   count = 0;
63117699dbbSLois Curfman McInnes   for (i=0; i<size; i++) {
632c1dc657dSBarry Smith     if (nprocs[2*i+1]) {
633b1d57f15SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
6341eb62cbbSBarry Smith     }
6351eb62cbbSBarry Smith   }
636606d414cSSatish Balay   ierr = PetscFree(starts);CHKERRQ(ierr);
6371eb62cbbSBarry Smith 
63817699dbbSLois Curfman McInnes   base = owners[rank];
6391eb62cbbSBarry Smith 
6401eb62cbbSBarry Smith   /*  wait on receives */
641b1d57f15SBarry Smith   ierr   = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
6421eb62cbbSBarry Smith   source = lens + nrecvs;
6431eb62cbbSBarry Smith   count  = nrecvs; slen = 0;
6441eb62cbbSBarry Smith   while (count) {
645ca161407SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
6461eb62cbbSBarry Smith     /* unpack receives into our local space */
647b1d57f15SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
648d6dfbf8fSBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
649d6dfbf8fSBarry Smith     lens[imdex]    = n;
6501eb62cbbSBarry Smith     slen          += n;
6511eb62cbbSBarry Smith     count--;
6521eb62cbbSBarry Smith   }
653606d414cSSatish Balay   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
6541eb62cbbSBarry Smith 
6551eb62cbbSBarry Smith   /* move the data into the send scatter */
656b1d57f15SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
6571eb62cbbSBarry Smith   count = 0;
6581eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
6591eb62cbbSBarry Smith     values = rvalues + i*nmax;
6601eb62cbbSBarry Smith     for (j=0; j<lens[i]; j++) {
6611eb62cbbSBarry Smith       lrows[count++] = values[j] - base;
6621eb62cbbSBarry Smith     }
6631eb62cbbSBarry Smith   }
664606d414cSSatish Balay   ierr = PetscFree(rvalues);CHKERRQ(ierr);
665606d414cSSatish Balay   ierr = PetscFree(lens);CHKERRQ(ierr);
666606d414cSSatish Balay   ierr = PetscFree(owner);CHKERRQ(ierr);
667606d414cSSatish Balay   ierr = PetscFree(nprocs);CHKERRQ(ierr);
6681eb62cbbSBarry Smith 
6691eb62cbbSBarry Smith   /* actually zap the local rows */
6706eb55b6aSBarry Smith   /*
6716eb55b6aSBarry Smith         Zero the required rows. If the "diagonal block" of the matrix
672a8c7a070SBarry Smith      is square and the user wishes to set the diagonal we use separate
6736eb55b6aSBarry Smith      code so that MatSetValues() is not called for each diagonal allocating
6746eb55b6aSBarry Smith      new memory, thus calling lots of mallocs and slowing things down.
6756eb55b6aSBarry Smith 
676f4df32b1SMatthew Knepley        Contributed by: Matthew Knepley
6776eb55b6aSBarry Smith   */
678e2d53e46SBarry Smith   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
679f4df32b1SMatthew Knepley   ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr);
680d0f46423SBarry Smith   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
681f4df32b1SMatthew Knepley     ierr      = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr);
682f4df32b1SMatthew Knepley   } else if (diag != 0.0) {
683f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
684fa46199cSSatish Balay     if (((Mat_SeqAIJ*)l->A->data)->nonew) {
68529bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\
686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
6876525c446SSatish Balay     }
688e2d53e46SBarry Smith     for (i = 0; i < slen; i++) {
689e2d53e46SBarry Smith       row  = lrows[i] + rstart;
690f4df32b1SMatthew Knepley       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
691e2d53e46SBarry Smith     }
692e2d53e46SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
693e2d53e46SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
6946eb55b6aSBarry Smith   } else {
695f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
6966eb55b6aSBarry Smith   }
697606d414cSSatish Balay   ierr = PetscFree(lrows);CHKERRQ(ierr);
69872dacd9aSBarry Smith 
6991eb62cbbSBarry Smith   /* wait on sends */
7001eb62cbbSBarry Smith   if (nsends) {
701b0a32e0cSBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
702ca161407SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
703606d414cSSatish Balay     ierr = PetscFree(send_status);CHKERRQ(ierr);
7041eb62cbbSBarry Smith   }
705606d414cSSatish Balay   ierr = PetscFree(send_waits);CHKERRQ(ierr);
706606d414cSSatish Balay   ierr = PetscFree(svalues);CHKERRQ(ierr);
7071eb62cbbSBarry Smith 
7083a40ed3dSBarry Smith   PetscFunctionReturn(0);
7091eb62cbbSBarry Smith }
7101eb62cbbSBarry Smith 
7114a2ae208SSatish Balay #undef __FUNCT__
7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ"
713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy)
7141eb62cbbSBarry Smith {
715416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
716dfbe8321SBarry Smith   PetscErrorCode ierr;
717b1d57f15SBarry Smith   PetscInt       nt;
718416022c9SBarry Smith 
7193a40ed3dSBarry Smith   PetscFunctionBegin;
720a2ce50c7SBarry Smith   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
721d0f46423SBarry Smith   if (nt != A->cmap->n) {
722d0f46423SBarry Smith     SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt);
723fbd6ef76SBarry Smith   }
724ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
725f830108cSBarry Smith   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
726ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
727f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
7283a40ed3dSBarry Smith   PetscFunctionReturn(0);
7291eb62cbbSBarry Smith }
7301eb62cbbSBarry Smith 
7314a2ae208SSatish Balay #undef __FUNCT__
7324a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ"
733dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
734da3a660dSBarry Smith {
735416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
736dfbe8321SBarry Smith   PetscErrorCode ierr;
7373a40ed3dSBarry Smith 
7383a40ed3dSBarry Smith   PetscFunctionBegin;
739ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
740f830108cSBarry Smith   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
741ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
742f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
7433a40ed3dSBarry Smith   PetscFunctionReturn(0);
744da3a660dSBarry Smith }
745da3a660dSBarry Smith 
7464a2ae208SSatish Balay #undef __FUNCT__
7474a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ"
748dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy)
749da3a660dSBarry Smith {
750416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
751dfbe8321SBarry Smith   PetscErrorCode ierr;
752a5ff213dSBarry Smith   PetscTruth     merged;
753da3a660dSBarry Smith 
7543a40ed3dSBarry Smith   PetscFunctionBegin;
755a5ff213dSBarry Smith   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
756da3a660dSBarry Smith   /* do nondiagonal part */
7577c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
758a5ff213dSBarry Smith   if (!merged) {
759da3a660dSBarry Smith     /* send it on its way */
760ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
761da3a660dSBarry Smith     /* do local part */
7627c922b88SBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
763da3a660dSBarry Smith     /* receive remote parts: note this assumes the values are not actually */
764a5ff213dSBarry Smith     /* added in yy until the next line, */
765ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
766a5ff213dSBarry Smith   } else {
767a5ff213dSBarry Smith     /* do local part */
768a5ff213dSBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
769a5ff213dSBarry Smith     /* send it on its way */
770ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
771a5ff213dSBarry Smith     /* values actually were received in the Begin() but we need to call this nop */
772ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
773a5ff213dSBarry Smith   }
7743a40ed3dSBarry Smith   PetscFunctionReturn(0);
775da3a660dSBarry Smith }
776da3a660dSBarry Smith 
777cd0d46ebSvictorle EXTERN_C_BEGIN
778cd0d46ebSvictorle #undef __FUNCT__
7795fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ"
78013c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f)
781cd0d46ebSvictorle {
7824f423910Svictorle   MPI_Comm       comm;
783cd0d46ebSvictorle   Mat_MPIAIJ     *Aij = (Mat_MPIAIJ *) Amat->data, *Bij;
78466501d38Svictorle   Mat            Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs;
785cd0d46ebSvictorle   IS             Me,Notme;
7866849ba73SBarry Smith   PetscErrorCode ierr;
787b1d57f15SBarry Smith   PetscInt       M,N,first,last,*notme,i;
788b1d57f15SBarry Smith   PetscMPIInt    size;
789cd0d46ebSvictorle 
790cd0d46ebSvictorle   PetscFunctionBegin;
79142e5f5b4Svictorle 
79242e5f5b4Svictorle   /* Easy test: symmetric diagonal block */
79366501d38Svictorle   Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A;
7945485867bSBarry Smith   ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr);
795cd0d46ebSvictorle   if (!*f) PetscFunctionReturn(0);
7964f423910Svictorle   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
797b1d57f15SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
798b1d57f15SBarry Smith   if (size == 1) PetscFunctionReturn(0);
79942e5f5b4Svictorle 
80042e5f5b4Svictorle   /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */
801cd0d46ebSvictorle   ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr);
802cd0d46ebSvictorle   ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr);
803b1d57f15SBarry Smith   ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),&notme);CHKERRQ(ierr);
804cd0d46ebSvictorle   for (i=0; i<first; i++) notme[i] = i;
805cd0d46ebSvictorle   for (i=last; i<M; i++) notme[i-last+first] = i;
806268466fbSBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr);
807268466fbSBarry Smith   ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr);
808268466fbSBarry Smith   ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr);
80966501d38Svictorle   Aoff = Aoffs[0];
810268466fbSBarry Smith   ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr);
81166501d38Svictorle   Boff = Boffs[0];
8125485867bSBarry Smith   ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr);
81366501d38Svictorle   ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr);
81466501d38Svictorle   ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr);
81542e5f5b4Svictorle   ierr = ISDestroy(Me);CHKERRQ(ierr);
81642e5f5b4Svictorle   ierr = ISDestroy(Notme);CHKERRQ(ierr);
81742e5f5b4Svictorle 
818cd0d46ebSvictorle   PetscFunctionReturn(0);
819cd0d46ebSvictorle }
820cd0d46ebSvictorle EXTERN_C_END
821cd0d46ebSvictorle 
8224a2ae208SSatish Balay #undef __FUNCT__
8234a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ"
824dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
825da3a660dSBarry Smith {
826416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
827dfbe8321SBarry Smith   PetscErrorCode ierr;
828da3a660dSBarry Smith 
8293a40ed3dSBarry Smith   PetscFunctionBegin;
830da3a660dSBarry Smith   /* do nondiagonal part */
8317c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
832da3a660dSBarry Smith   /* send it on its way */
833ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
834da3a660dSBarry Smith   /* do local part */
8357c922b88SBarry Smith   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
836a5ff213dSBarry Smith   /* receive remote parts */
837ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
8383a40ed3dSBarry Smith   PetscFunctionReturn(0);
839da3a660dSBarry Smith }
840da3a660dSBarry Smith 
8411eb62cbbSBarry Smith /*
8421eb62cbbSBarry Smith   This only works correctly for square matrices where the subblock A->A is the
8431eb62cbbSBarry Smith    diagonal block
8441eb62cbbSBarry Smith */
8454a2ae208SSatish Balay #undef __FUNCT__
8464a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ"
847dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v)
8481eb62cbbSBarry Smith {
849dfbe8321SBarry Smith   PetscErrorCode ierr;
850416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
8513a40ed3dSBarry Smith 
8523a40ed3dSBarry Smith   PetscFunctionBegin;
853d0f46423SBarry Smith   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
854d0f46423SBarry Smith   if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) {
85529bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition");
8563a40ed3dSBarry Smith   }
8573a40ed3dSBarry Smith   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
8583a40ed3dSBarry Smith   PetscFunctionReturn(0);
8591eb62cbbSBarry Smith }
8601eb62cbbSBarry Smith 
8614a2ae208SSatish Balay #undef __FUNCT__
8624a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ"
863f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa)
864052efed2SBarry Smith {
865052efed2SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
866dfbe8321SBarry Smith   PetscErrorCode ierr;
8673a40ed3dSBarry Smith 
8683a40ed3dSBarry Smith   PetscFunctionBegin;
869f4df32b1SMatthew Knepley   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
870f4df32b1SMatthew Knepley   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
8713a40ed3dSBarry Smith   PetscFunctionReturn(0);
872052efed2SBarry Smith }
873052efed2SBarry Smith 
8744a2ae208SSatish Balay #undef __FUNCT__
8754a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ"
876dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat)
8771eb62cbbSBarry Smith {
87844a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
879dfbe8321SBarry Smith   PetscErrorCode ierr;
88083e2fdc7SBarry Smith 
8813a40ed3dSBarry Smith   PetscFunctionBegin;
882aa482453SBarry Smith #if defined(PETSC_USE_LOG)
883d0f46423SBarry Smith   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N);
884a5a9c739SBarry Smith #endif
8858798bf22SSatish Balay   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
88678b31e54SBarry Smith   ierr = MatDestroy(aij->A);CHKERRQ(ierr);
88778b31e54SBarry Smith   ierr = MatDestroy(aij->B);CHKERRQ(ierr);
888aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
8899c666560SBarry Smith   if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);}
890b1fc9764SSatish Balay #else
89105b42c5fSBarry Smith   ierr = PetscFree(aij->colmap);CHKERRQ(ierr);
892b1fc9764SSatish Balay #endif
89305b42c5fSBarry Smith   ierr = PetscFree(aij->garray);CHKERRQ(ierr);
8947c922b88SBarry Smith   if (aij->lvec)   {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);}
8957c922b88SBarry Smith   if (aij->Mvctx)  {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);}
89605b42c5fSBarry Smith   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
8978aa348c1SBarry Smith   ierr = PetscFree(aij->ld);CHKERRQ(ierr);
898606d414cSSatish Balay   ierr = PetscFree(aij);CHKERRQ(ierr);
899901853e0SKris Buschelman 
900dbd8c25aSHong Zhang   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
901901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
902901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
903901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
904901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr);
905901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
906ff69c46cSKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
907901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
9083a40ed3dSBarry Smith   PetscFunctionReturn(0);
9091eb62cbbSBarry Smith }
910ee50ffe9SBarry Smith 
9114a2ae208SSatish Balay #undef __FUNCT__
9128e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary"
913dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer)
9148e2fed03SBarry Smith {
9158e2fed03SBarry Smith   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
9168e2fed03SBarry Smith   Mat_SeqAIJ*       A = (Mat_SeqAIJ*)aij->A->data;
9178e2fed03SBarry Smith   Mat_SeqAIJ*       B = (Mat_SeqAIJ*)aij->B->data;
9186849ba73SBarry Smith   PetscErrorCode    ierr;
91932dcc486SBarry Smith   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
9206f69ff64SBarry Smith   int               fd;
921a788621eSSatish Balay   PetscInt          nz,header[4],*row_lengths,*range=0,rlen,i;
922d0f46423SBarry Smith   PetscInt          nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz;
9238e2fed03SBarry Smith   PetscScalar       *column_values;
9248e2fed03SBarry Smith 
9258e2fed03SBarry Smith   PetscFunctionBegin;
9267adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
9277adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
9288e2fed03SBarry Smith   nz   = A->nz + B->nz;
929958c9bccSBarry Smith   if (!rank) {
9308e2fed03SBarry Smith     header[0] = MAT_FILE_COOKIE;
931d0f46423SBarry Smith     header[1] = mat->rmap->N;
932d0f46423SBarry Smith     header[2] = mat->cmap->N;
9337adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
9348e2fed03SBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
9356f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9368e2fed03SBarry Smith     /* get largest number of rows any processor has */
937d0f46423SBarry Smith     rlen = mat->rmap->n;
938d0f46423SBarry Smith     range = mat->rmap->range;
9398e2fed03SBarry Smith     for (i=1; i<size; i++) {
9408e2fed03SBarry Smith       rlen = PetscMax(rlen,range[i+1] - range[i]);
9418e2fed03SBarry Smith     }
9428e2fed03SBarry Smith   } else {
9437adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
944d0f46423SBarry Smith     rlen = mat->rmap->n;
9458e2fed03SBarry Smith   }
9468e2fed03SBarry Smith 
9478e2fed03SBarry Smith   /* load up the local row counts */
948b1d57f15SBarry Smith   ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr);
949d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9508e2fed03SBarry Smith     row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
9518e2fed03SBarry Smith   }
9528e2fed03SBarry Smith 
9538e2fed03SBarry Smith   /* store the row lengths to the file */
954958c9bccSBarry Smith   if (!rank) {
9558e2fed03SBarry Smith     MPI_Status status;
956d0f46423SBarry Smith     ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9578e2fed03SBarry Smith     for (i=1; i<size; i++) {
9588e2fed03SBarry Smith       rlen = range[i+1] - range[i];
9597adad957SLisandro Dalcin       ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9606f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9618e2fed03SBarry Smith     }
9628e2fed03SBarry Smith   } else {
963d0f46423SBarry Smith     ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9648e2fed03SBarry Smith   }
9658e2fed03SBarry Smith   ierr = PetscFree(row_lengths);CHKERRQ(ierr);
9668e2fed03SBarry Smith 
9678e2fed03SBarry Smith   /* load up the local column indices */
9688e2fed03SBarry Smith   nzmax = nz; /* )th processor needs space a largest processor needs */
9697adad957SLisandro Dalcin   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
970b1d57f15SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
9718e2fed03SBarry Smith   cnt  = 0;
972d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9738e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
9748e2fed03SBarry Smith       if ( (col = garray[B->j[j]]) > cstart) break;
9758e2fed03SBarry Smith       column_indices[cnt++] = col;
9768e2fed03SBarry Smith     }
9778e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
9788e2fed03SBarry Smith       column_indices[cnt++] = A->j[k] + cstart;
9798e2fed03SBarry Smith     }
9808e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
9818e2fed03SBarry Smith       column_indices[cnt++] = garray[B->j[j]];
9828e2fed03SBarry Smith     }
9838e2fed03SBarry Smith   }
98477431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
9858e2fed03SBarry Smith 
9868e2fed03SBarry Smith   /* store the column indices to the file */
987958c9bccSBarry Smith   if (!rank) {
9888e2fed03SBarry Smith     MPI_Status status;
9896f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9908e2fed03SBarry Smith     for (i=1; i<size; i++) {
9917adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
99277431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
9937adad957SLisandro Dalcin       ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9946f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9958e2fed03SBarry Smith     }
9968e2fed03SBarry Smith   } else {
9977adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9987adad957SLisandro Dalcin     ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9998e2fed03SBarry Smith   }
10008e2fed03SBarry Smith   ierr = PetscFree(column_indices);CHKERRQ(ierr);
10018e2fed03SBarry Smith 
10028e2fed03SBarry Smith   /* load up the local column values */
10038e2fed03SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
10048e2fed03SBarry Smith   cnt  = 0;
1005d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
10068e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
10078e2fed03SBarry Smith       if ( garray[B->j[j]] > cstart) break;
10088e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10098e2fed03SBarry Smith     }
10108e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
10118e2fed03SBarry Smith       column_values[cnt++] = A->a[k];
10128e2fed03SBarry Smith     }
10138e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
10148e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10158e2fed03SBarry Smith     }
10168e2fed03SBarry Smith   }
101777431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
10188e2fed03SBarry Smith 
10198e2fed03SBarry Smith   /* store the column values to the file */
1020958c9bccSBarry Smith   if (!rank) {
10218e2fed03SBarry Smith     MPI_Status status;
10226f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10238e2fed03SBarry Smith     for (i=1; i<size; i++) {
10247adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
102577431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
10267adad957SLisandro Dalcin       ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
10276f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10288e2fed03SBarry Smith     }
10298e2fed03SBarry Smith   } else {
10307adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10317adad957SLisandro Dalcin     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10328e2fed03SBarry Smith   }
10338e2fed03SBarry Smith   ierr = PetscFree(column_values);CHKERRQ(ierr);
10348e2fed03SBarry Smith   PetscFunctionReturn(0);
10358e2fed03SBarry Smith }
10368e2fed03SBarry Smith 
10378e2fed03SBarry Smith #undef __FUNCT__
10384a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket"
1039dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1040416022c9SBarry Smith {
104144a69424SLois Curfman McInnes   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
1042dfbe8321SBarry Smith   PetscErrorCode    ierr;
104332dcc486SBarry Smith   PetscMPIInt       rank = aij->rank,size = aij->size;
1044d38fa0fbSBarry Smith   PetscTruth        isdraw,iascii,isbinary;
1045b0a32e0cSBarry Smith   PetscViewer       sviewer;
1046f3ef73ceSBarry Smith   PetscViewerFormat format;
1047416022c9SBarry Smith 
10483a40ed3dSBarry Smith   PetscFunctionBegin;
1049fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
105032077d6dSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
10518e2fed03SBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
105232077d6dSBarry Smith   if (iascii) {
1053b0a32e0cSBarry Smith     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1054456192e2SBarry Smith     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
10554e220ebcSLois Curfman McInnes       MatInfo    info;
1056923f20ffSKris Buschelman       PetscTruth inodes;
1057923f20ffSKris Buschelman 
10587adad957SLisandro Dalcin       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
1059888f2ed8SSatish Balay       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1060923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr);
1061923f20ffSKris Buschelman       if (!inodes) {
106277431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n",
1063d0f46423SBarry Smith 					      rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10646831982aSBarry Smith       } else {
106577431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n",
1066d0f46423SBarry Smith 		    rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10676831982aSBarry Smith       }
1068888f2ed8SSatish Balay       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
106977431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1070888f2ed8SSatish Balay       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
107177431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1072b0a32e0cSBarry Smith       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
107307d81ca4SBarry Smith       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1074a40aa06bSLois Curfman McInnes       ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr);
10753a40ed3dSBarry Smith       PetscFunctionReturn(0);
1076fb9695e5SSatish Balay     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1077923f20ffSKris Buschelman       PetscInt   inodecount,inodelimit,*inodes;
1078923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr);
1079923f20ffSKris Buschelman       if (inodes) {
1080923f20ffSKris Buschelman         ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr);
1081d38fa0fbSBarry Smith       } else {
1082d38fa0fbSBarry Smith         ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr);
1083d38fa0fbSBarry Smith       }
10843a40ed3dSBarry Smith       PetscFunctionReturn(0);
10854aedb280SBarry Smith     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
10864aedb280SBarry Smith       PetscFunctionReturn(0);
108708480c60SBarry Smith     }
10888e2fed03SBarry Smith   } else if (isbinary) {
10898e2fed03SBarry Smith     if (size == 1) {
10907adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
10918e2fed03SBarry Smith       ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
10928e2fed03SBarry Smith     } else {
10938e2fed03SBarry Smith       ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr);
10948e2fed03SBarry Smith     }
10958e2fed03SBarry Smith     PetscFunctionReturn(0);
10960f5bd95cSBarry Smith   } else if (isdraw) {
1097b0a32e0cSBarry Smith     PetscDraw  draw;
109819bcc07fSBarry Smith     PetscTruth isnull;
1099b0a32e0cSBarry Smith     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1100b0a32e0cSBarry Smith     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
110119bcc07fSBarry Smith   }
110219bcc07fSBarry Smith 
110317699dbbSLois Curfman McInnes   if (size == 1) {
11047adad957SLisandro Dalcin     ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
110578b31e54SBarry Smith     ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
11063a40ed3dSBarry Smith   } else {
110795373324SBarry Smith     /* assemble the entire matrix onto first processor. */
110895373324SBarry Smith     Mat         A;
1109ec8511deSBarry Smith     Mat_SeqAIJ  *Aloc;
1110d0f46423SBarry Smith     PetscInt    M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct;
1111dd6ea824SBarry Smith     MatScalar   *a;
11122ee70a88SLois Curfman McInnes 
1113d0f46423SBarry Smith     if (mat->rmap->N > 1024) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 512 rows, use binary format instead");
11140805154bSBarry Smith 
11157adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
111617699dbbSLois Curfman McInnes     if (!rank) {
1117f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
11183a40ed3dSBarry Smith     } else {
1119f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
112095373324SBarry Smith     }
1121f204ca49SKris Buschelman     /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */
1122f204ca49SKris Buschelman     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
1123f204ca49SKris Buschelman     ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
112452e6d16bSBarry Smith     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
1125416022c9SBarry Smith 
112695373324SBarry Smith     /* copy over the A part */
1127ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->A->data;
1128d0f46423SBarry Smith     m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1129d0f46423SBarry Smith     row = mat->rmap->rstart;
1130d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;}
113195373324SBarry Smith     for (i=0; i<m; i++) {
1132416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr);
113395373324SBarry Smith       row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i];
113495373324SBarry Smith     }
11352ee70a88SLois Curfman McInnes     aj = Aloc->j;
1136d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;}
113795373324SBarry Smith 
113895373324SBarry Smith     /* copy over the B part */
1139ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->B->data;
1140d0f46423SBarry Smith     m    = aij->B->rmap->n;  ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1141d0f46423SBarry Smith     row  = mat->rmap->rstart;
1142b1d57f15SBarry Smith     ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1143b0a32e0cSBarry Smith     ct   = cols;
1144bfec09a0SHong Zhang     for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];}
114595373324SBarry Smith     for (i=0; i<m; i++) {
1146416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr);
114795373324SBarry Smith       row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i];
114895373324SBarry Smith     }
1149606d414cSSatish Balay     ierr = PetscFree(ct);CHKERRQ(ierr);
11506d4a8577SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
11516d4a8577SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
115255843e3eSBarry Smith     /*
115355843e3eSBarry Smith        Everyone has to call to draw the matrix since the graphics waits are
1154b0a32e0cSBarry Smith        synchronized across all processors that share the PetscDraw object
115555843e3eSBarry Smith     */
1156b0a32e0cSBarry Smith     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1157e03a110bSBarry Smith     if (!rank) {
11587adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
11596831982aSBarry Smith       ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
116095373324SBarry Smith     }
1161b0a32e0cSBarry Smith     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
116278b31e54SBarry Smith     ierr = MatDestroy(A);CHKERRQ(ierr);
116395373324SBarry Smith   }
11643a40ed3dSBarry Smith   PetscFunctionReturn(0);
11651eb62cbbSBarry Smith }
11661eb62cbbSBarry Smith 
11674a2ae208SSatish Balay #undef __FUNCT__
11684a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ"
1169dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer)
1170416022c9SBarry Smith {
1171dfbe8321SBarry Smith   PetscErrorCode ierr;
117232077d6dSBarry Smith   PetscTruth     iascii,isdraw,issocket,isbinary;
1173416022c9SBarry Smith 
11743a40ed3dSBarry Smith   PetscFunctionBegin;
117532077d6dSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
1176fb9695e5SSatish Balay   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
1177fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
1178b0a32e0cSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr);
117932077d6dSBarry Smith   if (iascii || isdraw || isbinary || issocket) {
11807b2a1423SBarry Smith     ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
11815cd90555SBarry Smith   } else {
118279a5c55eSBarry Smith     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name);
1183416022c9SBarry Smith   }
11843a40ed3dSBarry Smith   PetscFunctionReturn(0);
1185416022c9SBarry Smith }
1186416022c9SBarry Smith 
11874a2ae208SSatish Balay #undef __FUNCT__
11884a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ"
1189b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
11908a729477SBarry Smith {
119144a69424SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1192dfbe8321SBarry Smith   PetscErrorCode ierr;
1193c14dc6b6SHong Zhang   Vec            bb1;
11948a729477SBarry Smith 
11953a40ed3dSBarry Smith   PetscFunctionBegin;
1196c14dc6b6SHong Zhang   ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
11972798e883SHong Zhang 
1198c16cb8f2SBarry Smith   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
1199da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1200bd3bf7d3SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
12012798e883SHong Zhang       its--;
1202da3a660dSBarry Smith     }
12032798e883SHong Zhang 
12042798e883SHong Zhang     while (its--) {
1205ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1206ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12072798e883SHong Zhang 
1208c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1209efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1210c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12112798e883SHong Zhang 
1212c14dc6b6SHong Zhang       /* local sweep */
121371f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
12142798e883SHong Zhang     }
12153a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
1216da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1217c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12182798e883SHong Zhang       its--;
1219da3a660dSBarry Smith     }
12202798e883SHong Zhang     while (its--) {
1221ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1222ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12232798e883SHong Zhang 
1224c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1225efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1226c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
1227c14dc6b6SHong Zhang 
1228c14dc6b6SHong Zhang       /* local sweep */
122971f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12302798e883SHong Zhang     }
12313a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
1232da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1233c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12342798e883SHong Zhang       its--;
1235da3a660dSBarry Smith     }
12362798e883SHong Zhang     while (its--) {
1237ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1238ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12392798e883SHong Zhang 
1240c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1241efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1242c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12432798e883SHong Zhang 
1244c14dc6b6SHong Zhang       /* local sweep */
124571f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12462798e883SHong Zhang     }
12473a40ed3dSBarry Smith   } else {
124829bbc08cSBarry Smith     SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported");
1249c16cb8f2SBarry Smith   }
1250c14dc6b6SHong Zhang 
1251c14dc6b6SHong Zhang   ierr = VecDestroy(bb1);CHKERRQ(ierr);
12523a40ed3dSBarry Smith   PetscFunctionReturn(0);
12538a729477SBarry Smith }
1254a66be287SLois Curfman McInnes 
12554a2ae208SSatish Balay #undef __FUNCT__
125642e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ"
125742e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B)
125842e855d1Svictor {
125942e855d1Svictor   MPI_Comm       comm,pcomm;
12605d0c19d7SBarry Smith   PetscInt       first,local_size,nrows;
12615d0c19d7SBarry Smith   const PetscInt *rows;
126242e855d1Svictor   int            ntids;
126342e855d1Svictor   IS             crowp,growp,irowp,lrowp,lcolp,icolp;
126442e855d1Svictor   PetscErrorCode ierr;
126542e855d1Svictor 
126642e855d1Svictor   PetscFunctionBegin;
126742e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)A,&comm); CHKERRQ(ierr);
126842e855d1Svictor   /* make a collective version of 'rowp' */
126942e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm); CHKERRQ(ierr);
127042e855d1Svictor   if (pcomm==comm) {
127142e855d1Svictor     crowp = rowp;
127242e855d1Svictor   } else {
127342e855d1Svictor     ierr = ISGetSize(rowp,&nrows); CHKERRQ(ierr);
127442e855d1Svictor     ierr = ISGetIndices(rowp,&rows); CHKERRQ(ierr);
127542e855d1Svictor     ierr = ISCreateGeneral(comm,nrows,rows,&crowp); CHKERRQ(ierr);
127642e855d1Svictor     ierr = ISRestoreIndices(rowp,&rows); CHKERRQ(ierr);
127742e855d1Svictor   }
127842e855d1Svictor   /* collect the global row permutation and invert it */
127942e855d1Svictor   ierr = ISAllGather(crowp,&growp); CHKERRQ(ierr);
128042e855d1Svictor   ierr = ISSetPermutation(growp); CHKERRQ(ierr);
128142e855d1Svictor   if (pcomm!=comm) {
128242e855d1Svictor     ierr = ISDestroy(crowp); CHKERRQ(ierr);
128342e855d1Svictor   }
128442e855d1Svictor   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
128542e855d1Svictor   /* get the local target indices */
128642e855d1Svictor   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL); CHKERRQ(ierr);
128742e855d1Svictor   ierr = MatGetLocalSize(A,&local_size,PETSC_NULL); CHKERRQ(ierr);
128842e855d1Svictor   ierr = ISGetIndices(irowp,&rows); CHKERRQ(ierr);
128942e855d1Svictor   ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp); CHKERRQ(ierr);
129042e855d1Svictor   ierr = ISRestoreIndices(irowp,&rows); CHKERRQ(ierr);
129142e855d1Svictor   ierr = ISDestroy(irowp); CHKERRQ(ierr);
129242e855d1Svictor   /* the column permutation is so much easier;
129342e855d1Svictor      make a local version of 'colp' and invert it */
129442e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm); CHKERRQ(ierr);
129542e855d1Svictor   ierr = MPI_Comm_size(pcomm,&ntids); CHKERRQ(ierr);
129642e855d1Svictor   if (ntids==1) {
129742e855d1Svictor     lcolp = colp;
129842e855d1Svictor   } else {
129942e855d1Svictor     ierr = ISGetSize(colp,&nrows); CHKERRQ(ierr);
130042e855d1Svictor     ierr = ISGetIndices(colp,&rows); CHKERRQ(ierr);
130142e855d1Svictor     ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp); CHKERRQ(ierr);
130242e855d1Svictor   }
130342e855d1Svictor   ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp); CHKERRQ(ierr);
130442e855d1Svictor   ierr = ISSetPermutation(lcolp); CHKERRQ(ierr);
130542e855d1Svictor   if (ntids>1) {
130642e855d1Svictor     ierr = ISRestoreIndices(colp,&rows); CHKERRQ(ierr);
130742e855d1Svictor     ierr = ISDestroy(lcolp); CHKERRQ(ierr);
130842e855d1Svictor   }
130942e855d1Svictor   /* now we just get the submatrix */
131042e855d1Svictor   ierr = MatGetSubMatrix(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B); CHKERRQ(ierr);
131142e855d1Svictor   /* clean up */
131242e855d1Svictor   ierr = ISDestroy(lrowp); CHKERRQ(ierr);
131342e855d1Svictor   ierr = ISDestroy(icolp); CHKERRQ(ierr);
131442e855d1Svictor   PetscFunctionReturn(0);
131542e855d1Svictor }
131642e855d1Svictor 
131742e855d1Svictor #undef __FUNCT__
13184a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ"
1319dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1320a66be287SLois Curfman McInnes {
1321a66be287SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1322a66be287SLois Curfman McInnes   Mat            A = mat->A,B = mat->B;
1323dfbe8321SBarry Smith   PetscErrorCode ierr;
1324329f5518SBarry Smith   PetscReal      isend[5],irecv[5];
1325a66be287SLois Curfman McInnes 
13263a40ed3dSBarry Smith   PetscFunctionBegin;
13274e220ebcSLois Curfman McInnes   info->block_size     = 1.0;
13284e220ebcSLois Curfman McInnes   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
13294e220ebcSLois Curfman McInnes   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
13304e220ebcSLois Curfman McInnes   isend[3] = info->memory;  isend[4] = info->mallocs;
13314e220ebcSLois Curfman McInnes   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
13324e220ebcSLois Curfman McInnes   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
13334e220ebcSLois Curfman McInnes   isend[3] += info->memory;  isend[4] += info->mallocs;
1334a66be287SLois Curfman McInnes   if (flag == MAT_LOCAL) {
13354e220ebcSLois Curfman McInnes     info->nz_used      = isend[0];
13364e220ebcSLois Curfman McInnes     info->nz_allocated = isend[1];
13374e220ebcSLois Curfman McInnes     info->nz_unneeded  = isend[2];
13384e220ebcSLois Curfman McInnes     info->memory       = isend[3];
13394e220ebcSLois Curfman McInnes     info->mallocs      = isend[4];
1340a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_MAX) {
13417adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
13424e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13434e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13444e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13454e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13464e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1347a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_SUM) {
13487adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
13494e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13504e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13514e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13524e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13534e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1354a66be287SLois Curfman McInnes   }
13554e220ebcSLois Curfman McInnes   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
13564e220ebcSLois Curfman McInnes   info->fill_ratio_needed = 0;
13574e220ebcSLois Curfman McInnes   info->factor_mallocs    = 0;
13584e220ebcSLois Curfman McInnes 
13593a40ed3dSBarry Smith   PetscFunctionReturn(0);
1360a66be287SLois Curfman McInnes }
1361a66be287SLois Curfman McInnes 
13624a2ae208SSatish Balay #undef __FUNCT__
13634a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ"
13644e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg)
1365c74985f6SBarry Smith {
1366c0bbcb79SLois Curfman McInnes   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1367dfbe8321SBarry Smith   PetscErrorCode ierr;
1368c74985f6SBarry Smith 
13693a40ed3dSBarry Smith   PetscFunctionBegin;
137012c028f9SKris Buschelman   switch (op) {
1371512a5fc5SBarry Smith   case MAT_NEW_NONZERO_LOCATIONS:
137212c028f9SKris Buschelman   case MAT_NEW_NONZERO_ALLOCATION_ERR:
137328b2fa4aSMatthew Knepley   case MAT_UNUSED_NONZERO_LOCATION_ERR:
137412c028f9SKris Buschelman   case MAT_KEEP_ZEROED_ROWS:
137512c028f9SKris Buschelman   case MAT_NEW_NONZERO_LOCATION_ERR:
137612c028f9SKris Buschelman   case MAT_USE_INODES:
137712c028f9SKris Buschelman   case MAT_IGNORE_ZERO_ENTRIES:
13784e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13794e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
138012c028f9SKris Buschelman     break;
138112c028f9SKris Buschelman   case MAT_ROW_ORIENTED:
13824e0d8c25SBarry Smith     a->roworiented = flg;
13834e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13844e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
138512c028f9SKris Buschelman     break;
13864e0d8c25SBarry Smith   case MAT_NEW_DIAGONALS:
1387290bbb0aSBarry Smith     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
138812c028f9SKris Buschelman     break;
138912c028f9SKris Buschelman   case MAT_IGNORE_OFF_PROC_ENTRIES:
13907c922b88SBarry Smith     a->donotstash = PETSC_TRUE;
139112c028f9SKris Buschelman     break;
139277e54ba9SKris Buschelman   case MAT_SYMMETRIC:
13934e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
139425f421beSHong Zhang     break;
139577e54ba9SKris Buschelman   case MAT_STRUCTURALLY_SYMMETRIC:
1396bf108f30SBarry Smith   case MAT_HERMITIAN:
1397bf108f30SBarry Smith   case MAT_SYMMETRY_ETERNAL:
13984e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
139977e54ba9SKris Buschelman     break;
140012c028f9SKris Buschelman   default:
1401ad86a440SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
14023a40ed3dSBarry Smith   }
14033a40ed3dSBarry Smith   PetscFunctionReturn(0);
1404c74985f6SBarry Smith }
1405c74985f6SBarry Smith 
14064a2ae208SSatish Balay #undef __FUNCT__
14074a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ"
1408b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140939e00950SLois Curfman McInnes {
1410154123eaSLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
141187828ca2SBarry Smith   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
14126849ba73SBarry Smith   PetscErrorCode ierr;
1413d0f46423SBarry Smith   PetscInt       i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart;
1414d0f46423SBarry Smith   PetscInt       nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend;
1415b1d57f15SBarry Smith   PetscInt       *cmap,*idx_p;
141639e00950SLois Curfman McInnes 
14173a40ed3dSBarry Smith   PetscFunctionBegin;
1418abc0a331SBarry Smith   if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active");
14197a0afa10SBarry Smith   mat->getrowactive = PETSC_TRUE;
14207a0afa10SBarry Smith 
142170f0671dSBarry Smith   if (!mat->rowvalues && (idx || v)) {
14227a0afa10SBarry Smith     /*
14237a0afa10SBarry Smith         allocate enough space to hold information from the longest row.
14247a0afa10SBarry Smith     */
14257a0afa10SBarry Smith     Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data;
1426b1d57f15SBarry Smith     PetscInt     max = 1,tmp;
1427d0f46423SBarry Smith     for (i=0; i<matin->rmap->n; i++) {
14287a0afa10SBarry Smith       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
14297a0afa10SBarry Smith       if (max < tmp) { max = tmp; }
14307a0afa10SBarry Smith     }
1431b1d57f15SBarry Smith     ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr);
1432b1d57f15SBarry Smith     mat->rowindices = (PetscInt*)(mat->rowvalues + max);
14337a0afa10SBarry Smith   }
14347a0afa10SBarry Smith 
143529bbc08cSBarry Smith   if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows")
1436abc0e9e4SLois Curfman McInnes   lrow = row - rstart;
143739e00950SLois Curfman McInnes 
1438154123eaSLois Curfman McInnes   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1439154123eaSLois Curfman McInnes   if (!v)   {pvA = 0; pvB = 0;}
1440154123eaSLois Curfman McInnes   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1441f830108cSBarry Smith   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1442f830108cSBarry Smith   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1443154123eaSLois Curfman McInnes   nztot = nzA + nzB;
1444154123eaSLois Curfman McInnes 
144570f0671dSBarry Smith   cmap  = mat->garray;
1446154123eaSLois Curfman McInnes   if (v  || idx) {
1447154123eaSLois Curfman McInnes     if (nztot) {
1448154123eaSLois Curfman McInnes       /* Sort by increasing column numbers, assuming A and B already sorted */
1449b1d57f15SBarry Smith       PetscInt imark = -1;
1450154123eaSLois Curfman McInnes       if (v) {
145170f0671dSBarry Smith         *v = v_p = mat->rowvalues;
145239e00950SLois Curfman McInnes         for (i=0; i<nzB; i++) {
145370f0671dSBarry Smith           if (cmap[cworkB[i]] < cstart)   v_p[i] = vworkB[i];
1454154123eaSLois Curfman McInnes           else break;
1455154123eaSLois Curfman McInnes         }
1456154123eaSLois Curfman McInnes         imark = i;
145770f0671dSBarry Smith         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
145870f0671dSBarry Smith         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1459154123eaSLois Curfman McInnes       }
1460154123eaSLois Curfman McInnes       if (idx) {
146170f0671dSBarry Smith         *idx = idx_p = mat->rowindices;
146270f0671dSBarry Smith         if (imark > -1) {
146370f0671dSBarry Smith           for (i=0; i<imark; i++) {
146470f0671dSBarry Smith             idx_p[i] = cmap[cworkB[i]];
146570f0671dSBarry Smith           }
146670f0671dSBarry Smith         } else {
1467154123eaSLois Curfman McInnes           for (i=0; i<nzB; i++) {
146870f0671dSBarry Smith             if (cmap[cworkB[i]] < cstart)   idx_p[i] = cmap[cworkB[i]];
1469154123eaSLois Curfman McInnes             else break;
1470154123eaSLois Curfman McInnes           }
1471154123eaSLois Curfman McInnes           imark = i;
147270f0671dSBarry Smith         }
147370f0671dSBarry Smith         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart + cworkA[i];
147470f0671dSBarry Smith         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]];
147539e00950SLois Curfman McInnes       }
14763f97c4b0SBarry Smith     } else {
14771ca473b0SSatish Balay       if (idx) *idx = 0;
14781ca473b0SSatish Balay       if (v)   *v   = 0;
14791ca473b0SSatish Balay     }
1480154123eaSLois Curfman McInnes   }
148139e00950SLois Curfman McInnes   *nz = nztot;
1482f830108cSBarry Smith   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1483f830108cSBarry Smith   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
14843a40ed3dSBarry Smith   PetscFunctionReturn(0);
148539e00950SLois Curfman McInnes }
148639e00950SLois Curfman McInnes 
14874a2ae208SSatish Balay #undef __FUNCT__
14884a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ"
1489b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
149039e00950SLois Curfman McInnes {
14917a0afa10SBarry Smith   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
14923a40ed3dSBarry Smith 
14933a40ed3dSBarry Smith   PetscFunctionBegin;
1494abc0a331SBarry Smith   if (!aij->getrowactive) {
1495abc0a331SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
14967a0afa10SBarry Smith   }
14977a0afa10SBarry Smith   aij->getrowactive = PETSC_FALSE;
14983a40ed3dSBarry Smith   PetscFunctionReturn(0);
149939e00950SLois Curfman McInnes }
150039e00950SLois Curfman McInnes 
15014a2ae208SSatish Balay #undef __FUNCT__
15024a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ"
1503dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm)
1504855ac2c5SLois Curfman McInnes {
1505855ac2c5SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1506ec8511deSBarry Smith   Mat_SeqAIJ     *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data;
1507dfbe8321SBarry Smith   PetscErrorCode ierr;
1508d0f46423SBarry Smith   PetscInt       i,j,cstart = mat->cmap->rstart;
1509329f5518SBarry Smith   PetscReal      sum = 0.0;
1510a77337e4SBarry Smith   MatScalar      *v;
151104ca555eSLois Curfman McInnes 
15123a40ed3dSBarry Smith   PetscFunctionBegin;
151317699dbbSLois Curfman McInnes   if (aij->size == 1) {
151414183eadSLois Curfman McInnes     ierr =  MatNorm(aij->A,type,norm);CHKERRQ(ierr);
151537fa93a5SLois Curfman McInnes   } else {
151604ca555eSLois Curfman McInnes     if (type == NORM_FROBENIUS) {
151704ca555eSLois Curfman McInnes       v = amat->a;
151804ca555eSLois Curfman McInnes       for (i=0; i<amat->nz; i++) {
1519aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1520329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
152104ca555eSLois Curfman McInnes #else
152204ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
152304ca555eSLois Curfman McInnes #endif
152404ca555eSLois Curfman McInnes       }
152504ca555eSLois Curfman McInnes       v = bmat->a;
152604ca555eSLois Curfman McInnes       for (i=0; i<bmat->nz; i++) {
1527aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1528329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
152904ca555eSLois Curfman McInnes #else
153004ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
153104ca555eSLois Curfman McInnes #endif
153204ca555eSLois Curfman McInnes       }
15337adad957SLisandro Dalcin       ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
153404ca555eSLois Curfman McInnes       *norm = sqrt(*norm);
15353a40ed3dSBarry Smith     } else if (type == NORM_1) { /* max column norm */
1536329f5518SBarry Smith       PetscReal *tmp,*tmp2;
1537b1d57f15SBarry Smith       PetscInt  *jj,*garray = aij->garray;
1538d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr);
1539d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr);
1540d0f46423SBarry Smith       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
154104ca555eSLois Curfman McInnes       *norm = 0.0;
154204ca555eSLois Curfman McInnes       v = amat->a; jj = amat->j;
154304ca555eSLois Curfman McInnes       for (j=0; j<amat->nz; j++) {
1544bfec09a0SHong Zhang         tmp[cstart + *jj++ ] += PetscAbsScalar(*v);  v++;
154504ca555eSLois Curfman McInnes       }
154604ca555eSLois Curfman McInnes       v = bmat->a; jj = bmat->j;
154704ca555eSLois Curfman McInnes       for (j=0; j<bmat->nz; j++) {
1548bfec09a0SHong Zhang         tmp[garray[*jj++]] += PetscAbsScalar(*v); v++;
154904ca555eSLois Curfman McInnes       }
1550d0f46423SBarry Smith       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
1551d0f46423SBarry Smith       for (j=0; j<mat->cmap->N; j++) {
155204ca555eSLois Curfman McInnes         if (tmp2[j] > *norm) *norm = tmp2[j];
155304ca555eSLois Curfman McInnes       }
1554606d414cSSatish Balay       ierr = PetscFree(tmp);CHKERRQ(ierr);
1555606d414cSSatish Balay       ierr = PetscFree(tmp2);CHKERRQ(ierr);
15563a40ed3dSBarry Smith     } else if (type == NORM_INFINITY) { /* max row norm */
1557329f5518SBarry Smith       PetscReal ntemp = 0.0;
1558d0f46423SBarry Smith       for (j=0; j<aij->A->rmap->n; j++) {
1559bfec09a0SHong Zhang         v = amat->a + amat->i[j];
156004ca555eSLois Curfman McInnes         sum = 0.0;
156104ca555eSLois Curfman McInnes         for (i=0; i<amat->i[j+1]-amat->i[j]; i++) {
1562cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
156304ca555eSLois Curfman McInnes         }
1564bfec09a0SHong Zhang         v = bmat->a + bmat->i[j];
156504ca555eSLois Curfman McInnes         for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) {
1566cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
156704ca555eSLois Curfman McInnes         }
1568515d9167SLois Curfman McInnes         if (sum > ntemp) ntemp = sum;
156904ca555eSLois Curfman McInnes       }
15707adad957SLisandro Dalcin       ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
1571ca161407SBarry Smith     } else {
157229bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"No support for two norm");
157304ca555eSLois Curfman McInnes     }
157437fa93a5SLois Curfman McInnes   }
15753a40ed3dSBarry Smith   PetscFunctionReturn(0);
1576855ac2c5SLois Curfman McInnes }
1577855ac2c5SLois Curfman McInnes 
15784a2ae208SSatish Balay #undef __FUNCT__
15794a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ"
1580fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout)
1581b7c46309SBarry Smith {
1582b7c46309SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1583da668accSHong Zhang   Mat_SeqAIJ     *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data;
1584dfbe8321SBarry Smith   PetscErrorCode ierr;
1585d0f46423SBarry Smith   PetscInt       M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz;
1586d0f46423SBarry Smith   PetscInt       cstart=A->cmap->rstart,ncol;
15873a40ed3dSBarry Smith   Mat            B;
1588a77337e4SBarry Smith   MatScalar      *array;
1589b7c46309SBarry Smith 
15903a40ed3dSBarry Smith   PetscFunctionBegin;
1591e9695a30SBarry Smith   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1592da668accSHong Zhang 
1593d0f46423SBarry Smith   ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n;
1594da668accSHong Zhang   ai = Aloc->i; aj = Aloc->j;
1595da668accSHong Zhang   bi = Bloc->i; bj = Bloc->j;
1596fc73b1b3SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1597fc73b1b3SBarry Smith     /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */
1598fc73b1b3SBarry Smith     ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
1599da668accSHong Zhang     ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr);
1600da668accSHong Zhang     for (i=0; i<ai[ma]; i++){
1601da668accSHong Zhang       d_nnz[aj[i]] ++;
1602da668accSHong Zhang       aj[i] += cstart; /* global col index to be used by MatSetValues() */
1603d4bb536fSBarry Smith     }
1604d4bb536fSBarry Smith 
16057adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1606d0f46423SBarry Smith     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
16077adad957SLisandro Dalcin     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1608da668accSHong Zhang     ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr);
1609fc73b1b3SBarry Smith     ierr = PetscFree(d_nnz);CHKERRQ(ierr);
1610fc4dec0aSBarry Smith   } else {
1611fc4dec0aSBarry Smith     B = *matout;
1612fc4dec0aSBarry Smith   }
1613b7c46309SBarry Smith 
1614b7c46309SBarry Smith   /* copy over the A part */
1615da668accSHong Zhang   array = Aloc->a;
1616d0f46423SBarry Smith   row = A->rmap->rstart;
1617da668accSHong Zhang   for (i=0; i<ma; i++) {
1618da668accSHong Zhang     ncol = ai[i+1]-ai[i];
1619da668accSHong Zhang     ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
1620da668accSHong Zhang     row++; array += ncol; aj += ncol;
1621b7c46309SBarry Smith   }
1622b7c46309SBarry Smith   aj = Aloc->j;
1623da668accSHong Zhang   for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */
1624b7c46309SBarry Smith 
1625b7c46309SBarry Smith   /* copy over the B part */
1626fc73b1b3SBarry Smith   ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1627fc73b1b3SBarry Smith   ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr);
1628da668accSHong Zhang   array = Bloc->a;
1629d0f46423SBarry Smith   row = A->rmap->rstart;
1630da668accSHong Zhang   for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];}
163161a2fbbaSHong Zhang   cols_tmp = cols;
1632da668accSHong Zhang   for (i=0; i<mb; i++) {
1633da668accSHong Zhang     ncol = bi[i+1]-bi[i];
163461a2fbbaSHong Zhang     ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
163561a2fbbaSHong Zhang     row++; array += ncol; cols_tmp += ncol;
1636b7c46309SBarry Smith   }
1637fc73b1b3SBarry Smith   ierr = PetscFree(cols);CHKERRQ(ierr);
1638fc73b1b3SBarry Smith 
16396d4a8577SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
16406d4a8577SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1641815cbec1SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout != A) {
16420de55854SLois Curfman McInnes     *matout = B;
16430de55854SLois Curfman McInnes   } else {
1644273d9f13SBarry Smith     ierr = MatHeaderCopy(A,B);CHKERRQ(ierr);
16450de55854SLois Curfman McInnes   }
16463a40ed3dSBarry Smith   PetscFunctionReturn(0);
1647b7c46309SBarry Smith }
1648b7c46309SBarry Smith 
16494a2ae208SSatish Balay #undef __FUNCT__
16504a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ"
1651dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr)
1652a008b906SSatish Balay {
16534b967eb1SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
16544b967eb1SSatish Balay   Mat            a = aij->A,b = aij->B;
1655dfbe8321SBarry Smith   PetscErrorCode ierr;
1656b1d57f15SBarry Smith   PetscInt       s1,s2,s3;
1657a008b906SSatish Balay 
16583a40ed3dSBarry Smith   PetscFunctionBegin;
16594b967eb1SSatish Balay   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
16604b967eb1SSatish Balay   if (rr) {
1661e1311b90SBarry Smith     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
166229bbc08cSBarry Smith     if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
16634b967eb1SSatish Balay     /* Overlap communication with computation. */
1664ca9f406cSSatish Balay     ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1665a008b906SSatish Balay   }
16664b967eb1SSatish Balay   if (ll) {
1667e1311b90SBarry Smith     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
166829bbc08cSBarry Smith     if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1669f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr);
16704b967eb1SSatish Balay   }
16714b967eb1SSatish Balay   /* scale  the diagonal block */
1672f830108cSBarry Smith   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
16734b967eb1SSatish Balay 
16744b967eb1SSatish Balay   if (rr) {
16754b967eb1SSatish Balay     /* Do a scatter end and then right scale the off-diagonal block */
1676ca9f406cSSatish Balay     ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1677f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr);
16784b967eb1SSatish Balay   }
16794b967eb1SSatish Balay 
16803a40ed3dSBarry Smith   PetscFunctionReturn(0);
1681a008b906SSatish Balay }
1682a008b906SSatish Balay 
16834a2ae208SSatish Balay #undef __FUNCT__
1684521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ"
1685521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs)
16865a838052SSatish Balay {
1687521d7252SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1688521d7252SBarry Smith   PetscErrorCode ierr;
1689521d7252SBarry Smith 
16903a40ed3dSBarry Smith   PetscFunctionBegin;
1691521d7252SBarry Smith   ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr);
1692521d7252SBarry Smith   ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr);
16933a40ed3dSBarry Smith   PetscFunctionReturn(0);
16945a838052SSatish Balay }
16954a2ae208SSatish Balay #undef __FUNCT__
16964a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ"
1697dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A)
1698bb5a7306SBarry Smith {
1699bb5a7306SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1700dfbe8321SBarry Smith   PetscErrorCode ierr;
17013a40ed3dSBarry Smith 
17023a40ed3dSBarry Smith   PetscFunctionBegin;
1703bb5a7306SBarry Smith   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
17043a40ed3dSBarry Smith   PetscFunctionReturn(0);
1705bb5a7306SBarry Smith }
1706bb5a7306SBarry Smith 
17074a2ae208SSatish Balay #undef __FUNCT__
17084a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ"
1709dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag)
1710d4bb536fSBarry Smith {
1711d4bb536fSBarry Smith   Mat_MPIAIJ     *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data;
1712d4bb536fSBarry Smith   Mat            a,b,c,d;
1713d4bb536fSBarry Smith   PetscTruth     flg;
1714dfbe8321SBarry Smith   PetscErrorCode ierr;
1715d4bb536fSBarry Smith 
17163a40ed3dSBarry Smith   PetscFunctionBegin;
1717d4bb536fSBarry Smith   a = matA->A; b = matA->B;
1718d4bb536fSBarry Smith   c = matB->A; d = matB->B;
1719d4bb536fSBarry Smith 
1720d4bb536fSBarry Smith   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1721abc0a331SBarry Smith   if (flg) {
1722d4bb536fSBarry Smith     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1723d4bb536fSBarry Smith   }
17247adad957SLisandro Dalcin   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
17253a40ed3dSBarry Smith   PetscFunctionReturn(0);
1726d4bb536fSBarry Smith }
1727d4bb536fSBarry Smith 
17284a2ae208SSatish Balay #undef __FUNCT__
17294a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ"
1730dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str)
1731cb5b572fSBarry Smith {
1732dfbe8321SBarry Smith   PetscErrorCode ierr;
1733cb5b572fSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ *)A->data;
1734cb5b572fSBarry Smith   Mat_MPIAIJ     *b = (Mat_MPIAIJ *)B->data;
1735cb5b572fSBarry Smith 
1736cb5b572fSBarry Smith   PetscFunctionBegin;
173733f4a19fSKris Buschelman   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
173833f4a19fSKris Buschelman   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1739cb5b572fSBarry Smith     /* because of the column compression in the off-processor part of the matrix a->B,
1740cb5b572fSBarry Smith        the number of columns in a->B and b->B may be different, hence we cannot call
1741cb5b572fSBarry Smith        the MatCopy() directly on the two parts. If need be, we can provide a more
1742cb5b572fSBarry Smith        efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices
1743cb5b572fSBarry Smith        then copying the submatrices */
1744cb5b572fSBarry Smith     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1745cb5b572fSBarry Smith   } else {
1746cb5b572fSBarry Smith     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1747cb5b572fSBarry Smith     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1748cb5b572fSBarry Smith   }
1749cb5b572fSBarry Smith   PetscFunctionReturn(0);
1750cb5b572fSBarry Smith }
1751cb5b572fSBarry Smith 
17524a2ae208SSatish Balay #undef __FUNCT__
17534a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ"
1754dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A)
1755273d9f13SBarry Smith {
1756dfbe8321SBarry Smith   PetscErrorCode ierr;
1757273d9f13SBarry Smith 
1758273d9f13SBarry Smith   PetscFunctionBegin;
1759273d9f13SBarry Smith   ierr =  MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1760273d9f13SBarry Smith   PetscFunctionReturn(0);
1761273d9f13SBarry Smith }
1762273d9f13SBarry Smith 
1763ac90fabeSBarry Smith #include "petscblaslapack.h"
1764ac90fabeSBarry Smith #undef __FUNCT__
1765ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ"
1766f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1767ac90fabeSBarry Smith {
1768dfbe8321SBarry Smith   PetscErrorCode ierr;
1769b1d57f15SBarry Smith   PetscInt       i;
1770ac90fabeSBarry Smith   Mat_MPIAIJ     *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data;
17714ce68768SBarry Smith   PetscBLASInt   bnz,one=1;
1772ac90fabeSBarry Smith   Mat_SeqAIJ     *x,*y;
1773ac90fabeSBarry Smith 
1774ac90fabeSBarry Smith   PetscFunctionBegin;
1775ac90fabeSBarry Smith   if (str == SAME_NONZERO_PATTERN) {
1776f4df32b1SMatthew Knepley     PetscScalar alpha = a;
1777ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->A->data;
1778ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->A->data;
17790805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1780f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1781ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->B->data;
1782ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->B->data;
17830805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1784f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1785a30b2313SHong Zhang   } else if (str == SUBSET_NONZERO_PATTERN) {
1786f4df32b1SMatthew Knepley     ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr);
1787c537a176SHong Zhang 
1788c537a176SHong Zhang     x = (Mat_SeqAIJ *)xx->B->data;
1789a30b2313SHong Zhang     y = (Mat_SeqAIJ *)yy->B->data;
1790a30b2313SHong Zhang     if (y->xtoy && y->XtoY != xx->B) {
1791a30b2313SHong Zhang       ierr = PetscFree(y->xtoy);CHKERRQ(ierr);
1792a30b2313SHong Zhang       ierr = MatDestroy(y->XtoY);CHKERRQ(ierr);
1793c537a176SHong Zhang     }
1794a30b2313SHong Zhang     if (!y->xtoy) { /* get xtoy */
1795d0f46423SBarry Smith       ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr);
1796a30b2313SHong Zhang       y->XtoY = xx->B;
1797407f6b05SHong Zhang       ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr);
1798c537a176SHong Zhang     }
1799f4df32b1SMatthew Knepley     for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]);
1800ac90fabeSBarry Smith   } else {
1801f4df32b1SMatthew Knepley     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1802ac90fabeSBarry Smith   }
1803ac90fabeSBarry Smith   PetscFunctionReturn(0);
1804ac90fabeSBarry Smith }
1805ac90fabeSBarry Smith 
1806354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat);
1807354c94deSBarry Smith 
1808354c94deSBarry Smith #undef __FUNCT__
1809354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ"
1810354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat)
1811354c94deSBarry Smith {
1812354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX)
1813354c94deSBarry Smith   PetscErrorCode ierr;
1814354c94deSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
1815354c94deSBarry Smith 
1816354c94deSBarry Smith   PetscFunctionBegin;
1817354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr);
1818354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr);
1819354c94deSBarry Smith #else
1820354c94deSBarry Smith   PetscFunctionBegin;
1821354c94deSBarry Smith #endif
1822354c94deSBarry Smith   PetscFunctionReturn(0);
1823354c94deSBarry Smith }
1824354c94deSBarry Smith 
182599cafbc1SBarry Smith #undef __FUNCT__
182699cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ"
182799cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A)
182899cafbc1SBarry Smith {
182999cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
183099cafbc1SBarry Smith   PetscErrorCode ierr;
183199cafbc1SBarry Smith 
183299cafbc1SBarry Smith   PetscFunctionBegin;
183399cafbc1SBarry Smith   ierr = MatRealPart(a->A);CHKERRQ(ierr);
183499cafbc1SBarry Smith   ierr = MatRealPart(a->B);CHKERRQ(ierr);
183599cafbc1SBarry Smith   PetscFunctionReturn(0);
183699cafbc1SBarry Smith }
183799cafbc1SBarry Smith 
183899cafbc1SBarry Smith #undef __FUNCT__
183999cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ"
184099cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A)
184199cafbc1SBarry Smith {
184299cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
184399cafbc1SBarry Smith   PetscErrorCode ierr;
184499cafbc1SBarry Smith 
184599cafbc1SBarry Smith   PetscFunctionBegin;
184699cafbc1SBarry Smith   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
184799cafbc1SBarry Smith   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
184899cafbc1SBarry Smith   PetscFunctionReturn(0);
184999cafbc1SBarry Smith }
185099cafbc1SBarry Smith 
1851103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
1852103bf8bdSMatthew Knepley 
1853103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp>
1854a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp>
1855a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp>
1856a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp>
1857103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp>
1858a2c909beSMatthew Knepley #include <boost/multi_array.hpp>
1859d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp>
1860103bf8bdSMatthew Knepley 
1861103bf8bdSMatthew Knepley #undef __FUNCT__
1862103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ"
1863103bf8bdSMatthew Knepley /*
1864103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1865103bf8bdSMatthew Knepley */
18660481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info)
1867103bf8bdSMatthew Knepley {
1868a2c909beSMatthew Knepley   namespace petsc = boost::distributed::petsc;
1869a2c909beSMatthew Knepley 
1870a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1871a2c909beSMatthew Knepley   using boost::graph::distributed::ilu_default::process_group_type;
1872a2c909beSMatthew Knepley   using boost::graph::ilu_permuted;
1873a2c909beSMatthew Knepley 
1874103bf8bdSMatthew Knepley   PetscTruth      row_identity, col_identity;
1875776b82aeSLisandro Dalcin   PetscContainer  c;
1876103bf8bdSMatthew Knepley   PetscInt        m, n, M, N;
1877103bf8bdSMatthew Knepley   PetscErrorCode  ierr;
1878103bf8bdSMatthew Knepley 
1879103bf8bdSMatthew Knepley   PetscFunctionBegin;
1880103bf8bdSMatthew Knepley   if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu");
1881103bf8bdSMatthew Knepley   ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr);
1882103bf8bdSMatthew Knepley   ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr);
1883103bf8bdSMatthew Knepley   if (!row_identity || !col_identity) {
1884103bf8bdSMatthew Knepley     SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU");
1885103bf8bdSMatthew Knepley   }
1886103bf8bdSMatthew Knepley 
1887103bf8bdSMatthew Knepley   process_group_type pg;
1888a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1889a2c909beSMatthew Knepley   lgraph_type*   lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg));
1890a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1891a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1892a2c909beSMatthew Knepley 
1893103bf8bdSMatthew Knepley   petsc::read_matrix(A, graph, get(boost::edge_weight, graph));
1894a2c909beSMatthew Knepley   ilu_permuted(level_graph);
1895103bf8bdSMatthew Knepley 
1896103bf8bdSMatthew Knepley   /* put together the new matrix */
18977adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr);
1898103bf8bdSMatthew Knepley   ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr);
1899103bf8bdSMatthew Knepley   ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr);
1900719d5645SBarry Smith   ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr);
1901719d5645SBarry Smith   ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr);
1902719d5645SBarry Smith   ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1903719d5645SBarry Smith   ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1904103bf8bdSMatthew Knepley 
19057adad957SLisandro Dalcin   ierr = PetscContainerCreate(((PetscObject)A)->comm, &c);
1906776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(c, lgraph_p);
1907719d5645SBarry Smith   ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c);
1908103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1909103bf8bdSMatthew Knepley }
1910103bf8bdSMatthew Knepley 
1911103bf8bdSMatthew Knepley #undef __FUNCT__
1912103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ"
19130481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info)
1914103bf8bdSMatthew Knepley {
1915103bf8bdSMatthew Knepley   PetscFunctionBegin;
1916103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1917103bf8bdSMatthew Knepley }
1918103bf8bdSMatthew Knepley 
1919103bf8bdSMatthew Knepley #undef __FUNCT__
1920103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ"
1921103bf8bdSMatthew Knepley /*
1922103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1923103bf8bdSMatthew Knepley */
1924103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x)
1925103bf8bdSMatthew Knepley {
1926a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1927a2c909beSMatthew Knepley 
1928a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1929a2c909beSMatthew Knepley   lgraph_type*   lgraph_p;
1930776b82aeSLisandro Dalcin   PetscContainer c;
1931103bf8bdSMatthew Knepley   PetscErrorCode ierr;
1932103bf8bdSMatthew Knepley 
1933103bf8bdSMatthew Knepley   PetscFunctionBegin;
1934103bf8bdSMatthew Knepley   ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr);
1935776b82aeSLisandro Dalcin   ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr);
1936103bf8bdSMatthew Knepley   ierr = VecCopy(b, x); CHKERRQ(ierr);
1937a2c909beSMatthew Knepley 
1938a2c909beSMatthew Knepley   PetscScalar* array_x;
1939a2c909beSMatthew Knepley   ierr = VecGetArray(x, &array_x);CHKERRQ(ierr);
1940a2c909beSMatthew Knepley   PetscInt sx;
1941a2c909beSMatthew Knepley   ierr = VecGetSize(x, &sx);CHKERRQ(ierr);
1942a2c909beSMatthew Knepley 
1943a2c909beSMatthew Knepley   PetscScalar* array_b;
1944a2c909beSMatthew Knepley   ierr = VecGetArray(b, &array_b);CHKERRQ(ierr);
1945a2c909beSMatthew Knepley   PetscInt sb;
1946a2c909beSMatthew Knepley   ierr = VecGetSize(b, &sb);CHKERRQ(ierr);
1947a2c909beSMatthew Knepley 
1948a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1949a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1950a2c909beSMatthew Knepley 
1951a2c909beSMatthew Knepley   typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type;
1952a2c909beSMatthew Knepley   array_ref_type                                 ref_b(array_b, boost::extents[num_vertices(graph)]),
1953a2c909beSMatthew Knepley                                                  ref_x(array_x, boost::extents[num_vertices(graph)]);
1954a2c909beSMatthew Knepley 
1955a2c909beSMatthew Knepley   typedef boost::iterator_property_map<array_ref_type::iterator,
1956a2c909beSMatthew Knepley                                 boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type>  gvector_type;
1957a2c909beSMatthew Knepley   gvector_type                                   vector_b(ref_b.begin(), get(boost::vertex_index, graph)),
1958a2c909beSMatthew Knepley                                                  vector_x(ref_x.begin(), get(boost::vertex_index, graph));
1959a2c909beSMatthew Knepley 
1960a2c909beSMatthew Knepley   ilu_set_solve(*lgraph_p, vector_b, vector_x);
1961a2c909beSMatthew Knepley 
1962103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1963103bf8bdSMatthew Knepley }
1964103bf8bdSMatthew Knepley #endif
1965103bf8bdSMatthew Knepley 
196669db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */
196769db28dcSHong Zhang   PetscInt       nzlocal,nsends,nrecvs;
1968aa5bb8c0SSatish Balay   PetscMPIInt    *send_rank;
1969aa5bb8c0SSatish Balay   PetscInt       *sbuf_nz,*sbuf_j,**rbuf_j;
197069db28dcSHong Zhang   PetscScalar    *sbuf_a,**rbuf_a;
197169db28dcSHong Zhang   PetscErrorCode (*MatDestroy)(Mat);
197269db28dcSHong Zhang } Mat_Redundant;
197369db28dcSHong Zhang 
197469db28dcSHong Zhang #undef __FUNCT__
197569db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant"
197669db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr)
197769db28dcSHong Zhang {
197869db28dcSHong Zhang   PetscErrorCode       ierr;
197969db28dcSHong Zhang   Mat_Redundant        *redund=(Mat_Redundant*)ptr;
198069db28dcSHong Zhang   PetscInt             i;
198169db28dcSHong Zhang 
198269db28dcSHong Zhang   PetscFunctionBegin;
198369db28dcSHong Zhang   ierr = PetscFree(redund->send_rank);CHKERRQ(ierr);
198469db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr);
198569db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr);
198669db28dcSHong Zhang   for (i=0; i<redund->nrecvs; i++){
198769db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr);
198869db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr);
198969db28dcSHong Zhang   }
199069db28dcSHong Zhang   ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr);
199169db28dcSHong Zhang   ierr = PetscFree(redund);CHKERRQ(ierr);
199269db28dcSHong Zhang   PetscFunctionReturn(0);
199369db28dcSHong Zhang }
199469db28dcSHong Zhang 
199569db28dcSHong Zhang #undef __FUNCT__
199669db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant"
199769db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A)
199869db28dcSHong Zhang {
199969db28dcSHong Zhang   PetscErrorCode  ierr;
200069db28dcSHong Zhang   PetscContainer  container;
200169db28dcSHong Zhang   Mat_Redundant   *redund=PETSC_NULL;
200269db28dcSHong Zhang 
200369db28dcSHong Zhang   PetscFunctionBegin;
200469db28dcSHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
200569db28dcSHong Zhang   if (container) {
200669db28dcSHong Zhang     ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
200769db28dcSHong Zhang   } else {
200869db28dcSHong Zhang     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
200969db28dcSHong Zhang   }
201069db28dcSHong Zhang   A->ops->destroy = redund->MatDestroy;
201169db28dcSHong Zhang   ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr);
201269db28dcSHong Zhang   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
201369db28dcSHong Zhang   ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
201469db28dcSHong Zhang   PetscFunctionReturn(0);
201569db28dcSHong Zhang }
201669db28dcSHong Zhang 
201769db28dcSHong Zhang #undef __FUNCT__
201869db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ"
201969db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant)
202069db28dcSHong Zhang {
202169db28dcSHong Zhang   PetscMPIInt    rank,size;
20227adad957SLisandro Dalcin   MPI_Comm       comm=((PetscObject)mat)->comm;
202369db28dcSHong Zhang   PetscErrorCode ierr;
202469db28dcSHong Zhang   PetscInt       nsends=0,nrecvs=0,i,rownz_max=0;
202569db28dcSHong Zhang   PetscMPIInt    *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL;
2026d0f46423SBarry Smith   PetscInt       *rowrange=mat->rmap->range;
202769db28dcSHong Zhang   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
202869db28dcSHong Zhang   Mat            A=aij->A,B=aij->B,C=*matredundant;
202969db28dcSHong Zhang   Mat_SeqAIJ     *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data;
203069db28dcSHong Zhang   PetscScalar    *sbuf_a;
203169db28dcSHong Zhang   PetscInt       nzlocal=a->nz+b->nz;
2032d0f46423SBarry Smith   PetscInt       j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB;
2033d0f46423SBarry Smith   PetscInt       rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N;
203469db28dcSHong Zhang   PetscInt       *cols,ctmp,lwrite,*rptr,l,*sbuf_j;
2035a77337e4SBarry Smith   MatScalar      *aworkA,*aworkB;
2036a77337e4SBarry Smith   PetscScalar    *vals;
203769db28dcSHong Zhang   PetscMPIInt    tag1,tag2,tag3,imdex;
203869db28dcSHong Zhang   MPI_Request    *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL,
203969db28dcSHong Zhang                  *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL;
204069db28dcSHong Zhang   MPI_Status     recv_status,*send_status;
204169db28dcSHong Zhang   PetscInt       *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count;
204269db28dcSHong Zhang   PetscInt       **rbuf_j=PETSC_NULL;
204369db28dcSHong Zhang   PetscScalar    **rbuf_a=PETSC_NULL;
204469db28dcSHong Zhang   Mat_Redundant  *redund=PETSC_NULL;
204569db28dcSHong Zhang   PetscContainer container;
204669db28dcSHong Zhang 
204769db28dcSHong Zhang   PetscFunctionBegin;
204869db28dcSHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
204969db28dcSHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
205069db28dcSHong Zhang 
205169db28dcSHong Zhang   if (reuse == MAT_REUSE_MATRIX) {
205269db28dcSHong Zhang     ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2053d0f46423SBarry Smith     if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size");
205469db28dcSHong Zhang     ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr);
205569db28dcSHong Zhang     if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size");
205669db28dcSHong Zhang     ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
205769db28dcSHong Zhang     if (container) {
205869db28dcSHong Zhang       ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
205969db28dcSHong Zhang     } else {
206069db28dcSHong Zhang       SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
206169db28dcSHong Zhang     }
206269db28dcSHong Zhang     if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal");
206369db28dcSHong Zhang 
206469db28dcSHong Zhang     nsends    = redund->nsends;
206569db28dcSHong Zhang     nrecvs    = redund->nrecvs;
206669db28dcSHong Zhang     send_rank = redund->send_rank; recv_rank = send_rank + size;
206769db28dcSHong Zhang     sbuf_nz   = redund->sbuf_nz;     rbuf_nz = sbuf_nz + nsends;
206869db28dcSHong Zhang     sbuf_j    = redund->sbuf_j;
206969db28dcSHong Zhang     sbuf_a    = redund->sbuf_a;
207069db28dcSHong Zhang     rbuf_j    = redund->rbuf_j;
207169db28dcSHong Zhang     rbuf_a    = redund->rbuf_a;
207269db28dcSHong Zhang   }
207369db28dcSHong Zhang 
207469db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
207569db28dcSHong Zhang     PetscMPIInt  subrank,subsize;
207669db28dcSHong Zhang     PetscInt     nleftover,np_subcomm;
207769db28dcSHong Zhang     /* get the destination processors' id send_rank, nsends and nrecvs */
207869db28dcSHong Zhang     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
207969db28dcSHong Zhang     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
208069db28dcSHong Zhang     ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank);
208169db28dcSHong Zhang     recv_rank = send_rank + size;
208269db28dcSHong Zhang     np_subcomm = size/nsubcomm;
208369db28dcSHong Zhang     nleftover  = size - nsubcomm*np_subcomm;
208469db28dcSHong Zhang     nsends = 0; nrecvs = 0;
208569db28dcSHong Zhang     for (i=0; i<size; i++){ /* i=rank*/
208669db28dcSHong Zhang       if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */
208769db28dcSHong Zhang         send_rank[nsends] = i; nsends++;
208869db28dcSHong Zhang         recv_rank[nrecvs++] = i;
208969db28dcSHong Zhang       }
209069db28dcSHong Zhang     }
209169db28dcSHong Zhang     if (rank >= size - nleftover){/* this proc is a leftover processor */
209269db28dcSHong Zhang       i = size-nleftover-1;
209369db28dcSHong Zhang       j = 0;
209469db28dcSHong Zhang       while (j < nsubcomm - nleftover){
209569db28dcSHong Zhang         send_rank[nsends++] = i;
209669db28dcSHong Zhang         i--; j++;
209769db28dcSHong Zhang       }
209869db28dcSHong Zhang     }
209969db28dcSHong Zhang 
210069db28dcSHong Zhang     if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */
210169db28dcSHong Zhang       for (i=0; i<nleftover; i++){
210269db28dcSHong Zhang         recv_rank[nrecvs++] = size-nleftover+i;
210369db28dcSHong Zhang       }
210469db28dcSHong Zhang     }
210569db28dcSHong Zhang 
210669db28dcSHong Zhang     /* allocate sbuf_j, sbuf_a */
210769db28dcSHong Zhang     i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2;
210869db28dcSHong Zhang     ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr);
210969db28dcSHong Zhang     ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr);
211069db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
211169db28dcSHong Zhang 
211269db28dcSHong Zhang   /* copy mat's local entries into the buffers */
211369db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
211469db28dcSHong Zhang     rownz_max = 0;
211569db28dcSHong Zhang     rptr = sbuf_j;
211669db28dcSHong Zhang     cols = sbuf_j + rend-rstart + 1;
211769db28dcSHong Zhang     vals = sbuf_a;
211869db28dcSHong Zhang     rptr[0] = 0;
211969db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
212069db28dcSHong Zhang       row = i + rstart;
212169db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
212269db28dcSHong Zhang       ncols  = nzA + nzB;
212369db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
212469db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
212569db28dcSHong Zhang       /* load the column indices for this row into cols */
212669db28dcSHong Zhang       lwrite = 0;
212769db28dcSHong Zhang       for (l=0; l<nzB; l++) {
212869db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart){
212969db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
213069db28dcSHong Zhang           cols[lwrite++] = ctmp;
213169db28dcSHong Zhang         }
213269db28dcSHong Zhang       }
213369db28dcSHong Zhang       for (l=0; l<nzA; l++){
213469db28dcSHong Zhang         vals[lwrite]   = aworkA[l];
213569db28dcSHong Zhang         cols[lwrite++] = cstart + cworkA[l];
213669db28dcSHong Zhang       }
213769db28dcSHong Zhang       for (l=0; l<nzB; l++) {
213869db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend){
213969db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
214069db28dcSHong Zhang           cols[lwrite++] = ctmp;
214169db28dcSHong Zhang         }
214269db28dcSHong Zhang       }
214369db28dcSHong Zhang       vals += ncols;
214469db28dcSHong Zhang       cols += ncols;
214569db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
214669db28dcSHong Zhang       if (rownz_max < ncols) rownz_max = ncols;
214769db28dcSHong Zhang     }
214869db28dcSHong Zhang     if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz);
214969db28dcSHong Zhang   } else { /* only copy matrix values into sbuf_a */
215069db28dcSHong Zhang     rptr = sbuf_j;
215169db28dcSHong Zhang     vals = sbuf_a;
215269db28dcSHong Zhang     rptr[0] = 0;
215369db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
215469db28dcSHong Zhang       row = i + rstart;
215569db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
215669db28dcSHong Zhang       ncols  = nzA + nzB;
215769db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
215869db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
215969db28dcSHong Zhang       lwrite = 0;
216069db28dcSHong Zhang       for (l=0; l<nzB; l++) {
216169db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l];
216269db28dcSHong Zhang       }
216369db28dcSHong Zhang       for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l];
216469db28dcSHong Zhang       for (l=0; l<nzB; l++) {
216569db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l];
216669db28dcSHong Zhang       }
216769db28dcSHong Zhang       vals += ncols;
216869db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
216969db28dcSHong Zhang     }
217069db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
217169db28dcSHong Zhang 
217269db28dcSHong Zhang   /* send nzlocal to others, and recv other's nzlocal */
217369db28dcSHong Zhang   /*--------------------------------------------------*/
217469db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
217569db28dcSHong Zhang     ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
217669db28dcSHong Zhang     s_waits2 = s_waits3 + nsends;
217769db28dcSHong Zhang     s_waits1 = s_waits2 + nsends;
217869db28dcSHong Zhang     r_waits1 = s_waits1 + nsends;
217969db28dcSHong Zhang     r_waits2 = r_waits1 + nrecvs;
218069db28dcSHong Zhang     r_waits3 = r_waits2 + nrecvs;
218169db28dcSHong Zhang   } else {
218269db28dcSHong Zhang     ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
218369db28dcSHong Zhang     r_waits3 = s_waits3 + nsends;
218469db28dcSHong Zhang   }
218569db28dcSHong Zhang 
218669db28dcSHong Zhang   ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr);
218769db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
218869db28dcSHong Zhang     /* get new tags to keep the communication clean */
218969db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr);
219069db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr);
219169db28dcSHong Zhang     ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr);
219269db28dcSHong Zhang     rbuf_nz = sbuf_nz + nsends;
219369db28dcSHong Zhang 
219469db28dcSHong Zhang     /* post receives of other's nzlocal */
219569db28dcSHong Zhang     for (i=0; i<nrecvs; i++){
219669db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr);
219769db28dcSHong Zhang     }
219869db28dcSHong Zhang     /* send nzlocal to others */
219969db28dcSHong Zhang     for (i=0; i<nsends; i++){
220069db28dcSHong Zhang       sbuf_nz[i] = nzlocal;
220169db28dcSHong Zhang       ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr);
220269db28dcSHong Zhang     }
220369db28dcSHong Zhang     /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */
220469db28dcSHong Zhang     count = nrecvs;
220569db28dcSHong Zhang     while (count) {
220669db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr);
220769db28dcSHong Zhang       recv_rank[imdex] = recv_status.MPI_SOURCE;
220869db28dcSHong Zhang       /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */
220969db28dcSHong Zhang       ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr);
221069db28dcSHong Zhang 
221169db28dcSHong Zhang       i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */
221269db28dcSHong Zhang       rbuf_nz[imdex] += i + 2;
221369db28dcSHong Zhang       ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr);
221469db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr);
221569db28dcSHong Zhang       count--;
221669db28dcSHong Zhang     }
221769db28dcSHong Zhang     /* wait on sends of nzlocal */
221869db28dcSHong Zhang     if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);}
221969db28dcSHong Zhang     /* send mat->i,j to others, and recv from other's */
222069db28dcSHong Zhang     /*------------------------------------------------*/
222169db28dcSHong Zhang     for (i=0; i<nsends; i++){
222269db28dcSHong Zhang       j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1;
222369db28dcSHong Zhang       ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
222469db28dcSHong Zhang     }
222569db28dcSHong Zhang     /* wait on receives of mat->i,j */
222669db28dcSHong Zhang     /*------------------------------*/
222769db28dcSHong Zhang     count = nrecvs;
222869db28dcSHong Zhang     while (count) {
222969db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr);
223069db28dcSHong Zhang       if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
223169db28dcSHong Zhang       count--;
223269db28dcSHong Zhang     }
223369db28dcSHong Zhang     /* wait on sends of mat->i,j */
223469db28dcSHong Zhang     /*---------------------------*/
223569db28dcSHong Zhang     if (nsends) {
223669db28dcSHong Zhang       ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr);
223769db28dcSHong Zhang     }
223869db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
223969db28dcSHong Zhang 
224069db28dcSHong Zhang   /* post receives, send and receive mat->a */
224169db28dcSHong Zhang   /*----------------------------------------*/
224269db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++) {
224369db28dcSHong Zhang     ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr);
224469db28dcSHong Zhang   }
224569db28dcSHong Zhang   for (i=0; i<nsends; i++){
224669db28dcSHong Zhang     ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
224769db28dcSHong Zhang   }
224869db28dcSHong Zhang   count = nrecvs;
224969db28dcSHong Zhang   while (count) {
225069db28dcSHong Zhang     ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr);
225169db28dcSHong Zhang     if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
225269db28dcSHong Zhang     count--;
225369db28dcSHong Zhang   }
225469db28dcSHong Zhang   if (nsends) {
225569db28dcSHong Zhang     ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr);
225669db28dcSHong Zhang   }
225769db28dcSHong Zhang 
225869db28dcSHong Zhang   ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr);
225969db28dcSHong Zhang 
226069db28dcSHong Zhang   /* create redundant matrix */
226169db28dcSHong Zhang   /*-------------------------*/
226269db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
226369db28dcSHong Zhang     /* compute rownz_max for preallocation */
226469db28dcSHong Zhang     for (imdex=0; imdex<nrecvs; imdex++){
226569db28dcSHong Zhang       j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]];
226669db28dcSHong Zhang       rptr = rbuf_j[imdex];
226769db28dcSHong Zhang       for (i=0; i<j; i++){
226869db28dcSHong Zhang         ncols = rptr[i+1] - rptr[i];
226969db28dcSHong Zhang         if (rownz_max < ncols) rownz_max = ncols;
227069db28dcSHong Zhang       }
227169db28dcSHong Zhang     }
227269db28dcSHong Zhang 
227369db28dcSHong Zhang     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
227469db28dcSHong Zhang     ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
227569db28dcSHong Zhang     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
227669db28dcSHong Zhang     ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr);
227769db28dcSHong Zhang     ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr);
227869db28dcSHong Zhang   } else {
227969db28dcSHong Zhang     C = *matredundant;
228069db28dcSHong Zhang   }
228169db28dcSHong Zhang 
228269db28dcSHong Zhang   /* insert local matrix entries */
228369db28dcSHong Zhang   rptr = sbuf_j;
228469db28dcSHong Zhang   cols = sbuf_j + rend-rstart + 1;
228569db28dcSHong Zhang   vals = sbuf_a;
228669db28dcSHong Zhang   for (i=0; i<rend-rstart; i++){
228769db28dcSHong Zhang     row   = i + rstart;
228869db28dcSHong Zhang     ncols = rptr[i+1] - rptr[i];
228969db28dcSHong Zhang     ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
229069db28dcSHong Zhang     vals += ncols;
229169db28dcSHong Zhang     cols += ncols;
229269db28dcSHong Zhang   }
229369db28dcSHong Zhang   /* insert received matrix entries */
229469db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++){
229569db28dcSHong Zhang     rstart = rowrange[recv_rank[imdex]];
229669db28dcSHong Zhang     rend   = rowrange[recv_rank[imdex]+1];
229769db28dcSHong Zhang     rptr = rbuf_j[imdex];
229869db28dcSHong Zhang     cols = rbuf_j[imdex] + rend-rstart + 1;
229969db28dcSHong Zhang     vals = rbuf_a[imdex];
230069db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
230169db28dcSHong Zhang       row   = i + rstart;
230269db28dcSHong Zhang       ncols = rptr[i+1] - rptr[i];
230369db28dcSHong Zhang       ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
230469db28dcSHong Zhang       vals += ncols;
230569db28dcSHong Zhang       cols += ncols;
230669db28dcSHong Zhang     }
230769db28dcSHong Zhang   }
230869db28dcSHong Zhang   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
230969db28dcSHong Zhang   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
231069db28dcSHong Zhang   ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2311d0f46423SBarry Smith   if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N);
231269db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
231369db28dcSHong Zhang     PetscContainer container;
231469db28dcSHong Zhang     *matredundant = C;
231569db28dcSHong Zhang     /* create a supporting struct and attach it to C for reuse */
231638f2d2fdSLisandro Dalcin     ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr);
231769db28dcSHong Zhang     ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
231869db28dcSHong Zhang     ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr);
231969db28dcSHong Zhang     ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr);
232069db28dcSHong Zhang     ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr);
232169db28dcSHong Zhang 
232269db28dcSHong Zhang     redund->nzlocal = nzlocal;
232369db28dcSHong Zhang     redund->nsends  = nsends;
232469db28dcSHong Zhang     redund->nrecvs  = nrecvs;
232569db28dcSHong Zhang     redund->send_rank = send_rank;
232669db28dcSHong Zhang     redund->sbuf_nz = sbuf_nz;
232769db28dcSHong Zhang     redund->sbuf_j  = sbuf_j;
232869db28dcSHong Zhang     redund->sbuf_a  = sbuf_a;
232969db28dcSHong Zhang     redund->rbuf_j  = rbuf_j;
233069db28dcSHong Zhang     redund->rbuf_a  = rbuf_a;
233169db28dcSHong Zhang 
233269db28dcSHong Zhang     redund->MatDestroy = C->ops->destroy;
233369db28dcSHong Zhang     C->ops->destroy    = MatDestroy_MatRedundant;
233469db28dcSHong Zhang   }
233569db28dcSHong Zhang   PetscFunctionReturn(0);
233669db28dcSHong Zhang }
233769db28dcSHong Zhang 
233803bc72f1SMatthew Knepley #undef __FUNCT__
2339c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ"
2340c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2341c91732d9SHong Zhang {
2342c91732d9SHong Zhang   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2343c91732d9SHong Zhang   PetscErrorCode ierr;
2344c91732d9SHong Zhang   PetscInt       i,*idxb = 0;
2345c91732d9SHong Zhang   PetscScalar    *va,*vb;
2346c91732d9SHong Zhang   Vec            vtmp;
2347c91732d9SHong Zhang 
2348c91732d9SHong Zhang   PetscFunctionBegin;
2349c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
2350c91732d9SHong Zhang   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2351c91732d9SHong Zhang   if (idx) {
2352192daf7cSBarry Smith     for (i=0; i<A->rmap->n; i++) {
2353d0f46423SBarry Smith       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2354c91732d9SHong Zhang     }
2355c91732d9SHong Zhang   }
2356c91732d9SHong Zhang 
2357d0f46423SBarry Smith   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2358c91732d9SHong Zhang   if (idx) {
2359d0f46423SBarry Smith     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2360c91732d9SHong Zhang   }
2361c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2362c91732d9SHong Zhang   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2363c91732d9SHong Zhang 
2364d0f46423SBarry Smith   for (i=0; i<A->rmap->n; i++){
2365c91732d9SHong Zhang     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
2366c91732d9SHong Zhang       va[i] = vb[i];
2367c91732d9SHong Zhang       if (idx) idx[i] = a->garray[idxb[i]];
2368c91732d9SHong Zhang     }
2369c91732d9SHong Zhang   }
2370c91732d9SHong Zhang 
2371c91732d9SHong Zhang   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2372c91732d9SHong Zhang   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2373c91732d9SHong Zhang   if (idxb) {
2374c91732d9SHong Zhang     ierr = PetscFree(idxb);CHKERRQ(ierr);
2375c91732d9SHong Zhang   }
2376c91732d9SHong Zhang   ierr = VecDestroy(vtmp);CHKERRQ(ierr);
2377c91732d9SHong Zhang   PetscFunctionReturn(0);
2378c91732d9SHong Zhang }
2379c91732d9SHong Zhang 
2380c91732d9SHong Zhang #undef __FUNCT__
2381c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ"
2382c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2383c87e5d42SMatthew Knepley {
2384c87e5d42SMatthew Knepley   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2385c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2386c87e5d42SMatthew Knepley   PetscInt       i,*idxb = 0;
2387c87e5d42SMatthew Knepley   PetscScalar    *va,*vb;
2388c87e5d42SMatthew Knepley   Vec            vtmp;
2389c87e5d42SMatthew Knepley 
2390c87e5d42SMatthew Knepley   PetscFunctionBegin;
2391c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr);
2392c87e5d42SMatthew Knepley   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2393c87e5d42SMatthew Knepley   if (idx) {
2394c87e5d42SMatthew Knepley     for (i=0; i<A->cmap->n; i++) {
2395c87e5d42SMatthew Knepley       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2396c87e5d42SMatthew Knepley     }
2397c87e5d42SMatthew Knepley   }
2398c87e5d42SMatthew Knepley 
2399c87e5d42SMatthew Knepley   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2400c87e5d42SMatthew Knepley   if (idx) {
2401c87e5d42SMatthew Knepley     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2402c87e5d42SMatthew Knepley   }
2403c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2404c87e5d42SMatthew Knepley   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2405c87e5d42SMatthew Knepley 
2406c87e5d42SMatthew Knepley   for (i=0; i<A->rmap->n; i++){
2407c87e5d42SMatthew Knepley     if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) {
2408c87e5d42SMatthew Knepley       va[i] = vb[i];
2409c87e5d42SMatthew Knepley       if (idx) idx[i] = a->garray[idxb[i]];
2410c87e5d42SMatthew Knepley     }
2411c87e5d42SMatthew Knepley   }
2412c87e5d42SMatthew Knepley 
2413c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2414c87e5d42SMatthew Knepley   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2415c87e5d42SMatthew Knepley   if (idxb) {
2416c87e5d42SMatthew Knepley     ierr = PetscFree(idxb);CHKERRQ(ierr);
2417c87e5d42SMatthew Knepley   }
2418c87e5d42SMatthew Knepley   ierr = VecDestroy(vtmp);CHKERRQ(ierr);
2419c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2420c87e5d42SMatthew Knepley }
2421c87e5d42SMatthew Knepley 
2422c87e5d42SMatthew Knepley #undef __FUNCT__
242303bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ"
242403bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[])
242503bc72f1SMatthew Knepley {
242603bc72f1SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2427d0f46423SBarry Smith   PetscInt       n      = A->rmap->n;
2428d0f46423SBarry Smith   PetscInt       cstart = A->cmap->rstart;
242903bc72f1SMatthew Knepley   PetscInt      *cmap   = mat->garray;
243003bc72f1SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
243103bc72f1SMatthew Knepley   Vec            diagV, offdiagV;
243203bc72f1SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
243303bc72f1SMatthew Knepley   PetscInt       r;
243403bc72f1SMatthew Knepley   PetscErrorCode ierr;
243503bc72f1SMatthew Knepley 
243603bc72f1SMatthew Knepley   PetscFunctionBegin;
243703bc72f1SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2438e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2439e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
244003bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
244103bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
244203bc72f1SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
244303bc72f1SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
244403bc72f1SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
244503bc72f1SMatthew Knepley   for(r = 0; r < n; ++r) {
2446028cd4eaSSatish Balay     if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) {
244703bc72f1SMatthew Knepley       a[r]   = diagA[r];
244803bc72f1SMatthew Knepley       idx[r] = cstart + diagIdx[r];
244903bc72f1SMatthew Knepley     } else {
245003bc72f1SMatthew Knepley       a[r]   = offdiagA[r];
245103bc72f1SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
245203bc72f1SMatthew Knepley     }
245303bc72f1SMatthew Knepley   }
245403bc72f1SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
245503bc72f1SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
245603bc72f1SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
245703bc72f1SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
245803bc72f1SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
245903bc72f1SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
246003bc72f1SMatthew Knepley   PetscFunctionReturn(0);
246103bc72f1SMatthew Knepley }
246203bc72f1SMatthew Knepley 
24635494a064SHong Zhang #undef __FUNCT__
2464c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ"
2465c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2466c87e5d42SMatthew Knepley {
2467c87e5d42SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2468c87e5d42SMatthew Knepley   PetscInt       n      = A->rmap->n;
2469c87e5d42SMatthew Knepley   PetscInt       cstart = A->cmap->rstart;
2470c87e5d42SMatthew Knepley   PetscInt      *cmap   = mat->garray;
2471c87e5d42SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
2472c87e5d42SMatthew Knepley   Vec            diagV, offdiagV;
2473c87e5d42SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
2474c87e5d42SMatthew Knepley   PetscInt       r;
2475c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2476c87e5d42SMatthew Knepley 
2477c87e5d42SMatthew Knepley   PetscFunctionBegin;
2478c87e5d42SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2479c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2480c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
2481c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
2482c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
2483c87e5d42SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
2484c87e5d42SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
2485c87e5d42SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
2486c87e5d42SMatthew Knepley   for(r = 0; r < n; ++r) {
2487c87e5d42SMatthew Knepley     if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) {
2488c87e5d42SMatthew Knepley       a[r]   = diagA[r];
2489c87e5d42SMatthew Knepley       idx[r] = cstart + diagIdx[r];
2490c87e5d42SMatthew Knepley     } else {
2491c87e5d42SMatthew Knepley       a[r]   = offdiagA[r];
2492c87e5d42SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
2493c87e5d42SMatthew Knepley     }
2494c87e5d42SMatthew Knepley   }
2495c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
2496c87e5d42SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
2497c87e5d42SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
2498c87e5d42SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
2499c87e5d42SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
2500c87e5d42SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
2501c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2502c87e5d42SMatthew Knepley }
2503c87e5d42SMatthew Knepley 
2504c87e5d42SMatthew Knepley #undef __FUNCT__
2505829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ"
2506829201f2SHong Zhang PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat[])
25075494a064SHong Zhang {
25085494a064SHong Zhang   PetscErrorCode ierr;
25095494a064SHong Zhang 
25105494a064SHong Zhang   PetscFunctionBegin;
25115494a064SHong Zhang   ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,newmat);CHKERRQ(ierr);
25125494a064SHong Zhang   PetscFunctionReturn(0);
25135494a064SHong Zhang }
25145494a064SHong Zhang 
25158a729477SBarry Smith /* -------------------------------------------------------------------*/
2516cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ,
2517cda55fadSBarry Smith        MatGetRow_MPIAIJ,
2518cda55fadSBarry Smith        MatRestoreRow_MPIAIJ,
2519cda55fadSBarry Smith        MatMult_MPIAIJ,
252097304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ,
25217c922b88SBarry Smith        MatMultTranspose_MPIAIJ,
25227c922b88SBarry Smith        MatMultTransposeAdd_MPIAIJ,
2523103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2524103bf8bdSMatthew Knepley        MatSolve_MPIAIJ,
2525103bf8bdSMatthew Knepley #else
2526cda55fadSBarry Smith        0,
2527103bf8bdSMatthew Knepley #endif
2528cda55fadSBarry Smith        0,
2529cda55fadSBarry Smith        0,
253097304618SKris Buschelman /*10*/ 0,
2531cda55fadSBarry Smith        0,
2532cda55fadSBarry Smith        0,
253344a69424SLois Curfman McInnes        MatRelax_MPIAIJ,
2534b7c46309SBarry Smith        MatTranspose_MPIAIJ,
253597304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ,
2536cda55fadSBarry Smith        MatEqual_MPIAIJ,
2537cda55fadSBarry Smith        MatGetDiagonal_MPIAIJ,
2538cda55fadSBarry Smith        MatDiagonalScale_MPIAIJ,
2539cda55fadSBarry Smith        MatNorm_MPIAIJ,
254097304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ,
2541cda55fadSBarry Smith        MatAssemblyEnd_MPIAIJ,
25421eb62cbbSBarry Smith        0,
2543cda55fadSBarry Smith        MatSetOption_MPIAIJ,
2544cda55fadSBarry Smith        MatZeroEntries_MPIAIJ,
254597304618SKris Buschelman /*25*/ MatZeroRows_MPIAIJ,
2546cda55fadSBarry Smith        0,
2547103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2548719d5645SBarry Smith        0,
2549103bf8bdSMatthew Knepley #else
2550cda55fadSBarry Smith        0,
2551103bf8bdSMatthew Knepley #endif
2552cda55fadSBarry Smith        0,
2553cda55fadSBarry Smith        0,
255497304618SKris Buschelman /*30*/ MatSetUpPreallocation_MPIAIJ,
2555103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2556719d5645SBarry Smith        0,
2557103bf8bdSMatthew Knepley #else
2558cda55fadSBarry Smith        0,
2559103bf8bdSMatthew Knepley #endif
2560cda55fadSBarry Smith        0,
2561cda55fadSBarry Smith        0,
2562cda55fadSBarry Smith        0,
256397304618SKris Buschelman /*35*/ MatDuplicate_MPIAIJ,
2564cda55fadSBarry Smith        0,
2565cda55fadSBarry Smith        0,
2566cda55fadSBarry Smith        0,
2567cda55fadSBarry Smith        0,
256897304618SKris Buschelman /*40*/ MatAXPY_MPIAIJ,
2569cda55fadSBarry Smith        MatGetSubMatrices_MPIAIJ,
2570cda55fadSBarry Smith        MatIncreaseOverlap_MPIAIJ,
2571cda55fadSBarry Smith        MatGetValues_MPIAIJ,
2572cb5b572fSBarry Smith        MatCopy_MPIAIJ,
2573c87e5d42SMatthew Knepley /*45*/ MatGetRowMax_MPIAIJ,
2574cda55fadSBarry Smith        MatScale_MPIAIJ,
2575cda55fadSBarry Smith        0,
2576cda55fadSBarry Smith        0,
2577cda55fadSBarry Smith        0,
2578521d7252SBarry Smith /*50*/ MatSetBlockSize_MPIAIJ,
2579cda55fadSBarry Smith        0,
2580cda55fadSBarry Smith        0,
2581cda55fadSBarry Smith        0,
2582cda55fadSBarry Smith        0,
258397304618SKris Buschelman /*55*/ MatFDColoringCreate_MPIAIJ,
2584cda55fadSBarry Smith        0,
2585cda55fadSBarry Smith        MatSetUnfactored_MPIAIJ,
258642e855d1Svictor        MatPermute_MPIAIJ,
2587cda55fadSBarry Smith        0,
258897304618SKris Buschelman /*60*/ MatGetSubMatrix_MPIAIJ,
2589e03a110bSBarry Smith        MatDestroy_MPIAIJ,
2590e03a110bSBarry Smith        MatView_MPIAIJ,
2591357abbc8SBarry Smith        0,
2592a2243be0SBarry Smith        0,
259397304618SKris Buschelman /*65*/ 0,
2594a2243be0SBarry Smith        0,
2595a2243be0SBarry Smith        0,
2596a2243be0SBarry Smith        0,
2597a2243be0SBarry Smith        0,
2598c91732d9SHong Zhang /*70*/ MatGetRowMaxAbs_MPIAIJ,
2599c87e5d42SMatthew Knepley        MatGetRowMinAbs_MPIAIJ,
2600a2243be0SBarry Smith        0,
2601a2243be0SBarry Smith        MatSetColoring_MPIAIJ,
2602dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
2603779c1a83SBarry Smith        MatSetValuesAdic_MPIAIJ,
2604dcf5cc72SBarry Smith #else
2605dcf5cc72SBarry Smith        0,
2606dcf5cc72SBarry Smith #endif
260797304618SKris Buschelman        MatSetValuesAdifor_MPIAIJ,
260897304618SKris Buschelman /*75*/ 0,
260997304618SKris Buschelman        0,
261097304618SKris Buschelman        0,
261197304618SKris Buschelman        0,
261297304618SKris Buschelman        0,
261397304618SKris Buschelman /*80*/ 0,
261497304618SKris Buschelman        0,
261597304618SKris Buschelman        0,
261641acf15aSKris Buschelman /*84*/ MatLoad_MPIAIJ,
26176284ec50SHong Zhang        0,
26186284ec50SHong Zhang        0,
26196284ec50SHong Zhang        0,
26206284ec50SHong Zhang        0,
2621865e5f61SKris Buschelman        0,
2622865e5f61SKris Buschelman /*90*/ MatMatMult_MPIAIJ_MPIAIJ,
262326be0446SHong Zhang        MatMatMultSymbolic_MPIAIJ_MPIAIJ,
262426be0446SHong Zhang        MatMatMultNumeric_MPIAIJ_MPIAIJ,
26257a7894deSKris Buschelman        MatPtAP_Basic,
26267a7894deSKris Buschelman        MatPtAPSymbolic_MPIAIJ,
26277a7894deSKris Buschelman /*95*/ MatPtAPNumeric_MPIAIJ,
26287a7894deSKris Buschelman        0,
26297a7894deSKris Buschelman        0,
26307a7894deSKris Buschelman        0,
26317a7894deSKris Buschelman        0,
26327a7894deSKris Buschelman /*100*/0,
2633865e5f61SKris Buschelman        MatPtAPSymbolic_MPIAIJ_MPIAIJ,
26347a7894deSKris Buschelman        MatPtAPNumeric_MPIAIJ_MPIAIJ,
26352fd7e33dSBarry Smith        MatConjugate_MPIAIJ,
26362fd7e33dSBarry Smith        0,
263799cafbc1SBarry Smith /*105*/MatSetValuesRow_MPIAIJ,
263899cafbc1SBarry Smith        MatRealPart_MPIAIJ,
263969db28dcSHong Zhang        MatImaginaryPart_MPIAIJ,
264069db28dcSHong Zhang        0,
264169db28dcSHong Zhang        0,
264269db28dcSHong Zhang /*110*/0,
264303bc72f1SMatthew Knepley        MatGetRedundantMatrix_MPIAIJ,
26445494a064SHong Zhang        MatGetRowMin_MPIAIJ,
26455494a064SHong Zhang        0,
26465494a064SHong Zhang        0,
2647829201f2SHong Zhang /*115*/MatGetSeqNonzerostructure_MPIAIJ};
264836ce4990SBarry Smith 
26492e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/
26502e8a6d31SBarry Smith 
2651fb2e594dSBarry Smith EXTERN_C_BEGIN
26524a2ae208SSatish Balay #undef __FUNCT__
26534a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ"
2654be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat)
26552e8a6d31SBarry Smith {
26562e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2657dfbe8321SBarry Smith   PetscErrorCode ierr;
26582e8a6d31SBarry Smith 
26592e8a6d31SBarry Smith   PetscFunctionBegin;
26602e8a6d31SBarry Smith   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
26612e8a6d31SBarry Smith   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
26622e8a6d31SBarry Smith   PetscFunctionReturn(0);
26632e8a6d31SBarry Smith }
2664fb2e594dSBarry Smith EXTERN_C_END
26652e8a6d31SBarry Smith 
2666fb2e594dSBarry Smith EXTERN_C_BEGIN
26674a2ae208SSatish Balay #undef __FUNCT__
26684a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ"
2669be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat)
26702e8a6d31SBarry Smith {
26712e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2672dfbe8321SBarry Smith   PetscErrorCode ierr;
26732e8a6d31SBarry Smith 
26742e8a6d31SBarry Smith   PetscFunctionBegin;
26752e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
26762e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
26772e8a6d31SBarry Smith   PetscFunctionReturn(0);
26782e8a6d31SBarry Smith }
2679fb2e594dSBarry Smith EXTERN_C_END
26808a729477SBarry Smith 
2681e090d566SSatish Balay #include "petscpc.h"
268227508adbSBarry Smith EXTERN_C_BEGIN
26834a2ae208SSatish Balay #undef __FUNCT__
2684a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ"
2685be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
2686a23d5eceSKris Buschelman {
2687a23d5eceSKris Buschelman   Mat_MPIAIJ     *b;
2688dfbe8321SBarry Smith   PetscErrorCode ierr;
2689b1d57f15SBarry Smith   PetscInt       i;
2690a23d5eceSKris Buschelman 
2691a23d5eceSKris Buschelman   PetscFunctionBegin;
2692a23d5eceSKris Buschelman   B->preallocated = PETSC_TRUE;
2693a23d5eceSKris Buschelman   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
2694a23d5eceSKris Buschelman   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
269577431f27SBarry Smith   if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
269677431f27SBarry Smith   if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
2697899cda47SBarry Smith 
26987408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
26997408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
2700d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
2701d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
2702a23d5eceSKris Buschelman   if (d_nnz) {
2703d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
270477431f27SBarry Smith       if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
2705a23d5eceSKris Buschelman     }
2706a23d5eceSKris Buschelman   }
2707a23d5eceSKris Buschelman   if (o_nnz) {
2708d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
270977431f27SBarry Smith       if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
2710a23d5eceSKris Buschelman     }
2711a23d5eceSKris Buschelman   }
2712a23d5eceSKris Buschelman   b = (Mat_MPIAIJ*)B->data;
2713899cda47SBarry Smith 
2714899cda47SBarry Smith   /* Explicitly create 2 MATSEQAIJ matrices. */
2715899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2716d0f46423SBarry Smith   ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2717899cda47SBarry Smith   ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr);
2718899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
2719899cda47SBarry Smith   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2720d0f46423SBarry Smith   ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2721899cda47SBarry Smith   ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr);
2722899cda47SBarry Smith   ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
2723899cda47SBarry Smith 
2724c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr);
2725c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr);
2726a23d5eceSKris Buschelman 
2727a23d5eceSKris Buschelman   PetscFunctionReturn(0);
2728a23d5eceSKris Buschelman }
2729a23d5eceSKris Buschelman EXTERN_C_END
2730a23d5eceSKris Buschelman 
27314a2ae208SSatish Balay #undef __FUNCT__
27324a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ"
2733dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2734d6dfbf8fSBarry Smith {
2735d6dfbf8fSBarry Smith   Mat            mat;
2736416022c9SBarry Smith   Mat_MPIAIJ     *a,*oldmat = (Mat_MPIAIJ*)matin->data;
2737dfbe8321SBarry Smith   PetscErrorCode ierr;
2738d6dfbf8fSBarry Smith 
27393a40ed3dSBarry Smith   PetscFunctionBegin;
2740416022c9SBarry Smith   *newmat       = 0;
27417adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
2742d0f46423SBarry Smith   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
27437adad957SLisandro Dalcin   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
27441d5dac46SHong Zhang   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2745273d9f13SBarry Smith   a    = (Mat_MPIAIJ*)mat->data;
2746e1b6402fSHong Zhang 
2747d6dfbf8fSBarry Smith   mat->factor       = matin->factor;
2748d0f46423SBarry Smith   mat->rmap->bs      = matin->rmap->bs;
2749c456f294SBarry Smith   mat->assembled    = PETSC_TRUE;
2750e7641de0SSatish Balay   mat->insertmode   = NOT_SET_VALUES;
2751273d9f13SBarry Smith   mat->preallocated = PETSC_TRUE;
2752d6dfbf8fSBarry Smith 
275317699dbbSLois Curfman McInnes   a->size           = oldmat->size;
275417699dbbSLois Curfman McInnes   a->rank           = oldmat->rank;
2755e7641de0SSatish Balay   a->donotstash     = oldmat->donotstash;
2756e7641de0SSatish Balay   a->roworiented    = oldmat->roworiented;
2757e7641de0SSatish Balay   a->rowindices     = 0;
2758bcd2baecSBarry Smith   a->rowvalues      = 0;
2759bcd2baecSBarry Smith   a->getrowactive   = PETSC_FALSE;
2760d6dfbf8fSBarry Smith 
2761d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr);
2762d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr);
2763899cda47SBarry Smith 
27647adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr);
27652ee70a88SLois Curfman McInnes   if (oldmat->colmap) {
2766aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
27670f5bd95cSBarry Smith     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2768b1fc9764SSatish Balay #else
2769d0f46423SBarry Smith     ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
2770d0f46423SBarry Smith     ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2771d0f46423SBarry Smith     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2772b1fc9764SSatish Balay #endif
2773416022c9SBarry Smith   } else a->colmap = 0;
27743f41c07dSBarry Smith   if (oldmat->garray) {
2775b1d57f15SBarry Smith     PetscInt len;
2776d0f46423SBarry Smith     len  = oldmat->B->cmap->n;
2777b1d57f15SBarry Smith     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
277852e6d16bSBarry Smith     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2779b1d57f15SBarry Smith     if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); }
2780416022c9SBarry Smith   } else a->garray = 0;
2781d6dfbf8fSBarry Smith 
2782416022c9SBarry Smith   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
278352e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
2784a56f8943SBarry Smith   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
278552e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
27862e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
278752e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
27882e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
278952e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
27907adad957SLisandro Dalcin   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
27918a729477SBarry Smith   *newmat = mat;
27923a40ed3dSBarry Smith   PetscFunctionReturn(0);
27938a729477SBarry Smith }
2794416022c9SBarry Smith 
2795e090d566SSatish Balay #include "petscsys.h"
2796416022c9SBarry Smith 
27974a2ae208SSatish Balay #undef __FUNCT__
27984a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ"
2799a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat)
2800416022c9SBarry Smith {
2801d65a2f8fSBarry Smith   Mat            A;
280287828ca2SBarry Smith   PetscScalar    *vals,*svals;
280319bcc07fSBarry Smith   MPI_Comm       comm = ((PetscObject)viewer)->comm;
2804416022c9SBarry Smith   MPI_Status     status;
28056849ba73SBarry Smith   PetscErrorCode ierr;
280613980483SBarry Smith   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,mpicnt,mpimaxnz;
280713980483SBarry Smith   PetscInt       i,nz,j,rstart,rend,mmax,maxnz;
2808b1d57f15SBarry Smith   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols;
2809910ba992SMatthew Knepley   PetscInt       *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols;
2810dc231df0SBarry Smith   PetscInt       cend,cstart,n,*rowners;
2811b1d57f15SBarry Smith   int            fd;
2812416022c9SBarry Smith 
28133a40ed3dSBarry Smith   PetscFunctionBegin;
28141dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
28151dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
281617699dbbSLois Curfman McInnes   if (!rank) {
2817b0a32e0cSBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
28180752156aSBarry Smith     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
2819552e946dSBarry Smith     if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
28206c5fab8fSBarry Smith   }
28216c5fab8fSBarry Smith 
2822b1d57f15SBarry Smith   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2823416022c9SBarry Smith   M = header[1]; N = header[2];
2824416022c9SBarry Smith   /* determine ownership of all rows */
282529cdbbc8SSatish Balay   m    = M/size + ((M % size) > rank);
2826dc231df0SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
2827dc231df0SBarry Smith   ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
2828167e7480SBarry Smith 
2829167e7480SBarry Smith   /* First process needs enough room for process with most rows */
2830167e7480SBarry Smith   if (!rank) {
2831167e7480SBarry Smith     mmax       = rowners[1];
2832167e7480SBarry Smith     for (i=2; i<size; i++) {
2833167e7480SBarry Smith       mmax = PetscMax(mmax,rowners[i]);
2834167e7480SBarry Smith     }
2835167e7480SBarry Smith   } else mmax = m;
2836167e7480SBarry Smith 
2837416022c9SBarry Smith   rowners[0] = 0;
283817699dbbSLois Curfman McInnes   for (i=2; i<=size; i++) {
2839416022c9SBarry Smith     rowners[i] += rowners[i-1];
2840416022c9SBarry Smith   }
284117699dbbSLois Curfman McInnes   rstart = rowners[rank];
284217699dbbSLois Curfman McInnes   rend   = rowners[rank+1];
2843416022c9SBarry Smith 
2844416022c9SBarry Smith   /* distribute row lengths to all processors */
2845167e7480SBarry Smith   ierr    = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr);
284617699dbbSLois Curfman McInnes   if (!rank) {
2847dc231df0SBarry Smith     ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr);
2848dc231df0SBarry Smith     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
2849b1d57f15SBarry Smith     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
2850b1d57f15SBarry Smith     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2851dc231df0SBarry Smith     for (j=0; j<m; j++) {
2852dc231df0SBarry Smith       procsnz[0] += ourlens[j];
2853dc231df0SBarry Smith     }
2854dc231df0SBarry Smith     for (i=1; i<size; i++) {
2855dc231df0SBarry Smith       ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr);
2856dc231df0SBarry Smith       /* calculate the number of nonzeros on each processor */
2857dc231df0SBarry Smith       for (j=0; j<rowners[i+1]-rowners[i]; j++) {
2858416022c9SBarry Smith         procsnz[i] += rowlengths[j];
2859416022c9SBarry Smith       }
286013980483SBarry Smith       mpicnt = PetscMPIIntCast(rowners[i+1]-rowners[i]);
286113980483SBarry Smith       ierr   = MPI_Send(rowlengths,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2862416022c9SBarry Smith     }
2863606d414cSSatish Balay     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2864dc231df0SBarry Smith   } else {
286513980483SBarry Smith     mpicnt = PetscMPIIntCast(m);CHKERRQ(ierr);
286613980483SBarry Smith     ierr   = MPI_Recv(ourlens,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2867dc231df0SBarry Smith   }
2868416022c9SBarry Smith 
2869dc231df0SBarry Smith   if (!rank) {
2870416022c9SBarry Smith     /* determine max buffer needed and allocate it */
2871416022c9SBarry Smith     maxnz = 0;
28728a8e0b3aSBarry Smith     for (i=0; i<size; i++) {
28730452661fSBarry Smith       maxnz = PetscMax(maxnz,procsnz[i]);
2874416022c9SBarry Smith     }
2875b1d57f15SBarry Smith     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2876416022c9SBarry Smith 
2877416022c9SBarry Smith     /* read in my part of the matrix column indices  */
2878416022c9SBarry Smith     nz   = procsnz[0];
2879b1d57f15SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
28800752156aSBarry Smith     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2881d65a2f8fSBarry Smith 
2882d65a2f8fSBarry Smith     /* read in every one elses and ship off */
288317699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2884d65a2f8fSBarry Smith       nz     = procsnz[i];
28850752156aSBarry Smith       ierr   = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
288613980483SBarry Smith       mpicnt = PetscMPIIntCast(nz);
288713980483SBarry Smith       ierr   = MPI_Send(cols,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2888d65a2f8fSBarry Smith     }
2889606d414cSSatish Balay     ierr = PetscFree(cols);CHKERRQ(ierr);
28903a40ed3dSBarry Smith   } else {
2891416022c9SBarry Smith     /* determine buffer space needed for message */
2892416022c9SBarry Smith     nz = 0;
2893416022c9SBarry Smith     for (i=0; i<m; i++) {
2894416022c9SBarry Smith       nz += ourlens[i];
2895416022c9SBarry Smith     }
2896dc231df0SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
2897416022c9SBarry Smith 
2898416022c9SBarry Smith     /* receive message of column indices*/
289913980483SBarry Smith     mpicnt = PetscMPIIntCast(nz);CHKERRQ(ierr);
290013980483SBarry Smith     ierr = MPI_Recv(mycols,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
290113980483SBarry Smith     ierr = MPI_Get_count(&status,MPIU_INT,&mpimaxnz);CHKERRQ(ierr);
29027c533972SBarry Smith     if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);}
290313980483SBarry Smith     else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);}
290413980483SBarry Smith     else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);}
2905416022c9SBarry Smith   }
2906416022c9SBarry Smith 
2907b362ba68SBarry Smith   /* determine column ownership if matrix is not square */
2908b362ba68SBarry Smith   if (N != M) {
2909b362ba68SBarry Smith     n      = N/size + ((N % size) > rank);
2910b1d57f15SBarry Smith     ierr   = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2911b362ba68SBarry Smith     cstart = cend - n;
2912b362ba68SBarry Smith   } else {
2913b362ba68SBarry Smith     cstart = rstart;
2914b362ba68SBarry Smith     cend   = rend;
2915fb2e594dSBarry Smith     n      = cend - cstart;
2916b362ba68SBarry Smith   }
2917b362ba68SBarry Smith 
2918416022c9SBarry Smith   /* loop over local rows, determining number of off diagonal entries */
2919b1d57f15SBarry Smith   ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr);
2920416022c9SBarry Smith   jj = 0;
2921416022c9SBarry Smith   for (i=0; i<m; i++) {
2922416022c9SBarry Smith     for (j=0; j<ourlens[i]; j++) {
2923b362ba68SBarry Smith       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
2924416022c9SBarry Smith       jj++;
2925416022c9SBarry Smith     }
2926416022c9SBarry Smith   }
2927d65a2f8fSBarry Smith 
2928d65a2f8fSBarry Smith   /* create our matrix */
2929416022c9SBarry Smith   for (i=0; i<m; i++) {
2930416022c9SBarry Smith     ourlens[i] -= offlens[i];
2931416022c9SBarry Smith   }
2932f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&A);CHKERRQ(ierr);
2933f69a0ea3SMatthew Knepley   ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr);
2934d10c748bSKris Buschelman   ierr = MatSetType(A,type);CHKERRQ(ierr);
2935d10c748bSKris Buschelman   ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr);
2936d10c748bSKris Buschelman 
2937d65a2f8fSBarry Smith   for (i=0; i<m; i++) {
2938d65a2f8fSBarry Smith     ourlens[i] += offlens[i];
2939d65a2f8fSBarry Smith   }
2940416022c9SBarry Smith 
294117699dbbSLois Curfman McInnes   if (!rank) {
2942906b51c7SHong Zhang     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2943416022c9SBarry Smith 
2944416022c9SBarry Smith     /* read in my part of the matrix numerical values  */
2945416022c9SBarry Smith     nz   = procsnz[0];
29460752156aSBarry Smith     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2947d65a2f8fSBarry Smith 
2948d65a2f8fSBarry Smith     /* insert into matrix */
2949d65a2f8fSBarry Smith     jj      = rstart;
2950d65a2f8fSBarry Smith     smycols = mycols;
2951d65a2f8fSBarry Smith     svals   = vals;
2952d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2953dc231df0SBarry Smith       ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2954d65a2f8fSBarry Smith       smycols += ourlens[i];
2955d65a2f8fSBarry Smith       svals   += ourlens[i];
2956d65a2f8fSBarry Smith       jj++;
2957416022c9SBarry Smith     }
2958416022c9SBarry Smith 
2959d65a2f8fSBarry Smith     /* read in other processors and ship out */
296017699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2961416022c9SBarry Smith       nz     = procsnz[i];
29620752156aSBarry Smith       ierr   = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
296313980483SBarry Smith       mpicnt = PetscMPIIntCast(nz);
296413980483SBarry Smith       ierr   = MPI_Send(vals,mpicnt,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr);
2965416022c9SBarry Smith     }
2966606d414cSSatish Balay     ierr = PetscFree(procsnz);CHKERRQ(ierr);
29673a40ed3dSBarry Smith   } else {
2968d65a2f8fSBarry Smith     /* receive numeric values */
296987828ca2SBarry Smith     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2970416022c9SBarry Smith 
2971d65a2f8fSBarry Smith     /* receive message of values*/
297213980483SBarry Smith     mpicnt = PetscMPIIntCast(nz);
297313980483SBarry Smith     ierr   = MPI_Recv(vals,mpicnt,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr);
297413980483SBarry Smith     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&mpimaxnz);CHKERRQ(ierr);
29757c533972SBarry Smith     if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);}
297613980483SBarry Smith     else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);}
297713980483SBarry Smith     else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);}
2978d65a2f8fSBarry Smith 
2979d65a2f8fSBarry Smith     /* insert into matrix */
2980d65a2f8fSBarry Smith     jj      = rstart;
2981d65a2f8fSBarry Smith     smycols = mycols;
2982d65a2f8fSBarry Smith     svals   = vals;
2983d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2984dc231df0SBarry Smith       ierr     = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2985d65a2f8fSBarry Smith       smycols += ourlens[i];
2986d65a2f8fSBarry Smith       svals   += ourlens[i];
2987d65a2f8fSBarry Smith       jj++;
2988d65a2f8fSBarry Smith     }
2989d65a2f8fSBarry Smith   }
2990dc231df0SBarry Smith   ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr);
2991606d414cSSatish Balay   ierr = PetscFree(vals);CHKERRQ(ierr);
2992606d414cSSatish Balay   ierr = PetscFree(mycols);CHKERRQ(ierr);
2993606d414cSSatish Balay   ierr = PetscFree(rowners);CHKERRQ(ierr);
2994d65a2f8fSBarry Smith 
29956d4a8577SBarry Smith   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
29966d4a8577SBarry Smith   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2997d10c748bSKris Buschelman   *newmat = A;
29983a40ed3dSBarry Smith   PetscFunctionReturn(0);
2999416022c9SBarry Smith }
3000a0ff6018SBarry Smith 
30014a2ae208SSatish Balay #undef __FUNCT__
30024a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ"
3003a0ff6018SBarry Smith /*
300429da9460SBarry Smith     Not great since it makes two copies of the submatrix, first an SeqAIJ
300529da9460SBarry Smith   in local and then by concatenating the local matrices the end result.
300629da9460SBarry Smith   Writing it directly would be much like MatGetSubMatrices_MPIAIJ()
3007a0ff6018SBarry Smith */
3008b1d57f15SBarry Smith PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
3009a0ff6018SBarry Smith {
3010dfbe8321SBarry Smith   PetscErrorCode ierr;
301132dcc486SBarry Smith   PetscMPIInt    rank,size;
3012b1d57f15SBarry Smith   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j;
3013b1d57f15SBarry Smith   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal;
3014fee21e36SBarry Smith   Mat            *local,M,Mreuse;
3015a77337e4SBarry Smith   MatScalar      *vwork,*aa;
30167adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)mat)->comm;
301700e6dbe6SBarry Smith   Mat_SeqAIJ     *aij;
30187e2c5f70SBarry Smith 
3019a0ff6018SBarry Smith 
3020a0ff6018SBarry Smith   PetscFunctionBegin;
30211dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
30221dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
302300e6dbe6SBarry Smith 
3024fee21e36SBarry Smith   if (call ==  MAT_REUSE_MATRIX) {
3025fee21e36SBarry Smith     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
3026e005ede5SBarry Smith     if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
3027fee21e36SBarry Smith     local = &Mreuse;
3028fee21e36SBarry Smith     ierr  = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr);
3029fee21e36SBarry Smith   } else {
3030a0ff6018SBarry Smith     ierr   = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr);
3031fee21e36SBarry Smith     Mreuse = *local;
3032606d414cSSatish Balay     ierr   = PetscFree(local);CHKERRQ(ierr);
3033fee21e36SBarry Smith   }
3034a0ff6018SBarry Smith 
3035a0ff6018SBarry Smith   /*
3036a0ff6018SBarry Smith       m - number of local rows
3037a0ff6018SBarry Smith       n - number of columns (same on all processors)
3038a0ff6018SBarry Smith       rstart - first row in new global matrix generated
3039a0ff6018SBarry Smith   */
3040fee21e36SBarry Smith   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
3041a0ff6018SBarry Smith   if (call == MAT_INITIAL_MATRIX) {
3042fee21e36SBarry Smith     aij = (Mat_SeqAIJ*)(Mreuse)->data;
304300e6dbe6SBarry Smith     ii  = aij->i;
304400e6dbe6SBarry Smith     jj  = aij->j;
304500e6dbe6SBarry Smith 
3046a0ff6018SBarry Smith     /*
304700e6dbe6SBarry Smith         Determine the number of non-zeros in the diagonal and off-diagonal
304800e6dbe6SBarry Smith         portions of the matrix in order to do correct preallocation
3049a0ff6018SBarry Smith     */
305000e6dbe6SBarry Smith 
305100e6dbe6SBarry Smith     /* first get start and end of "diagonal" columns */
30526a6a5d1dSBarry Smith     if (csize == PETSC_DECIDE) {
3053ab50ec6bSBarry Smith       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
3054ab50ec6bSBarry Smith       if (mglobal == n) { /* square matrix */
3055e2c4fddaSBarry Smith 	nlocal = m;
30566a6a5d1dSBarry Smith       } else {
3057ab50ec6bSBarry Smith         nlocal = n/size + ((n % size) > rank);
3058ab50ec6bSBarry Smith       }
3059ab50ec6bSBarry Smith     } else {
30606a6a5d1dSBarry Smith       nlocal = csize;
30616a6a5d1dSBarry Smith     }
3062b1d57f15SBarry Smith     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
306300e6dbe6SBarry Smith     rstart = rend - nlocal;
30646a6a5d1dSBarry Smith     if (rank == size - 1 && rend != n) {
306577431f27SBarry Smith       SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
30666a6a5d1dSBarry Smith     }
306700e6dbe6SBarry Smith 
306800e6dbe6SBarry Smith     /* next, compute all the lengths */
3069b1d57f15SBarry Smith     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
307000e6dbe6SBarry Smith     olens = dlens + m;
307100e6dbe6SBarry Smith     for (i=0; i<m; i++) {
307200e6dbe6SBarry Smith       jend = ii[i+1] - ii[i];
307300e6dbe6SBarry Smith       olen = 0;
307400e6dbe6SBarry Smith       dlen = 0;
307500e6dbe6SBarry Smith       for (j=0; j<jend; j++) {
307600e6dbe6SBarry Smith         if (*jj < rstart || *jj >= rend) olen++;
307700e6dbe6SBarry Smith         else dlen++;
307800e6dbe6SBarry Smith         jj++;
307900e6dbe6SBarry Smith       }
308000e6dbe6SBarry Smith       olens[i] = olen;
308100e6dbe6SBarry Smith       dlens[i] = dlen;
308200e6dbe6SBarry Smith     }
3083f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
3084f69a0ea3SMatthew Knepley     ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr);
30857adad957SLisandro Dalcin     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
3086e2d9671bSKris Buschelman     ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr);
3087606d414cSSatish Balay     ierr = PetscFree(dlens);CHKERRQ(ierr);
3088a0ff6018SBarry Smith   } else {
3089b1d57f15SBarry Smith     PetscInt ml,nl;
3090a0ff6018SBarry Smith 
3091a0ff6018SBarry Smith     M = *newmat;
3092a0ff6018SBarry Smith     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
309329bbc08cSBarry Smith     if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
3094a0ff6018SBarry Smith     ierr = MatZeroEntries(M);CHKERRQ(ierr);
3095c48de900SBarry Smith     /*
3096c48de900SBarry Smith          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
3097c48de900SBarry Smith        rather than the slower MatSetValues().
3098c48de900SBarry Smith     */
3099c48de900SBarry Smith     M->was_assembled = PETSC_TRUE;
3100c48de900SBarry Smith     M->assembled     = PETSC_FALSE;
3101a0ff6018SBarry Smith   }
3102a0ff6018SBarry Smith   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
3103fee21e36SBarry Smith   aij = (Mat_SeqAIJ*)(Mreuse)->data;
310400e6dbe6SBarry Smith   ii  = aij->i;
310500e6dbe6SBarry Smith   jj  = aij->j;
310600e6dbe6SBarry Smith   aa  = aij->a;
3107a0ff6018SBarry Smith   for (i=0; i<m; i++) {
3108a0ff6018SBarry Smith     row   = rstart + i;
310900e6dbe6SBarry Smith     nz    = ii[i+1] - ii[i];
311000e6dbe6SBarry Smith     cwork = jj;     jj += nz;
311100e6dbe6SBarry Smith     vwork = aa;     aa += nz;
31128c638d02SBarry Smith     ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
3113a0ff6018SBarry Smith   }
3114a0ff6018SBarry Smith 
3115a0ff6018SBarry Smith   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3116a0ff6018SBarry Smith   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3117a0ff6018SBarry Smith   *newmat = M;
3118fee21e36SBarry Smith 
3119fee21e36SBarry Smith   /* save submatrix used in processor for next request */
3120fee21e36SBarry Smith   if (call ==  MAT_INITIAL_MATRIX) {
3121fee21e36SBarry Smith     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
3122fee21e36SBarry Smith     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
3123fee21e36SBarry Smith   }
3124fee21e36SBarry Smith 
3125a0ff6018SBarry Smith   PetscFunctionReturn(0);
3126a0ff6018SBarry Smith }
3127273d9f13SBarry Smith 
3128e2e86b8fSSatish Balay EXTERN_C_BEGIN
31294a2ae208SSatish Balay #undef __FUNCT__
3130ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ"
3131b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[])
3132ccd8e176SBarry Smith {
3133899cda47SBarry Smith   PetscInt       m,cstart, cend,j,nnz,i,d;
3134899cda47SBarry Smith   PetscInt       *d_nnz,*o_nnz,nnz_max = 0,rstart,ii;
3135ccd8e176SBarry Smith   const PetscInt *JJ;
3136ccd8e176SBarry Smith   PetscScalar    *values;
3137ccd8e176SBarry Smith   PetscErrorCode ierr;
3138ccd8e176SBarry Smith 
3139ccd8e176SBarry Smith   PetscFunctionBegin;
3140b7940d39SSatish Balay   if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]);
3141899cda47SBarry Smith 
31427408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
31437408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
3144d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
3145d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
3146d0f46423SBarry Smith   m      = B->rmap->n;
3147d0f46423SBarry Smith   cstart = B->cmap->rstart;
3148d0f46423SBarry Smith   cend   = B->cmap->rend;
3149d0f46423SBarry Smith   rstart = B->rmap->rstart;
3150899cda47SBarry Smith 
3151ccd8e176SBarry Smith   ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
3152ccd8e176SBarry Smith   o_nnz = d_nnz + m;
3153ccd8e176SBarry Smith 
3154ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING)
3155ecc77c7aSBarry Smith   for (i=0; i<m; i++) {
3156ecc77c7aSBarry Smith     nnz     = Ii[i+1]- Ii[i];
3157ecc77c7aSBarry Smith     JJ      = J + Ii[i];
3158ecc77c7aSBarry Smith     if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz);
3159ecc77c7aSBarry Smith     if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j);
3160d0f46423SBarry Smith     if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N);
3161ecc77c7aSBarry Smith     for (j=1; j<nnz; j++) {
3162ecc77c7aSBarry Smith       if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j);
3163ecc77c7aSBarry Smith     }
3164ecc77c7aSBarry Smith   }
3165ecc77c7aSBarry Smith #endif
3166ecc77c7aSBarry Smith 
3167ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3168b7940d39SSatish Balay     nnz     = Ii[i+1]- Ii[i];
3169b7940d39SSatish Balay     JJ      = J + Ii[i];
3170ccd8e176SBarry Smith     nnz_max = PetscMax(nnz_max,nnz);
3171ccd8e176SBarry Smith     for (j=0; j<nnz; j++) {
3172ccd8e176SBarry Smith       if (*JJ >= cstart) break;
3173ccd8e176SBarry Smith       JJ++;
3174ccd8e176SBarry Smith     }
3175ccd8e176SBarry Smith     d = 0;
3176ccd8e176SBarry Smith     for (; j<nnz; j++) {
3177ccd8e176SBarry Smith       if (*JJ++ >= cend) break;
3178ccd8e176SBarry Smith       d++;
3179ccd8e176SBarry Smith     }
3180ccd8e176SBarry Smith     d_nnz[i] = d;
3181ccd8e176SBarry Smith     o_nnz[i] = nnz - d;
3182ccd8e176SBarry Smith   }
3183ccd8e176SBarry Smith   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
3184ccd8e176SBarry Smith   ierr = PetscFree(d_nnz);CHKERRQ(ierr);
3185ccd8e176SBarry Smith 
3186ccd8e176SBarry Smith   if (v) values = (PetscScalar*)v;
3187ccd8e176SBarry Smith   else {
3188ccd8e176SBarry Smith     ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr);
3189ccd8e176SBarry Smith     ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr);
3190ccd8e176SBarry Smith   }
3191ccd8e176SBarry Smith 
3192ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3193ccd8e176SBarry Smith     ii   = i + rstart;
3194b7940d39SSatish Balay     nnz  = Ii[i+1]- Ii[i];
3195b7940d39SSatish Balay     ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr);
3196ccd8e176SBarry Smith   }
3197ccd8e176SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3198ccd8e176SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3199ccd8e176SBarry Smith 
3200ccd8e176SBarry Smith   if (!v) {
3201ccd8e176SBarry Smith     ierr = PetscFree(values);CHKERRQ(ierr);
3202ccd8e176SBarry Smith   }
3203ccd8e176SBarry Smith   PetscFunctionReturn(0);
3204ccd8e176SBarry Smith }
3205e2e86b8fSSatish Balay EXTERN_C_END
3206ccd8e176SBarry Smith 
3207ccd8e176SBarry Smith #undef __FUNCT__
3208ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR"
32091eea217eSSatish Balay /*@
3210ccd8e176SBarry Smith    MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format
3211ccd8e176SBarry Smith    (the default parallel PETSc format).
3212ccd8e176SBarry Smith 
3213ccd8e176SBarry Smith    Collective on MPI_Comm
3214ccd8e176SBarry Smith 
3215ccd8e176SBarry Smith    Input Parameters:
3216a1661176SMatthew Knepley +  B - the matrix
3217ccd8e176SBarry Smith .  i - the indices into j for the start of each local row (starts with zero)
3218ccd8e176SBarry Smith .  j - the column indices for each local row (starts with zero) these must be sorted for each row
3219ccd8e176SBarry Smith -  v - optional values in the matrix
3220ccd8e176SBarry Smith 
3221ccd8e176SBarry Smith    Level: developer
3222ccd8e176SBarry Smith 
322312251496SSatish Balay    Notes:
322412251496SSatish Balay        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
322512251496SSatish Balay      thus you CANNOT change the matrix entries by changing the values of a[] after you have
322612251496SSatish Balay      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
322712251496SSatish Balay 
322812251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
322912251496SSatish Balay 
323012251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
323112251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
323212251496SSatish Balay     as shown:
323312251496SSatish Balay 
323412251496SSatish Balay         1 0 0
323512251496SSatish Balay         2 0 3     P0
323612251496SSatish Balay        -------
323712251496SSatish Balay         4 5 6     P1
323812251496SSatish Balay 
323912251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
324012251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
324112251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
324212251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
324312251496SSatish Balay 
324412251496SSatish Balay      Process1 [P1]: rows_owned=[2]
324512251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
324612251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
324712251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
324812251496SSatish Balay 
3249ecc77c7aSBarry Smith       The column indices for each row MUST be sorted.
32502fb0ec9aSBarry Smith 
3251ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3252ccd8e176SBarry Smith 
32532fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ,
32548d7a6e47SBarry Smith           MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays()
3255ccd8e176SBarry Smith @*/
3256be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3257ccd8e176SBarry Smith {
3258ccd8e176SBarry Smith   PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
3259ccd8e176SBarry Smith 
3260ccd8e176SBarry Smith   PetscFunctionBegin;
3261ccd8e176SBarry Smith   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr);
3262ccd8e176SBarry Smith   if (f) {
3263ccd8e176SBarry Smith     ierr = (*f)(B,i,j,v);CHKERRQ(ierr);
3264ccd8e176SBarry Smith   }
3265ccd8e176SBarry Smith   PetscFunctionReturn(0);
3266ccd8e176SBarry Smith }
3267ccd8e176SBarry Smith 
3268ccd8e176SBarry Smith #undef __FUNCT__
32694a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation"
3270273d9f13SBarry Smith /*@C
3271ccd8e176SBarry Smith    MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format
3272273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3273273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3274273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3275273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3276273d9f13SBarry Smith 
3277273d9f13SBarry Smith    Collective on MPI_Comm
3278273d9f13SBarry Smith 
3279273d9f13SBarry Smith    Input Parameters:
3280273d9f13SBarry Smith +  A - the matrix
3281273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3282273d9f13SBarry Smith            (same value is used for all local rows)
3283273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3284273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3285273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3286273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3287273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3288273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3289273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3290273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3291273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3292273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3293273d9f13SBarry Smith            structure. The size of this array is equal to the number
3294273d9f13SBarry Smith            of local rows, i.e 'm'.
3295273d9f13SBarry Smith 
329649a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
329749a6f317SBarry Smith 
3298273d9f13SBarry Smith    The AIJ format (also called the Yale sparse matrix format or
3299ccd8e176SBarry Smith    compressed row storage (CSR)), is fully compatible with standard Fortran 77
3300ccd8e176SBarry Smith    storage.  The stored row and column indices begin with zero.  See the users manual for details.
3301273d9f13SBarry Smith 
3302273d9f13SBarry Smith    The parallel matrix is partitioned such that the first m0 rows belong to
3303273d9f13SBarry Smith    process 0, the next m1 rows belong to process 1, the next m2 rows belong
3304273d9f13SBarry Smith    to process 2 etc.. where m0,m1,m2... are the input parameter 'm'.
3305273d9f13SBarry Smith 
3306273d9f13SBarry Smith    The DIAGONAL portion of the local submatrix of a processor can be defined
3307273d9f13SBarry Smith    as the submatrix which is obtained by extraction the part corresponding
3308273d9f13SBarry Smith    to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the
3309273d9f13SBarry Smith    first row that belongs to the processor, and r2 is the last row belonging
3310273d9f13SBarry Smith    to the this processor. This is a square mxm matrix. The remaining portion
3311273d9f13SBarry Smith    of the local submatrix (mxN) constitute the OFF-DIAGONAL portion.
3312273d9f13SBarry Smith 
3313273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3314273d9f13SBarry Smith 
3315aa95bbe8SBarry Smith    You can call MatGetInfo() to get information on how effective the preallocation was;
3316aa95bbe8SBarry Smith    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3317aa95bbe8SBarry Smith    You can also run with the option -info and look for messages with the string
3318aa95bbe8SBarry Smith    malloc in them to see if additional memory allocation was needed.
3319aa95bbe8SBarry Smith 
3320273d9f13SBarry Smith    Example usage:
3321273d9f13SBarry Smith 
3322273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3323273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3324273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3325273d9f13SBarry Smith    as follows:
3326273d9f13SBarry Smith 
3327273d9f13SBarry Smith .vb
3328273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3329273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3330273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3331273d9f13SBarry Smith     -------------------------------------
3332273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3333273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3334273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3335273d9f13SBarry Smith     -------------------------------------
3336273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3337273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3338273d9f13SBarry Smith .ve
3339273d9f13SBarry Smith 
3340273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3341273d9f13SBarry Smith 
3342273d9f13SBarry Smith .vb
3343273d9f13SBarry Smith       A B C
3344273d9f13SBarry Smith       D E F
3345273d9f13SBarry Smith       G H I
3346273d9f13SBarry Smith .ve
3347273d9f13SBarry Smith 
3348273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3349273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3350273d9f13SBarry Smith 
3351273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3352273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3353273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3354273d9f13SBarry Smith 
3355273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3356273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3357273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3358273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3359273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3360273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3361273d9f13SBarry Smith 
3362273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3363273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3364273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3365273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3366273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3367273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3368273d9f13SBarry Smith .vb
3369273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3370273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3371273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3372273d9f13SBarry Smith .ve
3373273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3374273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3375273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3376273d9f13SBarry Smith    34 values.
3377273d9f13SBarry Smith 
3378273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3379273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3380273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3381273d9f13SBarry Smith .vb
3382273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3383273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3384273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3385273d9f13SBarry Smith .ve
3386273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3387273d9f13SBarry Smith    hence pre-allocation is perfect.
3388273d9f13SBarry Smith 
3389273d9f13SBarry Smith    Level: intermediate
3390273d9f13SBarry Smith 
3391273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3392273d9f13SBarry Smith 
3393ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(),
3394aa95bbe8SBarry Smith           MPIAIJ, MatGetInfo()
3395273d9f13SBarry Smith @*/
3396be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3397273d9f13SBarry Smith {
3398b1d57f15SBarry Smith   PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
3399273d9f13SBarry Smith 
3400273d9f13SBarry Smith   PetscFunctionBegin;
3401a23d5eceSKris Buschelman   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
3402a23d5eceSKris Buschelman   if (f) {
3403a23d5eceSKris Buschelman     ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3404273d9f13SBarry Smith   }
3405273d9f13SBarry Smith   PetscFunctionReturn(0);
3406273d9f13SBarry Smith }
3407273d9f13SBarry Smith 
34084a2ae208SSatish Balay #undef __FUNCT__
34092fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays"
341058d36128SBarry Smith /*@
34112fb0ec9aSBarry Smith      MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard
34122fb0ec9aSBarry Smith          CSR format the local rows.
34132fb0ec9aSBarry Smith 
34142fb0ec9aSBarry Smith    Collective on MPI_Comm
34152fb0ec9aSBarry Smith 
34162fb0ec9aSBarry Smith    Input Parameters:
34172fb0ec9aSBarry Smith +  comm - MPI communicator
34182fb0ec9aSBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
34192fb0ec9aSBarry Smith .  n - This value should be the same as the local size used in creating the
34202fb0ec9aSBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
34212fb0ec9aSBarry Smith        calculated if N is given) For square matrices n is almost always m.
34222fb0ec9aSBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
34232fb0ec9aSBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
34242fb0ec9aSBarry Smith .   i - row indices
34252fb0ec9aSBarry Smith .   j - column indices
34262fb0ec9aSBarry Smith -   a - matrix values
34272fb0ec9aSBarry Smith 
34282fb0ec9aSBarry Smith    Output Parameter:
34292fb0ec9aSBarry Smith .   mat - the matrix
343003bfb495SBarry Smith 
34312fb0ec9aSBarry Smith    Level: intermediate
34322fb0ec9aSBarry Smith 
34332fb0ec9aSBarry Smith    Notes:
34342fb0ec9aSBarry Smith        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
34352fb0ec9aSBarry Smith      thus you CANNOT change the matrix entries by changing the values of a[] after you have
34368d7a6e47SBarry Smith      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
34372fb0ec9aSBarry Smith 
343812251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
343912251496SSatish Balay 
344012251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
344112251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
344212251496SSatish Balay     as shown:
344312251496SSatish Balay 
344412251496SSatish Balay         1 0 0
344512251496SSatish Balay         2 0 3     P0
344612251496SSatish Balay        -------
344712251496SSatish Balay         4 5 6     P1
344812251496SSatish Balay 
344912251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
345012251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
345112251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
345212251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
345312251496SSatish Balay 
345412251496SSatish Balay      Process1 [P1]: rows_owned=[2]
345512251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
345612251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
345712251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
34582fb0ec9aSBarry Smith 
34592fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
34602fb0ec9aSBarry Smith 
34612fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
34628d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays()
34632fb0ec9aSBarry Smith @*/
346482b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
34652fb0ec9aSBarry Smith {
34662fb0ec9aSBarry Smith   PetscErrorCode ierr;
34672fb0ec9aSBarry Smith 
34682fb0ec9aSBarry Smith  PetscFunctionBegin;
34692fb0ec9aSBarry Smith   if (i[0]) {
34702fb0ec9aSBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
34712fb0ec9aSBarry Smith   }
34722fb0ec9aSBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
34732fb0ec9aSBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3474d4146a68SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
34752fb0ec9aSBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
34762fb0ec9aSBarry Smith   ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr);
34772fb0ec9aSBarry Smith   PetscFunctionReturn(0);
34782fb0ec9aSBarry Smith }
34792fb0ec9aSBarry Smith 
34802fb0ec9aSBarry Smith #undef __FUNCT__
34814a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ"
3482273d9f13SBarry Smith /*@C
3483273d9f13SBarry Smith    MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format
3484273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3485273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3486273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3487273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3488273d9f13SBarry Smith 
3489273d9f13SBarry Smith    Collective on MPI_Comm
3490273d9f13SBarry Smith 
3491273d9f13SBarry Smith    Input Parameters:
3492273d9f13SBarry Smith +  comm - MPI communicator
3493273d9f13SBarry Smith .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3494273d9f13SBarry Smith            This value should be the same as the local size used in creating the
3495273d9f13SBarry Smith            y vector for the matrix-vector product y = Ax.
3496273d9f13SBarry Smith .  n - This value should be the same as the local size used in creating the
3497273d9f13SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3498273d9f13SBarry Smith        calculated if N is given) For square matrices n is almost always m.
3499273d9f13SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3500273d9f13SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3501273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3502273d9f13SBarry Smith            (same value is used for all local rows)
3503273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3504273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3505273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3506273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3507273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3508273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3509273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3510273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3511273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3512273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3513273d9f13SBarry Smith            structure. The size of this array is equal to the number
3514273d9f13SBarry Smith            of local rows, i.e 'm'.
3515273d9f13SBarry Smith 
3516273d9f13SBarry Smith    Output Parameter:
3517273d9f13SBarry Smith .  A - the matrix
3518273d9f13SBarry Smith 
3519175b88e8SBarry Smith    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3520ae1d86c5SBarry Smith    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3521175b88e8SBarry Smith    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3522175b88e8SBarry Smith 
3523273d9f13SBarry Smith    Notes:
352449a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
352549a6f317SBarry Smith 
3526273d9f13SBarry Smith    m,n,M,N parameters specify the size of the matrix, and its partitioning across
3527273d9f13SBarry Smith    processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate
3528273d9f13SBarry Smith    storage requirements for this matrix.
3529273d9f13SBarry Smith 
3530273d9f13SBarry Smith    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one
3531273d9f13SBarry Smith    processor than it must be used on all processors that share the object for
3532273d9f13SBarry Smith    that argument.
3533273d9f13SBarry Smith 
3534273d9f13SBarry Smith    The user MUST specify either the local or global matrix dimensions
3535273d9f13SBarry Smith    (possibly both).
3536273d9f13SBarry Smith 
353733a7c187SSatish Balay    The parallel matrix is partitioned across processors such that the
353833a7c187SSatish Balay    first m0 rows belong to process 0, the next m1 rows belong to
353933a7c187SSatish Balay    process 1, the next m2 rows belong to process 2 etc.. where
354033a7c187SSatish Balay    m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores
354133a7c187SSatish Balay    values corresponding to [m x N] submatrix.
3542273d9f13SBarry Smith 
354333a7c187SSatish Balay    The columns are logically partitioned with the n0 columns belonging
354433a7c187SSatish Balay    to 0th partition, the next n1 columns belonging to the next
354533a7c187SSatish Balay    partition etc.. where n0,n1,n2... are the the input parameter 'n'.
354633a7c187SSatish Balay 
354733a7c187SSatish Balay    The DIAGONAL portion of the local submatrix on any given processor
354833a7c187SSatish Balay    is the submatrix corresponding to the rows and columns m,n
354933a7c187SSatish Balay    corresponding to the given processor. i.e diagonal matrix on
355033a7c187SSatish Balay    process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1]
355133a7c187SSatish Balay    etc. The remaining portion of the local submatrix [m x (N-n)]
355233a7c187SSatish Balay    constitute the OFF-DIAGONAL portion. The example below better
355333a7c187SSatish Balay    illustrates this concept.
355433a7c187SSatish Balay 
355533a7c187SSatish Balay    For a square global matrix we define each processor's diagonal portion
355633a7c187SSatish Balay    to be its local rows and the corresponding columns (a square submatrix);
355733a7c187SSatish Balay    each processor's off-diagonal portion encompasses the remainder of the
355833a7c187SSatish Balay    local matrix (a rectangular submatrix).
3559273d9f13SBarry Smith 
3560273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3561273d9f13SBarry Smith 
356297d05335SKris Buschelman    When calling this routine with a single process communicator, a matrix of
356397d05335SKris Buschelman    type SEQAIJ is returned.  If a matrix of type MPIAIJ is desired for this
356497d05335SKris Buschelman    type of communicator, use the construction mechanism:
3565*78102f6cSMatthew Knepley      MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...);
356697d05335SKris Buschelman 
3567273d9f13SBarry Smith    By default, this format uses inodes (identical nodes) when possible.
3568273d9f13SBarry Smith    We search for consecutive rows with the same nonzero structure, thereby
3569273d9f13SBarry Smith    reusing matrix information to achieve increased efficiency.
3570273d9f13SBarry Smith 
3571273d9f13SBarry Smith    Options Database Keys:
3572923f20ffSKris Buschelman +  -mat_no_inode  - Do not use inodes
3573923f20ffSKris Buschelman .  -mat_inode_limit <limit> - Sets inode limit (max limit=5)
3574273d9f13SBarry Smith -  -mat_aij_oneindex - Internally use indexing starting at 1
3575273d9f13SBarry Smith         rather than 0.  Note that when calling MatSetValues(),
3576273d9f13SBarry Smith         the user still MUST index entries starting at 0!
3577273d9f13SBarry Smith 
3578273d9f13SBarry Smith 
3579273d9f13SBarry Smith    Example usage:
3580273d9f13SBarry Smith 
3581273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3582273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3583273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3584273d9f13SBarry Smith    as follows:
3585273d9f13SBarry Smith 
3586273d9f13SBarry Smith .vb
3587273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3588273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3589273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3590273d9f13SBarry Smith     -------------------------------------
3591273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3592273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3593273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3594273d9f13SBarry Smith     -------------------------------------
3595273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3596273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3597273d9f13SBarry Smith .ve
3598273d9f13SBarry Smith 
3599273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3600273d9f13SBarry Smith 
3601273d9f13SBarry Smith .vb
3602273d9f13SBarry Smith       A B C
3603273d9f13SBarry Smith       D E F
3604273d9f13SBarry Smith       G H I
3605273d9f13SBarry Smith .ve
3606273d9f13SBarry Smith 
3607273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3608273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3609273d9f13SBarry Smith 
3610273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3611273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3612273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3613273d9f13SBarry Smith 
3614273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3615273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3616273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3617273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3618273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3619273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3620273d9f13SBarry Smith 
3621273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3622273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3623273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3624273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3625273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3626273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3627273d9f13SBarry Smith .vb
3628273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3629273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3630273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3631273d9f13SBarry Smith .ve
3632273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3633273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3634273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3635273d9f13SBarry Smith    34 values.
3636273d9f13SBarry Smith 
3637273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3638273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3639273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3640273d9f13SBarry Smith .vb
3641273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3642273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3643273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3644273d9f13SBarry Smith .ve
3645273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3646273d9f13SBarry Smith    hence pre-allocation is perfect.
3647273d9f13SBarry Smith 
3648273d9f13SBarry Smith    Level: intermediate
3649273d9f13SBarry Smith 
3650273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3651273d9f13SBarry Smith 
3652ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
36532fb0ec9aSBarry Smith           MPIAIJ, MatCreateMPIAIJWithArrays()
3654273d9f13SBarry Smith @*/
3655be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3656273d9f13SBarry Smith {
36576849ba73SBarry Smith   PetscErrorCode ierr;
3658b1d57f15SBarry Smith   PetscMPIInt    size;
3659273d9f13SBarry Smith 
3660273d9f13SBarry Smith   PetscFunctionBegin;
3661f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3662f69a0ea3SMatthew Knepley   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3663273d9f13SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3664273d9f13SBarry Smith   if (size > 1) {
3665273d9f13SBarry Smith     ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr);
3666273d9f13SBarry Smith     ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3667273d9f13SBarry Smith   } else {
3668273d9f13SBarry Smith     ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr);
3669273d9f13SBarry Smith     ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr);
3670273d9f13SBarry Smith   }
3671273d9f13SBarry Smith   PetscFunctionReturn(0);
3672273d9f13SBarry Smith }
3673195d93cdSBarry Smith 
36744a2ae208SSatish Balay #undef __FUNCT__
36754a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ"
3676be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[])
3677195d93cdSBarry Smith {
3678195d93cdSBarry Smith   Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
3679b1d57f15SBarry Smith 
3680195d93cdSBarry Smith   PetscFunctionBegin;
3681195d93cdSBarry Smith   *Ad     = a->A;
3682195d93cdSBarry Smith   *Ao     = a->B;
3683195d93cdSBarry Smith   *colmap = a->garray;
3684195d93cdSBarry Smith   PetscFunctionReturn(0);
3685195d93cdSBarry Smith }
3686a2243be0SBarry Smith 
3687a2243be0SBarry Smith #undef __FUNCT__
3688a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ"
3689dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring)
3690a2243be0SBarry Smith {
3691dfbe8321SBarry Smith   PetscErrorCode ierr;
3692b1d57f15SBarry Smith   PetscInt       i;
3693a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3694a2243be0SBarry Smith 
3695a2243be0SBarry Smith   PetscFunctionBegin;
36968ee2e534SBarry Smith   if (coloring->ctype == IS_COLORING_GLOBAL) {
369708b6dcc0SBarry Smith     ISColoringValue *allcolors,*colors;
3698a2243be0SBarry Smith     ISColoring      ocoloring;
3699a2243be0SBarry Smith 
3700a2243be0SBarry Smith     /* set coloring for diagonal portion */
3701a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr);
3702a2243be0SBarry Smith 
3703a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
37047adad957SLisandro Dalcin     ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr);
3705d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3706d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3707a2243be0SBarry Smith       colors[i] = allcolors[a->garray[i]];
3708a2243be0SBarry Smith     }
3709a2243be0SBarry Smith     ierr = PetscFree(allcolors);CHKERRQ(ierr);
3710d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3711a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3712a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3713a2243be0SBarry Smith   } else if (coloring->ctype == IS_COLORING_GHOSTED) {
371408b6dcc0SBarry Smith     ISColoringValue *colors;
3715b1d57f15SBarry Smith     PetscInt        *larray;
3716a2243be0SBarry Smith     ISColoring      ocoloring;
3717a2243be0SBarry Smith 
3718a2243be0SBarry Smith     /* set coloring for diagonal portion */
3719d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3720d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3721d0f46423SBarry Smith       larray[i] = i + A->cmap->rstart;
3722a2243be0SBarry Smith     }
3723d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr);
3724d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3725d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3726a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3727a2243be0SBarry Smith     }
3728a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3729d0f46423SBarry Smith     ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3730a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr);
3731a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3732a2243be0SBarry Smith 
3733a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
3734d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3735d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr);
3736d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3737d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3738a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3739a2243be0SBarry Smith     }
3740a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3741d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3742a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3743a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3744a2243be0SBarry Smith   } else {
374577431f27SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype);
3746a2243be0SBarry Smith   }
3747a2243be0SBarry Smith 
3748a2243be0SBarry Smith   PetscFunctionReturn(0);
3749a2243be0SBarry Smith }
3750a2243be0SBarry Smith 
3751dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
3752a2243be0SBarry Smith #undef __FUNCT__
3753779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ"
3754dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues)
3755a2243be0SBarry Smith {
3756a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3757dfbe8321SBarry Smith   PetscErrorCode ierr;
3758a2243be0SBarry Smith 
3759a2243be0SBarry Smith   PetscFunctionBegin;
3760779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr);
3761779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr);
3762779c1a83SBarry Smith   PetscFunctionReturn(0);
3763779c1a83SBarry Smith }
3764dcf5cc72SBarry Smith #endif
3765779c1a83SBarry Smith 
3766779c1a83SBarry Smith #undef __FUNCT__
3767779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ"
3768b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues)
3769779c1a83SBarry Smith {
3770779c1a83SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3771dfbe8321SBarry Smith   PetscErrorCode ierr;
3772779c1a83SBarry Smith 
3773779c1a83SBarry Smith   PetscFunctionBegin;
3774779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr);
3775779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr);
3776a2243be0SBarry Smith   PetscFunctionReturn(0);
3777a2243be0SBarry Smith }
3778c5d6d63eSBarry Smith 
3779c5d6d63eSBarry Smith #undef __FUNCT__
378051dd7536SBarry Smith #define __FUNCT__ "MatMerge"
3781bc08b0f1SBarry Smith /*@
378251dd7536SBarry Smith       MatMerge - Creates a single large PETSc matrix by concatinating sequential
378351dd7536SBarry Smith                  matrices from each processor
3784c5d6d63eSBarry Smith 
3785c5d6d63eSBarry Smith     Collective on MPI_Comm
3786c5d6d63eSBarry Smith 
3787c5d6d63eSBarry Smith    Input Parameters:
378851dd7536SBarry Smith +    comm - the communicators the parallel matrix will live on
3789d6bb3c2dSHong Zhang .    inmat - the input sequential matrices
37900e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3791d6bb3c2dSHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
379251dd7536SBarry Smith 
379351dd7536SBarry Smith    Output Parameter:
379451dd7536SBarry Smith .    outmat - the parallel matrix generated
3795c5d6d63eSBarry Smith 
37967e25d530SSatish Balay     Level: advanced
37977e25d530SSatish Balay 
3798f08fae4eSHong Zhang    Notes: The number of columns of the matrix in EACH processor MUST be the same.
3799c5d6d63eSBarry Smith 
3800c5d6d63eSBarry Smith @*/
3801be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3802c5d6d63eSBarry Smith {
3803dfbe8321SBarry Smith   PetscErrorCode ierr;
3804b7940d39SSatish Balay   PetscInt       m,N,i,rstart,nnz,Ii,*dnz,*onz;
3805ba8c8a56SBarry Smith   PetscInt       *indx;
3806ba8c8a56SBarry Smith   PetscScalar    *values;
3807c5d6d63eSBarry Smith 
3808c5d6d63eSBarry Smith   PetscFunctionBegin;
38090e36024fSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3810d6bb3c2dSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
3811d6bb3c2dSHong Zhang     /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */
38120e36024fSHong Zhang     if (n == PETSC_DECIDE){
3813357abbc8SBarry Smith       ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
38140e36024fSHong Zhang     }
3815357abbc8SBarry Smith     ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3816357abbc8SBarry Smith     rstart -= m;
3817d6bb3c2dSHong Zhang 
3818d6bb3c2dSHong Zhang     ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
3819d6bb3c2dSHong Zhang     for (i=0;i<m;i++) {
3820ba8c8a56SBarry Smith       ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3821d6bb3c2dSHong Zhang       ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr);
3822ba8c8a56SBarry Smith       ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3823d6bb3c2dSHong Zhang     }
3824d6bb3c2dSHong Zhang     /* This routine will ONLY return MPIAIJ type matrix */
3825f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3826f69a0ea3SMatthew Knepley     ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3827d6bb3c2dSHong Zhang     ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr);
3828d6bb3c2dSHong Zhang     ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr);
3829d6bb3c2dSHong Zhang     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3830d6bb3c2dSHong Zhang 
3831d6bb3c2dSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
3832d6bb3c2dSHong Zhang     ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr);
3833d6bb3c2dSHong Zhang   } else {
383477431f27SBarry Smith     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
3835d6bb3c2dSHong Zhang   }
3836d6bb3c2dSHong Zhang 
3837d6bb3c2dSHong Zhang   for (i=0;i<m;i++) {
3838ba8c8a56SBarry Smith     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3839b7940d39SSatish Balay     Ii    = i + rstart;
3840b7940d39SSatish Balay     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3841ba8c8a56SBarry Smith     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3842d6bb3c2dSHong Zhang   }
3843d6bb3c2dSHong Zhang   ierr = MatDestroy(inmat);CHKERRQ(ierr);
3844d6bb3c2dSHong Zhang   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3845d6bb3c2dSHong Zhang   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384651dd7536SBarry Smith 
3847c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3848c5d6d63eSBarry Smith }
3849c5d6d63eSBarry Smith 
3850c5d6d63eSBarry Smith #undef __FUNCT__
3851c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit"
3852dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile)
3853c5d6d63eSBarry Smith {
3854dfbe8321SBarry Smith   PetscErrorCode    ierr;
385532dcc486SBarry Smith   PetscMPIInt       rank;
3856b1d57f15SBarry Smith   PetscInt          m,N,i,rstart,nnz;
3857de4209c5SBarry Smith   size_t            len;
3858b1d57f15SBarry Smith   const PetscInt    *indx;
3859c5d6d63eSBarry Smith   PetscViewer       out;
3860c5d6d63eSBarry Smith   char              *name;
3861c5d6d63eSBarry Smith   Mat               B;
3862b3cc6726SBarry Smith   const PetscScalar *values;
3863c5d6d63eSBarry Smith 
3864c5d6d63eSBarry Smith   PetscFunctionBegin;
3865c5d6d63eSBarry Smith   ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr);
3866c5d6d63eSBarry Smith   ierr = MatGetSize(A,0,&N);CHKERRQ(ierr);
3867f204ca49SKris Buschelman   /* Should this be the type of the diagonal block of A? */
3868f69a0ea3SMatthew Knepley   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
3869f69a0ea3SMatthew Knepley   ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr);
3870f204ca49SKris Buschelman   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
3871f204ca49SKris Buschelman   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
3872c5d6d63eSBarry Smith   ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr);
3873c5d6d63eSBarry Smith   for (i=0;i<m;i++) {
3874c5d6d63eSBarry Smith     ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3875c5d6d63eSBarry Smith     ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3876c5d6d63eSBarry Smith     ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3877c5d6d63eSBarry Smith   }
3878c5d6d63eSBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3879c5d6d63eSBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3880c5d6d63eSBarry Smith 
38817adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
3882c5d6d63eSBarry Smith   ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr);
3883c5d6d63eSBarry Smith   ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr);
3884c5d6d63eSBarry Smith   sprintf(name,"%s.%d",outfile,rank);
3885852598b0SBarry Smith   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr);
3886c5d6d63eSBarry Smith   ierr = PetscFree(name);
3887c5d6d63eSBarry Smith   ierr = MatView(B,out);CHKERRQ(ierr);
3888c5d6d63eSBarry Smith   ierr = PetscViewerDestroy(out);CHKERRQ(ierr);
3889c5d6d63eSBarry Smith   ierr = MatDestroy(B);CHKERRQ(ierr);
3890c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3891c5d6d63eSBarry Smith }
3892e5f2cdd8SHong Zhang 
389351a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
389451a7d1a8SHong Zhang #undef __FUNCT__
389551a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI"
3896be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A)
389751a7d1a8SHong Zhang {
389851a7d1a8SHong Zhang   PetscErrorCode       ierr;
3899671beff6SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3900776b82aeSLisandro Dalcin   PetscContainer       container;
390151a7d1a8SHong Zhang 
390251a7d1a8SHong Zhang   PetscFunctionBegin;
3903671beff6SHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
3904671beff6SHong Zhang   if (container) {
3905776b82aeSLisandro Dalcin     ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
390651a7d1a8SHong Zhang     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
39073e06a4e6SHong Zhang     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
39083e06a4e6SHong Zhang     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
390951a7d1a8SHong Zhang     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
391051a7d1a8SHong Zhang     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
391102c68681SHong Zhang     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
391202c68681SHong Zhang     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
391305b42c5fSBarry Smith     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
391405b42c5fSBarry Smith     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
391505b42c5fSBarry Smith     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
39162c72b5baSSatish Balay     ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr);
3917671beff6SHong Zhang 
3918776b82aeSLisandro Dalcin     ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
3919671beff6SHong Zhang     ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr);
3920671beff6SHong Zhang   }
392151a7d1a8SHong Zhang   ierr = PetscFree(merge);CHKERRQ(ierr);
392251a7d1a8SHong Zhang 
392351a7d1a8SHong Zhang   ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
392451a7d1a8SHong Zhang   PetscFunctionReturn(0);
392551a7d1a8SHong Zhang }
392651a7d1a8SHong Zhang 
39277c4f633dSBarry Smith #include "../src/mat/utils/freespace.h"
3928be0fcf8dSHong Zhang #include "petscbt.h"
39294ebed01fSBarry Smith 
3930e5f2cdd8SHong Zhang #undef __FUNCT__
393138f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric"
3932e5f2cdd8SHong Zhang /*@C
3933f08fae4eSHong Zhang       MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential
3934e5f2cdd8SHong Zhang                  matrices from each processor
3935e5f2cdd8SHong Zhang 
3936e5f2cdd8SHong Zhang     Collective on MPI_Comm
3937e5f2cdd8SHong Zhang 
3938e5f2cdd8SHong Zhang    Input Parameters:
3939e5f2cdd8SHong Zhang +    comm - the communicators the parallel matrix will live on
3940f08fae4eSHong Zhang .    seqmat - the input sequential matrices
39410e36024fSHong Zhang .    m - number of local rows (or PETSC_DECIDE)
39420e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3943e5f2cdd8SHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
3944e5f2cdd8SHong Zhang 
3945e5f2cdd8SHong Zhang    Output Parameter:
3946f08fae4eSHong Zhang .    mpimat - the parallel matrix generated
3947e5f2cdd8SHong Zhang 
3948e5f2cdd8SHong Zhang     Level: advanced
3949e5f2cdd8SHong Zhang 
3950affca5deSHong Zhang    Notes:
3951affca5deSHong Zhang      The dimensions of the sequential matrix in each processor MUST be the same.
3952affca5deSHong Zhang      The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be
3953affca5deSHong Zhang      destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat.
3954e5f2cdd8SHong Zhang @*/
3955be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat)
395655d1abb9SHong Zhang {
395755d1abb9SHong Zhang   PetscErrorCode       ierr;
39587adad957SLisandro Dalcin   MPI_Comm             comm=((PetscObject)mpimat)->comm;
395955d1abb9SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3960b1d57f15SBarry Smith   PetscMPIInt          size,rank,taga,*len_s;
3961d0f46423SBarry Smith   PetscInt             N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j;
3962b1d57f15SBarry Smith   PetscInt             proc,m;
3963b1d57f15SBarry Smith   PetscInt             **buf_ri,**buf_rj;
3964b1d57f15SBarry Smith   PetscInt             k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj;
3965b1d57f15SBarry Smith   PetscInt             nrows,**buf_ri_k,**nextrow,**nextai;
396655d1abb9SHong Zhang   MPI_Request          *s_waits,*r_waits;
396755d1abb9SHong Zhang   MPI_Status           *status;
3968a77337e4SBarry Smith   MatScalar            *aa=a->a;
3969dd6ea824SBarry Smith   MatScalar            **abuf_r,*ba_i;
397055d1abb9SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3971776b82aeSLisandro Dalcin   PetscContainer       container;
397255d1abb9SHong Zhang 
397355d1abb9SHong Zhang   PetscFunctionBegin;
39744ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
39753c2c1871SHong Zhang 
397655d1abb9SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
397755d1abb9SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
397855d1abb9SHong Zhang 
397955d1abb9SHong Zhang   ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
398055d1abb9SHong Zhang   if (container) {
3981776b82aeSLisandro Dalcin     ierr  = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
398255d1abb9SHong Zhang   }
398355d1abb9SHong Zhang   bi     = merge->bi;
398455d1abb9SHong Zhang   bj     = merge->bj;
398555d1abb9SHong Zhang   buf_ri = merge->buf_ri;
398655d1abb9SHong Zhang   buf_rj = merge->buf_rj;
398755d1abb9SHong Zhang 
398855d1abb9SHong Zhang   ierr   = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
3989357abbc8SBarry Smith   owners = merge->rowmap.range;
399055d1abb9SHong Zhang   len_s  = merge->len_s;
399155d1abb9SHong Zhang 
399255d1abb9SHong Zhang   /* send and recv matrix values */
399355d1abb9SHong Zhang   /*-----------------------------*/
3994357abbc8SBarry Smith   ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr);
399555d1abb9SHong Zhang   ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr);
399655d1abb9SHong Zhang 
399755d1abb9SHong Zhang   ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr);
399855d1abb9SHong Zhang   for (proc=0,k=0; proc<size; proc++){
399955d1abb9SHong Zhang     if (!len_s[proc]) continue;
400055d1abb9SHong Zhang     i = owners[proc];
400155d1abb9SHong Zhang     ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr);
400255d1abb9SHong Zhang     k++;
400355d1abb9SHong Zhang   }
400455d1abb9SHong Zhang 
40050c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);}
40060c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);}
400755d1abb9SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
400855d1abb9SHong Zhang 
400955d1abb9SHong Zhang   ierr = PetscFree(s_waits);CHKERRQ(ierr);
401055d1abb9SHong Zhang   ierr = PetscFree(r_waits);CHKERRQ(ierr);
401155d1abb9SHong Zhang 
401255d1abb9SHong Zhang   /* insert mat values of mpimat */
401355d1abb9SHong Zhang   /*----------------------------*/
4014a77337e4SBarry Smith   ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr);
4015b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
401655d1abb9SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
401755d1abb9SHong Zhang   nextai  = nextrow + merge->nrecv;
401855d1abb9SHong Zhang 
401955d1abb9SHong Zhang   for (k=0; k<merge->nrecv; k++){
402055d1abb9SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
402155d1abb9SHong Zhang     nrows = *(buf_ri_k[k]);
402255d1abb9SHong Zhang     nextrow[k]  = buf_ri_k[k]+1;  /* next row number of k-th recved i-structure */
402355d1abb9SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
402455d1abb9SHong Zhang   }
402555d1abb9SHong Zhang 
402655d1abb9SHong Zhang   /* set values of ba */
4027357abbc8SBarry Smith   m = merge->rowmap.n;
402855d1abb9SHong Zhang   for (i=0; i<m; i++) {
402955d1abb9SHong Zhang     arow = owners[rank] + i;
403055d1abb9SHong Zhang     bj_i = bj+bi[i];  /* col indices of the i-th row of mpimat */
403155d1abb9SHong Zhang     bnzi = bi[i+1] - bi[i];
4032a77337e4SBarry Smith     ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr);
403355d1abb9SHong Zhang 
403455d1abb9SHong Zhang     /* add local non-zero vals of this proc's seqmat into ba */
403555d1abb9SHong Zhang     anzi = ai[arow+1] - ai[arow];
403655d1abb9SHong Zhang     aj   = a->j + ai[arow];
403755d1abb9SHong Zhang     aa   = a->a + ai[arow];
403855d1abb9SHong Zhang     nextaj = 0;
403955d1abb9SHong Zhang     for (j=0; nextaj<anzi; j++){
404055d1abb9SHong Zhang       if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
404155d1abb9SHong Zhang         ba_i[j] += aa[nextaj++];
404255d1abb9SHong Zhang       }
404355d1abb9SHong Zhang     }
404455d1abb9SHong Zhang 
404555d1abb9SHong Zhang     /* add received vals into ba */
404655d1abb9SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
404755d1abb9SHong Zhang       /* i-th row */
404855d1abb9SHong Zhang       if (i == *nextrow[k]) {
404955d1abb9SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
405055d1abb9SHong Zhang         aj   = buf_rj[k] + *(nextai[k]);
405155d1abb9SHong Zhang         aa   = abuf_r[k] + *(nextai[k]);
405255d1abb9SHong Zhang         nextaj = 0;
405355d1abb9SHong Zhang         for (j=0; nextaj<anzi; j++){
405455d1abb9SHong Zhang           if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
405555d1abb9SHong Zhang             ba_i[j] += aa[nextaj++];
405655d1abb9SHong Zhang           }
405755d1abb9SHong Zhang         }
405855d1abb9SHong Zhang         nextrow[k]++; nextai[k]++;
405955d1abb9SHong Zhang       }
406055d1abb9SHong Zhang     }
406155d1abb9SHong Zhang     ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr);
406255d1abb9SHong Zhang   }
406355d1abb9SHong Zhang   ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406455d1abb9SHong Zhang   ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
406555d1abb9SHong Zhang 
406655d1abb9SHong Zhang   ierr = PetscFree(abuf_r);CHKERRQ(ierr);
406755d1abb9SHong Zhang   ierr = PetscFree(ba_i);CHKERRQ(ierr);
406855d1abb9SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
40694ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
407055d1abb9SHong Zhang   PetscFunctionReturn(0);
407155d1abb9SHong Zhang }
407238f152feSBarry Smith 
407338f152feSBarry Smith #undef __FUNCT__
407438f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic"
4075be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat)
4076e5f2cdd8SHong Zhang {
4077f08fae4eSHong Zhang   PetscErrorCode       ierr;
407855a3bba9SHong Zhang   Mat                  B_mpi;
4079c2234fe3SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
4080b1d57f15SBarry Smith   PetscMPIInt          size,rank,tagi,tagj,*len_s,*len_si,*len_ri;
4081b1d57f15SBarry Smith   PetscInt             **buf_rj,**buf_ri,**buf_ri_k;
4082d0f46423SBarry Smith   PetscInt             M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j;
4083b1d57f15SBarry Smith   PetscInt             len,proc,*dnz,*onz;
4084b1d57f15SBarry Smith   PetscInt             k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0;
4085b1d57f15SBarry Smith   PetscInt             nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai;
408655d1abb9SHong Zhang   MPI_Request          *si_waits,*sj_waits,*ri_waits,*rj_waits;
408758cb9c82SHong Zhang   MPI_Status           *status;
4088a1a86e44SBarry Smith   PetscFreeSpaceList   free_space=PETSC_NULL,current_space=PETSC_NULL;
4089be0fcf8dSHong Zhang   PetscBT              lnkbt;
409051a7d1a8SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4091776b82aeSLisandro Dalcin   PetscContainer       container;
409202c68681SHong Zhang 
4093e5f2cdd8SHong Zhang   PetscFunctionBegin;
40944ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
40953c2c1871SHong Zhang 
409638f152feSBarry Smith   /* make sure it is a PETSc comm */
409738f152feSBarry Smith   ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr);
4098e5f2cdd8SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
4099e5f2cdd8SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
410055d1abb9SHong Zhang 
410151a7d1a8SHong Zhang   ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr);
4102c2234fe3SHong Zhang   ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
4103e5f2cdd8SHong Zhang 
41046abd8857SHong Zhang   /* determine row ownership */
4105f08fae4eSHong Zhang   /*---------------------------------------------------------*/
4106b167c4dbSHong Zhang   ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr);
4107899cda47SBarry Smith   merge->rowmap.n = m;
4108899cda47SBarry Smith   merge->rowmap.N = M;
4109fc42d0c8SSatish Balay   merge->rowmap.bs = 1;
41106148ca0dSBarry Smith   ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr);
4111b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr);
4112b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr);
411355d1abb9SHong Zhang 
4114357abbc8SBarry Smith   m      = merge->rowmap.n;
4115357abbc8SBarry Smith   M      = merge->rowmap.N;
4116357abbc8SBarry Smith   owners = merge->rowmap.range;
41176abd8857SHong Zhang 
41186abd8857SHong Zhang   /* determine the number of messages to send, their lengths */
41196abd8857SHong Zhang   /*---------------------------------------------------------*/
41203e06a4e6SHong Zhang   len_s  = merge->len_s;
412151a7d1a8SHong Zhang 
41222257cef7SHong Zhang   len = 0;  /* length of buf_si[] */
4123c2234fe3SHong Zhang   merge->nsend = 0;
4124409913e3SHong Zhang   for (proc=0; proc<size; proc++){
41252257cef7SHong Zhang     len_si[proc] = 0;
41263e06a4e6SHong Zhang     if (proc == rank){
41276abd8857SHong Zhang       len_s[proc] = 0;
41283e06a4e6SHong Zhang     } else {
412902c68681SHong Zhang       len_si[proc] = owners[proc+1] - owners[proc] + 1;
41303e06a4e6SHong Zhang       len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */
41313e06a4e6SHong Zhang     }
41323e06a4e6SHong Zhang     if (len_s[proc]) {
4133c2234fe3SHong Zhang       merge->nsend++;
41342257cef7SHong Zhang       nrows = 0;
41352257cef7SHong Zhang       for (i=owners[proc]; i<owners[proc+1]; i++){
41362257cef7SHong Zhang         if (ai[i+1] > ai[i]) nrows++;
41372257cef7SHong Zhang       }
41382257cef7SHong Zhang       len_si[proc] = 2*(nrows+1);
41392257cef7SHong Zhang       len += len_si[proc];
4140409913e3SHong Zhang     }
414158cb9c82SHong Zhang   }
4142409913e3SHong Zhang 
41432257cef7SHong Zhang   /* determine the number and length of messages to receive for ij-structure */
41442257cef7SHong Zhang   /*-------------------------------------------------------------------------*/
414551a7d1a8SHong Zhang   ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr);
414655d1abb9SHong Zhang   ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr);
4147671beff6SHong Zhang 
41483e06a4e6SHong Zhang   /* post the Irecv of j-structure */
41493e06a4e6SHong Zhang   /*-------------------------------*/
41502c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
41513e06a4e6SHong Zhang   ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr);
415202c68681SHong Zhang 
41533e06a4e6SHong Zhang   /* post the Isend of j-structure */
4154affca5deSHong Zhang   /*--------------------------------*/
41552257cef7SHong Zhang   ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr);
415602c68681SHong Zhang   sj_waits = si_waits + merge->nsend;
41573e06a4e6SHong Zhang 
41582257cef7SHong Zhang   for (proc=0, k=0; proc<size; proc++){
4159409913e3SHong Zhang     if (!len_s[proc]) continue;
416002c68681SHong Zhang     i = owners[proc];
4161b1d57f15SBarry Smith     ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr);
416251a7d1a8SHong Zhang     k++;
416351a7d1a8SHong Zhang   }
416451a7d1a8SHong Zhang 
41653e06a4e6SHong Zhang   /* receives and sends of j-structure are complete */
41663e06a4e6SHong Zhang   /*------------------------------------------------*/
41670c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);}
41680c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);}
416902c68681SHong Zhang 
417002c68681SHong Zhang   /* send and recv i-structure */
417102c68681SHong Zhang   /*---------------------------*/
41722c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
417302c68681SHong Zhang   ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr);
417402c68681SHong Zhang 
4175b1d57f15SBarry Smith   ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr);
41763e06a4e6SHong Zhang   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
41772257cef7SHong Zhang   for (proc=0,k=0; proc<size; proc++){
417802c68681SHong Zhang     if (!len_s[proc]) continue;
41793e06a4e6SHong Zhang     /* form outgoing message for i-structure:
41803e06a4e6SHong Zhang          buf_si[0]:                 nrows to be sent
41813e06a4e6SHong Zhang                [1:nrows]:           row index (global)
41823e06a4e6SHong Zhang                [nrows+1:2*nrows+1]: i-structure index
41833e06a4e6SHong Zhang     */
41843e06a4e6SHong Zhang     /*-------------------------------------------*/
41852257cef7SHong Zhang     nrows = len_si[proc]/2 - 1;
41863e06a4e6SHong Zhang     buf_si_i    = buf_si + nrows+1;
41873e06a4e6SHong Zhang     buf_si[0]   = nrows;
41883e06a4e6SHong Zhang     buf_si_i[0] = 0;
41893e06a4e6SHong Zhang     nrows = 0;
41903e06a4e6SHong Zhang     for (i=owners[proc]; i<owners[proc+1]; i++){
41913e06a4e6SHong Zhang       anzi = ai[i+1] - ai[i];
41923e06a4e6SHong Zhang       if (anzi) {
41933e06a4e6SHong Zhang         buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */
41943e06a4e6SHong Zhang         buf_si[nrows+1] = i-owners[proc]; /* local row index */
41953e06a4e6SHong Zhang         nrows++;
41963e06a4e6SHong Zhang       }
41973e06a4e6SHong Zhang     }
4198b1d57f15SBarry Smith     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr);
419902c68681SHong Zhang     k++;
42002257cef7SHong Zhang     buf_si += len_si[proc];
420102c68681SHong Zhang   }
42022257cef7SHong Zhang 
42030c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);}
42040c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);}
420502c68681SHong Zhang 
4206ae15b995SBarry Smith   ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr);
42073e06a4e6SHong Zhang   for (i=0; i<merge->nrecv; i++){
4208ae15b995SBarry Smith     ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr);
42093e06a4e6SHong Zhang   }
42103e06a4e6SHong Zhang 
42113e06a4e6SHong Zhang   ierr = PetscFree(len_si);CHKERRQ(ierr);
421202c68681SHong Zhang   ierr = PetscFree(len_ri);CHKERRQ(ierr);
421302c68681SHong Zhang   ierr = PetscFree(rj_waits);CHKERRQ(ierr);
42143e06a4e6SHong Zhang   ierr = PetscFree(si_waits);CHKERRQ(ierr);
42152257cef7SHong Zhang   ierr = PetscFree(ri_waits);CHKERRQ(ierr);
42163e06a4e6SHong Zhang   ierr = PetscFree(buf_s);CHKERRQ(ierr);
4217bcc1bcd5SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
421858cb9c82SHong Zhang 
4219bcc1bcd5SHong Zhang   /* compute a local seq matrix in each processor */
4220bcc1bcd5SHong Zhang   /*----------------------------------------------*/
422158cb9c82SHong Zhang   /* allocate bi array and free space for accumulating nonzero column info */
4222b1d57f15SBarry Smith   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr);
422358cb9c82SHong Zhang   bi[0] = 0;
422458cb9c82SHong Zhang 
4225be0fcf8dSHong Zhang   /* create and initialize a linked list */
4226be0fcf8dSHong Zhang   nlnk = N+1;
4227be0fcf8dSHong Zhang   ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
422858cb9c82SHong Zhang 
4229bcc1bcd5SHong Zhang   /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */
423058cb9c82SHong Zhang   len = 0;
4231bcc1bcd5SHong Zhang   len  = ai[owners[rank+1]] - ai[owners[rank]];
4232a1a86e44SBarry Smith   ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr);
423358cb9c82SHong Zhang   current_space = free_space;
423458cb9c82SHong Zhang 
4235bcc1bcd5SHong Zhang   /* determine symbolic info for each local row */
4236b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
42373e06a4e6SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
42383e06a4e6SHong Zhang   nextai  = nextrow + merge->nrecv;
42393e06a4e6SHong Zhang   for (k=0; k<merge->nrecv; k++){
42402257cef7SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
42413e06a4e6SHong Zhang     nrows = *buf_ri_k[k];
42423e06a4e6SHong Zhang     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
42432257cef7SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
42443e06a4e6SHong Zhang   }
42452257cef7SHong Zhang 
4246bcc1bcd5SHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
4247bcc1bcd5SHong Zhang   len = 0;
424858cb9c82SHong Zhang   for (i=0;i<m;i++) {
424958cb9c82SHong Zhang     bnzi   = 0;
425058cb9c82SHong Zhang     /* add local non-zero cols of this proc's seqmat into lnk */
425158cb9c82SHong Zhang     arow   = owners[rank] + i;
425258cb9c82SHong Zhang     anzi   = ai[arow+1] - ai[arow];
425358cb9c82SHong Zhang     aj     = a->j + ai[arow];
4254be0fcf8dSHong Zhang     ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
425558cb9c82SHong Zhang     bnzi += nlnk;
425658cb9c82SHong Zhang     /* add received col data into lnk */
425751a7d1a8SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
425855d1abb9SHong Zhang       if (i == *nextrow[k]) { /* i-th row */
42593e06a4e6SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
42603e06a4e6SHong Zhang         aj   = buf_rj[k] + *nextai[k];
42613e06a4e6SHong Zhang         ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
42623e06a4e6SHong Zhang         bnzi += nlnk;
42633e06a4e6SHong Zhang         nextrow[k]++; nextai[k]++;
42643e06a4e6SHong Zhang       }
426558cb9c82SHong Zhang     }
4266bcc1bcd5SHong Zhang     if (len < bnzi) len = bnzi;  /* =max(bnzi) */
426758cb9c82SHong Zhang 
426858cb9c82SHong Zhang     /* if free space is not available, make more free space */
426958cb9c82SHong Zhang     if (current_space->local_remaining<bnzi) {
42704238b7adSHong Zhang       ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,&current_space);CHKERRQ(ierr);
427158cb9c82SHong Zhang       nspacedouble++;
427258cb9c82SHong Zhang     }
427358cb9c82SHong Zhang     /* copy data into free space, then initialize lnk */
4274be0fcf8dSHong Zhang     ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr);
4275bcc1bcd5SHong Zhang     ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr);
4276bcc1bcd5SHong Zhang 
427758cb9c82SHong Zhang     current_space->array           += bnzi;
427858cb9c82SHong Zhang     current_space->local_used      += bnzi;
427958cb9c82SHong Zhang     current_space->local_remaining -= bnzi;
428058cb9c82SHong Zhang 
428158cb9c82SHong Zhang     bi[i+1] = bi[i] + bnzi;
428258cb9c82SHong Zhang   }
4283bcc1bcd5SHong Zhang 
4284bcc1bcd5SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
4285bcc1bcd5SHong Zhang 
4286b1d57f15SBarry Smith   ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr);
4287a1a86e44SBarry Smith   ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr);
4288be0fcf8dSHong Zhang   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
4289409913e3SHong Zhang 
4290bcc1bcd5SHong Zhang   /* create symbolic parallel matrix B_mpi */
4291bcc1bcd5SHong Zhang   /*---------------------------------------*/
4292f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr);
429354b84b50SHong Zhang   if (n==PETSC_DECIDE) {
4294f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr);
429554b84b50SHong Zhang   } else {
4296f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
429754b84b50SHong Zhang   }
4298bcc1bcd5SHong Zhang   ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr);
4299bcc1bcd5SHong Zhang   ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr);
4300bcc1bcd5SHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
430158cb9c82SHong Zhang 
43026abd8857SHong Zhang   /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */
43036abd8857SHong Zhang   B_mpi->assembled     = PETSC_FALSE;
4304affca5deSHong Zhang   B_mpi->ops->destroy  = MatDestroy_MPIAIJ_SeqsToMPI;
4305affca5deSHong Zhang   merge->bi            = bi;
4306affca5deSHong Zhang   merge->bj            = bj;
430702c68681SHong Zhang   merge->buf_ri        = buf_ri;
430802c68681SHong Zhang   merge->buf_rj        = buf_rj;
4309de0260b3SHong Zhang   merge->coi           = PETSC_NULL;
4310de0260b3SHong Zhang   merge->coj           = PETSC_NULL;
4311de0260b3SHong Zhang   merge->owners_co     = PETSC_NULL;
4312affca5deSHong Zhang 
4313affca5deSHong Zhang   /* attach the supporting struct to B_mpi for reuse */
4314776b82aeSLisandro Dalcin   ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
4315776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr);
4316affca5deSHong Zhang   ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr);
4317affca5deSHong Zhang   *mpimat = B_mpi;
431838f152feSBarry Smith 
431938f152feSBarry Smith   ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);
43204ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
4321e5f2cdd8SHong Zhang   PetscFunctionReturn(0);
4322e5f2cdd8SHong Zhang }
432325616d81SHong Zhang 
432438f152feSBarry Smith #undef __FUNCT__
432538f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI"
4326be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat)
432755d1abb9SHong Zhang {
432855d1abb9SHong Zhang   PetscErrorCode   ierr;
432955d1abb9SHong Zhang 
433055d1abb9SHong Zhang   PetscFunctionBegin;
43314ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
433255d1abb9SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
433355d1abb9SHong Zhang     ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr);
433455d1abb9SHong Zhang   }
433555d1abb9SHong Zhang   ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr);
43364ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
433755d1abb9SHong Zhang   PetscFunctionReturn(0);
433855d1abb9SHong Zhang }
43394ebed01fSBarry Smith 
434025616d81SHong Zhang #undef __FUNCT__
434125616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat"
4342bc08b0f1SBarry Smith /*@
434332fba14fSHong Zhang      MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows
434425616d81SHong Zhang 
434532fba14fSHong Zhang     Not Collective
434625616d81SHong Zhang 
434725616d81SHong Zhang    Input Parameters:
434825616d81SHong Zhang +    A - the matrix
434925616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
435025616d81SHong Zhang 
435125616d81SHong Zhang    Output Parameter:
435225616d81SHong Zhang .    A_loc - the local sequential matrix generated
435325616d81SHong Zhang 
435425616d81SHong Zhang     Level: developer
435525616d81SHong Zhang 
435625616d81SHong Zhang @*/
4357be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc)
435825616d81SHong Zhang {
435925616d81SHong Zhang   PetscErrorCode  ierr;
436001b7ae99SHong Zhang   Mat_MPIAIJ      *mpimat=(Mat_MPIAIJ*)A->data;
436101b7ae99SHong Zhang   Mat_SeqAIJ      *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data;
436201b7ae99SHong Zhang   PetscInt        *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray;
4363a77337e4SBarry Smith   MatScalar       *aa=a->a,*ba=b->a,*cam;
4364a77337e4SBarry Smith   PetscScalar     *ca;
4365d0f46423SBarry Smith   PetscInt        am=A->rmap->n,i,j,k,cstart=A->cmap->rstart;
43665a7d977cSHong Zhang   PetscInt        *ci,*cj,col,ncols_d,ncols_o,jo;
436725616d81SHong Zhang 
436825616d81SHong Zhang   PetscFunctionBegin;
43694ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
437001b7ae99SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4371dea91ad1SHong Zhang     ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr);
4372dea91ad1SHong Zhang     ci[0] = 0;
437301b7ae99SHong Zhang     for (i=0; i<am; i++){
4374dea91ad1SHong Zhang       ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]);
437501b7ae99SHong Zhang     }
4376dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr);
4377dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr);
4378dea91ad1SHong Zhang     k = 0;
437901b7ae99SHong Zhang     for (i=0; i<am; i++) {
43805a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
43815a7d977cSHong Zhang       ncols_d = ai[i+1] - ai[i];
438201b7ae99SHong Zhang       /* off-diagonal portion of A */
43835a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
43845a7d977cSHong Zhang         col = cmap[*bj];
43855a7d977cSHong Zhang         if (col >= cstart) break;
43865a7d977cSHong Zhang         cj[k]   = col; bj++;
43875a7d977cSHong Zhang         ca[k++] = *ba++;
43885a7d977cSHong Zhang       }
43895a7d977cSHong Zhang       /* diagonal portion of A */
43905a7d977cSHong Zhang       for (j=0; j<ncols_d; j++) {
43915a7d977cSHong Zhang         cj[k]   = cstart + *aj++;
43925a7d977cSHong Zhang         ca[k++] = *aa++;
43935a7d977cSHong Zhang       }
43945a7d977cSHong Zhang       /* off-diagonal portion of A */
43955a7d977cSHong Zhang       for (j=jo; j<ncols_o; j++) {
43965a7d977cSHong Zhang         cj[k]   = cmap[*bj++];
43975a7d977cSHong Zhang         ca[k++] = *ba++;
43985a7d977cSHong Zhang       }
439925616d81SHong Zhang     }
4400dea91ad1SHong Zhang     /* put together the new matrix */
4401d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr);
4402dea91ad1SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4403dea91ad1SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
4404dea91ad1SHong Zhang     mat          = (Mat_SeqAIJ*)(*A_loc)->data;
4405e6b907acSBarry Smith     mat->free_a  = PETSC_TRUE;
4406e6b907acSBarry Smith     mat->free_ij = PETSC_TRUE;
4407dea91ad1SHong Zhang     mat->nonew   = 0;
44085a7d977cSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
44095a7d977cSHong Zhang     mat=(Mat_SeqAIJ*)(*A_loc)->data;
4410a77337e4SBarry Smith     ci = mat->i; cj = mat->j; cam = mat->a;
44115a7d977cSHong Zhang     for (i=0; i<am; i++) {
44125a7d977cSHong Zhang       /* off-diagonal portion of A */
44135a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
44145a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
44155a7d977cSHong Zhang         col = cmap[*bj];
44165a7d977cSHong Zhang         if (col >= cstart) break;
4417a77337e4SBarry Smith         *cam++ = *ba++; bj++;
44185a7d977cSHong Zhang       }
44195a7d977cSHong Zhang       /* diagonal portion of A */
4420ecc9b87dSHong Zhang       ncols_d = ai[i+1] - ai[i];
4421a77337e4SBarry Smith       for (j=0; j<ncols_d; j++) *cam++ = *aa++;
44225a7d977cSHong Zhang       /* off-diagonal portion of A */
4423f33d1a9aSHong Zhang       for (j=jo; j<ncols_o; j++) {
4424a77337e4SBarry Smith         *cam++ = *ba++; bj++;
4425f33d1a9aSHong Zhang       }
44265a7d977cSHong Zhang     }
44275a7d977cSHong Zhang   } else {
44285a7d977cSHong Zhang     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
442925616d81SHong Zhang   }
443001b7ae99SHong Zhang 
44314ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
443225616d81SHong Zhang   PetscFunctionReturn(0);
443325616d81SHong Zhang }
443425616d81SHong Zhang 
443532fba14fSHong Zhang #undef __FUNCT__
443632fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed"
443732fba14fSHong Zhang /*@C
443832fba14fSHong Zhang      MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns
443932fba14fSHong Zhang 
444032fba14fSHong Zhang     Not Collective
444132fba14fSHong Zhang 
444232fba14fSHong Zhang    Input Parameters:
444332fba14fSHong Zhang +    A - the matrix
444432fba14fSHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
444532fba14fSHong Zhang -    row, col - index sets of rows and columns to extract (or PETSC_NULL)
444632fba14fSHong Zhang 
444732fba14fSHong Zhang    Output Parameter:
444832fba14fSHong Zhang .    A_loc - the local sequential matrix generated
444932fba14fSHong Zhang 
445032fba14fSHong Zhang     Level: developer
445132fba14fSHong Zhang 
445232fba14fSHong Zhang @*/
4453be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc)
445432fba14fSHong Zhang {
445532fba14fSHong Zhang   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
445632fba14fSHong Zhang   PetscErrorCode    ierr;
445732fba14fSHong Zhang   PetscInt          i,start,end,ncols,nzA,nzB,*cmap,imark,*idx;
445832fba14fSHong Zhang   IS                isrowa,iscola;
445932fba14fSHong Zhang   Mat               *aloc;
446032fba14fSHong Zhang 
446132fba14fSHong Zhang   PetscFunctionBegin;
44624ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
446332fba14fSHong Zhang   if (!row){
4464d0f46423SBarry Smith     start = A->rmap->rstart; end = A->rmap->rend;
446532fba14fSHong Zhang     ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr);
446632fba14fSHong Zhang   } else {
446732fba14fSHong Zhang     isrowa = *row;
446832fba14fSHong Zhang   }
446932fba14fSHong Zhang   if (!col){
4470d0f46423SBarry Smith     start = A->cmap->rstart;
447132fba14fSHong Zhang     cmap  = a->garray;
4472d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4473d0f46423SBarry Smith     nzB   = a->B->cmap->n;
447432fba14fSHong Zhang     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
447532fba14fSHong Zhang     ncols = 0;
447632fba14fSHong Zhang     for (i=0; i<nzB; i++) {
447732fba14fSHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
447832fba14fSHong Zhang       else break;
447932fba14fSHong Zhang     }
448032fba14fSHong Zhang     imark = i;
448132fba14fSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;
448232fba14fSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i];
448332fba14fSHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr);
448432fba14fSHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
448532fba14fSHong Zhang   } else {
448632fba14fSHong Zhang     iscola = *col;
448732fba14fSHong Zhang   }
448832fba14fSHong Zhang   if (scall != MAT_INITIAL_MATRIX){
448932fba14fSHong Zhang     ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr);
449032fba14fSHong Zhang     aloc[0] = *A_loc;
449132fba14fSHong Zhang   }
449232fba14fSHong Zhang   ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr);
449332fba14fSHong Zhang   *A_loc = aloc[0];
449432fba14fSHong Zhang   ierr = PetscFree(aloc);CHKERRQ(ierr);
449532fba14fSHong Zhang   if (!row){
449632fba14fSHong Zhang     ierr = ISDestroy(isrowa);CHKERRQ(ierr);
449732fba14fSHong Zhang   }
449832fba14fSHong Zhang   if (!col){
449932fba14fSHong Zhang     ierr = ISDestroy(iscola);CHKERRQ(ierr);
450032fba14fSHong Zhang   }
45014ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
450232fba14fSHong Zhang   PetscFunctionReturn(0);
450332fba14fSHong Zhang }
450432fba14fSHong Zhang 
450525616d81SHong Zhang #undef __FUNCT__
450625616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols"
450725616d81SHong Zhang /*@C
450832fba14fSHong Zhang     MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A
450925616d81SHong Zhang 
451025616d81SHong Zhang     Collective on Mat
451125616d81SHong Zhang 
451225616d81SHong Zhang    Input Parameters:
4513e240928fSHong Zhang +    A,B - the matrices in mpiaij format
451425616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
451525616d81SHong Zhang -    rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL)
451625616d81SHong Zhang 
451725616d81SHong Zhang    Output Parameter:
451825616d81SHong Zhang +    rowb, colb - index sets of rows and columns of B to extract
4519d0f46423SBarry Smith .    brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows
452025616d81SHong Zhang -    B_seq - the sequential matrix generated
452125616d81SHong Zhang 
452225616d81SHong Zhang     Level: developer
452325616d81SHong Zhang 
452425616d81SHong Zhang @*/
4525be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq)
452625616d81SHong Zhang {
4527899cda47SBarry Smith   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
452825616d81SHong Zhang   PetscErrorCode    ierr;
4529b1d57f15SBarry Smith   PetscInt          *idx,i,start,ncols,nzA,nzB,*cmap,imark;
453025616d81SHong Zhang   IS                isrowb,iscolb;
453125616d81SHong Zhang   Mat               *bseq;
453225616d81SHong Zhang 
453325616d81SHong Zhang   PetscFunctionBegin;
4534d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4535d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
453625616d81SHong Zhang   }
45374ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
453825616d81SHong Zhang 
453925616d81SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4540d0f46423SBarry Smith     start = A->cmap->rstart;
454125616d81SHong Zhang     cmap  = a->garray;
4542d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4543d0f46423SBarry Smith     nzB   = a->B->cmap->n;
4544b1d57f15SBarry Smith     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
454525616d81SHong Zhang     ncols = 0;
45460390132cSHong Zhang     for (i=0; i<nzB; i++) {  /* row < local row index */
454725616d81SHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
454825616d81SHong Zhang       else break;
454925616d81SHong Zhang     }
455025616d81SHong Zhang     imark = i;
45510390132cSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;  /* local rows */
45520390132cSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */
455325616d81SHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr);
455425616d81SHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
455525616d81SHong Zhang     *brstart = imark;
4556d0f46423SBarry Smith     ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr);
455725616d81SHong Zhang   } else {
455825616d81SHong Zhang     if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX");
455925616d81SHong Zhang     isrowb = *rowb; iscolb = *colb;
456025616d81SHong Zhang     ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr);
456125616d81SHong Zhang     bseq[0] = *B_seq;
456225616d81SHong Zhang   }
456325616d81SHong Zhang   ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr);
456425616d81SHong Zhang   *B_seq = bseq[0];
456525616d81SHong Zhang   ierr = PetscFree(bseq);CHKERRQ(ierr);
456625616d81SHong Zhang   if (!rowb){
456725616d81SHong Zhang     ierr = ISDestroy(isrowb);CHKERRQ(ierr);
456825616d81SHong Zhang   } else {
456925616d81SHong Zhang     *rowb = isrowb;
457025616d81SHong Zhang   }
457125616d81SHong Zhang   if (!colb){
457225616d81SHong Zhang     ierr = ISDestroy(iscolb);CHKERRQ(ierr);
457325616d81SHong Zhang   } else {
457425616d81SHong Zhang     *colb = iscolb;
457525616d81SHong Zhang   }
45764ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
457725616d81SHong Zhang   PetscFunctionReturn(0);
457825616d81SHong Zhang }
4579429d309bSHong Zhang 
4580a61c8c0fSHong Zhang #undef __FUNCT__
4581a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols"
4582429d309bSHong Zhang /*@C
4583429d309bSHong Zhang     MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns
458401b7ae99SHong Zhang     of the OFF-DIAGONAL portion of local A
4585429d309bSHong Zhang 
4586429d309bSHong Zhang     Collective on Mat
4587429d309bSHong Zhang 
4588429d309bSHong Zhang    Input Parameters:
4589429d309bSHong Zhang +    A,B - the matrices in mpiaij format
459087025532SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
459187025532SHong Zhang .    startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL)
459287025532SHong Zhang -    bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL)
4593429d309bSHong Zhang 
4594429d309bSHong Zhang    Output Parameter:
459587025532SHong Zhang +    B_oth - the sequential matrix generated
4596429d309bSHong Zhang 
4597429d309bSHong Zhang     Level: developer
4598429d309bSHong Zhang 
4599429d309bSHong Zhang @*/
4600dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth)
4601429d309bSHong Zhang {
4602a6b2eed2SHong Zhang   VecScatter_MPI_General *gen_to,*gen_from;
4603429d309bSHong Zhang   PetscErrorCode         ierr;
4604899cda47SBarry Smith   Mat_MPIAIJ             *a=(Mat_MPIAIJ*)A->data;
460587025532SHong Zhang   Mat_SeqAIJ             *b_oth;
4606a6b2eed2SHong Zhang   VecScatter             ctx=a->Mvctx;
46077adad957SLisandro Dalcin   MPI_Comm               comm=((PetscObject)ctx)->comm;
46087adad957SLisandro Dalcin   PetscMPIInt            *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank;
4609d0f46423SBarry Smith   PetscInt               *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj;
4610dd6ea824SBarry Smith   PetscScalar            *rvalues,*svalues;
4611dd6ea824SBarry Smith   MatScalar              *b_otha,*bufa,*bufA;
4612e42f35eeSHong Zhang   PetscInt               i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len;
4613910ba992SMatthew Knepley   MPI_Request            *rwaits = PETSC_NULL,*swaits = PETSC_NULL;
461487025532SHong Zhang   MPI_Status             *sstatus,rstatus;
4615aa5bb8c0SSatish Balay   PetscMPIInt            jj;
4616e42f35eeSHong Zhang   PetscInt               *cols,sbs,rbs;
4617ba8c8a56SBarry Smith   PetscScalar            *vals;
4618429d309bSHong Zhang 
4619429d309bSHong Zhang   PetscFunctionBegin;
4620d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4621d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
4622429d309bSHong Zhang   }
46234ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4624a6b2eed2SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
4625a6b2eed2SHong Zhang 
4626a6b2eed2SHong Zhang   gen_to   = (VecScatter_MPI_General*)ctx->todata;
4627a6b2eed2SHong Zhang   gen_from = (VecScatter_MPI_General*)ctx->fromdata;
4628e42f35eeSHong Zhang   rvalues  = gen_from->values; /* holds the length of receiving row */
4629e42f35eeSHong Zhang   svalues  = gen_to->values;   /* holds the length of sending row */
4630a6b2eed2SHong Zhang   nrecvs   = gen_from->n;
4631a6b2eed2SHong Zhang   nsends   = gen_to->n;
4632d7ee0231SBarry Smith 
4633d7ee0231SBarry Smith   ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr);
4634a6b2eed2SHong Zhang   srow     = gen_to->indices;   /* local row index to be sent */
4635a6b2eed2SHong Zhang   sstarts  = gen_to->starts;
4636a6b2eed2SHong Zhang   sprocs   = gen_to->procs;
4637a6b2eed2SHong Zhang   sstatus  = gen_to->sstatus;
4638e42f35eeSHong Zhang   sbs      = gen_to->bs;
4639e42f35eeSHong Zhang   rstarts  = gen_from->starts;
4640e42f35eeSHong Zhang   rprocs   = gen_from->procs;
4641e42f35eeSHong Zhang   rbs      = gen_from->bs;
4642429d309bSHong Zhang 
4643dea91ad1SHong Zhang   if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX;
4644429d309bSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4645a6b2eed2SHong Zhang     /* i-array */
4646a6b2eed2SHong Zhang     /*---------*/
4647a6b2eed2SHong Zhang     /*  post receives */
4648a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4649e42f35eeSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4650e42f35eeSHong Zhang       nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */
465187025532SHong Zhang       ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4652429d309bSHong Zhang     }
4653a6b2eed2SHong Zhang 
4654a6b2eed2SHong Zhang     /* pack the outgoing message */
465587025532SHong Zhang     ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr);
4656a6b2eed2SHong Zhang     rstartsj = sstartsj + nsends +1;
4657a6b2eed2SHong Zhang     sstartsj[0] = 0;  rstartsj[0] = 0;
4658a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be sent */
4659a6b2eed2SHong Zhang     k = 0;
4660a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4661e42f35eeSHong Zhang       rowlen = (PetscInt*)svalues + sstarts[i]*sbs;
4662e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
466387025532SHong Zhang       for (j=0; j<nrows; j++) {
4664d0f46423SBarry Smith         row = srow[k] + B->rmap->range[rank]; /* global row idx */
4665e42f35eeSHong Zhang         for (l=0; l<sbs; l++){
4666e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */
4667e42f35eeSHong Zhang           rowlen[j*sbs+l] = ncols;
4668e42f35eeSHong Zhang           len += ncols;
4669e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
4670e42f35eeSHong Zhang         }
4671a6b2eed2SHong Zhang         k++;
4672429d309bSHong Zhang       }
4673e42f35eeSHong Zhang       ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4674dea91ad1SHong Zhang       sstartsj[i+1] = len;  /* starting point of (i+1)-th outgoing msg in bufj and bufa */
4675429d309bSHong Zhang     }
467687025532SHong Zhang     /* recvs and sends of i-array are completed */
467787025532SHong Zhang     i = nrecvs;
467887025532SHong Zhang     while (i--) {
4679aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
468087025532SHong Zhang     }
46810c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4682e42f35eeSHong Zhang 
4683a6b2eed2SHong Zhang     /* allocate buffers for sending j and a arrays */
4684a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr);
4685a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr);
4686a6b2eed2SHong Zhang 
468787025532SHong Zhang     /* create i-array of B_oth */
468887025532SHong Zhang     ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr);
468987025532SHong Zhang     b_othi[0] = 0;
4690a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be received */
4691a6b2eed2SHong Zhang     k = 0;
4692a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4693fd0ff01cSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4694e42f35eeSHong Zhang       nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */
469587025532SHong Zhang       for (j=0; j<nrows; j++) {
469687025532SHong Zhang         b_othi[k+1] = b_othi[k] + rowlen[j];
4697a6b2eed2SHong Zhang         len += rowlen[j]; k++;
4698a6b2eed2SHong Zhang       }
4699dea91ad1SHong Zhang       rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */
4700a6b2eed2SHong Zhang     }
4701a6b2eed2SHong Zhang 
470287025532SHong Zhang     /* allocate space for j and a arrrays of B_oth */
470387025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr);
4704dd6ea824SBarry Smith     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr);
4705a6b2eed2SHong Zhang 
470687025532SHong Zhang     /* j-array */
470787025532SHong Zhang     /*---------*/
4708a6b2eed2SHong Zhang     /*  post receives of j-array */
4709a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
471087025532SHong Zhang       nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
471187025532SHong Zhang       ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4712a6b2eed2SHong Zhang     }
4713e42f35eeSHong Zhang 
4714e42f35eeSHong Zhang     /* pack the outgoing message j-array */
4715a6b2eed2SHong Zhang     k = 0;
4716a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4717e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
4718a6b2eed2SHong Zhang       bufJ = bufj+sstartsj[i];
471987025532SHong Zhang       for (j=0; j<nrows; j++) {
4720d0f46423SBarry Smith         row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4721e42f35eeSHong Zhang         for (ll=0; ll<sbs; ll++){
4722e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4723a6b2eed2SHong Zhang           for (l=0; l<ncols; l++){
4724a6b2eed2SHong Zhang             *bufJ++ = cols[l];
472587025532SHong Zhang           }
4726e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4727e42f35eeSHong Zhang         }
472887025532SHong Zhang       }
472987025532SHong Zhang       ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
473087025532SHong Zhang     }
473187025532SHong Zhang 
473287025532SHong Zhang     /* recvs and sends of j-array are completed */
473387025532SHong Zhang     i = nrecvs;
473487025532SHong Zhang     while (i--) {
4735aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
473687025532SHong Zhang     }
47370c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
473887025532SHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
473987025532SHong Zhang     sstartsj = *startsj;
474087025532SHong Zhang     rstartsj = sstartsj + nsends +1;
474187025532SHong Zhang     bufa     = *bufa_ptr;
474287025532SHong Zhang     b_oth    = (Mat_SeqAIJ*)(*B_oth)->data;
474387025532SHong Zhang     b_otha   = b_oth->a;
474487025532SHong Zhang   } else {
474587025532SHong Zhang     SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container");
474687025532SHong Zhang   }
474787025532SHong Zhang 
474887025532SHong Zhang   /* a-array */
474987025532SHong Zhang   /*---------*/
475087025532SHong Zhang   /*  post receives of a-array */
475187025532SHong Zhang   for (i=0; i<nrecvs; i++){
475287025532SHong Zhang     nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
475387025532SHong Zhang     ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
475487025532SHong Zhang   }
4755e42f35eeSHong Zhang 
4756e42f35eeSHong Zhang   /* pack the outgoing message a-array */
475787025532SHong Zhang   k = 0;
475887025532SHong Zhang   for (i=0; i<nsends; i++){
4759e42f35eeSHong Zhang     nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
476087025532SHong Zhang     bufA = bufa+sstartsj[i];
476187025532SHong Zhang     for (j=0; j<nrows; j++) {
4762d0f46423SBarry Smith       row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4763e42f35eeSHong Zhang       for (ll=0; ll<sbs; ll++){
4764e42f35eeSHong Zhang         ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
476587025532SHong Zhang         for (l=0; l<ncols; l++){
4766a6b2eed2SHong Zhang           *bufA++ = vals[l];
4767a6b2eed2SHong Zhang         }
4768e42f35eeSHong Zhang         ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
4769e42f35eeSHong Zhang       }
4770a6b2eed2SHong Zhang     }
477187025532SHong Zhang     ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4772a6b2eed2SHong Zhang   }
477387025532SHong Zhang   /* recvs and sends of a-array are completed */
477487025532SHong Zhang   i = nrecvs;
477587025532SHong Zhang   while (i--) {
4776aa5bb8c0SSatish Balay     ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
477787025532SHong Zhang   }
47780c468ba9SBarry Smith   if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4779d7ee0231SBarry Smith   ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr);
4780a6b2eed2SHong Zhang 
478187025532SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4782a6b2eed2SHong Zhang     /* put together the new matrix */
4783d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr);
4784a6b2eed2SHong Zhang 
4785a6b2eed2SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4786a6b2eed2SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
478787025532SHong Zhang     b_oth          = (Mat_SeqAIJ *)(*B_oth)->data;
4788e6b907acSBarry Smith     b_oth->free_a  = PETSC_TRUE;
4789e6b907acSBarry Smith     b_oth->free_ij = PETSC_TRUE;
479087025532SHong Zhang     b_oth->nonew   = 0;
4791a6b2eed2SHong Zhang 
4792a6b2eed2SHong Zhang     ierr = PetscFree(bufj);CHKERRQ(ierr);
4793dea91ad1SHong Zhang     if (!startsj || !bufa_ptr){
4794dea91ad1SHong Zhang       ierr = PetscFree(sstartsj);CHKERRQ(ierr);
4795dea91ad1SHong Zhang       ierr = PetscFree(bufa_ptr);CHKERRQ(ierr);
4796dea91ad1SHong Zhang     } else {
479787025532SHong Zhang       *startsj  = sstartsj;
479887025532SHong Zhang       *bufa_ptr = bufa;
479987025532SHong Zhang     }
4800dea91ad1SHong Zhang   }
48014ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4802429d309bSHong Zhang   PetscFunctionReturn(0);
4803429d309bSHong Zhang }
4804ccd8e176SBarry Smith 
480543eb5e2fSMatthew Knepley #undef __FUNCT__
480643eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs"
480743eb5e2fSMatthew Knepley /*@C
480843eb5e2fSMatthew Knepley   MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication.
480943eb5e2fSMatthew Knepley 
481043eb5e2fSMatthew Knepley   Not Collective
481143eb5e2fSMatthew Knepley 
481243eb5e2fSMatthew Knepley   Input Parameters:
481343eb5e2fSMatthew Knepley . A - The matrix in mpiaij format
481443eb5e2fSMatthew Knepley 
481543eb5e2fSMatthew Knepley   Output Parameter:
481643eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product
481743eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec
481843eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec
481943eb5e2fSMatthew Knepley 
482043eb5e2fSMatthew Knepley   Level: developer
482143eb5e2fSMatthew Knepley 
482243eb5e2fSMatthew Knepley @*/
482343eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE)
482443eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter)
482543eb5e2fSMatthew Knepley #else
482643eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter)
482743eb5e2fSMatthew Knepley #endif
482843eb5e2fSMatthew Knepley {
482943eb5e2fSMatthew Knepley   Mat_MPIAIJ *a;
483043eb5e2fSMatthew Knepley 
483143eb5e2fSMatthew Knepley   PetscFunctionBegin;
483243eb5e2fSMatthew Knepley   PetscValidHeaderSpecific(A, MAT_COOKIE, 1);
483343eb5e2fSMatthew Knepley   PetscValidPointer(lvec, 2)
483443eb5e2fSMatthew Knepley   PetscValidPointer(colmap, 3)
483543eb5e2fSMatthew Knepley   PetscValidPointer(multScatter, 4)
483643eb5e2fSMatthew Knepley   a = (Mat_MPIAIJ *) A->data;
483743eb5e2fSMatthew Knepley   if (lvec) *lvec = a->lvec;
483843eb5e2fSMatthew Knepley   if (colmap) *colmap = a->colmap;
483943eb5e2fSMatthew Knepley   if (multScatter) *multScatter = a->Mvctx;
484043eb5e2fSMatthew Knepley   PetscFunctionReturn(0);
484143eb5e2fSMatthew Knepley }
484243eb5e2fSMatthew Knepley 
484317667f90SBarry Smith EXTERN_C_BEGIN
48448cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*);
48458cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*);
484617667f90SBarry Smith EXTERN_C_END
484717667f90SBarry Smith 
48487c4f633dSBarry Smith #include "../src/mat/impls/dense/mpi/mpidense.h"
4849fc4dec0aSBarry Smith 
4850fc4dec0aSBarry Smith #undef __FUNCT__
4851fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ"
4852fc4dec0aSBarry Smith /*
4853fc4dec0aSBarry Smith     Computes (B'*A')' since computing B*A directly is untenable
4854fc4dec0aSBarry Smith 
4855fc4dec0aSBarry Smith                n                       p                          p
4856fc4dec0aSBarry Smith         (              )       (              )         (                  )
4857fc4dec0aSBarry Smith       m (      A       )  *  n (       B      )   =   m (         C        )
4858fc4dec0aSBarry Smith         (              )       (              )         (                  )
4859fc4dec0aSBarry Smith 
4860fc4dec0aSBarry Smith */
4861fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C)
4862fc4dec0aSBarry Smith {
4863fc4dec0aSBarry Smith   PetscErrorCode     ierr;
4864fc4dec0aSBarry Smith   Mat                At,Bt,Ct;
4865fc4dec0aSBarry Smith 
4866fc4dec0aSBarry Smith   PetscFunctionBegin;
4867fc4dec0aSBarry Smith   ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr);
4868fc4dec0aSBarry Smith   ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr);
4869fc4dec0aSBarry Smith   ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr);
4870fc4dec0aSBarry Smith   ierr = MatDestroy(At);CHKERRQ(ierr);
4871fc4dec0aSBarry Smith   ierr = MatDestroy(Bt);CHKERRQ(ierr);
4872fc4dec0aSBarry Smith   ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
4873e5e4356aSBarry Smith   ierr = MatDestroy(Ct);CHKERRQ(ierr);
4874fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4875fc4dec0aSBarry Smith }
4876fc4dec0aSBarry Smith 
4877fc4dec0aSBarry Smith #undef __FUNCT__
4878fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ"
4879fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C)
4880fc4dec0aSBarry Smith {
4881fc4dec0aSBarry Smith   PetscErrorCode ierr;
4882d0f46423SBarry Smith   PetscInt       m=A->rmap->n,n=B->cmap->n;
4883fc4dec0aSBarry Smith   Mat            Cmat;
4884fc4dec0aSBarry Smith 
4885fc4dec0aSBarry Smith   PetscFunctionBegin;
4886d0f46423SBarry Smith   if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n);
488739804f7cSBarry Smith   ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr);
4888fc4dec0aSBarry Smith   ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4889fc4dec0aSBarry Smith   ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr);
4890fc4dec0aSBarry Smith   ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr);
489138556019SBarry Smith   ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
489238556019SBarry Smith   ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4893fc4dec0aSBarry Smith   *C   = Cmat;
4894fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4895fc4dec0aSBarry Smith }
4896fc4dec0aSBarry Smith 
4897fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/
4898fc4dec0aSBarry Smith #undef __FUNCT__
4899fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ"
4900fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
4901fc4dec0aSBarry Smith {
4902fc4dec0aSBarry Smith   PetscErrorCode ierr;
4903fc4dec0aSBarry Smith 
4904fc4dec0aSBarry Smith   PetscFunctionBegin;
4905fc4dec0aSBarry Smith   if (scall == MAT_INITIAL_MATRIX){
4906fc4dec0aSBarry Smith     ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr);
4907fc4dec0aSBarry Smith   }
4908fc4dec0aSBarry Smith   ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr);
4909fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4910fc4dec0aSBarry Smith }
4911fc4dec0aSBarry Smith 
49125c9eb25fSBarry Smith EXTERN_C_BEGIN
4913611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
49145c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*);
4915611f576cSBarry Smith #endif
49163bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
49173bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*);
49183bf14a46SMatthew Knepley #endif
4919611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
49205c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*);
4921611f576cSBarry Smith #endif
4922611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
49235c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*);
4924611f576cSBarry Smith #endif
49255c9eb25fSBarry Smith EXTERN_C_END
49265c9eb25fSBarry Smith 
4927ccd8e176SBarry Smith /*MC
4928ccd8e176SBarry Smith    MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices.
4929ccd8e176SBarry Smith 
4930ccd8e176SBarry Smith    Options Database Keys:
4931ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions()
4932ccd8e176SBarry Smith 
4933ccd8e176SBarry Smith   Level: beginner
4934ccd8e176SBarry Smith 
4935175b88e8SBarry Smith .seealso: MatCreateMPIAIJ()
4936ccd8e176SBarry Smith M*/
4937ccd8e176SBarry Smith 
4938ccd8e176SBarry Smith EXTERN_C_BEGIN
4939ccd8e176SBarry Smith #undef __FUNCT__
4940ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ"
4941be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B)
4942ccd8e176SBarry Smith {
4943ccd8e176SBarry Smith   Mat_MPIAIJ     *b;
4944ccd8e176SBarry Smith   PetscErrorCode ierr;
4945ccd8e176SBarry Smith   PetscMPIInt    size;
4946ccd8e176SBarry Smith 
4947ccd8e176SBarry Smith   PetscFunctionBegin;
49487adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
4949ccd8e176SBarry Smith 
495038f2d2fdSLisandro Dalcin   ierr            = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr);
4951ccd8e176SBarry Smith   B->data         = (void*)b;
4952ccd8e176SBarry Smith   ierr            = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
4953d0f46423SBarry Smith   B->rmap->bs      = 1;
4954ccd8e176SBarry Smith   B->assembled    = PETSC_FALSE;
4955ccd8e176SBarry Smith   B->mapping      = 0;
4956ccd8e176SBarry Smith 
4957ccd8e176SBarry Smith   B->insertmode      = NOT_SET_VALUES;
4958ccd8e176SBarry Smith   b->size            = size;
49597adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
4960ccd8e176SBarry Smith 
4961ccd8e176SBarry Smith   /* build cache for off array entries formed */
49627adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
4963ccd8e176SBarry Smith   b->donotstash  = PETSC_FALSE;
4964ccd8e176SBarry Smith   b->colmap      = 0;
4965ccd8e176SBarry Smith   b->garray      = 0;
4966ccd8e176SBarry Smith   b->roworiented = PETSC_TRUE;
4967ccd8e176SBarry Smith 
4968ccd8e176SBarry Smith   /* stuff used for matrix vector multiply */
4969ccd8e176SBarry Smith   b->lvec      = PETSC_NULL;
4970ccd8e176SBarry Smith   b->Mvctx     = PETSC_NULL;
4971ccd8e176SBarry Smith 
4972ccd8e176SBarry Smith   /* stuff for MatGetRow() */
4973ccd8e176SBarry Smith   b->rowindices   = 0;
4974ccd8e176SBarry Smith   b->rowvalues    = 0;
4975ccd8e176SBarry Smith   b->getrowactive = PETSC_FALSE;
4976ccd8e176SBarry Smith 
4977611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
49785c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_spooles_C",
49795c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_spooles",
49805c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_spooles);CHKERRQ(ierr);
4981611f576cSBarry Smith #endif
4982611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
49835c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_mumps_C",
49845c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_mumps",
49855c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_mumps);CHKERRQ(ierr);
4986611f576cSBarry Smith #endif
49873bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
49883bf14a46SMatthew Knepley   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_pastix_C",
49893bf14a46SMatthew Knepley 					   "MatGetFactor_mpiaij_pastix",
49903bf14a46SMatthew Knepley 					   MatGetFactor_mpiaij_pastix);CHKERRQ(ierr);
49913bf14a46SMatthew Knepley #endif
4992611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
49935c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_superlu_dist_C",
49945c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_superlu_dist",
49955c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr);
4996611f576cSBarry Smith #endif
4997ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
4998ccd8e176SBarry Smith                                      "MatStoreValues_MPIAIJ",
4999ccd8e176SBarry Smith                                      MatStoreValues_MPIAIJ);CHKERRQ(ierr);
5000ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
5001ccd8e176SBarry Smith                                      "MatRetrieveValues_MPIAIJ",
5002ccd8e176SBarry Smith                                      MatRetrieveValues_MPIAIJ);CHKERRQ(ierr);
5003ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
5004ccd8e176SBarry Smith 				     "MatGetDiagonalBlock_MPIAIJ",
5005ccd8e176SBarry Smith                                      MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr);
5006ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C",
5007ccd8e176SBarry Smith 				     "MatIsTranspose_MPIAIJ",
5008ccd8e176SBarry Smith 				     MatIsTranspose_MPIAIJ);CHKERRQ(ierr);
5009ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
5010ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocation_MPIAIJ",
5011ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr);
5012ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",
5013ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocationCSR_MPIAIJ",
5014ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr);
5015ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
5016ccd8e176SBarry Smith 				     "MatDiagonalScaleLocal_MPIAIJ",
5017ccd8e176SBarry Smith 				     MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr);
501817667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C",
501917667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICSRPERM",
502017667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr);
502117667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C",
502217667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICRL",
502317667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr);
5024fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",
5025fc4dec0aSBarry Smith                                      "MatMatMult_MPIDense_MPIAIJ",
5026fc4dec0aSBarry Smith                                       MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr);
5027fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",
5028fc4dec0aSBarry Smith                                      "MatMatMultSymbolic_MPIDense_MPIAIJ",
5029fc4dec0aSBarry Smith                                       MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr);
5030fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",
5031fc4dec0aSBarry Smith                                      "MatMatMultNumeric_MPIDense_MPIAIJ",
5032fc4dec0aSBarry Smith                                       MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr);
503317667f90SBarry Smith   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr);
5034ccd8e176SBarry Smith   PetscFunctionReturn(0);
5035ccd8e176SBarry Smith }
5036ccd8e176SBarry Smith EXTERN_C_END
503781824310SBarry Smith 
503803bfb495SBarry Smith #undef __FUNCT__
503903bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays"
504058d36128SBarry Smith /*@
504103bfb495SBarry Smith      MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal"
504203bfb495SBarry Smith          and "off-diagonal" part of the matrix in CSR format.
504303bfb495SBarry Smith 
504403bfb495SBarry Smith    Collective on MPI_Comm
504503bfb495SBarry Smith 
504603bfb495SBarry Smith    Input Parameters:
504703bfb495SBarry Smith +  comm - MPI communicator
504803bfb495SBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
504903bfb495SBarry Smith .  n - This value should be the same as the local size used in creating the
505003bfb495SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
505103bfb495SBarry Smith        calculated if N is given) For square matrices n is almost always m.
505203bfb495SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
505303bfb495SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
505403bfb495SBarry Smith .   i - row indices for "diagonal" portion of matrix
505503bfb495SBarry Smith .   j - column indices
505603bfb495SBarry Smith .   a - matrix values
505703bfb495SBarry Smith .   oi - row indices for "off-diagonal" portion of matrix
505803bfb495SBarry Smith .   oj - column indices
505903bfb495SBarry Smith -   oa - matrix values
506003bfb495SBarry Smith 
506103bfb495SBarry Smith    Output Parameter:
506203bfb495SBarry Smith .   mat - the matrix
506303bfb495SBarry Smith 
506403bfb495SBarry Smith    Level: advanced
506503bfb495SBarry Smith 
506603bfb495SBarry Smith    Notes:
506703bfb495SBarry Smith        The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc.
506803bfb495SBarry Smith 
506903bfb495SBarry Smith        The i and j indices are 0 based
507003bfb495SBarry Smith 
507103bfb495SBarry Smith        See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix
507203bfb495SBarry Smith 
507303bfb495SBarry Smith 
507403bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
507503bfb495SBarry Smith 
507603bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
50778d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays()
507803bfb495SBarry Smith @*/
50798d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],
508003bfb495SBarry Smith 								PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat)
508103bfb495SBarry Smith {
508203bfb495SBarry Smith   PetscErrorCode ierr;
508303bfb495SBarry Smith   Mat_MPIAIJ     *maij;
508403bfb495SBarry Smith 
508503bfb495SBarry Smith  PetscFunctionBegin;
508603bfb495SBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
508703bfb495SBarry Smith   if (i[0]) {
508803bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
508903bfb495SBarry Smith   }
509003bfb495SBarry Smith   if (oi[0]) {
509103bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0");
509203bfb495SBarry Smith   }
509303bfb495SBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
509403bfb495SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
509503bfb495SBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
509603bfb495SBarry Smith   maij = (Mat_MPIAIJ*) (*mat)->data;
50978d7a6e47SBarry Smith   maij->donotstash     = PETSC_TRUE;
50988d7a6e47SBarry Smith   (*mat)->preallocated = PETSC_TRUE;
509903bfb495SBarry Smith 
51007408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr);
51017408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr);
5102d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->rmap);CHKERRQ(ierr);
5103d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->cmap);CHKERRQ(ierr);
510403bfb495SBarry Smith 
510503bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr);
5106d0f46423SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr);
510703bfb495SBarry Smith 
51088d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51098d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51108d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51118d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51128d7a6e47SBarry Smith 
511303bfb495SBarry Smith   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
511403bfb495SBarry Smith   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
511503bfb495SBarry Smith   PetscFunctionReturn(0);
511603bfb495SBarry Smith }
511703bfb495SBarry Smith 
511881824310SBarry Smith /*
511981824310SBarry Smith     Special version for direct calls from Fortran
512081824310SBarry Smith */
512181824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS)
512281824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ
512381824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
512481824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij
512581824310SBarry Smith #endif
512681824310SBarry Smith 
512781824310SBarry Smith /* Change these macros so can be used in void function */
512881824310SBarry Smith #undef CHKERRQ
51297adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr)
513081824310SBarry Smith #undef SETERRQ2
51317adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr)
513281824310SBarry Smith #undef SETERRQ
51337adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr)
513481824310SBarry Smith 
513581824310SBarry Smith EXTERN_C_BEGIN
513681824310SBarry Smith #undef __FUNCT__
513781824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_"
51381f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr)
513981824310SBarry Smith {
514081824310SBarry Smith   Mat             mat = *mmat;
514181824310SBarry Smith   PetscInt        m = *mm, n = *mn;
514281824310SBarry Smith   InsertMode      addv = *maddv;
514381824310SBarry Smith   Mat_MPIAIJ      *aij = (Mat_MPIAIJ*)mat->data;
514481824310SBarry Smith   PetscScalar     value;
514581824310SBarry Smith   PetscErrorCode  ierr;
5146899cda47SBarry Smith 
5147d9e2c085SLisandro Dalcin   ierr = MatPreallocated(mat);CHKERRQ(ierr);
514881824310SBarry Smith   if (mat->insertmode == NOT_SET_VALUES) {
514981824310SBarry Smith     mat->insertmode = addv;
515081824310SBarry Smith   }
515181824310SBarry Smith #if defined(PETSC_USE_DEBUG)
515281824310SBarry Smith   else if (mat->insertmode != addv) {
515381824310SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
515481824310SBarry Smith   }
515581824310SBarry Smith #endif
515681824310SBarry Smith   {
5157d0f46423SBarry Smith   PetscInt        i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
5158d0f46423SBarry Smith   PetscInt        cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
515981824310SBarry Smith   PetscTruth      roworiented = aij->roworiented;
516081824310SBarry Smith 
516181824310SBarry Smith   /* Some Variables required in the macro */
516281824310SBarry Smith   Mat             A = aij->A;
516381824310SBarry Smith   Mat_SeqAIJ      *a = (Mat_SeqAIJ*)A->data;
516481824310SBarry Smith   PetscInt        *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
5165dd6ea824SBarry Smith   MatScalar       *aa = a->a;
516681824310SBarry Smith   PetscTruth      ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE);
516781824310SBarry Smith   Mat             B = aij->B;
516881824310SBarry Smith   Mat_SeqAIJ      *b = (Mat_SeqAIJ*)B->data;
5169d0f46423SBarry Smith   PetscInt        *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
5170dd6ea824SBarry Smith   MatScalar       *ba = b->a;
517181824310SBarry Smith 
517281824310SBarry Smith   PetscInt        *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
517381824310SBarry Smith   PetscInt        nonew = a->nonew;
5174dd6ea824SBarry Smith   MatScalar       *ap1,*ap2;
517581824310SBarry Smith 
517681824310SBarry Smith   PetscFunctionBegin;
517781824310SBarry Smith   for (i=0; i<m; i++) {
517881824310SBarry Smith     if (im[i] < 0) continue;
517981824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5180d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
518181824310SBarry Smith #endif
518281824310SBarry Smith     if (im[i] >= rstart && im[i] < rend) {
518381824310SBarry Smith       row      = im[i] - rstart;
518481824310SBarry Smith       lastcol1 = -1;
518581824310SBarry Smith       rp1      = aj + ai[row];
518681824310SBarry Smith       ap1      = aa + ai[row];
518781824310SBarry Smith       rmax1    = aimax[row];
518881824310SBarry Smith       nrow1    = ailen[row];
518981824310SBarry Smith       low1     = 0;
519081824310SBarry Smith       high1    = nrow1;
519181824310SBarry Smith       lastcol2 = -1;
519281824310SBarry Smith       rp2      = bj + bi[row];
519381824310SBarry Smith       ap2      = ba + bi[row];
519481824310SBarry Smith       rmax2    = bimax[row];
519581824310SBarry Smith       nrow2    = bilen[row];
519681824310SBarry Smith       low2     = 0;
519781824310SBarry Smith       high2    = nrow2;
519881824310SBarry Smith 
519981824310SBarry Smith       for (j=0; j<n; j++) {
520081824310SBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
520181824310SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
520281824310SBarry Smith         if (in[j] >= cstart && in[j] < cend){
520381824310SBarry Smith           col = in[j] - cstart;
520481824310SBarry Smith           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
520581824310SBarry Smith         } else if (in[j] < 0) continue;
520681824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5207d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
520881824310SBarry Smith #endif
520981824310SBarry Smith         else {
521081824310SBarry Smith           if (mat->was_assembled) {
521181824310SBarry Smith             if (!aij->colmap) {
521281824310SBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
521381824310SBarry Smith             }
521481824310SBarry Smith #if defined (PETSC_USE_CTABLE)
521581824310SBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
521681824310SBarry Smith 	    col--;
521781824310SBarry Smith #else
521881824310SBarry Smith             col = aij->colmap[in[j]] - 1;
521981824310SBarry Smith #endif
522081824310SBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
522181824310SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
522281824310SBarry Smith               col =  in[j];
522381824310SBarry Smith               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
522481824310SBarry Smith               B = aij->B;
522581824310SBarry Smith               b = (Mat_SeqAIJ*)B->data;
522681824310SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
522781824310SBarry Smith               rp2      = bj + bi[row];
522881824310SBarry Smith               ap2      = ba + bi[row];
522981824310SBarry Smith               rmax2    = bimax[row];
523081824310SBarry Smith               nrow2    = bilen[row];
523181824310SBarry Smith               low2     = 0;
523281824310SBarry Smith               high2    = nrow2;
5233d0f46423SBarry Smith               bm       = aij->B->rmap->n;
523481824310SBarry Smith               ba = b->a;
523581824310SBarry Smith             }
523681824310SBarry Smith           } else col = in[j];
523781824310SBarry Smith           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
523881824310SBarry Smith         }
523981824310SBarry Smith       }
524081824310SBarry Smith     } else {
524181824310SBarry Smith       if (!aij->donotstash) {
524281824310SBarry Smith         if (roworiented) {
524381824310SBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
524481824310SBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
524581824310SBarry Smith         } else {
524681824310SBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
524781824310SBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
524881824310SBarry Smith         }
524981824310SBarry Smith       }
525081824310SBarry Smith     }
525181824310SBarry Smith   }}
525281824310SBarry Smith   PetscFunctionReturnVoid();
525381824310SBarry Smith }
525481824310SBarry Smith EXTERN_C_END
525503bfb495SBarry Smith 
5256