xref: /petsc/src/mat/impls/aij/mpi/mpiaij.c (revision b79d042183e113449aaa39f6d7d15d2799ca2ce9)
1be1d678aSKris Buschelman #define PETSCMAT_DLL
28a729477SBarry Smith 
37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h"   /*I "petscmat.h" I*/
47c4f633dSBarry Smith #include "../src/inline/spops.h"
58a729477SBarry Smith 
6dd6ea824SBarry Smith #undef __FUNCT__
7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ"
8dd6ea824SBarry Smith /*
9dd6ea824SBarry Smith     Distributes a SeqAIJ matrix across a set of processes. Code stolen from
10dd6ea824SBarry Smith     MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type.
11dd6ea824SBarry Smith 
12dd6ea824SBarry Smith     Only for square matrices
13dd6ea824SBarry Smith */
14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat)
15dd6ea824SBarry Smith {
16dd6ea824SBarry Smith   PetscMPIInt    rank,size;
17dd6ea824SBarry Smith   PetscInt       *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld;
18dd6ea824SBarry Smith   PetscErrorCode ierr;
19dd6ea824SBarry Smith   Mat            mat;
20dd6ea824SBarry Smith   Mat_SeqAIJ     *gmata;
21dd6ea824SBarry Smith   PetscMPIInt    tag;
22dd6ea824SBarry Smith   MPI_Status     status;
23dd6ea824SBarry Smith   PetscTruth     aij;
24dd6ea824SBarry Smith   MatScalar      *gmataa,*ao,*ad,*gmataarestore=0;
25dd6ea824SBarry Smith 
26dd6ea824SBarry Smith   PetscFunctionBegin;
27dd6ea824SBarry Smith   CHKMEMQ;
28dd6ea824SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
29dd6ea824SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
30dd6ea824SBarry Smith   if (!rank) {
31dd6ea824SBarry Smith     ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr);
32dd6ea824SBarry Smith     if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name);
33dd6ea824SBarry Smith   }
34dd6ea824SBarry Smith   if (reuse == MAT_INITIAL_MATRIX) {
35dd6ea824SBarry Smith     ierr = MatCreate(comm,&mat);CHKERRQ(ierr);
36dd6ea824SBarry Smith     ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
37dd6ea824SBarry Smith     ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr);
38dd6ea824SBarry Smith     ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
39dd6ea824SBarry Smith     ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr);
40dd6ea824SBarry Smith     ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
41dd6ea824SBarry Smith     rowners[0] = 0;
42dd6ea824SBarry Smith     for (i=2; i<=size; i++) {
43dd6ea824SBarry Smith       rowners[i] += rowners[i-1];
44dd6ea824SBarry Smith     }
45dd6ea824SBarry Smith     rstart = rowners[rank];
46dd6ea824SBarry Smith     rend   = rowners[rank+1];
47dd6ea824SBarry Smith     ierr   = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
48dd6ea824SBarry Smith     if (!rank) {
49dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
50dd6ea824SBarry Smith       /* send row lengths to all processors */
51dd6ea824SBarry Smith       for (i=0; i<m; i++) dlens[i] = gmata->ilen[i];
52dd6ea824SBarry Smith       for (i=1; i<size; i++) {
53dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
54dd6ea824SBarry Smith       }
55dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
56dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
57dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
58dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
59dd6ea824SBarry Smith       jj = 0;
60dd6ea824SBarry Smith       for (i=0; i<m; i++) {
61dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
62dd6ea824SBarry Smith           if (gmata->j[jj] < rstart) ld[i]++;
63dd6ea824SBarry Smith 	  if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++;
64dd6ea824SBarry Smith 	  jj++;
65dd6ea824SBarry Smith 	}
66dd6ea824SBarry Smith       }
67dd6ea824SBarry Smith       /* send column indices to other processes */
68dd6ea824SBarry Smith       for (i=1; i<size; i++) {
69dd6ea824SBarry Smith 	nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
70dd6ea824SBarry Smith 	ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
71dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
72dd6ea824SBarry Smith       }
73dd6ea824SBarry Smith 
74dd6ea824SBarry Smith       /* send numerical values to other processes */
75dd6ea824SBarry Smith       for (i=1; i<size; i++) {
76dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
77dd6ea824SBarry Smith         ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
78dd6ea824SBarry Smith       }
79dd6ea824SBarry Smith       gmataa = gmata->a;
80dd6ea824SBarry Smith       gmataj = gmata->j;
81dd6ea824SBarry Smith 
82dd6ea824SBarry Smith     } else {
83dd6ea824SBarry Smith       /* receive row lengths */
84dd6ea824SBarry Smith       ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
85dd6ea824SBarry Smith       /* receive column indices */
86dd6ea824SBarry Smith       ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
87dd6ea824SBarry Smith       ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr);
88dd6ea824SBarry Smith       ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
89dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
90dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
91dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
92dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
93dd6ea824SBarry Smith       jj = 0;
94dd6ea824SBarry Smith       for (i=0; i<m; i++) {
95dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
96dd6ea824SBarry Smith           if (gmataj[jj] < rstart) ld[i]++;
97dd6ea824SBarry Smith 	  if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++;
98dd6ea824SBarry Smith 	  jj++;
99dd6ea824SBarry Smith 	}
100dd6ea824SBarry Smith       }
101dd6ea824SBarry Smith       /* receive numerical values */
102dd6ea824SBarry Smith       ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr);
103dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
104dd6ea824SBarry Smith     }
105dd6ea824SBarry Smith     /* set preallocation */
106dd6ea824SBarry Smith     for (i=0; i<m; i++) {
107dd6ea824SBarry Smith       dlens[i] -= olens[i];
108dd6ea824SBarry Smith     }
109dd6ea824SBarry Smith     ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr);
110dd6ea824SBarry Smith     ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr);
111dd6ea824SBarry Smith 
112dd6ea824SBarry Smith     for (i=0; i<m; i++) {
113dd6ea824SBarry Smith       dlens[i] += olens[i];
114dd6ea824SBarry Smith     }
115dd6ea824SBarry Smith     cnt  = 0;
116dd6ea824SBarry Smith     for (i=0; i<m; i++) {
117dd6ea824SBarry Smith       row  = rstart + i;
118dd6ea824SBarry Smith       ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr);
119dd6ea824SBarry Smith       cnt += dlens[i];
120dd6ea824SBarry Smith     }
121dd6ea824SBarry Smith     if (rank) {
122dd6ea824SBarry Smith       ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr);
123dd6ea824SBarry Smith     }
124dd6ea824SBarry Smith     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
125dd6ea824SBarry Smith     ierr = PetscFree(rowners);CHKERRQ(ierr);
126dd6ea824SBarry Smith     ((Mat_MPIAIJ*)(mat->data))->ld = ld;
127dd6ea824SBarry Smith     *inmat = mat;
128dd6ea824SBarry Smith   } else {   /* column indices are already set; only need to move over numerical values from process 0 */
129dd6ea824SBarry Smith     Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data;
130dd6ea824SBarry Smith     Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data;
131dd6ea824SBarry Smith     mat   = *inmat;
132dd6ea824SBarry Smith     ierr  = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
133dd6ea824SBarry Smith     if (!rank) {
134dd6ea824SBarry Smith       /* send numerical values to other processes */
135dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
136dd6ea824SBarry Smith       ierr   = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr);
137dd6ea824SBarry Smith       gmataa = gmata->a;
138dd6ea824SBarry Smith       for (i=1; i<size; i++) {
139dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
140dd6ea824SBarry Smith         ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
141dd6ea824SBarry Smith       }
142dd6ea824SBarry Smith       nz   = gmata->i[rowners[1]]-gmata->i[rowners[0]];
143dd6ea824SBarry Smith     } else {
144dd6ea824SBarry Smith       /* receive numerical values from process 0*/
145dd6ea824SBarry Smith       nz   = Ad->nz + Ao->nz;
146dd6ea824SBarry Smith       ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa;
147dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
148dd6ea824SBarry Smith     }
149dd6ea824SBarry Smith     /* transfer numerical values into the diagonal A and off diagonal B parts of mat */
150dd6ea824SBarry Smith     ld = ((Mat_MPIAIJ*)(mat->data))->ld;
151dd6ea824SBarry Smith     ad = Ad->a;
152dd6ea824SBarry Smith     ao = Ao->a;
153d0f46423SBarry Smith     if (mat->rmap->n) {
154dd6ea824SBarry Smith       i  = 0;
155dd6ea824SBarry Smith       nz = ld[i];                                   ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
156dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
157dd6ea824SBarry Smith     }
158d0f46423SBarry Smith     for (i=1; i<mat->rmap->n; i++) {
159dd6ea824SBarry Smith       nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
160dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
161dd6ea824SBarry Smith     }
162dd6ea824SBarry Smith     i--;
163d0f46423SBarry Smith     if (mat->rmap->n) {
164dd6ea824SBarry Smith       nz = Ao->i[i+1] - Ao->i[i] - ld[i];           ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
165dd6ea824SBarry Smith     }
166dd6ea824SBarry Smith     if (rank) {
167dd6ea824SBarry Smith       ierr = PetscFree(gmataarestore);CHKERRQ(ierr);
168dd6ea824SBarry Smith     }
169dd6ea824SBarry Smith   }
170dd6ea824SBarry Smith   ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
171dd6ea824SBarry Smith   ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
172dd6ea824SBarry Smith   CHKMEMQ;
173dd6ea824SBarry Smith   PetscFunctionReturn(0);
174dd6ea824SBarry Smith }
175dd6ea824SBarry Smith 
1760f5bd95cSBarry Smith /*
1770f5bd95cSBarry Smith   Local utility routine that creates a mapping from the global column
1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local
1790f5bd95cSBarry Smith storage of the matrix.  When PETSC_USE_CTABLE is used this is scalable at
1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor
1810f5bd95cSBarry Smith has an order N integer array but is fast to acess.
1829e25ed09SBarry Smith */
1834a2ae208SSatish Balay #undef __FUNCT__
1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private"
185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat)
1869e25ed09SBarry Smith {
18744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1886849ba73SBarry Smith   PetscErrorCode ierr;
189d0f46423SBarry Smith   PetscInt       n = aij->B->cmap->n,i;
190dbb450caSBarry Smith 
1913a40ed3dSBarry Smith   PetscFunctionBegin;
192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
193273d9f13SBarry Smith   ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr);
194b1fc9764SSatish Balay   for (i=0; i<n; i++){
1950f5bd95cSBarry Smith     ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr);
196b1fc9764SSatish Balay   }
197b1fc9764SSatish Balay #else
198d0f46423SBarry Smith   ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr);
199d0f46423SBarry Smith   ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
200d0f46423SBarry Smith   ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
201905e6a2fSBarry Smith   for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1;
202b1fc9764SSatish Balay #endif
2033a40ed3dSBarry Smith   PetscFunctionReturn(0);
2049e25ed09SBarry Smith }
2059e25ed09SBarry Smith 
206085a36d4SBarry Smith 
2070520107fSSatish Balay #define CHUNKSIZE   15
20830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \
2090520107fSSatish Balay { \
2107cd84e04SBarry Smith     if (col <= lastcol1) low1 = 0; else high1 = nrow1; \
211fd3458f5SBarry Smith     lastcol1 = col;\
212fd3458f5SBarry Smith     while (high1-low1 > 5) { \
213fd3458f5SBarry Smith       t = (low1+high1)/2; \
214fd3458f5SBarry Smith       if (rp1[t] > col) high1 = t; \
215fd3458f5SBarry Smith       else             low1  = t; \
216ba4e3ef2SSatish Balay     } \
217fd3458f5SBarry Smith       for (_i=low1; _i<high1; _i++) { \
218fd3458f5SBarry Smith         if (rp1[_i] > col) break; \
219fd3458f5SBarry Smith         if (rp1[_i] == col) { \
220fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap1[_i] += value;   \
221fd3458f5SBarry Smith           else                    ap1[_i] = value; \
22230770e4dSSatish Balay           goto a_noinsert; \
2230520107fSSatish Balay         } \
2240520107fSSatish Balay       }  \
225e44c0bd4SBarry Smith       if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \
226e44c0bd4SBarry Smith       if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;}		\
227085a36d4SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
228421e10b8SBarry Smith       MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \
229669a8dbcSSatish Balay       N = nrow1++ - 1; a->nz++; high1++; \
2300520107fSSatish Balay       /* shift up all the later entries in this row */ \
2310520107fSSatish Balay       for (ii=N; ii>=_i; ii--) { \
232fd3458f5SBarry Smith         rp1[ii+1] = rp1[ii]; \
233fd3458f5SBarry Smith         ap1[ii+1] = ap1[ii]; \
2340520107fSSatish Balay       } \
235fd3458f5SBarry Smith       rp1[_i] = col;  \
236fd3458f5SBarry Smith       ap1[_i] = value;  \
23730770e4dSSatish Balay       a_noinsert: ; \
238fd3458f5SBarry Smith       ailen[row] = nrow1; \
2390520107fSSatish Balay }
2400a198c4cSBarry Smith 
241085a36d4SBarry Smith 
24230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \
24330770e4dSSatish Balay { \
2447cd84e04SBarry Smith     if (col <= lastcol2) low2 = 0; else high2 = nrow2; \
245fd3458f5SBarry Smith     lastcol2 = col;\
246fd3458f5SBarry Smith     while (high2-low2 > 5) { \
247fd3458f5SBarry Smith       t = (low2+high2)/2; \
248fd3458f5SBarry Smith       if (rp2[t] > col) high2 = t; \
249fd3458f5SBarry Smith       else             low2  = t; \
250ba4e3ef2SSatish Balay     } \
251fd3458f5SBarry Smith     for (_i=low2; _i<high2; _i++) {		\
252fd3458f5SBarry Smith       if (rp2[_i] > col) break;			\
253fd3458f5SBarry Smith       if (rp2[_i] == col) {			      \
254fd3458f5SBarry Smith 	if (addv == ADD_VALUES) ap2[_i] += value;     \
255fd3458f5SBarry Smith 	else                    ap2[_i] = value;      \
25630770e4dSSatish Balay 	goto b_noinsert;			      \
25730770e4dSSatish Balay       }						      \
25830770e4dSSatish Balay     }							      \
259e44c0bd4SBarry Smith     if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \
260e44c0bd4SBarry Smith     if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;}		\
261085a36d4SBarry Smith     if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
262421e10b8SBarry Smith     MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \
263669a8dbcSSatish Balay     N = nrow2++ - 1; b->nz++; high2++;					\
26430770e4dSSatish Balay     /* shift up all the later entries in this row */			\
26530770e4dSSatish Balay     for (ii=N; ii>=_i; ii--) {						\
266fd3458f5SBarry Smith       rp2[ii+1] = rp2[ii];						\
267fd3458f5SBarry Smith       ap2[ii+1] = ap2[ii];						\
26830770e4dSSatish Balay     }									\
269fd3458f5SBarry Smith     rp2[_i] = col;							\
270fd3458f5SBarry Smith     ap2[_i] = value;							\
27130770e4dSSatish Balay     b_noinsert: ;								\
272fd3458f5SBarry Smith     bilen[row] = nrow2;							\
27330770e4dSSatish Balay }
27430770e4dSSatish Balay 
2754a2ae208SSatish Balay #undef __FUNCT__
2762fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ"
2772fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[])
2782fd7e33dSBarry Smith {
2792fd7e33dSBarry Smith   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)A->data;
2802fd7e33dSBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data;
2812fd7e33dSBarry Smith   PetscErrorCode ierr;
2822fd7e33dSBarry Smith   PetscInt       l,*garray = mat->garray,diag;
2832fd7e33dSBarry Smith 
2842fd7e33dSBarry Smith   PetscFunctionBegin;
2852fd7e33dSBarry Smith   /* code only works for square matrices A */
2862fd7e33dSBarry Smith 
2872fd7e33dSBarry Smith   /* find size of row to the left of the diagonal part */
2882fd7e33dSBarry Smith   ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr);
2892fd7e33dSBarry Smith   row  = row - diag;
2902fd7e33dSBarry Smith   for (l=0; l<b->i[row+1]-b->i[row]; l++) {
2912fd7e33dSBarry Smith     if (garray[b->j[b->i[row]+l]] > diag) break;
2922fd7e33dSBarry Smith   }
2932fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr);
2942fd7e33dSBarry Smith 
2952fd7e33dSBarry Smith   /* diagonal part */
2962fd7e33dSBarry Smith   ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr);
2972fd7e33dSBarry Smith 
2982fd7e33dSBarry Smith   /* right of diagonal part */
2992fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr);
3002fd7e33dSBarry Smith   PetscFunctionReturn(0);
3012fd7e33dSBarry Smith }
3022fd7e33dSBarry Smith 
3032fd7e33dSBarry Smith #undef __FUNCT__
3044a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ"
305b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
3068a729477SBarry Smith {
30744a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
30887828ca2SBarry Smith   PetscScalar    value;
309dfbe8321SBarry Smith   PetscErrorCode ierr;
310d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
311d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
312273d9f13SBarry Smith   PetscTruth     roworiented = aij->roworiented;
3138a729477SBarry Smith 
3140520107fSSatish Balay   /* Some Variables required in the macro */
3154ee7247eSSatish Balay   Mat            A = aij->A;
3164ee7247eSSatish Balay   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
31757809a77SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
318a77337e4SBarry Smith   MatScalar      *aa = a->a;
319edb03aefSBarry Smith   PetscTruth     ignorezeroentries = a->ignorezeroentries;
32030770e4dSSatish Balay   Mat            B = aij->B;
32130770e4dSSatish Balay   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
322d0f46423SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
323a77337e4SBarry Smith   MatScalar      *ba = b->a;
32430770e4dSSatish Balay 
325fd3458f5SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
326fd3458f5SBarry Smith   PetscInt       nonew = a->nonew;
327a77337e4SBarry Smith   MatScalar      *ap1,*ap2;
3284ee7247eSSatish Balay 
3293a40ed3dSBarry Smith   PetscFunctionBegin;
3308a729477SBarry Smith   for (i=0; i<m; i++) {
3315ef9f2a5SBarry Smith     if (im[i] < 0) continue;
3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
333d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
3340a198c4cSBarry Smith #endif
3354b0e389bSBarry Smith     if (im[i] >= rstart && im[i] < rend) {
3364b0e389bSBarry Smith       row      = im[i] - rstart;
337fd3458f5SBarry Smith       lastcol1 = -1;
338fd3458f5SBarry Smith       rp1      = aj + ai[row];
339fd3458f5SBarry Smith       ap1      = aa + ai[row];
340fd3458f5SBarry Smith       rmax1    = aimax[row];
341fd3458f5SBarry Smith       nrow1    = ailen[row];
342fd3458f5SBarry Smith       low1     = 0;
343fd3458f5SBarry Smith       high1    = nrow1;
344fd3458f5SBarry Smith       lastcol2 = -1;
345fd3458f5SBarry Smith       rp2      = bj + bi[row];
346d498b1e9SBarry Smith       ap2      = ba + bi[row];
347fd3458f5SBarry Smith       rmax2    = bimax[row];
348d498b1e9SBarry Smith       nrow2    = bilen[row];
349fd3458f5SBarry Smith       low2     = 0;
350fd3458f5SBarry Smith       high2    = nrow2;
351fd3458f5SBarry Smith 
3521eb62cbbSBarry Smith       for (j=0; j<n; j++) {
35316371a99SBarry Smith         if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0;
354abc0a331SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
355fd3458f5SBarry Smith         if (in[j] >= cstart && in[j] < cend){
356fd3458f5SBarry Smith           col = in[j] - cstart;
35730770e4dSSatish Balay           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
358273d9f13SBarry Smith         } else if (in[j] < 0) continue;
3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
360d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
3610a198c4cSBarry Smith #endif
3621eb62cbbSBarry Smith         else {
363227d817aSBarry Smith           if (mat->was_assembled) {
364905e6a2fSBarry Smith             if (!aij->colmap) {
365905e6a2fSBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
366905e6a2fSBarry Smith             }
367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
3680f5bd95cSBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
369fa46199cSSatish Balay 	    col--;
370b1fc9764SSatish Balay #else
371905e6a2fSBarry Smith             col = aij->colmap[in[j]] - 1;
372b1fc9764SSatish Balay #endif
373ec8511deSBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
3742493cbb0SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
3754b0e389bSBarry Smith               col =  in[j];
3769bf004c3SSatish Balay               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
377f9508a3cSSatish Balay               B = aij->B;
378f9508a3cSSatish Balay               b = (Mat_SeqAIJ*)B->data;
379e44c0bd4SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a;
380d498b1e9SBarry Smith               rp2      = bj + bi[row];
381d498b1e9SBarry Smith               ap2      = ba + bi[row];
382d498b1e9SBarry Smith               rmax2    = bimax[row];
383d498b1e9SBarry Smith               nrow2    = bilen[row];
384d498b1e9SBarry Smith               low2     = 0;
385d498b1e9SBarry Smith               high2    = nrow2;
386d0f46423SBarry Smith               bm       = aij->B->rmap->n;
387f9508a3cSSatish Balay               ba = b->a;
388d6dfbf8fSBarry Smith             }
389c48de900SBarry Smith           } else col = in[j];
39030770e4dSSatish Balay           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
3911eb62cbbSBarry Smith         }
3921eb62cbbSBarry Smith       }
3935ef9f2a5SBarry Smith     } else {
39490f02eecSBarry Smith       if (!aij->donotstash) {
395d36fbae8SSatish Balay         if (roworiented) {
3965b8514ebSBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
3978798bf22SSatish Balay           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
398d36fbae8SSatish Balay         } else {
3995b8514ebSBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
4008798bf22SSatish Balay           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
4014b0e389bSBarry Smith         }
4021eb62cbbSBarry Smith       }
4038a729477SBarry Smith     }
40490f02eecSBarry Smith   }
4053a40ed3dSBarry Smith   PetscFunctionReturn(0);
4068a729477SBarry Smith }
4078a729477SBarry Smith 
4084a2ae208SSatish Balay #undef __FUNCT__
4094a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ"
410b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
411b49de8d1SLois Curfman McInnes {
412b49de8d1SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
413dfbe8321SBarry Smith   PetscErrorCode ierr;
414d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
415d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
416b49de8d1SLois Curfman McInnes 
4173a40ed3dSBarry Smith   PetscFunctionBegin;
418b49de8d1SLois Curfman McInnes   for (i=0; i<m; i++) {
41997e567efSBarry Smith     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
420d0f46423SBarry Smith     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
421b49de8d1SLois Curfman McInnes     if (idxm[i] >= rstart && idxm[i] < rend) {
422b49de8d1SLois Curfman McInnes       row = idxm[i] - rstart;
423b49de8d1SLois Curfman McInnes       for (j=0; j<n; j++) {
42497e567efSBarry Smith         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
425d0f46423SBarry Smith         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
426b49de8d1SLois Curfman McInnes         if (idxn[j] >= cstart && idxn[j] < cend){
427b49de8d1SLois Curfman McInnes           col = idxn[j] - cstart;
428b49de8d1SLois Curfman McInnes           ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
429fa852ad4SSatish Balay         } else {
430905e6a2fSBarry Smith           if (!aij->colmap) {
431905e6a2fSBarry Smith             ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
432905e6a2fSBarry Smith           }
433aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
4340f5bd95cSBarry Smith           ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr);
435fa46199cSSatish Balay           col --;
436b1fc9764SSatish Balay #else
437905e6a2fSBarry Smith           col = aij->colmap[idxn[j]] - 1;
438b1fc9764SSatish Balay #endif
439e60e1c95SSatish Balay           if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0;
440d9d09a02SSatish Balay           else {
441b49de8d1SLois Curfman McInnes             ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
442b49de8d1SLois Curfman McInnes           }
443b49de8d1SLois Curfman McInnes         }
444b49de8d1SLois Curfman McInnes       }
445a8c6a408SBarry Smith     } else {
44629bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"Only local values currently supported");
447b49de8d1SLois Curfman McInnes     }
448b49de8d1SLois Curfman McInnes   }
4493a40ed3dSBarry Smith   PetscFunctionReturn(0);
450b49de8d1SLois Curfman McInnes }
451bc5ccf88SSatish Balay 
4524a2ae208SSatish Balay #undef __FUNCT__
4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ"
454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode)
455bc5ccf88SSatish Balay {
456bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
457dfbe8321SBarry Smith   PetscErrorCode ierr;
458b1d57f15SBarry Smith   PetscInt       nstash,reallocs;
459bc5ccf88SSatish Balay   InsertMode     addv;
460bc5ccf88SSatish Balay 
461bc5ccf88SSatish Balay   PetscFunctionBegin;
462bc5ccf88SSatish Balay   if (aij->donotstash) {
463bc5ccf88SSatish Balay     PetscFunctionReturn(0);
464bc5ccf88SSatish Balay   }
465bc5ccf88SSatish Balay 
466bc5ccf88SSatish Balay   /* make sure all processors are either in INSERTMODE or ADDMODE */
4677adad957SLisandro Dalcin   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
468bc5ccf88SSatish Balay   if (addv == (ADD_VALUES|INSERT_VALUES)) {
46929bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
470bc5ccf88SSatish Balay   }
471bc5ccf88SSatish Balay   mat->insertmode = addv; /* in case this processor had no cache */
472bc5ccf88SSatish Balay 
473d0f46423SBarry Smith   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
4748798bf22SSatish Balay   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
475ae15b995SBarry Smith   ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
476bc5ccf88SSatish Balay   PetscFunctionReturn(0);
477bc5ccf88SSatish Balay }
478bc5ccf88SSatish Balay 
4794a2ae208SSatish Balay #undef __FUNCT__
4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ"
481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode)
482bc5ccf88SSatish Balay {
483bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
48491c97fd4SSatish Balay   Mat_SeqAIJ     *a=(Mat_SeqAIJ *)aij->A->data;
4856849ba73SBarry Smith   PetscErrorCode ierr;
486b1d57f15SBarry Smith   PetscMPIInt    n;
487b1d57f15SBarry Smith   PetscInt       i,j,rstart,ncols,flg;
488e44c0bd4SBarry Smith   PetscInt       *row,*col;
489e44c0bd4SBarry Smith   PetscTruth     other_disassembled;
49087828ca2SBarry Smith   PetscScalar    *val;
491bc5ccf88SSatish Balay   InsertMode     addv = mat->insertmode;
492bc5ccf88SSatish Balay 
49391c97fd4SSatish Balay   /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */
494bc5ccf88SSatish Balay   PetscFunctionBegin;
495bc5ccf88SSatish Balay   if (!aij->donotstash) {
496a2d1c673SSatish Balay     while (1) {
4978798bf22SSatish Balay       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
498a2d1c673SSatish Balay       if (!flg) break;
499a2d1c673SSatish Balay 
500bc5ccf88SSatish Balay       for (i=0; i<n;) {
501bc5ccf88SSatish Balay         /* Now identify the consecutive vals belonging to the same row */
502bc5ccf88SSatish Balay         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
503bc5ccf88SSatish Balay         if (j < n) ncols = j-i;
504bc5ccf88SSatish Balay         else       ncols = n-i;
505bc5ccf88SSatish Balay         /* Now assemble all these values with a single function call */
506bc5ccf88SSatish Balay         ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
507bc5ccf88SSatish Balay         i = j;
508bc5ccf88SSatish Balay       }
509bc5ccf88SSatish Balay     }
5108798bf22SSatish Balay     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
511bc5ccf88SSatish Balay   }
5122f53aa61SHong Zhang   a->compressedrow.use     = PETSC_FALSE;
513bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr);
514bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr);
515bc5ccf88SSatish Balay 
516bc5ccf88SSatish Balay   /* determine if any processor has disassembled, if so we must
517bc5ccf88SSatish Balay      also disassemble ourselfs, in order that we may reassemble. */
518bc5ccf88SSatish Balay   /*
519bc5ccf88SSatish Balay      if nonzero structure of submatrix B cannot change then we know that
520bc5ccf88SSatish Balay      no processor disassembled thus we can skip this stuff
521bc5ccf88SSatish Balay   */
522bc5ccf88SSatish Balay   if (!((Mat_SeqAIJ*)aij->B->data)->nonew)  {
5237adad957SLisandro Dalcin     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
524bc5ccf88SSatish Balay     if (mat->was_assembled && !other_disassembled) {
525bc5ccf88SSatish Balay       ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
526ad59fb31SSatish Balay     }
527ad59fb31SSatish Balay   }
528bc5ccf88SSatish Balay   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
529bc5ccf88SSatish Balay     ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr);
530bc5ccf88SSatish Balay   }
5314e0d8c25SBarry Smith   ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr);
53291c97fd4SSatish Balay   ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */
533bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr);
534bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr);
535bc5ccf88SSatish Balay 
536606d414cSSatish Balay   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
537606d414cSSatish Balay   aij->rowvalues = 0;
538a30b2313SHong Zhang 
539a30b2313SHong Zhang   /* used by MatAXPY() */
54091c97fd4SSatish Balay   a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0;  /* b->xtoy = 0 */
54191c97fd4SSatish Balay   a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0;  /* b->XtoY = 0 */
542a30b2313SHong Zhang 
543bc5ccf88SSatish Balay   PetscFunctionReturn(0);
544bc5ccf88SSatish Balay }
545bc5ccf88SSatish Balay 
5464a2ae208SSatish Balay #undef __FUNCT__
5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ"
548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A)
5491eb62cbbSBarry Smith {
55044a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
551dfbe8321SBarry Smith   PetscErrorCode ierr;
5523a40ed3dSBarry Smith 
5533a40ed3dSBarry Smith   PetscFunctionBegin;
55478b31e54SBarry Smith   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
55578b31e54SBarry Smith   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
5563a40ed3dSBarry Smith   PetscFunctionReturn(0);
5571eb62cbbSBarry Smith }
5581eb62cbbSBarry Smith 
5594a2ae208SSatish Balay #undef __FUNCT__
5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ"
561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
5621eb62cbbSBarry Smith {
56344a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
5646849ba73SBarry Smith   PetscErrorCode ierr;
5657adad957SLisandro Dalcin   PetscMPIInt    size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
566d0f46423SBarry Smith   PetscInt       i,*owners = A->rmap->range;
567b1d57f15SBarry Smith   PetscInt       *nprocs,j,idx,nsends,row;
568b1d57f15SBarry Smith   PetscInt       nmax,*svalues,*starts,*owner,nrecvs;
569b1d57f15SBarry Smith   PetscInt       *rvalues,count,base,slen,*source;
570d0f46423SBarry Smith   PetscInt       *lens,*lrows,*values,rstart=A->rmap->rstart;
5717adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)A)->comm;
5721eb62cbbSBarry Smith   MPI_Request    *send_waits,*recv_waits;
5731eb62cbbSBarry Smith   MPI_Status     recv_status,*send_status;
5746543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5756543fbbaSBarry Smith   PetscTruth     found = PETSC_FALSE;
5766543fbbaSBarry Smith #endif
5771eb62cbbSBarry Smith 
5783a40ed3dSBarry Smith   PetscFunctionBegin;
5791eb62cbbSBarry Smith   /*  first count number of contributors to each processor */
580b1d57f15SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
581b1d57f15SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
582b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
5836543fbbaSBarry Smith   j = 0;
5841eb62cbbSBarry Smith   for (i=0; i<N; i++) {
5856543fbbaSBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
5866543fbbaSBarry Smith     lastidx = idx;
5876543fbbaSBarry Smith     for (; j<size; j++) {
5881eb62cbbSBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
5896543fbbaSBarry Smith         nprocs[2*j]++;
5906543fbbaSBarry Smith         nprocs[2*j+1] = 1;
5916543fbbaSBarry Smith         owner[i] = j;
5926543fbbaSBarry Smith #if defined(PETSC_DEBUG)
5936543fbbaSBarry Smith         found = PETSC_TRUE;
5946543fbbaSBarry Smith #endif
5956543fbbaSBarry Smith         break;
5961eb62cbbSBarry Smith       }
5971eb62cbbSBarry Smith     }
5986543fbbaSBarry Smith #if defined(PETSC_DEBUG)
59929bbc08cSBarry Smith     if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
6006543fbbaSBarry Smith     found = PETSC_FALSE;
6016543fbbaSBarry Smith #endif
6021eb62cbbSBarry Smith   }
603c1dc657dSBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
6041eb62cbbSBarry Smith 
6051eb62cbbSBarry Smith   /* inform other processors of number of messages and max length*/
606c1dc657dSBarry Smith   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
6071eb62cbbSBarry Smith 
6081eb62cbbSBarry Smith   /* post receives:   */
609b1d57f15SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
610b0a32e0cSBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
6111eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
612b1d57f15SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
6131eb62cbbSBarry Smith   }
6141eb62cbbSBarry Smith 
6151eb62cbbSBarry Smith   /* do sends:
6161eb62cbbSBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
6171eb62cbbSBarry Smith          the ith processor
6181eb62cbbSBarry Smith   */
619b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
620b0a32e0cSBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
621b1d57f15SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
6221eb62cbbSBarry Smith   starts[0] = 0;
623c1dc657dSBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6241eb62cbbSBarry Smith   for (i=0; i<N; i++) {
6251eb62cbbSBarry Smith     svalues[starts[owner[i]]++] = rows[i];
6261eb62cbbSBarry Smith   }
6271eb62cbbSBarry Smith 
6281eb62cbbSBarry Smith   starts[0] = 0;
629c1dc657dSBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
6301eb62cbbSBarry Smith   count = 0;
63117699dbbSLois Curfman McInnes   for (i=0; i<size; i++) {
632c1dc657dSBarry Smith     if (nprocs[2*i+1]) {
633b1d57f15SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
6341eb62cbbSBarry Smith     }
6351eb62cbbSBarry Smith   }
636606d414cSSatish Balay   ierr = PetscFree(starts);CHKERRQ(ierr);
6371eb62cbbSBarry Smith 
63817699dbbSLois Curfman McInnes   base = owners[rank];
6391eb62cbbSBarry Smith 
6401eb62cbbSBarry Smith   /*  wait on receives */
641b1d57f15SBarry Smith   ierr   = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
6421eb62cbbSBarry Smith   source = lens + nrecvs;
6431eb62cbbSBarry Smith   count  = nrecvs; slen = 0;
6441eb62cbbSBarry Smith   while (count) {
645ca161407SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
6461eb62cbbSBarry Smith     /* unpack receives into our local space */
647b1d57f15SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
648d6dfbf8fSBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
649d6dfbf8fSBarry Smith     lens[imdex]    = n;
6501eb62cbbSBarry Smith     slen          += n;
6511eb62cbbSBarry Smith     count--;
6521eb62cbbSBarry Smith   }
653606d414cSSatish Balay   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
6541eb62cbbSBarry Smith 
6551eb62cbbSBarry Smith   /* move the data into the send scatter */
656b1d57f15SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
6571eb62cbbSBarry Smith   count = 0;
6581eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
6591eb62cbbSBarry Smith     values = rvalues + i*nmax;
6601eb62cbbSBarry Smith     for (j=0; j<lens[i]; j++) {
6611eb62cbbSBarry Smith       lrows[count++] = values[j] - base;
6621eb62cbbSBarry Smith     }
6631eb62cbbSBarry Smith   }
664606d414cSSatish Balay   ierr = PetscFree(rvalues);CHKERRQ(ierr);
665606d414cSSatish Balay   ierr = PetscFree(lens);CHKERRQ(ierr);
666606d414cSSatish Balay   ierr = PetscFree(owner);CHKERRQ(ierr);
667606d414cSSatish Balay   ierr = PetscFree(nprocs);CHKERRQ(ierr);
6681eb62cbbSBarry Smith 
6691eb62cbbSBarry Smith   /* actually zap the local rows */
6706eb55b6aSBarry Smith   /*
6716eb55b6aSBarry Smith         Zero the required rows. If the "diagonal block" of the matrix
672a8c7a070SBarry Smith      is square and the user wishes to set the diagonal we use separate
6736eb55b6aSBarry Smith      code so that MatSetValues() is not called for each diagonal allocating
6746eb55b6aSBarry Smith      new memory, thus calling lots of mallocs and slowing things down.
6756eb55b6aSBarry Smith 
676f4df32b1SMatthew Knepley        Contributed by: Matthew Knepley
6776eb55b6aSBarry Smith   */
678e2d53e46SBarry Smith   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
679f4df32b1SMatthew Knepley   ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr);
680d0f46423SBarry Smith   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
681f4df32b1SMatthew Knepley     ierr      = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr);
682f4df32b1SMatthew Knepley   } else if (diag != 0.0) {
683f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
684fa46199cSSatish Balay     if (((Mat_SeqAIJ*)l->A->data)->nonew) {
68529bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\
686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
6876525c446SSatish Balay     }
688e2d53e46SBarry Smith     for (i = 0; i < slen; i++) {
689e2d53e46SBarry Smith       row  = lrows[i] + rstart;
690f4df32b1SMatthew Knepley       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
691e2d53e46SBarry Smith     }
692e2d53e46SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
693e2d53e46SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
6946eb55b6aSBarry Smith   } else {
695f4df32b1SMatthew Knepley     ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr);
6966eb55b6aSBarry Smith   }
697606d414cSSatish Balay   ierr = PetscFree(lrows);CHKERRQ(ierr);
69872dacd9aSBarry Smith 
6991eb62cbbSBarry Smith   /* wait on sends */
7001eb62cbbSBarry Smith   if (nsends) {
701b0a32e0cSBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
702ca161407SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
703606d414cSSatish Balay     ierr = PetscFree(send_status);CHKERRQ(ierr);
7041eb62cbbSBarry Smith   }
705606d414cSSatish Balay   ierr = PetscFree(send_waits);CHKERRQ(ierr);
706606d414cSSatish Balay   ierr = PetscFree(svalues);CHKERRQ(ierr);
7071eb62cbbSBarry Smith 
7083a40ed3dSBarry Smith   PetscFunctionReturn(0);
7091eb62cbbSBarry Smith }
7101eb62cbbSBarry Smith 
7114a2ae208SSatish Balay #undef __FUNCT__
7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ"
713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy)
7141eb62cbbSBarry Smith {
715416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
716dfbe8321SBarry Smith   PetscErrorCode ierr;
717b1d57f15SBarry Smith   PetscInt       nt;
718416022c9SBarry Smith 
7193a40ed3dSBarry Smith   PetscFunctionBegin;
720a2ce50c7SBarry Smith   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
721d0f46423SBarry Smith   if (nt != A->cmap->n) {
722d0f46423SBarry Smith     SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt);
723fbd6ef76SBarry Smith   }
724ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
725f830108cSBarry Smith   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
726ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
727f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
7283a40ed3dSBarry Smith   PetscFunctionReturn(0);
7291eb62cbbSBarry Smith }
7301eb62cbbSBarry Smith 
7314a2ae208SSatish Balay #undef __FUNCT__
7324a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ"
733dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
734da3a660dSBarry Smith {
735416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
736dfbe8321SBarry Smith   PetscErrorCode ierr;
7373a40ed3dSBarry Smith 
7383a40ed3dSBarry Smith   PetscFunctionBegin;
739ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
740f830108cSBarry Smith   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
741ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
742f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
7433a40ed3dSBarry Smith   PetscFunctionReturn(0);
744da3a660dSBarry Smith }
745da3a660dSBarry Smith 
7464a2ae208SSatish Balay #undef __FUNCT__
7474a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ"
748dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy)
749da3a660dSBarry Smith {
750416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
751dfbe8321SBarry Smith   PetscErrorCode ierr;
752a5ff213dSBarry Smith   PetscTruth     merged;
753da3a660dSBarry Smith 
7543a40ed3dSBarry Smith   PetscFunctionBegin;
755a5ff213dSBarry Smith   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
756da3a660dSBarry Smith   /* do nondiagonal part */
7577c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
758a5ff213dSBarry Smith   if (!merged) {
759da3a660dSBarry Smith     /* send it on its way */
760ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
761da3a660dSBarry Smith     /* do local part */
7627c922b88SBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
763da3a660dSBarry Smith     /* receive remote parts: note this assumes the values are not actually */
764a5ff213dSBarry Smith     /* added in yy until the next line, */
765ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
766a5ff213dSBarry Smith   } else {
767a5ff213dSBarry Smith     /* do local part */
768a5ff213dSBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
769a5ff213dSBarry Smith     /* send it on its way */
770ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
771a5ff213dSBarry Smith     /* values actually were received in the Begin() but we need to call this nop */
772ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
773a5ff213dSBarry Smith   }
7743a40ed3dSBarry Smith   PetscFunctionReturn(0);
775da3a660dSBarry Smith }
776da3a660dSBarry Smith 
777cd0d46ebSvictorle EXTERN_C_BEGIN
778cd0d46ebSvictorle #undef __FUNCT__
7795fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ"
78013c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f)
781cd0d46ebSvictorle {
7824f423910Svictorle   MPI_Comm       comm;
783cd0d46ebSvictorle   Mat_MPIAIJ     *Aij = (Mat_MPIAIJ *) Amat->data, *Bij;
78466501d38Svictorle   Mat            Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs;
785cd0d46ebSvictorle   IS             Me,Notme;
7866849ba73SBarry Smith   PetscErrorCode ierr;
787b1d57f15SBarry Smith   PetscInt       M,N,first,last,*notme,i;
788b1d57f15SBarry Smith   PetscMPIInt    size;
789cd0d46ebSvictorle 
790cd0d46ebSvictorle   PetscFunctionBegin;
79142e5f5b4Svictorle 
79242e5f5b4Svictorle   /* Easy test: symmetric diagonal block */
79366501d38Svictorle   Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A;
7945485867bSBarry Smith   ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr);
795cd0d46ebSvictorle   if (!*f) PetscFunctionReturn(0);
7964f423910Svictorle   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
797b1d57f15SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
798b1d57f15SBarry Smith   if (size == 1) PetscFunctionReturn(0);
79942e5f5b4Svictorle 
80042e5f5b4Svictorle   /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */
801cd0d46ebSvictorle   ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr);
802cd0d46ebSvictorle   ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr);
803b1d57f15SBarry Smith   ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),&notme);CHKERRQ(ierr);
804cd0d46ebSvictorle   for (i=0; i<first; i++) notme[i] = i;
805cd0d46ebSvictorle   for (i=last; i<M; i++) notme[i-last+first] = i;
806268466fbSBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr);
807268466fbSBarry Smith   ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr);
808268466fbSBarry Smith   ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr);
80966501d38Svictorle   Aoff = Aoffs[0];
810268466fbSBarry Smith   ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr);
81166501d38Svictorle   Boff = Boffs[0];
8125485867bSBarry Smith   ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr);
81366501d38Svictorle   ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr);
81466501d38Svictorle   ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr);
81542e5f5b4Svictorle   ierr = ISDestroy(Me);CHKERRQ(ierr);
81642e5f5b4Svictorle   ierr = ISDestroy(Notme);CHKERRQ(ierr);
81742e5f5b4Svictorle 
818cd0d46ebSvictorle   PetscFunctionReturn(0);
819cd0d46ebSvictorle }
820cd0d46ebSvictorle EXTERN_C_END
821cd0d46ebSvictorle 
8224a2ae208SSatish Balay #undef __FUNCT__
8234a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ"
824dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
825da3a660dSBarry Smith {
826416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
827dfbe8321SBarry Smith   PetscErrorCode ierr;
828da3a660dSBarry Smith 
8293a40ed3dSBarry Smith   PetscFunctionBegin;
830da3a660dSBarry Smith   /* do nondiagonal part */
8317c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
832da3a660dSBarry Smith   /* send it on its way */
833ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
834da3a660dSBarry Smith   /* do local part */
8357c922b88SBarry Smith   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
836a5ff213dSBarry Smith   /* receive remote parts */
837ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
8383a40ed3dSBarry Smith   PetscFunctionReturn(0);
839da3a660dSBarry Smith }
840da3a660dSBarry Smith 
8411eb62cbbSBarry Smith /*
8421eb62cbbSBarry Smith   This only works correctly for square matrices where the subblock A->A is the
8431eb62cbbSBarry Smith    diagonal block
8441eb62cbbSBarry Smith */
8454a2ae208SSatish Balay #undef __FUNCT__
8464a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ"
847dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v)
8481eb62cbbSBarry Smith {
849dfbe8321SBarry Smith   PetscErrorCode ierr;
850416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
8513a40ed3dSBarry Smith 
8523a40ed3dSBarry Smith   PetscFunctionBegin;
853d0f46423SBarry Smith   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
854d0f46423SBarry Smith   if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) {
85529bbc08cSBarry Smith     SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition");
8563a40ed3dSBarry Smith   }
8573a40ed3dSBarry Smith   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
8583a40ed3dSBarry Smith   PetscFunctionReturn(0);
8591eb62cbbSBarry Smith }
8601eb62cbbSBarry Smith 
8614a2ae208SSatish Balay #undef __FUNCT__
8624a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ"
863f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa)
864052efed2SBarry Smith {
865052efed2SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
866dfbe8321SBarry Smith   PetscErrorCode ierr;
8673a40ed3dSBarry Smith 
8683a40ed3dSBarry Smith   PetscFunctionBegin;
869f4df32b1SMatthew Knepley   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
870f4df32b1SMatthew Knepley   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
8713a40ed3dSBarry Smith   PetscFunctionReturn(0);
872052efed2SBarry Smith }
873052efed2SBarry Smith 
8744a2ae208SSatish Balay #undef __FUNCT__
8754a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ"
876dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat)
8771eb62cbbSBarry Smith {
87844a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
879dfbe8321SBarry Smith   PetscErrorCode ierr;
88083e2fdc7SBarry Smith 
8813a40ed3dSBarry Smith   PetscFunctionBegin;
882aa482453SBarry Smith #if defined(PETSC_USE_LOG)
883d0f46423SBarry Smith   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N);
884a5a9c739SBarry Smith #endif
8858798bf22SSatish Balay   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
88678b31e54SBarry Smith   ierr = MatDestroy(aij->A);CHKERRQ(ierr);
88778b31e54SBarry Smith   ierr = MatDestroy(aij->B);CHKERRQ(ierr);
888aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
8899c666560SBarry Smith   if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);}
890b1fc9764SSatish Balay #else
89105b42c5fSBarry Smith   ierr = PetscFree(aij->colmap);CHKERRQ(ierr);
892b1fc9764SSatish Balay #endif
89305b42c5fSBarry Smith   ierr = PetscFree(aij->garray);CHKERRQ(ierr);
8947c922b88SBarry Smith   if (aij->lvec)   {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);}
8957c922b88SBarry Smith   if (aij->Mvctx)  {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);}
89605b42c5fSBarry Smith   ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr);
8978aa348c1SBarry Smith   ierr = PetscFree(aij->ld);CHKERRQ(ierr);
898606d414cSSatish Balay   ierr = PetscFree(aij);CHKERRQ(ierr);
899901853e0SKris Buschelman 
900dbd8c25aSHong Zhang   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
901901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
902901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
903901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
904901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr);
905901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
906ff69c46cSKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
907901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
9083a40ed3dSBarry Smith   PetscFunctionReturn(0);
9091eb62cbbSBarry Smith }
910ee50ffe9SBarry Smith 
9114a2ae208SSatish Balay #undef __FUNCT__
9128e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary"
913dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer)
9148e2fed03SBarry Smith {
9158e2fed03SBarry Smith   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
9168e2fed03SBarry Smith   Mat_SeqAIJ*       A = (Mat_SeqAIJ*)aij->A->data;
9178e2fed03SBarry Smith   Mat_SeqAIJ*       B = (Mat_SeqAIJ*)aij->B->data;
9186849ba73SBarry Smith   PetscErrorCode    ierr;
91932dcc486SBarry Smith   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
9206f69ff64SBarry Smith   int               fd;
921a788621eSSatish Balay   PetscInt          nz,header[4],*row_lengths,*range=0,rlen,i;
922d0f46423SBarry Smith   PetscInt          nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz;
9238e2fed03SBarry Smith   PetscScalar       *column_values;
9248e2fed03SBarry Smith 
9258e2fed03SBarry Smith   PetscFunctionBegin;
9267adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
9277adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
9288e2fed03SBarry Smith   nz   = A->nz + B->nz;
929958c9bccSBarry Smith   if (!rank) {
9308e2fed03SBarry Smith     header[0] = MAT_FILE_COOKIE;
931d0f46423SBarry Smith     header[1] = mat->rmap->N;
932d0f46423SBarry Smith     header[2] = mat->cmap->N;
9337adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
9348e2fed03SBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
9356f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9368e2fed03SBarry Smith     /* get largest number of rows any processor has */
937d0f46423SBarry Smith     rlen = mat->rmap->n;
938d0f46423SBarry Smith     range = mat->rmap->range;
9398e2fed03SBarry Smith     for (i=1; i<size; i++) {
9408e2fed03SBarry Smith       rlen = PetscMax(rlen,range[i+1] - range[i]);
9418e2fed03SBarry Smith     }
9428e2fed03SBarry Smith   } else {
9437adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
944d0f46423SBarry Smith     rlen = mat->rmap->n;
9458e2fed03SBarry Smith   }
9468e2fed03SBarry Smith 
9478e2fed03SBarry Smith   /* load up the local row counts */
948b1d57f15SBarry Smith   ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr);
949d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9508e2fed03SBarry Smith     row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
9518e2fed03SBarry Smith   }
9528e2fed03SBarry Smith 
9538e2fed03SBarry Smith   /* store the row lengths to the file */
954958c9bccSBarry Smith   if (!rank) {
9558e2fed03SBarry Smith     MPI_Status status;
956d0f46423SBarry Smith     ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9578e2fed03SBarry Smith     for (i=1; i<size; i++) {
9588e2fed03SBarry Smith       rlen = range[i+1] - range[i];
9597adad957SLisandro Dalcin       ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9606f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9618e2fed03SBarry Smith     }
9628e2fed03SBarry Smith   } else {
963d0f46423SBarry Smith     ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9648e2fed03SBarry Smith   }
9658e2fed03SBarry Smith   ierr = PetscFree(row_lengths);CHKERRQ(ierr);
9668e2fed03SBarry Smith 
9678e2fed03SBarry Smith   /* load up the local column indices */
9688e2fed03SBarry Smith   nzmax = nz; /* )th processor needs space a largest processor needs */
9697adad957SLisandro Dalcin   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
970b1d57f15SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
9718e2fed03SBarry Smith   cnt  = 0;
972d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
9738e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
9748e2fed03SBarry Smith       if ( (col = garray[B->j[j]]) > cstart) break;
9758e2fed03SBarry Smith       column_indices[cnt++] = col;
9768e2fed03SBarry Smith     }
9778e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
9788e2fed03SBarry Smith       column_indices[cnt++] = A->j[k] + cstart;
9798e2fed03SBarry Smith     }
9808e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
9818e2fed03SBarry Smith       column_indices[cnt++] = garray[B->j[j]];
9828e2fed03SBarry Smith     }
9838e2fed03SBarry Smith   }
98477431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
9858e2fed03SBarry Smith 
9868e2fed03SBarry Smith   /* store the column indices to the file */
987958c9bccSBarry Smith   if (!rank) {
9888e2fed03SBarry Smith     MPI_Status status;
9896f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9908e2fed03SBarry Smith     for (i=1; i<size; i++) {
9917adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
99277431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
9937adad957SLisandro Dalcin       ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
9946f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
9958e2fed03SBarry Smith     }
9968e2fed03SBarry Smith   } else {
9977adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9987adad957SLisandro Dalcin     ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
9998e2fed03SBarry Smith   }
10008e2fed03SBarry Smith   ierr = PetscFree(column_indices);CHKERRQ(ierr);
10018e2fed03SBarry Smith 
10028e2fed03SBarry Smith   /* load up the local column values */
10038e2fed03SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
10048e2fed03SBarry Smith   cnt  = 0;
1005d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
10068e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
10078e2fed03SBarry Smith       if ( garray[B->j[j]] > cstart) break;
10088e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10098e2fed03SBarry Smith     }
10108e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
10118e2fed03SBarry Smith       column_values[cnt++] = A->a[k];
10128e2fed03SBarry Smith     }
10138e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
10148e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
10158e2fed03SBarry Smith     }
10168e2fed03SBarry Smith   }
101777431f27SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
10188e2fed03SBarry Smith 
10198e2fed03SBarry Smith   /* store the column values to the file */
1020958c9bccSBarry Smith   if (!rank) {
10218e2fed03SBarry Smith     MPI_Status status;
10226f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10238e2fed03SBarry Smith     for (i=1; i<size; i++) {
10247adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
102577431f27SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
10267adad957SLisandro Dalcin       ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
10276f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
10288e2fed03SBarry Smith     }
10298e2fed03SBarry Smith   } else {
10307adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10317adad957SLisandro Dalcin     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
10328e2fed03SBarry Smith   }
10338e2fed03SBarry Smith   ierr = PetscFree(column_values);CHKERRQ(ierr);
10348e2fed03SBarry Smith   PetscFunctionReturn(0);
10358e2fed03SBarry Smith }
10368e2fed03SBarry Smith 
10378e2fed03SBarry Smith #undef __FUNCT__
10384a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket"
1039dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1040416022c9SBarry Smith {
104144a69424SLois Curfman McInnes   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
1042dfbe8321SBarry Smith   PetscErrorCode    ierr;
104332dcc486SBarry Smith   PetscMPIInt       rank = aij->rank,size = aij->size;
1044d38fa0fbSBarry Smith   PetscTruth        isdraw,iascii,isbinary;
1045b0a32e0cSBarry Smith   PetscViewer       sviewer;
1046f3ef73ceSBarry Smith   PetscViewerFormat format;
1047416022c9SBarry Smith 
10483a40ed3dSBarry Smith   PetscFunctionBegin;
1049fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
105032077d6dSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
10518e2fed03SBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
105232077d6dSBarry Smith   if (iascii) {
1053b0a32e0cSBarry Smith     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1054456192e2SBarry Smith     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
10554e220ebcSLois Curfman McInnes       MatInfo    info;
1056923f20ffSKris Buschelman       PetscTruth inodes;
1057923f20ffSKris Buschelman 
10587adad957SLisandro Dalcin       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
1059888f2ed8SSatish Balay       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1060923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr);
1061923f20ffSKris Buschelman       if (!inodes) {
106277431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n",
1063d0f46423SBarry Smith 					      rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10646831982aSBarry Smith       } else {
106577431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n",
1066d0f46423SBarry Smith 		    rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
10676831982aSBarry Smith       }
1068888f2ed8SSatish Balay       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
106977431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1070888f2ed8SSatish Balay       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
107177431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1072b0a32e0cSBarry Smith       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
107307d81ca4SBarry Smith       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1074a40aa06bSLois Curfman McInnes       ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr);
10753a40ed3dSBarry Smith       PetscFunctionReturn(0);
1076fb9695e5SSatish Balay     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1077923f20ffSKris Buschelman       PetscInt   inodecount,inodelimit,*inodes;
1078923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr);
1079923f20ffSKris Buschelman       if (inodes) {
1080923f20ffSKris Buschelman         ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr);
1081d38fa0fbSBarry Smith       } else {
1082d38fa0fbSBarry Smith         ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr);
1083d38fa0fbSBarry Smith       }
10843a40ed3dSBarry Smith       PetscFunctionReturn(0);
10854aedb280SBarry Smith     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
10864aedb280SBarry Smith       PetscFunctionReturn(0);
108708480c60SBarry Smith     }
10888e2fed03SBarry Smith   } else if (isbinary) {
10898e2fed03SBarry Smith     if (size == 1) {
10907adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
10918e2fed03SBarry Smith       ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
10928e2fed03SBarry Smith     } else {
10938e2fed03SBarry Smith       ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr);
10948e2fed03SBarry Smith     }
10958e2fed03SBarry Smith     PetscFunctionReturn(0);
10960f5bd95cSBarry Smith   } else if (isdraw) {
1097b0a32e0cSBarry Smith     PetscDraw  draw;
109819bcc07fSBarry Smith     PetscTruth isnull;
1099b0a32e0cSBarry Smith     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1100b0a32e0cSBarry Smith     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
110119bcc07fSBarry Smith   }
110219bcc07fSBarry Smith 
110317699dbbSLois Curfman McInnes   if (size == 1) {
11047adad957SLisandro Dalcin     ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
110578b31e54SBarry Smith     ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
11063a40ed3dSBarry Smith   } else {
110795373324SBarry Smith     /* assemble the entire matrix onto first processor. */
110895373324SBarry Smith     Mat         A;
1109ec8511deSBarry Smith     Mat_SeqAIJ  *Aloc;
1110d0f46423SBarry Smith     PetscInt    M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct;
1111dd6ea824SBarry Smith     MatScalar   *a;
11122ee70a88SLois Curfman McInnes 
111332a366e4SMatthew Knepley     if (mat->rmap->N > 1024) {
111490d69ab7SBarry Smith       PetscTruth flg = PETSC_FALSE;
111532a366e4SMatthew Knepley 
11160c235cafSBarry Smith       ierr = PetscOptionsGetTruth(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr);
111732a366e4SMatthew Knepley       if (!flg) {
111890d69ab7SBarry Smith         SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large.");
111932a366e4SMatthew Knepley       }
112032a366e4SMatthew Knepley     }
11210805154bSBarry Smith 
11227adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
112317699dbbSLois Curfman McInnes     if (!rank) {
1124f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
11253a40ed3dSBarry Smith     } else {
1126f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
112795373324SBarry Smith     }
1128f204ca49SKris Buschelman     /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */
1129f204ca49SKris Buschelman     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
1130f204ca49SKris Buschelman     ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
113152e6d16bSBarry Smith     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
1132416022c9SBarry Smith 
113395373324SBarry Smith     /* copy over the A part */
1134ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->A->data;
1135d0f46423SBarry Smith     m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1136d0f46423SBarry Smith     row = mat->rmap->rstart;
1137d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;}
113895373324SBarry Smith     for (i=0; i<m; i++) {
1139416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr);
114095373324SBarry Smith       row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i];
114195373324SBarry Smith     }
11422ee70a88SLois Curfman McInnes     aj = Aloc->j;
1143d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;}
114495373324SBarry Smith 
114595373324SBarry Smith     /* copy over the B part */
1146ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->B->data;
1147d0f46423SBarry Smith     m    = aij->B->rmap->n;  ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1148d0f46423SBarry Smith     row  = mat->rmap->rstart;
1149b1d57f15SBarry Smith     ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1150b0a32e0cSBarry Smith     ct   = cols;
1151bfec09a0SHong Zhang     for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];}
115295373324SBarry Smith     for (i=0; i<m; i++) {
1153416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr);
115495373324SBarry Smith       row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i];
115595373324SBarry Smith     }
1156606d414cSSatish Balay     ierr = PetscFree(ct);CHKERRQ(ierr);
11576d4a8577SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
11586d4a8577SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
115955843e3eSBarry Smith     /*
116055843e3eSBarry Smith        Everyone has to call to draw the matrix since the graphics waits are
1161b0a32e0cSBarry Smith        synchronized across all processors that share the PetscDraw object
116255843e3eSBarry Smith     */
1163b0a32e0cSBarry Smith     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1164e03a110bSBarry Smith     if (!rank) {
11657adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
11666831982aSBarry Smith       ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
116795373324SBarry Smith     }
1168b0a32e0cSBarry Smith     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
116978b31e54SBarry Smith     ierr = MatDestroy(A);CHKERRQ(ierr);
117095373324SBarry Smith   }
11713a40ed3dSBarry Smith   PetscFunctionReturn(0);
11721eb62cbbSBarry Smith }
11731eb62cbbSBarry Smith 
11744a2ae208SSatish Balay #undef __FUNCT__
11754a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ"
1176dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer)
1177416022c9SBarry Smith {
1178dfbe8321SBarry Smith   PetscErrorCode ierr;
117932077d6dSBarry Smith   PetscTruth     iascii,isdraw,issocket,isbinary;
1180416022c9SBarry Smith 
11813a40ed3dSBarry Smith   PetscFunctionBegin;
118232077d6dSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
1183fb9695e5SSatish Balay   ierr  = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr);
1184fb9695e5SSatish Balay   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr);
1185b0a32e0cSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr);
118632077d6dSBarry Smith   if (iascii || isdraw || isbinary || issocket) {
11877b2a1423SBarry Smith     ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
11885cd90555SBarry Smith   } else {
118979a5c55eSBarry Smith     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name);
1190416022c9SBarry Smith   }
11913a40ed3dSBarry Smith   PetscFunctionReturn(0);
1192416022c9SBarry Smith }
1193416022c9SBarry Smith 
11944a2ae208SSatish Balay #undef __FUNCT__
11954a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ"
1196b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
11978a729477SBarry Smith {
119844a69424SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1199dfbe8321SBarry Smith   PetscErrorCode ierr;
1200c14dc6b6SHong Zhang   Vec            bb1;
12018a729477SBarry Smith 
12023a40ed3dSBarry Smith   PetscFunctionBegin;
1203c14dc6b6SHong Zhang   ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
12042798e883SHong Zhang 
1205c16cb8f2SBarry Smith   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
1206da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1207bd3bf7d3SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
12082798e883SHong Zhang       its--;
1209da3a660dSBarry Smith     }
12102798e883SHong Zhang 
12112798e883SHong Zhang     while (its--) {
1212ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1213ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12142798e883SHong Zhang 
1215c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1216efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1217c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12182798e883SHong Zhang 
1219c14dc6b6SHong Zhang       /* local sweep */
122071f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
12212798e883SHong Zhang     }
12223a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
1223da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1224c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12252798e883SHong Zhang       its--;
1226da3a660dSBarry Smith     }
12272798e883SHong Zhang     while (its--) {
1228ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1229ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12302798e883SHong Zhang 
1231c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1232efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1233c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
1234c14dc6b6SHong Zhang 
1235c14dc6b6SHong Zhang       /* local sweep */
123671f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12372798e883SHong Zhang     }
12383a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
1239da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
1240c14dc6b6SHong Zhang       ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12412798e883SHong Zhang       its--;
1242da3a660dSBarry Smith     }
12432798e883SHong Zhang     while (its--) {
1244ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1245ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
12462798e883SHong Zhang 
1247c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1248efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1249c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
12502798e883SHong Zhang 
1251c14dc6b6SHong Zhang       /* local sweep */
125271f1c65dSBarry Smith       ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr);
12532798e883SHong Zhang     }
12543a40ed3dSBarry Smith   } else {
125529bbc08cSBarry Smith     SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported");
1256c16cb8f2SBarry Smith   }
1257c14dc6b6SHong Zhang 
1258c14dc6b6SHong Zhang   ierr = VecDestroy(bb1);CHKERRQ(ierr);
12593a40ed3dSBarry Smith   PetscFunctionReturn(0);
12608a729477SBarry Smith }
1261a66be287SLois Curfman McInnes 
12624a2ae208SSatish Balay #undef __FUNCT__
126342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ"
126442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B)
126542e855d1Svictor {
126642e855d1Svictor   MPI_Comm       comm,pcomm;
12675d0c19d7SBarry Smith   PetscInt       first,local_size,nrows;
12685d0c19d7SBarry Smith   const PetscInt *rows;
1269dbf0e21dSBarry Smith   PetscMPIInt    size;
127042e855d1Svictor   IS             crowp,growp,irowp,lrowp,lcolp,icolp;
127142e855d1Svictor   PetscErrorCode ierr;
127242e855d1Svictor 
127342e855d1Svictor   PetscFunctionBegin;
127442e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
127542e855d1Svictor   /* make a collective version of 'rowp' */
127642e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
127742e855d1Svictor   if (pcomm==comm) {
127842e855d1Svictor     crowp = rowp;
127942e855d1Svictor   } else {
128042e855d1Svictor     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
128142e855d1Svictor     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
128242e855d1Svictor     ierr = ISCreateGeneral(comm,nrows,rows,&crowp);CHKERRQ(ierr);
128342e855d1Svictor     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
128442e855d1Svictor   }
128542e855d1Svictor   /* collect the global row permutation and invert it */
128642e855d1Svictor   ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr);
128742e855d1Svictor   ierr = ISSetPermutation(growp);CHKERRQ(ierr);
128842e855d1Svictor   if (pcomm!=comm) {
128942e855d1Svictor     ierr = ISDestroy(crowp);CHKERRQ(ierr);
129042e855d1Svictor   }
129142e855d1Svictor   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
129242e855d1Svictor   /* get the local target indices */
129342e855d1Svictor   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr);
129442e855d1Svictor   ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr);
129542e855d1Svictor   ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr);
129642e855d1Svictor   ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp);CHKERRQ(ierr);
129742e855d1Svictor   ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr);
129842e855d1Svictor   ierr = ISDestroy(irowp);CHKERRQ(ierr);
129942e855d1Svictor   /* the column permutation is so much easier;
130042e855d1Svictor      make a local version of 'colp' and invert it */
130142e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
1302dbf0e21dSBarry Smith   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
1303dbf0e21dSBarry Smith   if (size==1) {
130442e855d1Svictor     lcolp = colp;
130542e855d1Svictor   } else {
130642e855d1Svictor     ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr);
130742e855d1Svictor     ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr);
130842e855d1Svictor     ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp);CHKERRQ(ierr);
130942e855d1Svictor   }
1310dbf0e21dSBarry Smith   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
131142e855d1Svictor   ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr);
13124aa3045dSJed Brown   ierr = ISSetPermutation(icolp);CHKERRQ(ierr);
1313dbf0e21dSBarry Smith   if (size>1) {
131442e855d1Svictor     ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr);
131542e855d1Svictor     ierr = ISDestroy(lcolp);CHKERRQ(ierr);
131642e855d1Svictor   }
131742e855d1Svictor   /* now we just get the submatrix */
13184aa3045dSJed Brown   ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
131942e855d1Svictor   /* clean up */
132042e855d1Svictor   ierr = ISDestroy(lrowp);CHKERRQ(ierr);
132142e855d1Svictor   ierr = ISDestroy(icolp);CHKERRQ(ierr);
132242e855d1Svictor   PetscFunctionReturn(0);
132342e855d1Svictor }
132442e855d1Svictor 
132542e855d1Svictor #undef __FUNCT__
13264a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ"
1327dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1328a66be287SLois Curfman McInnes {
1329a66be287SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1330a66be287SLois Curfman McInnes   Mat            A = mat->A,B = mat->B;
1331dfbe8321SBarry Smith   PetscErrorCode ierr;
1332329f5518SBarry Smith   PetscReal      isend[5],irecv[5];
1333a66be287SLois Curfman McInnes 
13343a40ed3dSBarry Smith   PetscFunctionBegin;
13354e220ebcSLois Curfman McInnes   info->block_size     = 1.0;
13364e220ebcSLois Curfman McInnes   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
13374e220ebcSLois Curfman McInnes   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
13384e220ebcSLois Curfman McInnes   isend[3] = info->memory;  isend[4] = info->mallocs;
13394e220ebcSLois Curfman McInnes   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
13404e220ebcSLois Curfman McInnes   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
13414e220ebcSLois Curfman McInnes   isend[3] += info->memory;  isend[4] += info->mallocs;
1342a66be287SLois Curfman McInnes   if (flag == MAT_LOCAL) {
13434e220ebcSLois Curfman McInnes     info->nz_used      = isend[0];
13444e220ebcSLois Curfman McInnes     info->nz_allocated = isend[1];
13454e220ebcSLois Curfman McInnes     info->nz_unneeded  = isend[2];
13464e220ebcSLois Curfman McInnes     info->memory       = isend[3];
13474e220ebcSLois Curfman McInnes     info->mallocs      = isend[4];
1348a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_MAX) {
13497adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
13504e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13514e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13524e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13534e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13544e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1355a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_SUM) {
13567adad957SLisandro Dalcin     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
13574e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
13584e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
13594e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
13604e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
13614e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1362a66be287SLois Curfman McInnes   }
13634e220ebcSLois Curfman McInnes   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
13644e220ebcSLois Curfman McInnes   info->fill_ratio_needed = 0;
13654e220ebcSLois Curfman McInnes   info->factor_mallocs    = 0;
13664e220ebcSLois Curfman McInnes 
13673a40ed3dSBarry Smith   PetscFunctionReturn(0);
1368a66be287SLois Curfman McInnes }
1369a66be287SLois Curfman McInnes 
13704a2ae208SSatish Balay #undef __FUNCT__
13714a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ"
13724e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg)
1373c74985f6SBarry Smith {
1374c0bbcb79SLois Curfman McInnes   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1375dfbe8321SBarry Smith   PetscErrorCode ierr;
1376c74985f6SBarry Smith 
13773a40ed3dSBarry Smith   PetscFunctionBegin;
137812c028f9SKris Buschelman   switch (op) {
1379512a5fc5SBarry Smith   case MAT_NEW_NONZERO_LOCATIONS:
138012c028f9SKris Buschelman   case MAT_NEW_NONZERO_ALLOCATION_ERR:
138128b2fa4aSMatthew Knepley   case MAT_UNUSED_NONZERO_LOCATION_ERR:
138212c028f9SKris Buschelman   case MAT_KEEP_ZEROED_ROWS:
138312c028f9SKris Buschelman   case MAT_NEW_NONZERO_LOCATION_ERR:
138412c028f9SKris Buschelman   case MAT_USE_INODES:
138512c028f9SKris Buschelman   case MAT_IGNORE_ZERO_ENTRIES:
13864e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13874e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
138812c028f9SKris Buschelman     break;
138912c028f9SKris Buschelman   case MAT_ROW_ORIENTED:
13904e0d8c25SBarry Smith     a->roworiented = flg;
13914e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
13924e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
139312c028f9SKris Buschelman     break;
13944e0d8c25SBarry Smith   case MAT_NEW_DIAGONALS:
1395290bbb0aSBarry Smith     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
139612c028f9SKris Buschelman     break;
139712c028f9SKris Buschelman   case MAT_IGNORE_OFF_PROC_ENTRIES:
13987c922b88SBarry Smith     a->donotstash = PETSC_TRUE;
139912c028f9SKris Buschelman     break;
140077e54ba9SKris Buschelman   case MAT_SYMMETRIC:
14014e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
140225f421beSHong Zhang     break;
140377e54ba9SKris Buschelman   case MAT_STRUCTURALLY_SYMMETRIC:
1404bf108f30SBarry Smith   case MAT_HERMITIAN:
1405bf108f30SBarry Smith   case MAT_SYMMETRY_ETERNAL:
14064e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
140777e54ba9SKris Buschelman     break;
140812c028f9SKris Buschelman   default:
1409ad86a440SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
14103a40ed3dSBarry Smith   }
14113a40ed3dSBarry Smith   PetscFunctionReturn(0);
1412c74985f6SBarry Smith }
1413c74985f6SBarry Smith 
14144a2ae208SSatish Balay #undef __FUNCT__
14154a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ"
1416b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
141739e00950SLois Curfman McInnes {
1418154123eaSLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
141987828ca2SBarry Smith   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
14206849ba73SBarry Smith   PetscErrorCode ierr;
1421d0f46423SBarry Smith   PetscInt       i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart;
1422d0f46423SBarry Smith   PetscInt       nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend;
1423b1d57f15SBarry Smith   PetscInt       *cmap,*idx_p;
142439e00950SLois Curfman McInnes 
14253a40ed3dSBarry Smith   PetscFunctionBegin;
1426abc0a331SBarry Smith   if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active");
14277a0afa10SBarry Smith   mat->getrowactive = PETSC_TRUE;
14287a0afa10SBarry Smith 
142970f0671dSBarry Smith   if (!mat->rowvalues && (idx || v)) {
14307a0afa10SBarry Smith     /*
14317a0afa10SBarry Smith         allocate enough space to hold information from the longest row.
14327a0afa10SBarry Smith     */
14337a0afa10SBarry Smith     Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data;
1434b1d57f15SBarry Smith     PetscInt     max = 1,tmp;
1435d0f46423SBarry Smith     for (i=0; i<matin->rmap->n; i++) {
14367a0afa10SBarry Smith       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
14377a0afa10SBarry Smith       if (max < tmp) { max = tmp; }
14387a0afa10SBarry Smith     }
1439b1d57f15SBarry Smith     ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr);
1440b1d57f15SBarry Smith     mat->rowindices = (PetscInt*)(mat->rowvalues + max);
14417a0afa10SBarry Smith   }
14427a0afa10SBarry Smith 
144329bbc08cSBarry Smith   if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows")
1444abc0e9e4SLois Curfman McInnes   lrow = row - rstart;
144539e00950SLois Curfman McInnes 
1446154123eaSLois Curfman McInnes   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1447154123eaSLois Curfman McInnes   if (!v)   {pvA = 0; pvB = 0;}
1448154123eaSLois Curfman McInnes   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1449f830108cSBarry Smith   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1450f830108cSBarry Smith   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1451154123eaSLois Curfman McInnes   nztot = nzA + nzB;
1452154123eaSLois Curfman McInnes 
145370f0671dSBarry Smith   cmap  = mat->garray;
1454154123eaSLois Curfman McInnes   if (v  || idx) {
1455154123eaSLois Curfman McInnes     if (nztot) {
1456154123eaSLois Curfman McInnes       /* Sort by increasing column numbers, assuming A and B already sorted */
1457b1d57f15SBarry Smith       PetscInt imark = -1;
1458154123eaSLois Curfman McInnes       if (v) {
145970f0671dSBarry Smith         *v = v_p = mat->rowvalues;
146039e00950SLois Curfman McInnes         for (i=0; i<nzB; i++) {
146170f0671dSBarry Smith           if (cmap[cworkB[i]] < cstart)   v_p[i] = vworkB[i];
1462154123eaSLois Curfman McInnes           else break;
1463154123eaSLois Curfman McInnes         }
1464154123eaSLois Curfman McInnes         imark = i;
146570f0671dSBarry Smith         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
146670f0671dSBarry Smith         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1467154123eaSLois Curfman McInnes       }
1468154123eaSLois Curfman McInnes       if (idx) {
146970f0671dSBarry Smith         *idx = idx_p = mat->rowindices;
147070f0671dSBarry Smith         if (imark > -1) {
147170f0671dSBarry Smith           for (i=0; i<imark; i++) {
147270f0671dSBarry Smith             idx_p[i] = cmap[cworkB[i]];
147370f0671dSBarry Smith           }
147470f0671dSBarry Smith         } else {
1475154123eaSLois Curfman McInnes           for (i=0; i<nzB; i++) {
147670f0671dSBarry Smith             if (cmap[cworkB[i]] < cstart)   idx_p[i] = cmap[cworkB[i]];
1477154123eaSLois Curfman McInnes             else break;
1478154123eaSLois Curfman McInnes           }
1479154123eaSLois Curfman McInnes           imark = i;
148070f0671dSBarry Smith         }
148170f0671dSBarry Smith         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart + cworkA[i];
148270f0671dSBarry Smith         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]];
148339e00950SLois Curfman McInnes       }
14843f97c4b0SBarry Smith     } else {
14851ca473b0SSatish Balay       if (idx) *idx = 0;
14861ca473b0SSatish Balay       if (v)   *v   = 0;
14871ca473b0SSatish Balay     }
1488154123eaSLois Curfman McInnes   }
148939e00950SLois Curfman McInnes   *nz = nztot;
1490f830108cSBarry Smith   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1491f830108cSBarry Smith   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
14923a40ed3dSBarry Smith   PetscFunctionReturn(0);
149339e00950SLois Curfman McInnes }
149439e00950SLois Curfman McInnes 
14954a2ae208SSatish Balay #undef __FUNCT__
14964a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ"
1497b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
149839e00950SLois Curfman McInnes {
14997a0afa10SBarry Smith   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
15003a40ed3dSBarry Smith 
15013a40ed3dSBarry Smith   PetscFunctionBegin;
1502abc0a331SBarry Smith   if (!aij->getrowactive) {
1503abc0a331SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
15047a0afa10SBarry Smith   }
15057a0afa10SBarry Smith   aij->getrowactive = PETSC_FALSE;
15063a40ed3dSBarry Smith   PetscFunctionReturn(0);
150739e00950SLois Curfman McInnes }
150839e00950SLois Curfman McInnes 
15094a2ae208SSatish Balay #undef __FUNCT__
15104a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ"
1511dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm)
1512855ac2c5SLois Curfman McInnes {
1513855ac2c5SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1514ec8511deSBarry Smith   Mat_SeqAIJ     *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data;
1515dfbe8321SBarry Smith   PetscErrorCode ierr;
1516d0f46423SBarry Smith   PetscInt       i,j,cstart = mat->cmap->rstart;
1517329f5518SBarry Smith   PetscReal      sum = 0.0;
1518a77337e4SBarry Smith   MatScalar      *v;
151904ca555eSLois Curfman McInnes 
15203a40ed3dSBarry Smith   PetscFunctionBegin;
152117699dbbSLois Curfman McInnes   if (aij->size == 1) {
152214183eadSLois Curfman McInnes     ierr =  MatNorm(aij->A,type,norm);CHKERRQ(ierr);
152337fa93a5SLois Curfman McInnes   } else {
152404ca555eSLois Curfman McInnes     if (type == NORM_FROBENIUS) {
152504ca555eSLois Curfman McInnes       v = amat->a;
152604ca555eSLois Curfman McInnes       for (i=0; i<amat->nz; i++) {
1527aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1528329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
152904ca555eSLois Curfman McInnes #else
153004ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
153104ca555eSLois Curfman McInnes #endif
153204ca555eSLois Curfman McInnes       }
153304ca555eSLois Curfman McInnes       v = bmat->a;
153404ca555eSLois Curfman McInnes       for (i=0; i<bmat->nz; i++) {
1535aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1536329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
153704ca555eSLois Curfman McInnes #else
153804ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
153904ca555eSLois Curfman McInnes #endif
154004ca555eSLois Curfman McInnes       }
15417adad957SLisandro Dalcin       ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
154204ca555eSLois Curfman McInnes       *norm = sqrt(*norm);
15433a40ed3dSBarry Smith     } else if (type == NORM_1) { /* max column norm */
1544329f5518SBarry Smith       PetscReal *tmp,*tmp2;
1545b1d57f15SBarry Smith       PetscInt  *jj,*garray = aij->garray;
1546d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr);
1547d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr);
1548d0f46423SBarry Smith       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
154904ca555eSLois Curfman McInnes       *norm = 0.0;
155004ca555eSLois Curfman McInnes       v = amat->a; jj = amat->j;
155104ca555eSLois Curfman McInnes       for (j=0; j<amat->nz; j++) {
1552bfec09a0SHong Zhang         tmp[cstart + *jj++ ] += PetscAbsScalar(*v);  v++;
155304ca555eSLois Curfman McInnes       }
155404ca555eSLois Curfman McInnes       v = bmat->a; jj = bmat->j;
155504ca555eSLois Curfman McInnes       for (j=0; j<bmat->nz; j++) {
1556bfec09a0SHong Zhang         tmp[garray[*jj++]] += PetscAbsScalar(*v); v++;
155704ca555eSLois Curfman McInnes       }
1558d0f46423SBarry Smith       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
1559d0f46423SBarry Smith       for (j=0; j<mat->cmap->N; j++) {
156004ca555eSLois Curfman McInnes         if (tmp2[j] > *norm) *norm = tmp2[j];
156104ca555eSLois Curfman McInnes       }
1562606d414cSSatish Balay       ierr = PetscFree(tmp);CHKERRQ(ierr);
1563606d414cSSatish Balay       ierr = PetscFree(tmp2);CHKERRQ(ierr);
15643a40ed3dSBarry Smith     } else if (type == NORM_INFINITY) { /* max row norm */
1565329f5518SBarry Smith       PetscReal ntemp = 0.0;
1566d0f46423SBarry Smith       for (j=0; j<aij->A->rmap->n; j++) {
1567bfec09a0SHong Zhang         v = amat->a + amat->i[j];
156804ca555eSLois Curfman McInnes         sum = 0.0;
156904ca555eSLois Curfman McInnes         for (i=0; i<amat->i[j+1]-amat->i[j]; i++) {
1570cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
157104ca555eSLois Curfman McInnes         }
1572bfec09a0SHong Zhang         v = bmat->a + bmat->i[j];
157304ca555eSLois Curfman McInnes         for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) {
1574cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
157504ca555eSLois Curfman McInnes         }
1576515d9167SLois Curfman McInnes         if (sum > ntemp) ntemp = sum;
157704ca555eSLois Curfman McInnes       }
15787adad957SLisandro Dalcin       ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
1579ca161407SBarry Smith     } else {
158029bbc08cSBarry Smith       SETERRQ(PETSC_ERR_SUP,"No support for two norm");
158104ca555eSLois Curfman McInnes     }
158237fa93a5SLois Curfman McInnes   }
15833a40ed3dSBarry Smith   PetscFunctionReturn(0);
1584855ac2c5SLois Curfman McInnes }
1585855ac2c5SLois Curfman McInnes 
15864a2ae208SSatish Balay #undef __FUNCT__
15874a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ"
1588fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout)
1589b7c46309SBarry Smith {
1590b7c46309SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1591da668accSHong Zhang   Mat_SeqAIJ     *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data;
1592dfbe8321SBarry Smith   PetscErrorCode ierr;
1593d0f46423SBarry Smith   PetscInt       M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz;
1594d0f46423SBarry Smith   PetscInt       cstart=A->cmap->rstart,ncol;
15953a40ed3dSBarry Smith   Mat            B;
1596a77337e4SBarry Smith   MatScalar      *array;
1597b7c46309SBarry Smith 
15983a40ed3dSBarry Smith   PetscFunctionBegin;
1599e9695a30SBarry Smith   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1600da668accSHong Zhang 
1601d0f46423SBarry Smith   ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n;
1602da668accSHong Zhang   ai = Aloc->i; aj = Aloc->j;
1603da668accSHong Zhang   bi = Bloc->i; bj = Bloc->j;
1604fc73b1b3SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1605fc73b1b3SBarry Smith     /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */
1606fc73b1b3SBarry Smith     ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
1607da668accSHong Zhang     ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr);
1608da668accSHong Zhang     for (i=0; i<ai[ma]; i++){
1609da668accSHong Zhang       d_nnz[aj[i]] ++;
1610da668accSHong Zhang       aj[i] += cstart; /* global col index to be used by MatSetValues() */
1611d4bb536fSBarry Smith     }
1612d4bb536fSBarry Smith 
16137adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
1614d0f46423SBarry Smith     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
16157adad957SLisandro Dalcin     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1616da668accSHong Zhang     ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr);
1617fc73b1b3SBarry Smith     ierr = PetscFree(d_nnz);CHKERRQ(ierr);
1618fc4dec0aSBarry Smith   } else {
1619fc4dec0aSBarry Smith     B = *matout;
1620fc4dec0aSBarry Smith   }
1621b7c46309SBarry Smith 
1622b7c46309SBarry Smith   /* copy over the A part */
1623da668accSHong Zhang   array = Aloc->a;
1624d0f46423SBarry Smith   row = A->rmap->rstart;
1625da668accSHong Zhang   for (i=0; i<ma; i++) {
1626da668accSHong Zhang     ncol = ai[i+1]-ai[i];
1627da668accSHong Zhang     ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
1628da668accSHong Zhang     row++; array += ncol; aj += ncol;
1629b7c46309SBarry Smith   }
1630b7c46309SBarry Smith   aj = Aloc->j;
1631da668accSHong Zhang   for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */
1632b7c46309SBarry Smith 
1633b7c46309SBarry Smith   /* copy over the B part */
1634fc73b1b3SBarry Smith   ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1635fc73b1b3SBarry Smith   ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr);
1636da668accSHong Zhang   array = Bloc->a;
1637d0f46423SBarry Smith   row = A->rmap->rstart;
1638da668accSHong Zhang   for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];}
163961a2fbbaSHong Zhang   cols_tmp = cols;
1640da668accSHong Zhang   for (i=0; i<mb; i++) {
1641da668accSHong Zhang     ncol = bi[i+1]-bi[i];
164261a2fbbaSHong Zhang     ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
164361a2fbbaSHong Zhang     row++; array += ncol; cols_tmp += ncol;
1644b7c46309SBarry Smith   }
1645fc73b1b3SBarry Smith   ierr = PetscFree(cols);CHKERRQ(ierr);
1646fc73b1b3SBarry Smith 
16476d4a8577SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
16486d4a8577SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1649815cbec1SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout != A) {
16500de55854SLois Curfman McInnes     *matout = B;
16510de55854SLois Curfman McInnes   } else {
1652273d9f13SBarry Smith     ierr = MatHeaderCopy(A,B);CHKERRQ(ierr);
16530de55854SLois Curfman McInnes   }
16543a40ed3dSBarry Smith   PetscFunctionReturn(0);
1655b7c46309SBarry Smith }
1656b7c46309SBarry Smith 
16574a2ae208SSatish Balay #undef __FUNCT__
16584a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ"
1659dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr)
1660a008b906SSatish Balay {
16614b967eb1SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
16624b967eb1SSatish Balay   Mat            a = aij->A,b = aij->B;
1663dfbe8321SBarry Smith   PetscErrorCode ierr;
1664b1d57f15SBarry Smith   PetscInt       s1,s2,s3;
1665a008b906SSatish Balay 
16663a40ed3dSBarry Smith   PetscFunctionBegin;
16674b967eb1SSatish Balay   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
16684b967eb1SSatish Balay   if (rr) {
1669e1311b90SBarry Smith     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
167029bbc08cSBarry Smith     if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
16714b967eb1SSatish Balay     /* Overlap communication with computation. */
1672ca9f406cSSatish Balay     ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1673a008b906SSatish Balay   }
16744b967eb1SSatish Balay   if (ll) {
1675e1311b90SBarry Smith     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
167629bbc08cSBarry Smith     if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1677f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr);
16784b967eb1SSatish Balay   }
16794b967eb1SSatish Balay   /* scale  the diagonal block */
1680f830108cSBarry Smith   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
16814b967eb1SSatish Balay 
16824b967eb1SSatish Balay   if (rr) {
16834b967eb1SSatish Balay     /* Do a scatter end and then right scale the off-diagonal block */
1684ca9f406cSSatish Balay     ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1685f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr);
16864b967eb1SSatish Balay   }
16874b967eb1SSatish Balay 
16883a40ed3dSBarry Smith   PetscFunctionReturn(0);
1689a008b906SSatish Balay }
1690a008b906SSatish Balay 
16914a2ae208SSatish Balay #undef __FUNCT__
1692521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ"
1693521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs)
16945a838052SSatish Balay {
1695521d7252SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1696521d7252SBarry Smith   PetscErrorCode ierr;
1697521d7252SBarry Smith 
16983a40ed3dSBarry Smith   PetscFunctionBegin;
1699521d7252SBarry Smith   ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr);
1700521d7252SBarry Smith   ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr);
17013a40ed3dSBarry Smith   PetscFunctionReturn(0);
17025a838052SSatish Balay }
17034a2ae208SSatish Balay #undef __FUNCT__
17044a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ"
1705dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A)
1706bb5a7306SBarry Smith {
1707bb5a7306SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
1708dfbe8321SBarry Smith   PetscErrorCode ierr;
17093a40ed3dSBarry Smith 
17103a40ed3dSBarry Smith   PetscFunctionBegin;
1711bb5a7306SBarry Smith   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
17123a40ed3dSBarry Smith   PetscFunctionReturn(0);
1713bb5a7306SBarry Smith }
1714bb5a7306SBarry Smith 
17154a2ae208SSatish Balay #undef __FUNCT__
17164a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ"
1717dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag)
1718d4bb536fSBarry Smith {
1719d4bb536fSBarry Smith   Mat_MPIAIJ     *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data;
1720d4bb536fSBarry Smith   Mat            a,b,c,d;
1721d4bb536fSBarry Smith   PetscTruth     flg;
1722dfbe8321SBarry Smith   PetscErrorCode ierr;
1723d4bb536fSBarry Smith 
17243a40ed3dSBarry Smith   PetscFunctionBegin;
1725d4bb536fSBarry Smith   a = matA->A; b = matA->B;
1726d4bb536fSBarry Smith   c = matB->A; d = matB->B;
1727d4bb536fSBarry Smith 
1728d4bb536fSBarry Smith   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1729abc0a331SBarry Smith   if (flg) {
1730d4bb536fSBarry Smith     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1731d4bb536fSBarry Smith   }
17327adad957SLisandro Dalcin   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
17333a40ed3dSBarry Smith   PetscFunctionReturn(0);
1734d4bb536fSBarry Smith }
1735d4bb536fSBarry Smith 
17364a2ae208SSatish Balay #undef __FUNCT__
17374a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ"
1738dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str)
1739cb5b572fSBarry Smith {
1740dfbe8321SBarry Smith   PetscErrorCode ierr;
1741cb5b572fSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ *)A->data;
1742cb5b572fSBarry Smith   Mat_MPIAIJ     *b = (Mat_MPIAIJ *)B->data;
1743cb5b572fSBarry Smith 
1744cb5b572fSBarry Smith   PetscFunctionBegin;
174533f4a19fSKris Buschelman   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
174633f4a19fSKris Buschelman   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1747cb5b572fSBarry Smith     /* because of the column compression in the off-processor part of the matrix a->B,
1748cb5b572fSBarry Smith        the number of columns in a->B and b->B may be different, hence we cannot call
1749cb5b572fSBarry Smith        the MatCopy() directly on the two parts. If need be, we can provide a more
1750cb5b572fSBarry Smith        efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices
1751cb5b572fSBarry Smith        then copying the submatrices */
1752cb5b572fSBarry Smith     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1753cb5b572fSBarry Smith   } else {
1754cb5b572fSBarry Smith     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1755cb5b572fSBarry Smith     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1756cb5b572fSBarry Smith   }
1757cb5b572fSBarry Smith   PetscFunctionReturn(0);
1758cb5b572fSBarry Smith }
1759cb5b572fSBarry Smith 
17604a2ae208SSatish Balay #undef __FUNCT__
17614a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ"
1762dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A)
1763273d9f13SBarry Smith {
1764dfbe8321SBarry Smith   PetscErrorCode ierr;
1765273d9f13SBarry Smith 
1766273d9f13SBarry Smith   PetscFunctionBegin;
1767273d9f13SBarry Smith   ierr =  MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1768273d9f13SBarry Smith   PetscFunctionReturn(0);
1769273d9f13SBarry Smith }
1770273d9f13SBarry Smith 
1771ac90fabeSBarry Smith #include "petscblaslapack.h"
1772ac90fabeSBarry Smith #undef __FUNCT__
1773ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ"
1774f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1775ac90fabeSBarry Smith {
1776dfbe8321SBarry Smith   PetscErrorCode ierr;
1777b1d57f15SBarry Smith   PetscInt       i;
1778ac90fabeSBarry Smith   Mat_MPIAIJ     *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data;
17794ce68768SBarry Smith   PetscBLASInt   bnz,one=1;
1780ac90fabeSBarry Smith   Mat_SeqAIJ     *x,*y;
1781ac90fabeSBarry Smith 
1782ac90fabeSBarry Smith   PetscFunctionBegin;
1783ac90fabeSBarry Smith   if (str == SAME_NONZERO_PATTERN) {
1784f4df32b1SMatthew Knepley     PetscScalar alpha = a;
1785ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->A->data;
1786ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->A->data;
17870805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1788f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1789ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->B->data;
1790ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->B->data;
17910805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
1792f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
1793a30b2313SHong Zhang   } else if (str == SUBSET_NONZERO_PATTERN) {
1794f4df32b1SMatthew Knepley     ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr);
1795c537a176SHong Zhang 
1796c537a176SHong Zhang     x = (Mat_SeqAIJ *)xx->B->data;
1797a30b2313SHong Zhang     y = (Mat_SeqAIJ *)yy->B->data;
1798a30b2313SHong Zhang     if (y->xtoy && y->XtoY != xx->B) {
1799a30b2313SHong Zhang       ierr = PetscFree(y->xtoy);CHKERRQ(ierr);
1800a30b2313SHong Zhang       ierr = MatDestroy(y->XtoY);CHKERRQ(ierr);
1801c537a176SHong Zhang     }
1802a30b2313SHong Zhang     if (!y->xtoy) { /* get xtoy */
1803d0f46423SBarry Smith       ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr);
1804a30b2313SHong Zhang       y->XtoY = xx->B;
1805407f6b05SHong Zhang       ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr);
1806c537a176SHong Zhang     }
1807f4df32b1SMatthew Knepley     for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]);
1808ac90fabeSBarry Smith   } else {
1809f4df32b1SMatthew Knepley     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1810ac90fabeSBarry Smith   }
1811ac90fabeSBarry Smith   PetscFunctionReturn(0);
1812ac90fabeSBarry Smith }
1813ac90fabeSBarry Smith 
1814354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat);
1815354c94deSBarry Smith 
1816354c94deSBarry Smith #undef __FUNCT__
1817354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ"
1818354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat)
1819354c94deSBarry Smith {
1820354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX)
1821354c94deSBarry Smith   PetscErrorCode ierr;
1822354c94deSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
1823354c94deSBarry Smith 
1824354c94deSBarry Smith   PetscFunctionBegin;
1825354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr);
1826354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr);
1827354c94deSBarry Smith #else
1828354c94deSBarry Smith   PetscFunctionBegin;
1829354c94deSBarry Smith #endif
1830354c94deSBarry Smith   PetscFunctionReturn(0);
1831354c94deSBarry Smith }
1832354c94deSBarry Smith 
183399cafbc1SBarry Smith #undef __FUNCT__
183499cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ"
183599cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A)
183699cafbc1SBarry Smith {
183799cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
183899cafbc1SBarry Smith   PetscErrorCode ierr;
183999cafbc1SBarry Smith 
184099cafbc1SBarry Smith   PetscFunctionBegin;
184199cafbc1SBarry Smith   ierr = MatRealPart(a->A);CHKERRQ(ierr);
184299cafbc1SBarry Smith   ierr = MatRealPart(a->B);CHKERRQ(ierr);
184399cafbc1SBarry Smith   PetscFunctionReturn(0);
184499cafbc1SBarry Smith }
184599cafbc1SBarry Smith 
184699cafbc1SBarry Smith #undef __FUNCT__
184799cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ"
184899cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A)
184999cafbc1SBarry Smith {
185099cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
185199cafbc1SBarry Smith   PetscErrorCode ierr;
185299cafbc1SBarry Smith 
185399cafbc1SBarry Smith   PetscFunctionBegin;
185499cafbc1SBarry Smith   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
185599cafbc1SBarry Smith   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
185699cafbc1SBarry Smith   PetscFunctionReturn(0);
185799cafbc1SBarry Smith }
185899cafbc1SBarry Smith 
1859103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
1860103bf8bdSMatthew Knepley 
1861103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp>
1862a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp>
1863a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp>
1864a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp>
1865103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp>
1866a2c909beSMatthew Knepley #include <boost/multi_array.hpp>
1867d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp>
1868103bf8bdSMatthew Knepley 
1869103bf8bdSMatthew Knepley #undef __FUNCT__
1870103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ"
1871103bf8bdSMatthew Knepley /*
1872103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1873103bf8bdSMatthew Knepley */
18740481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info)
1875103bf8bdSMatthew Knepley {
1876a2c909beSMatthew Knepley   namespace petsc = boost::distributed::petsc;
1877a2c909beSMatthew Knepley 
1878a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1879a2c909beSMatthew Knepley   using boost::graph::distributed::ilu_default::process_group_type;
1880a2c909beSMatthew Knepley   using boost::graph::ilu_permuted;
1881a2c909beSMatthew Knepley 
1882103bf8bdSMatthew Knepley   PetscTruth      row_identity, col_identity;
1883776b82aeSLisandro Dalcin   PetscContainer  c;
1884103bf8bdSMatthew Knepley   PetscInt        m, n, M, N;
1885103bf8bdSMatthew Knepley   PetscErrorCode  ierr;
1886103bf8bdSMatthew Knepley 
1887103bf8bdSMatthew Knepley   PetscFunctionBegin;
1888103bf8bdSMatthew Knepley   if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu");
1889103bf8bdSMatthew Knepley   ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr);
1890103bf8bdSMatthew Knepley   ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr);
1891103bf8bdSMatthew Knepley   if (!row_identity || !col_identity) {
1892103bf8bdSMatthew Knepley     SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU");
1893103bf8bdSMatthew Knepley   }
1894103bf8bdSMatthew Knepley 
1895103bf8bdSMatthew Knepley   process_group_type pg;
1896a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1897a2c909beSMatthew Knepley   lgraph_type*   lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg));
1898a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1899a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1900a2c909beSMatthew Knepley 
1901103bf8bdSMatthew Knepley   petsc::read_matrix(A, graph, get(boost::edge_weight, graph));
1902a2c909beSMatthew Knepley   ilu_permuted(level_graph);
1903103bf8bdSMatthew Knepley 
1904103bf8bdSMatthew Knepley   /* put together the new matrix */
19057adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr);
1906103bf8bdSMatthew Knepley   ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr);
1907103bf8bdSMatthew Knepley   ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr);
1908719d5645SBarry Smith   ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr);
1909719d5645SBarry Smith   ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr);
1910719d5645SBarry Smith   ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1911719d5645SBarry Smith   ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1912103bf8bdSMatthew Knepley 
19137adad957SLisandro Dalcin   ierr = PetscContainerCreate(((PetscObject)A)->comm, &c);
1914776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(c, lgraph_p);
1915719d5645SBarry Smith   ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c);
1916103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1917103bf8bdSMatthew Knepley }
1918103bf8bdSMatthew Knepley 
1919103bf8bdSMatthew Knepley #undef __FUNCT__
1920103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ"
19210481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info)
1922103bf8bdSMatthew Knepley {
1923103bf8bdSMatthew Knepley   PetscFunctionBegin;
1924103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1925103bf8bdSMatthew Knepley }
1926103bf8bdSMatthew Knepley 
1927103bf8bdSMatthew Knepley #undef __FUNCT__
1928103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ"
1929103bf8bdSMatthew Knepley /*
1930103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
1931103bf8bdSMatthew Knepley */
1932103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x)
1933103bf8bdSMatthew Knepley {
1934a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
1935a2c909beSMatthew Knepley 
1936a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
1937a2c909beSMatthew Knepley   lgraph_type*   lgraph_p;
1938776b82aeSLisandro Dalcin   PetscContainer c;
1939103bf8bdSMatthew Knepley   PetscErrorCode ierr;
1940103bf8bdSMatthew Knepley 
1941103bf8bdSMatthew Knepley   PetscFunctionBegin;
1942103bf8bdSMatthew Knepley   ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr);
1943776b82aeSLisandro Dalcin   ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr);
1944103bf8bdSMatthew Knepley   ierr = VecCopy(b, x);CHKERRQ(ierr);
1945a2c909beSMatthew Knepley 
1946a2c909beSMatthew Knepley   PetscScalar* array_x;
1947a2c909beSMatthew Knepley   ierr = VecGetArray(x, &array_x);CHKERRQ(ierr);
1948a2c909beSMatthew Knepley   PetscInt sx;
1949a2c909beSMatthew Knepley   ierr = VecGetSize(x, &sx);CHKERRQ(ierr);
1950a2c909beSMatthew Knepley 
1951a2c909beSMatthew Knepley   PetscScalar* array_b;
1952a2c909beSMatthew Knepley   ierr = VecGetArray(b, &array_b);CHKERRQ(ierr);
1953a2c909beSMatthew Knepley   PetscInt sb;
1954a2c909beSMatthew Knepley   ierr = VecGetSize(b, &sb);CHKERRQ(ierr);
1955a2c909beSMatthew Knepley 
1956a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
1957a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
1958a2c909beSMatthew Knepley 
1959a2c909beSMatthew Knepley   typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type;
1960a2c909beSMatthew Knepley   array_ref_type                                 ref_b(array_b, boost::extents[num_vertices(graph)]),
1961a2c909beSMatthew Knepley                                                  ref_x(array_x, boost::extents[num_vertices(graph)]);
1962a2c909beSMatthew Knepley 
1963a2c909beSMatthew Knepley   typedef boost::iterator_property_map<array_ref_type::iterator,
1964a2c909beSMatthew Knepley                                 boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type>  gvector_type;
1965a2c909beSMatthew Knepley   gvector_type                                   vector_b(ref_b.begin(), get(boost::vertex_index, graph)),
1966a2c909beSMatthew Knepley                                                  vector_x(ref_x.begin(), get(boost::vertex_index, graph));
1967a2c909beSMatthew Knepley 
1968a2c909beSMatthew Knepley   ilu_set_solve(*lgraph_p, vector_b, vector_x);
1969a2c909beSMatthew Knepley 
1970103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
1971103bf8bdSMatthew Knepley }
1972103bf8bdSMatthew Knepley #endif
1973103bf8bdSMatthew Knepley 
197469db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */
197569db28dcSHong Zhang   PetscInt       nzlocal,nsends,nrecvs;
1976aa5bb8c0SSatish Balay   PetscMPIInt    *send_rank;
1977aa5bb8c0SSatish Balay   PetscInt       *sbuf_nz,*sbuf_j,**rbuf_j;
197869db28dcSHong Zhang   PetscScalar    *sbuf_a,**rbuf_a;
197969db28dcSHong Zhang   PetscErrorCode (*MatDestroy)(Mat);
198069db28dcSHong Zhang } Mat_Redundant;
198169db28dcSHong Zhang 
198269db28dcSHong Zhang #undef __FUNCT__
198369db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant"
198469db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr)
198569db28dcSHong Zhang {
198669db28dcSHong Zhang   PetscErrorCode       ierr;
198769db28dcSHong Zhang   Mat_Redundant        *redund=(Mat_Redundant*)ptr;
198869db28dcSHong Zhang   PetscInt             i;
198969db28dcSHong Zhang 
199069db28dcSHong Zhang   PetscFunctionBegin;
199169db28dcSHong Zhang   ierr = PetscFree(redund->send_rank);CHKERRQ(ierr);
199269db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr);
199369db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr);
199469db28dcSHong Zhang   for (i=0; i<redund->nrecvs; i++){
199569db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr);
199669db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr);
199769db28dcSHong Zhang   }
199869db28dcSHong Zhang   ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr);
199969db28dcSHong Zhang   ierr = PetscFree(redund);CHKERRQ(ierr);
200069db28dcSHong Zhang   PetscFunctionReturn(0);
200169db28dcSHong Zhang }
200269db28dcSHong Zhang 
200369db28dcSHong Zhang #undef __FUNCT__
200469db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant"
200569db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A)
200669db28dcSHong Zhang {
200769db28dcSHong Zhang   PetscErrorCode  ierr;
200869db28dcSHong Zhang   PetscContainer  container;
200969db28dcSHong Zhang   Mat_Redundant   *redund=PETSC_NULL;
201069db28dcSHong Zhang 
201169db28dcSHong Zhang   PetscFunctionBegin;
201269db28dcSHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
201369db28dcSHong Zhang   if (container) {
201469db28dcSHong Zhang     ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
201569db28dcSHong Zhang   } else {
201669db28dcSHong Zhang     SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
201769db28dcSHong Zhang   }
201869db28dcSHong Zhang   A->ops->destroy = redund->MatDestroy;
201969db28dcSHong Zhang   ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr);
202069db28dcSHong Zhang   ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
202169db28dcSHong Zhang   ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
202269db28dcSHong Zhang   PetscFunctionReturn(0);
202369db28dcSHong Zhang }
202469db28dcSHong Zhang 
202569db28dcSHong Zhang #undef __FUNCT__
202669db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ"
202769db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant)
202869db28dcSHong Zhang {
202969db28dcSHong Zhang   PetscMPIInt    rank,size;
20307adad957SLisandro Dalcin   MPI_Comm       comm=((PetscObject)mat)->comm;
203169db28dcSHong Zhang   PetscErrorCode ierr;
203269db28dcSHong Zhang   PetscInt       nsends=0,nrecvs=0,i,rownz_max=0;
203369db28dcSHong Zhang   PetscMPIInt    *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL;
2034d0f46423SBarry Smith   PetscInt       *rowrange=mat->rmap->range;
203569db28dcSHong Zhang   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
203669db28dcSHong Zhang   Mat            A=aij->A,B=aij->B,C=*matredundant;
203769db28dcSHong Zhang   Mat_SeqAIJ     *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data;
203869db28dcSHong Zhang   PetscScalar    *sbuf_a;
203969db28dcSHong Zhang   PetscInt       nzlocal=a->nz+b->nz;
2040d0f46423SBarry Smith   PetscInt       j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB;
2041d0f46423SBarry Smith   PetscInt       rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N;
204269db28dcSHong Zhang   PetscInt       *cols,ctmp,lwrite,*rptr,l,*sbuf_j;
2043a77337e4SBarry Smith   MatScalar      *aworkA,*aworkB;
2044a77337e4SBarry Smith   PetscScalar    *vals;
204569db28dcSHong Zhang   PetscMPIInt    tag1,tag2,tag3,imdex;
204669db28dcSHong Zhang   MPI_Request    *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL,
204769db28dcSHong Zhang                  *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL;
204869db28dcSHong Zhang   MPI_Status     recv_status,*send_status;
204969db28dcSHong Zhang   PetscInt       *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count;
205069db28dcSHong Zhang   PetscInt       **rbuf_j=PETSC_NULL;
205169db28dcSHong Zhang   PetscScalar    **rbuf_a=PETSC_NULL;
205269db28dcSHong Zhang   Mat_Redundant  *redund=PETSC_NULL;
205369db28dcSHong Zhang   PetscContainer container;
205469db28dcSHong Zhang 
205569db28dcSHong Zhang   PetscFunctionBegin;
205669db28dcSHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
205769db28dcSHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
205869db28dcSHong Zhang 
205969db28dcSHong Zhang   if (reuse == MAT_REUSE_MATRIX) {
206069db28dcSHong Zhang     ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2061d0f46423SBarry Smith     if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size");
206269db28dcSHong Zhang     ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr);
206369db28dcSHong Zhang     if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size");
206469db28dcSHong Zhang     ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
206569db28dcSHong Zhang     if (container) {
206669db28dcSHong Zhang       ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
206769db28dcSHong Zhang     } else {
206869db28dcSHong Zhang       SETERRQ(PETSC_ERR_PLIB,"Container does not exit");
206969db28dcSHong Zhang     }
207069db28dcSHong Zhang     if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal");
207169db28dcSHong Zhang 
207269db28dcSHong Zhang     nsends    = redund->nsends;
207369db28dcSHong Zhang     nrecvs    = redund->nrecvs;
207469db28dcSHong Zhang     send_rank = redund->send_rank; recv_rank = send_rank + size;
207569db28dcSHong Zhang     sbuf_nz   = redund->sbuf_nz;     rbuf_nz = sbuf_nz + nsends;
207669db28dcSHong Zhang     sbuf_j    = redund->sbuf_j;
207769db28dcSHong Zhang     sbuf_a    = redund->sbuf_a;
207869db28dcSHong Zhang     rbuf_j    = redund->rbuf_j;
207969db28dcSHong Zhang     rbuf_a    = redund->rbuf_a;
208069db28dcSHong Zhang   }
208169db28dcSHong Zhang 
208269db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
208369db28dcSHong Zhang     PetscMPIInt  subrank,subsize;
208469db28dcSHong Zhang     PetscInt     nleftover,np_subcomm;
208569db28dcSHong Zhang     /* get the destination processors' id send_rank, nsends and nrecvs */
208669db28dcSHong Zhang     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
208769db28dcSHong Zhang     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
208869db28dcSHong Zhang     ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank);
208969db28dcSHong Zhang     recv_rank = send_rank + size;
209069db28dcSHong Zhang     np_subcomm = size/nsubcomm;
209169db28dcSHong Zhang     nleftover  = size - nsubcomm*np_subcomm;
209269db28dcSHong Zhang     nsends = 0; nrecvs = 0;
209369db28dcSHong Zhang     for (i=0; i<size; i++){ /* i=rank*/
209469db28dcSHong Zhang       if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */
209569db28dcSHong Zhang         send_rank[nsends] = i; nsends++;
209669db28dcSHong Zhang         recv_rank[nrecvs++] = i;
209769db28dcSHong Zhang       }
209869db28dcSHong Zhang     }
209969db28dcSHong Zhang     if (rank >= size - nleftover){/* this proc is a leftover processor */
210069db28dcSHong Zhang       i = size-nleftover-1;
210169db28dcSHong Zhang       j = 0;
210269db28dcSHong Zhang       while (j < nsubcomm - nleftover){
210369db28dcSHong Zhang         send_rank[nsends++] = i;
210469db28dcSHong Zhang         i--; j++;
210569db28dcSHong Zhang       }
210669db28dcSHong Zhang     }
210769db28dcSHong Zhang 
210869db28dcSHong Zhang     if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */
210969db28dcSHong Zhang       for (i=0; i<nleftover; i++){
211069db28dcSHong Zhang         recv_rank[nrecvs++] = size-nleftover+i;
211169db28dcSHong Zhang       }
211269db28dcSHong Zhang     }
211369db28dcSHong Zhang 
211469db28dcSHong Zhang     /* allocate sbuf_j, sbuf_a */
211569db28dcSHong Zhang     i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2;
211669db28dcSHong Zhang     ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr);
211769db28dcSHong Zhang     ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr);
211869db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
211969db28dcSHong Zhang 
212069db28dcSHong Zhang   /* copy mat's local entries into the buffers */
212169db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
212269db28dcSHong Zhang     rownz_max = 0;
212369db28dcSHong Zhang     rptr = sbuf_j;
212469db28dcSHong Zhang     cols = sbuf_j + rend-rstart + 1;
212569db28dcSHong Zhang     vals = sbuf_a;
212669db28dcSHong Zhang     rptr[0] = 0;
212769db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
212869db28dcSHong Zhang       row = i + rstart;
212969db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
213069db28dcSHong Zhang       ncols  = nzA + nzB;
213169db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
213269db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
213369db28dcSHong Zhang       /* load the column indices for this row into cols */
213469db28dcSHong Zhang       lwrite = 0;
213569db28dcSHong Zhang       for (l=0; l<nzB; l++) {
213669db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart){
213769db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
213869db28dcSHong Zhang           cols[lwrite++] = ctmp;
213969db28dcSHong Zhang         }
214069db28dcSHong Zhang       }
214169db28dcSHong Zhang       for (l=0; l<nzA; l++){
214269db28dcSHong Zhang         vals[lwrite]   = aworkA[l];
214369db28dcSHong Zhang         cols[lwrite++] = cstart + cworkA[l];
214469db28dcSHong Zhang       }
214569db28dcSHong Zhang       for (l=0; l<nzB; l++) {
214669db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend){
214769db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
214869db28dcSHong Zhang           cols[lwrite++] = ctmp;
214969db28dcSHong Zhang         }
215069db28dcSHong Zhang       }
215169db28dcSHong Zhang       vals += ncols;
215269db28dcSHong Zhang       cols += ncols;
215369db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
215469db28dcSHong Zhang       if (rownz_max < ncols) rownz_max = ncols;
215569db28dcSHong Zhang     }
215669db28dcSHong Zhang     if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz);
215769db28dcSHong Zhang   } else { /* only copy matrix values into sbuf_a */
215869db28dcSHong Zhang     rptr = sbuf_j;
215969db28dcSHong Zhang     vals = sbuf_a;
216069db28dcSHong Zhang     rptr[0] = 0;
216169db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
216269db28dcSHong Zhang       row = i + rstart;
216369db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
216469db28dcSHong Zhang       ncols  = nzA + nzB;
216569db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
216669db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
216769db28dcSHong Zhang       lwrite = 0;
216869db28dcSHong Zhang       for (l=0; l<nzB; l++) {
216969db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l];
217069db28dcSHong Zhang       }
217169db28dcSHong Zhang       for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l];
217269db28dcSHong Zhang       for (l=0; l<nzB; l++) {
217369db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l];
217469db28dcSHong Zhang       }
217569db28dcSHong Zhang       vals += ncols;
217669db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
217769db28dcSHong Zhang     }
217869db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
217969db28dcSHong Zhang 
218069db28dcSHong Zhang   /* send nzlocal to others, and recv other's nzlocal */
218169db28dcSHong Zhang   /*--------------------------------------------------*/
218269db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
218369db28dcSHong Zhang     ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
218469db28dcSHong Zhang     s_waits2 = s_waits3 + nsends;
218569db28dcSHong Zhang     s_waits1 = s_waits2 + nsends;
218669db28dcSHong Zhang     r_waits1 = s_waits1 + nsends;
218769db28dcSHong Zhang     r_waits2 = r_waits1 + nrecvs;
218869db28dcSHong Zhang     r_waits3 = r_waits2 + nrecvs;
218969db28dcSHong Zhang   } else {
219069db28dcSHong Zhang     ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
219169db28dcSHong Zhang     r_waits3 = s_waits3 + nsends;
219269db28dcSHong Zhang   }
219369db28dcSHong Zhang 
219469db28dcSHong Zhang   ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr);
219569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
219669db28dcSHong Zhang     /* get new tags to keep the communication clean */
219769db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr);
219869db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr);
219969db28dcSHong Zhang     ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr);
220069db28dcSHong Zhang     rbuf_nz = sbuf_nz + nsends;
220169db28dcSHong Zhang 
220269db28dcSHong Zhang     /* post receives of other's nzlocal */
220369db28dcSHong Zhang     for (i=0; i<nrecvs; i++){
220469db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr);
220569db28dcSHong Zhang     }
220669db28dcSHong Zhang     /* send nzlocal to others */
220769db28dcSHong Zhang     for (i=0; i<nsends; i++){
220869db28dcSHong Zhang       sbuf_nz[i] = nzlocal;
220969db28dcSHong Zhang       ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr);
221069db28dcSHong Zhang     }
221169db28dcSHong Zhang     /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */
221269db28dcSHong Zhang     count = nrecvs;
221369db28dcSHong Zhang     while (count) {
221469db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr);
221569db28dcSHong Zhang       recv_rank[imdex] = recv_status.MPI_SOURCE;
221669db28dcSHong Zhang       /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */
221769db28dcSHong Zhang       ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr);
221869db28dcSHong Zhang 
221969db28dcSHong Zhang       i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */
222069db28dcSHong Zhang       rbuf_nz[imdex] += i + 2;
222169db28dcSHong Zhang       ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr);
222269db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr);
222369db28dcSHong Zhang       count--;
222469db28dcSHong Zhang     }
222569db28dcSHong Zhang     /* wait on sends of nzlocal */
222669db28dcSHong Zhang     if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);}
222769db28dcSHong Zhang     /* send mat->i,j to others, and recv from other's */
222869db28dcSHong Zhang     /*------------------------------------------------*/
222969db28dcSHong Zhang     for (i=0; i<nsends; i++){
223069db28dcSHong Zhang       j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1;
223169db28dcSHong Zhang       ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
223269db28dcSHong Zhang     }
223369db28dcSHong Zhang     /* wait on receives of mat->i,j */
223469db28dcSHong Zhang     /*------------------------------*/
223569db28dcSHong Zhang     count = nrecvs;
223669db28dcSHong Zhang     while (count) {
223769db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr);
223869db28dcSHong Zhang       if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
223969db28dcSHong Zhang       count--;
224069db28dcSHong Zhang     }
224169db28dcSHong Zhang     /* wait on sends of mat->i,j */
224269db28dcSHong Zhang     /*---------------------------*/
224369db28dcSHong Zhang     if (nsends) {
224469db28dcSHong Zhang       ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr);
224569db28dcSHong Zhang     }
224669db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
224769db28dcSHong Zhang 
224869db28dcSHong Zhang   /* post receives, send and receive mat->a */
224969db28dcSHong Zhang   /*----------------------------------------*/
225069db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++) {
225169db28dcSHong Zhang     ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr);
225269db28dcSHong Zhang   }
225369db28dcSHong Zhang   for (i=0; i<nsends; i++){
225469db28dcSHong Zhang     ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
225569db28dcSHong Zhang   }
225669db28dcSHong Zhang   count = nrecvs;
225769db28dcSHong Zhang   while (count) {
225869db28dcSHong Zhang     ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr);
225969db28dcSHong Zhang     if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
226069db28dcSHong Zhang     count--;
226169db28dcSHong Zhang   }
226269db28dcSHong Zhang   if (nsends) {
226369db28dcSHong Zhang     ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr);
226469db28dcSHong Zhang   }
226569db28dcSHong Zhang 
226669db28dcSHong Zhang   ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr);
226769db28dcSHong Zhang 
226869db28dcSHong Zhang   /* create redundant matrix */
226969db28dcSHong Zhang   /*-------------------------*/
227069db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
227169db28dcSHong Zhang     /* compute rownz_max for preallocation */
227269db28dcSHong Zhang     for (imdex=0; imdex<nrecvs; imdex++){
227369db28dcSHong Zhang       j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]];
227469db28dcSHong Zhang       rptr = rbuf_j[imdex];
227569db28dcSHong Zhang       for (i=0; i<j; i++){
227669db28dcSHong Zhang         ncols = rptr[i+1] - rptr[i];
227769db28dcSHong Zhang         if (rownz_max < ncols) rownz_max = ncols;
227869db28dcSHong Zhang       }
227969db28dcSHong Zhang     }
228069db28dcSHong Zhang 
228169db28dcSHong Zhang     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
228269db28dcSHong Zhang     ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
228369db28dcSHong Zhang     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
228469db28dcSHong Zhang     ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr);
228569db28dcSHong Zhang     ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr);
228669db28dcSHong Zhang   } else {
228769db28dcSHong Zhang     C = *matredundant;
228869db28dcSHong Zhang   }
228969db28dcSHong Zhang 
229069db28dcSHong Zhang   /* insert local matrix entries */
229169db28dcSHong Zhang   rptr = sbuf_j;
229269db28dcSHong Zhang   cols = sbuf_j + rend-rstart + 1;
229369db28dcSHong Zhang   vals = sbuf_a;
229469db28dcSHong Zhang   for (i=0; i<rend-rstart; i++){
229569db28dcSHong Zhang     row   = i + rstart;
229669db28dcSHong Zhang     ncols = rptr[i+1] - rptr[i];
229769db28dcSHong Zhang     ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
229869db28dcSHong Zhang     vals += ncols;
229969db28dcSHong Zhang     cols += ncols;
230069db28dcSHong Zhang   }
230169db28dcSHong Zhang   /* insert received matrix entries */
230269db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++){
230369db28dcSHong Zhang     rstart = rowrange[recv_rank[imdex]];
230469db28dcSHong Zhang     rend   = rowrange[recv_rank[imdex]+1];
230569db28dcSHong Zhang     rptr = rbuf_j[imdex];
230669db28dcSHong Zhang     cols = rbuf_j[imdex] + rend-rstart + 1;
230769db28dcSHong Zhang     vals = rbuf_a[imdex];
230869db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
230969db28dcSHong Zhang       row   = i + rstart;
231069db28dcSHong Zhang       ncols = rptr[i+1] - rptr[i];
231169db28dcSHong Zhang       ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
231269db28dcSHong Zhang       vals += ncols;
231369db28dcSHong Zhang       cols += ncols;
231469db28dcSHong Zhang     }
231569db28dcSHong Zhang   }
231669db28dcSHong Zhang   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
231769db28dcSHong Zhang   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
231869db28dcSHong Zhang   ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2319d0f46423SBarry Smith   if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N);
232069db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
232169db28dcSHong Zhang     PetscContainer container;
232269db28dcSHong Zhang     *matredundant = C;
232369db28dcSHong Zhang     /* create a supporting struct and attach it to C for reuse */
232438f2d2fdSLisandro Dalcin     ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr);
232569db28dcSHong Zhang     ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
232669db28dcSHong Zhang     ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr);
232769db28dcSHong Zhang     ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr);
232869db28dcSHong Zhang     ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr);
232969db28dcSHong Zhang 
233069db28dcSHong Zhang     redund->nzlocal = nzlocal;
233169db28dcSHong Zhang     redund->nsends  = nsends;
233269db28dcSHong Zhang     redund->nrecvs  = nrecvs;
233369db28dcSHong Zhang     redund->send_rank = send_rank;
233469db28dcSHong Zhang     redund->sbuf_nz = sbuf_nz;
233569db28dcSHong Zhang     redund->sbuf_j  = sbuf_j;
233669db28dcSHong Zhang     redund->sbuf_a  = sbuf_a;
233769db28dcSHong Zhang     redund->rbuf_j  = rbuf_j;
233869db28dcSHong Zhang     redund->rbuf_a  = rbuf_a;
233969db28dcSHong Zhang 
234069db28dcSHong Zhang     redund->MatDestroy = C->ops->destroy;
234169db28dcSHong Zhang     C->ops->destroy    = MatDestroy_MatRedundant;
234269db28dcSHong Zhang   }
234369db28dcSHong Zhang   PetscFunctionReturn(0);
234469db28dcSHong Zhang }
234569db28dcSHong Zhang 
234603bc72f1SMatthew Knepley #undef __FUNCT__
2347c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ"
2348c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2349c91732d9SHong Zhang {
2350c91732d9SHong Zhang   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2351c91732d9SHong Zhang   PetscErrorCode ierr;
2352c91732d9SHong Zhang   PetscInt       i,*idxb = 0;
2353c91732d9SHong Zhang   PetscScalar    *va,*vb;
2354c91732d9SHong Zhang   Vec            vtmp;
2355c91732d9SHong Zhang 
2356c91732d9SHong Zhang   PetscFunctionBegin;
2357c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
2358c91732d9SHong Zhang   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2359c91732d9SHong Zhang   if (idx) {
2360192daf7cSBarry Smith     for (i=0; i<A->rmap->n; i++) {
2361d0f46423SBarry Smith       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2362c91732d9SHong Zhang     }
2363c91732d9SHong Zhang   }
2364c91732d9SHong Zhang 
2365d0f46423SBarry Smith   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2366c91732d9SHong Zhang   if (idx) {
2367d0f46423SBarry Smith     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2368c91732d9SHong Zhang   }
2369c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2370c91732d9SHong Zhang   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2371c91732d9SHong Zhang 
2372d0f46423SBarry Smith   for (i=0; i<A->rmap->n; i++){
2373c91732d9SHong Zhang     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
2374c91732d9SHong Zhang       va[i] = vb[i];
2375c91732d9SHong Zhang       if (idx) idx[i] = a->garray[idxb[i]];
2376c91732d9SHong Zhang     }
2377c91732d9SHong Zhang   }
2378c91732d9SHong Zhang 
2379c91732d9SHong Zhang   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2380c91732d9SHong Zhang   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2381c91732d9SHong Zhang   if (idxb) {
2382c91732d9SHong Zhang     ierr = PetscFree(idxb);CHKERRQ(ierr);
2383c91732d9SHong Zhang   }
2384c91732d9SHong Zhang   ierr = VecDestroy(vtmp);CHKERRQ(ierr);
2385c91732d9SHong Zhang   PetscFunctionReturn(0);
2386c91732d9SHong Zhang }
2387c91732d9SHong Zhang 
2388c91732d9SHong Zhang #undef __FUNCT__
2389c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ"
2390c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2391c87e5d42SMatthew Knepley {
2392c87e5d42SMatthew Knepley   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2393c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2394c87e5d42SMatthew Knepley   PetscInt       i,*idxb = 0;
2395c87e5d42SMatthew Knepley   PetscScalar    *va,*vb;
2396c87e5d42SMatthew Knepley   Vec            vtmp;
2397c87e5d42SMatthew Knepley 
2398c87e5d42SMatthew Knepley   PetscFunctionBegin;
2399c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr);
2400c87e5d42SMatthew Knepley   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2401c87e5d42SMatthew Knepley   if (idx) {
2402c87e5d42SMatthew Knepley     for (i=0; i<A->cmap->n; i++) {
2403c87e5d42SMatthew Knepley       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2404c87e5d42SMatthew Knepley     }
2405c87e5d42SMatthew Knepley   }
2406c87e5d42SMatthew Knepley 
2407c87e5d42SMatthew Knepley   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2408c87e5d42SMatthew Knepley   if (idx) {
2409c87e5d42SMatthew Knepley     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2410c87e5d42SMatthew Knepley   }
2411c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2412c87e5d42SMatthew Knepley   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2413c87e5d42SMatthew Knepley 
2414c87e5d42SMatthew Knepley   for (i=0; i<A->rmap->n; i++){
2415c87e5d42SMatthew Knepley     if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) {
2416c87e5d42SMatthew Knepley       va[i] = vb[i];
2417c87e5d42SMatthew Knepley       if (idx) idx[i] = a->garray[idxb[i]];
2418c87e5d42SMatthew Knepley     }
2419c87e5d42SMatthew Knepley   }
2420c87e5d42SMatthew Knepley 
2421c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2422c87e5d42SMatthew Knepley   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2423c87e5d42SMatthew Knepley   if (idxb) {
2424c87e5d42SMatthew Knepley     ierr = PetscFree(idxb);CHKERRQ(ierr);
2425c87e5d42SMatthew Knepley   }
2426c87e5d42SMatthew Knepley   ierr = VecDestroy(vtmp);CHKERRQ(ierr);
2427c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2428c87e5d42SMatthew Knepley }
2429c87e5d42SMatthew Knepley 
2430c87e5d42SMatthew Knepley #undef __FUNCT__
243103bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ"
243203bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[])
243303bc72f1SMatthew Knepley {
243403bc72f1SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2435d0f46423SBarry Smith   PetscInt       n      = A->rmap->n;
2436d0f46423SBarry Smith   PetscInt       cstart = A->cmap->rstart;
243703bc72f1SMatthew Knepley   PetscInt      *cmap   = mat->garray;
243803bc72f1SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
243903bc72f1SMatthew Knepley   Vec            diagV, offdiagV;
244003bc72f1SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
244103bc72f1SMatthew Knepley   PetscInt       r;
244203bc72f1SMatthew Knepley   PetscErrorCode ierr;
244303bc72f1SMatthew Knepley 
244403bc72f1SMatthew Knepley   PetscFunctionBegin;
244503bc72f1SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2446e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2447e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
244803bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
244903bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
245003bc72f1SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
245103bc72f1SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
245203bc72f1SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
245303bc72f1SMatthew Knepley   for(r = 0; r < n; ++r) {
2454028cd4eaSSatish Balay     if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) {
245503bc72f1SMatthew Knepley       a[r]   = diagA[r];
245603bc72f1SMatthew Knepley       idx[r] = cstart + diagIdx[r];
245703bc72f1SMatthew Knepley     } else {
245803bc72f1SMatthew Knepley       a[r]   = offdiagA[r];
245903bc72f1SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
246003bc72f1SMatthew Knepley     }
246103bc72f1SMatthew Knepley   }
246203bc72f1SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
246303bc72f1SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
246403bc72f1SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
246503bc72f1SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
246603bc72f1SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
246703bc72f1SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
246803bc72f1SMatthew Knepley   PetscFunctionReturn(0);
246903bc72f1SMatthew Knepley }
247003bc72f1SMatthew Knepley 
24715494a064SHong Zhang #undef __FUNCT__
2472c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ"
2473c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2474c87e5d42SMatthew Knepley {
2475c87e5d42SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2476c87e5d42SMatthew Knepley   PetscInt       n      = A->rmap->n;
2477c87e5d42SMatthew Knepley   PetscInt       cstart = A->cmap->rstart;
2478c87e5d42SMatthew Knepley   PetscInt      *cmap   = mat->garray;
2479c87e5d42SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
2480c87e5d42SMatthew Knepley   Vec            diagV, offdiagV;
2481c87e5d42SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
2482c87e5d42SMatthew Knepley   PetscInt       r;
2483c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2484c87e5d42SMatthew Knepley 
2485c87e5d42SMatthew Knepley   PetscFunctionBegin;
2486c87e5d42SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2487c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2488c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
2489c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
2490c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
2491c87e5d42SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
2492c87e5d42SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
2493c87e5d42SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
2494c87e5d42SMatthew Knepley   for(r = 0; r < n; ++r) {
2495c87e5d42SMatthew Knepley     if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) {
2496c87e5d42SMatthew Knepley       a[r]   = diagA[r];
2497c87e5d42SMatthew Knepley       idx[r] = cstart + diagIdx[r];
2498c87e5d42SMatthew Knepley     } else {
2499c87e5d42SMatthew Knepley       a[r]   = offdiagA[r];
2500c87e5d42SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
2501c87e5d42SMatthew Knepley     }
2502c87e5d42SMatthew Knepley   }
2503c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
2504c87e5d42SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
2505c87e5d42SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
2506c87e5d42SMatthew Knepley   ierr = VecDestroy(diagV);CHKERRQ(ierr);
2507c87e5d42SMatthew Knepley   ierr = VecDestroy(offdiagV);CHKERRQ(ierr);
2508c87e5d42SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
2509c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2510c87e5d42SMatthew Knepley }
2511c87e5d42SMatthew Knepley 
2512c87e5d42SMatthew Knepley #undef __FUNCT__
2513829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ"
2514829201f2SHong Zhang PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat[])
25155494a064SHong Zhang {
25165494a064SHong Zhang   PetscErrorCode ierr;
25175494a064SHong Zhang 
25185494a064SHong Zhang   PetscFunctionBegin;
25195494a064SHong Zhang   ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,newmat);CHKERRQ(ierr);
25205494a064SHong Zhang   PetscFunctionReturn(0);
25215494a064SHong Zhang }
25225494a064SHong Zhang 
25238a729477SBarry Smith /* -------------------------------------------------------------------*/
2524cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ,
2525cda55fadSBarry Smith        MatGetRow_MPIAIJ,
2526cda55fadSBarry Smith        MatRestoreRow_MPIAIJ,
2527cda55fadSBarry Smith        MatMult_MPIAIJ,
252897304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ,
25297c922b88SBarry Smith        MatMultTranspose_MPIAIJ,
25307c922b88SBarry Smith        MatMultTransposeAdd_MPIAIJ,
2531103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2532103bf8bdSMatthew Knepley        MatSolve_MPIAIJ,
2533103bf8bdSMatthew Knepley #else
2534cda55fadSBarry Smith        0,
2535103bf8bdSMatthew Knepley #endif
2536cda55fadSBarry Smith        0,
2537cda55fadSBarry Smith        0,
253897304618SKris Buschelman /*10*/ 0,
2539cda55fadSBarry Smith        0,
2540cda55fadSBarry Smith        0,
254144a69424SLois Curfman McInnes        MatRelax_MPIAIJ,
2542b7c46309SBarry Smith        MatTranspose_MPIAIJ,
254397304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ,
2544cda55fadSBarry Smith        MatEqual_MPIAIJ,
2545cda55fadSBarry Smith        MatGetDiagonal_MPIAIJ,
2546cda55fadSBarry Smith        MatDiagonalScale_MPIAIJ,
2547cda55fadSBarry Smith        MatNorm_MPIAIJ,
254897304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ,
2549cda55fadSBarry Smith        MatAssemblyEnd_MPIAIJ,
2550cda55fadSBarry Smith        MatSetOption_MPIAIJ,
2551cda55fadSBarry Smith        MatZeroEntries_MPIAIJ,
2552d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ,
2553cda55fadSBarry Smith        0,
2554103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2555719d5645SBarry Smith        0,
2556103bf8bdSMatthew Knepley #else
2557cda55fadSBarry Smith        0,
2558103bf8bdSMatthew Knepley #endif
2559cda55fadSBarry Smith        0,
2560cda55fadSBarry Smith        0,
2561d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ,
2562103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2563719d5645SBarry Smith        0,
2564103bf8bdSMatthew Knepley #else
2565cda55fadSBarry Smith        0,
2566103bf8bdSMatthew Knepley #endif
2567cda55fadSBarry Smith        0,
2568cda55fadSBarry Smith        0,
2569cda55fadSBarry Smith        0,
2570d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ,
2571cda55fadSBarry Smith        0,
2572cda55fadSBarry Smith        0,
2573cda55fadSBarry Smith        0,
2574cda55fadSBarry Smith        0,
2575d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ,
2576cda55fadSBarry Smith        MatGetSubMatrices_MPIAIJ,
2577cda55fadSBarry Smith        MatIncreaseOverlap_MPIAIJ,
2578cda55fadSBarry Smith        MatGetValues_MPIAIJ,
2579cb5b572fSBarry Smith        MatCopy_MPIAIJ,
2580d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ,
2581cda55fadSBarry Smith        MatScale_MPIAIJ,
2582cda55fadSBarry Smith        0,
2583cda55fadSBarry Smith        0,
2584cda55fadSBarry Smith        0,
2585d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ,
2586cda55fadSBarry Smith        0,
2587cda55fadSBarry Smith        0,
2588cda55fadSBarry Smith        0,
2589cda55fadSBarry Smith        0,
2590d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ,
2591cda55fadSBarry Smith        0,
2592cda55fadSBarry Smith        MatSetUnfactored_MPIAIJ,
259342e855d1Svictor        MatPermute_MPIAIJ,
2594cda55fadSBarry Smith        0,
2595d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ,
2596e03a110bSBarry Smith        MatDestroy_MPIAIJ,
2597e03a110bSBarry Smith        MatView_MPIAIJ,
2598357abbc8SBarry Smith        0,
2599a2243be0SBarry Smith        0,
2600d519adbfSMatthew Knepley /*64*/ 0,
2601a2243be0SBarry Smith        0,
2602a2243be0SBarry Smith        0,
2603a2243be0SBarry Smith        0,
2604a2243be0SBarry Smith        0,
2605d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ,
2606c87e5d42SMatthew Knepley        MatGetRowMinAbs_MPIAIJ,
2607a2243be0SBarry Smith        0,
2608a2243be0SBarry Smith        MatSetColoring_MPIAIJ,
2609dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
2610779c1a83SBarry Smith        MatSetValuesAdic_MPIAIJ,
2611dcf5cc72SBarry Smith #else
2612dcf5cc72SBarry Smith        0,
2613dcf5cc72SBarry Smith #endif
261497304618SKris Buschelman        MatSetValuesAdifor_MPIAIJ,
261597304618SKris Buschelman /*75*/ 0,
261697304618SKris Buschelman        0,
261797304618SKris Buschelman        0,
261897304618SKris Buschelman        0,
261997304618SKris Buschelman        0,
262097304618SKris Buschelman /*80*/ 0,
262197304618SKris Buschelman        0,
262297304618SKris Buschelman        0,
2623d519adbfSMatthew Knepley /*83*/ MatLoad_MPIAIJ,
26246284ec50SHong Zhang        0,
26256284ec50SHong Zhang        0,
26266284ec50SHong Zhang        0,
26276284ec50SHong Zhang        0,
2628865e5f61SKris Buschelman        0,
2629d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ,
263026be0446SHong Zhang        MatMatMultSymbolic_MPIAIJ_MPIAIJ,
263126be0446SHong Zhang        MatMatMultNumeric_MPIAIJ_MPIAIJ,
26327a7894deSKris Buschelman        MatPtAP_Basic,
26337a7894deSKris Buschelman        MatPtAPSymbolic_MPIAIJ,
2634d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ,
26357a7894deSKris Buschelman        0,
26367a7894deSKris Buschelman        0,
26377a7894deSKris Buschelman        0,
26387a7894deSKris Buschelman        0,
2639d519adbfSMatthew Knepley /*99*/ 0,
2640865e5f61SKris Buschelman        MatPtAPSymbolic_MPIAIJ_MPIAIJ,
26417a7894deSKris Buschelman        MatPtAPNumeric_MPIAIJ_MPIAIJ,
26422fd7e33dSBarry Smith        MatConjugate_MPIAIJ,
26432fd7e33dSBarry Smith        0,
2644d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ,
264599cafbc1SBarry Smith        MatRealPart_MPIAIJ,
264669db28dcSHong Zhang        MatImaginaryPart_MPIAIJ,
264769db28dcSHong Zhang        0,
264869db28dcSHong Zhang        0,
2649d519adbfSMatthew Knepley /*109*/0,
265003bc72f1SMatthew Knepley        MatGetRedundantMatrix_MPIAIJ,
26515494a064SHong Zhang        MatGetRowMin_MPIAIJ,
26525494a064SHong Zhang        0,
26535494a064SHong Zhang        0,
2654d519adbfSMatthew Knepley /*114*/MatGetSeqNonzerostructure_MPIAIJ};
265536ce4990SBarry Smith 
26562e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/
26572e8a6d31SBarry Smith 
2658fb2e594dSBarry Smith EXTERN_C_BEGIN
26594a2ae208SSatish Balay #undef __FUNCT__
26604a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ"
2661be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat)
26622e8a6d31SBarry Smith {
26632e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2664dfbe8321SBarry Smith   PetscErrorCode ierr;
26652e8a6d31SBarry Smith 
26662e8a6d31SBarry Smith   PetscFunctionBegin;
26672e8a6d31SBarry Smith   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
26682e8a6d31SBarry Smith   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
26692e8a6d31SBarry Smith   PetscFunctionReturn(0);
26702e8a6d31SBarry Smith }
2671fb2e594dSBarry Smith EXTERN_C_END
26722e8a6d31SBarry Smith 
2673fb2e594dSBarry Smith EXTERN_C_BEGIN
26744a2ae208SSatish Balay #undef __FUNCT__
26754a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ"
2676be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat)
26772e8a6d31SBarry Smith {
26782e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2679dfbe8321SBarry Smith   PetscErrorCode ierr;
26802e8a6d31SBarry Smith 
26812e8a6d31SBarry Smith   PetscFunctionBegin;
26822e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
26832e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
26842e8a6d31SBarry Smith   PetscFunctionReturn(0);
26852e8a6d31SBarry Smith }
2686fb2e594dSBarry Smith EXTERN_C_END
26878a729477SBarry Smith 
2688e090d566SSatish Balay #include "petscpc.h"
268927508adbSBarry Smith EXTERN_C_BEGIN
26904a2ae208SSatish Balay #undef __FUNCT__
2691a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ"
2692be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
2693a23d5eceSKris Buschelman {
2694a23d5eceSKris Buschelman   Mat_MPIAIJ     *b;
2695dfbe8321SBarry Smith   PetscErrorCode ierr;
2696b1d57f15SBarry Smith   PetscInt       i;
2697a23d5eceSKris Buschelman 
2698a23d5eceSKris Buschelman   PetscFunctionBegin;
2699a23d5eceSKris Buschelman   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
2700a23d5eceSKris Buschelman   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
270177431f27SBarry Smith   if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
270277431f27SBarry Smith   if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
2703899cda47SBarry Smith 
27047408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
27057408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
2706d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
2707d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
2708a23d5eceSKris Buschelman   if (d_nnz) {
2709d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
271077431f27SBarry Smith       if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
2711a23d5eceSKris Buschelman     }
2712a23d5eceSKris Buschelman   }
2713a23d5eceSKris Buschelman   if (o_nnz) {
2714d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
271577431f27SBarry Smith       if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
2716a23d5eceSKris Buschelman     }
2717a23d5eceSKris Buschelman   }
2718a23d5eceSKris Buschelman   b = (Mat_MPIAIJ*)B->data;
2719899cda47SBarry Smith 
2720526dfc15SBarry Smith   if (!B->preallocated) {
2721899cda47SBarry Smith     /* Explicitly create 2 MATSEQAIJ matrices. */
2722899cda47SBarry Smith     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2723d0f46423SBarry Smith     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2724899cda47SBarry Smith     ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr);
2725899cda47SBarry Smith     ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
2726899cda47SBarry Smith     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2727d0f46423SBarry Smith     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2728899cda47SBarry Smith     ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr);
2729899cda47SBarry Smith     ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
2730526dfc15SBarry Smith   }
2731899cda47SBarry Smith 
2732c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr);
2733c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr);
2734526dfc15SBarry Smith   B->preallocated = PETSC_TRUE;
2735a23d5eceSKris Buschelman   PetscFunctionReturn(0);
2736a23d5eceSKris Buschelman }
2737a23d5eceSKris Buschelman EXTERN_C_END
2738a23d5eceSKris Buschelman 
27394a2ae208SSatish Balay #undef __FUNCT__
27404a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ"
2741dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2742d6dfbf8fSBarry Smith {
2743d6dfbf8fSBarry Smith   Mat            mat;
2744416022c9SBarry Smith   Mat_MPIAIJ     *a,*oldmat = (Mat_MPIAIJ*)matin->data;
2745dfbe8321SBarry Smith   PetscErrorCode ierr;
2746d6dfbf8fSBarry Smith 
27473a40ed3dSBarry Smith   PetscFunctionBegin;
2748416022c9SBarry Smith   *newmat       = 0;
27497adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
2750d0f46423SBarry Smith   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
27517adad957SLisandro Dalcin   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
27521d5dac46SHong Zhang   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2753273d9f13SBarry Smith   a    = (Mat_MPIAIJ*)mat->data;
2754e1b6402fSHong Zhang 
2755d6dfbf8fSBarry Smith   mat->factor       = matin->factor;
2756d0f46423SBarry Smith   mat->rmap->bs      = matin->rmap->bs;
2757c456f294SBarry Smith   mat->assembled    = PETSC_TRUE;
2758e7641de0SSatish Balay   mat->insertmode   = NOT_SET_VALUES;
2759273d9f13SBarry Smith   mat->preallocated = PETSC_TRUE;
2760d6dfbf8fSBarry Smith 
276117699dbbSLois Curfman McInnes   a->size           = oldmat->size;
276217699dbbSLois Curfman McInnes   a->rank           = oldmat->rank;
2763e7641de0SSatish Balay   a->donotstash     = oldmat->donotstash;
2764e7641de0SSatish Balay   a->roworiented    = oldmat->roworiented;
2765e7641de0SSatish Balay   a->rowindices     = 0;
2766bcd2baecSBarry Smith   a->rowvalues      = 0;
2767bcd2baecSBarry Smith   a->getrowactive   = PETSC_FALSE;
2768d6dfbf8fSBarry Smith 
2769d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr);
2770d0f46423SBarry Smith   ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr);
2771899cda47SBarry Smith 
27727adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr);
27732ee70a88SLois Curfman McInnes   if (oldmat->colmap) {
2774aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
27750f5bd95cSBarry Smith     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2776b1fc9764SSatish Balay #else
2777d0f46423SBarry Smith     ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
2778d0f46423SBarry Smith     ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2779d0f46423SBarry Smith     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
2780b1fc9764SSatish Balay #endif
2781416022c9SBarry Smith   } else a->colmap = 0;
27823f41c07dSBarry Smith   if (oldmat->garray) {
2783b1d57f15SBarry Smith     PetscInt len;
2784d0f46423SBarry Smith     len  = oldmat->B->cmap->n;
2785b1d57f15SBarry Smith     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
278652e6d16bSBarry Smith     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2787b1d57f15SBarry Smith     if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); }
2788416022c9SBarry Smith   } else a->garray = 0;
2789d6dfbf8fSBarry Smith 
2790416022c9SBarry Smith   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
279152e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
2792a56f8943SBarry Smith   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
279352e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
27942e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
279552e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
27962e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
279752e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
27987adad957SLisandro Dalcin   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
27998a729477SBarry Smith   *newmat = mat;
28003a40ed3dSBarry Smith   PetscFunctionReturn(0);
28018a729477SBarry Smith }
2802416022c9SBarry Smith 
2803e090d566SSatish Balay #include "petscsys.h"
2804416022c9SBarry Smith 
28054a2ae208SSatish Balay #undef __FUNCT__
28064a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ"
2807a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat)
2808416022c9SBarry Smith {
2809d65a2f8fSBarry Smith   Mat            A;
281087828ca2SBarry Smith   PetscScalar    *vals,*svals;
281119bcc07fSBarry Smith   MPI_Comm       comm = ((PetscObject)viewer)->comm;
2812416022c9SBarry Smith   MPI_Status     status;
28136849ba73SBarry Smith   PetscErrorCode ierr;
281413980483SBarry Smith   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,mpicnt,mpimaxnz;
281513980483SBarry Smith   PetscInt       i,nz,j,rstart,rend,mmax,maxnz;
2816b1d57f15SBarry Smith   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols;
2817910ba992SMatthew Knepley   PetscInt       *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols;
2818dc231df0SBarry Smith   PetscInt       cend,cstart,n,*rowners;
2819b1d57f15SBarry Smith   int            fd;
2820416022c9SBarry Smith 
28213a40ed3dSBarry Smith   PetscFunctionBegin;
28221dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
28231dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
282417699dbbSLois Curfman McInnes   if (!rank) {
2825b0a32e0cSBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
28260752156aSBarry Smith     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
2827552e946dSBarry Smith     if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
28286c5fab8fSBarry Smith   }
28296c5fab8fSBarry Smith 
2830b1d57f15SBarry Smith   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2831416022c9SBarry Smith   M = header[1]; N = header[2];
2832416022c9SBarry Smith   /* determine ownership of all rows */
283329cdbbc8SSatish Balay   m    = M/size + ((M % size) > rank);
2834dc231df0SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
2835dc231df0SBarry Smith   ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
2836167e7480SBarry Smith 
2837167e7480SBarry Smith   /* First process needs enough room for process with most rows */
2838167e7480SBarry Smith   if (!rank) {
2839167e7480SBarry Smith     mmax       = rowners[1];
2840167e7480SBarry Smith     for (i=2; i<size; i++) {
2841167e7480SBarry Smith       mmax = PetscMax(mmax,rowners[i]);
2842167e7480SBarry Smith     }
2843167e7480SBarry Smith   } else mmax = m;
2844167e7480SBarry Smith 
2845416022c9SBarry Smith   rowners[0] = 0;
284617699dbbSLois Curfman McInnes   for (i=2; i<=size; i++) {
2847416022c9SBarry Smith     rowners[i] += rowners[i-1];
2848416022c9SBarry Smith   }
284917699dbbSLois Curfman McInnes   rstart = rowners[rank];
285017699dbbSLois Curfman McInnes   rend   = rowners[rank+1];
2851416022c9SBarry Smith 
2852416022c9SBarry Smith   /* distribute row lengths to all processors */
2853167e7480SBarry Smith   ierr    = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr);
285417699dbbSLois Curfman McInnes   if (!rank) {
2855dc231df0SBarry Smith     ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr);
2856dc231df0SBarry Smith     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
2857b1d57f15SBarry Smith     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
2858b1d57f15SBarry Smith     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2859dc231df0SBarry Smith     for (j=0; j<m; j++) {
2860dc231df0SBarry Smith       procsnz[0] += ourlens[j];
2861dc231df0SBarry Smith     }
2862dc231df0SBarry Smith     for (i=1; i<size; i++) {
2863dc231df0SBarry Smith       ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr);
2864dc231df0SBarry Smith       /* calculate the number of nonzeros on each processor */
2865dc231df0SBarry Smith       for (j=0; j<rowners[i+1]-rowners[i]; j++) {
2866416022c9SBarry Smith         procsnz[i] += rowlengths[j];
2867416022c9SBarry Smith       }
286813980483SBarry Smith       mpicnt = PetscMPIIntCast(rowners[i+1]-rowners[i]);
286913980483SBarry Smith       ierr   = MPI_Send(rowlengths,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2870416022c9SBarry Smith     }
2871606d414cSSatish Balay     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2872dc231df0SBarry Smith   } else {
287313980483SBarry Smith     mpicnt = PetscMPIIntCast(m);CHKERRQ(ierr);
287413980483SBarry Smith     ierr   = MPI_Recv(ourlens,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2875dc231df0SBarry Smith   }
2876416022c9SBarry Smith 
2877dc231df0SBarry Smith   if (!rank) {
2878416022c9SBarry Smith     /* determine max buffer needed and allocate it */
2879416022c9SBarry Smith     maxnz = 0;
28808a8e0b3aSBarry Smith     for (i=0; i<size; i++) {
28810452661fSBarry Smith       maxnz = PetscMax(maxnz,procsnz[i]);
2882416022c9SBarry Smith     }
2883b1d57f15SBarry Smith     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2884416022c9SBarry Smith 
2885416022c9SBarry Smith     /* read in my part of the matrix column indices  */
2886416022c9SBarry Smith     nz   = procsnz[0];
2887b1d57f15SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
28880752156aSBarry Smith     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2889d65a2f8fSBarry Smith 
2890d65a2f8fSBarry Smith     /* read in every one elses and ship off */
289117699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2892d65a2f8fSBarry Smith       nz     = procsnz[i];
28930752156aSBarry Smith       ierr   = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
289413980483SBarry Smith       mpicnt = PetscMPIIntCast(nz);
289513980483SBarry Smith       ierr   = MPI_Send(cols,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2896d65a2f8fSBarry Smith     }
2897606d414cSSatish Balay     ierr = PetscFree(cols);CHKERRQ(ierr);
28983a40ed3dSBarry Smith   } else {
2899416022c9SBarry Smith     /* determine buffer space needed for message */
2900416022c9SBarry Smith     nz = 0;
2901416022c9SBarry Smith     for (i=0; i<m; i++) {
2902416022c9SBarry Smith       nz += ourlens[i];
2903416022c9SBarry Smith     }
2904dc231df0SBarry Smith     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
2905416022c9SBarry Smith 
2906416022c9SBarry Smith     /* receive message of column indices*/
290713980483SBarry Smith     mpicnt = PetscMPIIntCast(nz);CHKERRQ(ierr);
290813980483SBarry Smith     ierr = MPI_Recv(mycols,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
290913980483SBarry Smith     ierr = MPI_Get_count(&status,MPIU_INT,&mpimaxnz);CHKERRQ(ierr);
29107c533972SBarry Smith     if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);}
291113980483SBarry Smith     else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);}
291213980483SBarry Smith     else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);}
2913416022c9SBarry Smith   }
2914416022c9SBarry Smith 
2915b362ba68SBarry Smith   /* determine column ownership if matrix is not square */
2916b362ba68SBarry Smith   if (N != M) {
2917b362ba68SBarry Smith     n      = N/size + ((N % size) > rank);
2918b1d57f15SBarry Smith     ierr   = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2919b362ba68SBarry Smith     cstart = cend - n;
2920b362ba68SBarry Smith   } else {
2921b362ba68SBarry Smith     cstart = rstart;
2922b362ba68SBarry Smith     cend   = rend;
2923fb2e594dSBarry Smith     n      = cend - cstart;
2924b362ba68SBarry Smith   }
2925b362ba68SBarry Smith 
2926416022c9SBarry Smith   /* loop over local rows, determining number of off diagonal entries */
2927b1d57f15SBarry Smith   ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr);
2928416022c9SBarry Smith   jj = 0;
2929416022c9SBarry Smith   for (i=0; i<m; i++) {
2930416022c9SBarry Smith     for (j=0; j<ourlens[i]; j++) {
2931b362ba68SBarry Smith       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
2932416022c9SBarry Smith       jj++;
2933416022c9SBarry Smith     }
2934416022c9SBarry Smith   }
2935d65a2f8fSBarry Smith 
2936d65a2f8fSBarry Smith   /* create our matrix */
2937416022c9SBarry Smith   for (i=0; i<m; i++) {
2938416022c9SBarry Smith     ourlens[i] -= offlens[i];
2939416022c9SBarry Smith   }
2940f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&A);CHKERRQ(ierr);
2941f69a0ea3SMatthew Knepley   ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr);
2942d10c748bSKris Buschelman   ierr = MatSetType(A,type);CHKERRQ(ierr);
2943d10c748bSKris Buschelman   ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr);
2944d10c748bSKris Buschelman 
2945d65a2f8fSBarry Smith   for (i=0; i<m; i++) {
2946d65a2f8fSBarry Smith     ourlens[i] += offlens[i];
2947d65a2f8fSBarry Smith   }
2948416022c9SBarry Smith 
294917699dbbSLois Curfman McInnes   if (!rank) {
2950906b51c7SHong Zhang     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2951416022c9SBarry Smith 
2952416022c9SBarry Smith     /* read in my part of the matrix numerical values  */
2953416022c9SBarry Smith     nz   = procsnz[0];
29540752156aSBarry Smith     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2955d65a2f8fSBarry Smith 
2956d65a2f8fSBarry Smith     /* insert into matrix */
2957d65a2f8fSBarry Smith     jj      = rstart;
2958d65a2f8fSBarry Smith     smycols = mycols;
2959d65a2f8fSBarry Smith     svals   = vals;
2960d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2961dc231df0SBarry Smith       ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2962d65a2f8fSBarry Smith       smycols += ourlens[i];
2963d65a2f8fSBarry Smith       svals   += ourlens[i];
2964d65a2f8fSBarry Smith       jj++;
2965416022c9SBarry Smith     }
2966416022c9SBarry Smith 
2967d65a2f8fSBarry Smith     /* read in other processors and ship out */
296817699dbbSLois Curfman McInnes     for (i=1; i<size; i++) {
2969416022c9SBarry Smith       nz     = procsnz[i];
29700752156aSBarry Smith       ierr   = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
297113980483SBarry Smith       mpicnt = PetscMPIIntCast(nz);
297213980483SBarry Smith       ierr   = MPI_Send(vals,mpicnt,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr);
2973416022c9SBarry Smith     }
2974606d414cSSatish Balay     ierr = PetscFree(procsnz);CHKERRQ(ierr);
29753a40ed3dSBarry Smith   } else {
2976d65a2f8fSBarry Smith     /* receive numeric values */
297787828ca2SBarry Smith     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
2978416022c9SBarry Smith 
2979d65a2f8fSBarry Smith     /* receive message of values*/
298013980483SBarry Smith     mpicnt = PetscMPIIntCast(nz);
298113980483SBarry Smith     ierr   = MPI_Recv(vals,mpicnt,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr);
298213980483SBarry Smith     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&mpimaxnz);CHKERRQ(ierr);
29837c533972SBarry Smith     if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);}
298413980483SBarry Smith     else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);}
298513980483SBarry Smith     else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);}
2986d65a2f8fSBarry Smith 
2987d65a2f8fSBarry Smith     /* insert into matrix */
2988d65a2f8fSBarry Smith     jj      = rstart;
2989d65a2f8fSBarry Smith     smycols = mycols;
2990d65a2f8fSBarry Smith     svals   = vals;
2991d65a2f8fSBarry Smith     for (i=0; i<m; i++) {
2992dc231df0SBarry Smith       ierr     = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
2993d65a2f8fSBarry Smith       smycols += ourlens[i];
2994d65a2f8fSBarry Smith       svals   += ourlens[i];
2995d65a2f8fSBarry Smith       jj++;
2996d65a2f8fSBarry Smith     }
2997d65a2f8fSBarry Smith   }
2998dc231df0SBarry Smith   ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr);
2999606d414cSSatish Balay   ierr = PetscFree(vals);CHKERRQ(ierr);
3000606d414cSSatish Balay   ierr = PetscFree(mycols);CHKERRQ(ierr);
3001606d414cSSatish Balay   ierr = PetscFree(rowners);CHKERRQ(ierr);
3002d65a2f8fSBarry Smith 
30036d4a8577SBarry Smith   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
30046d4a8577SBarry Smith   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3005d10c748bSKris Buschelman   *newmat = A;
30063a40ed3dSBarry Smith   PetscFunctionReturn(0);
3007416022c9SBarry Smith }
3008a0ff6018SBarry Smith 
30094a2ae208SSatish Balay #undef __FUNCT__
30104a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ"
30114aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
30124aa3045dSJed Brown {
30134aa3045dSJed Brown   PetscErrorCode ierr;
30144aa3045dSJed Brown   IS             iscol_local;
30154aa3045dSJed Brown   PetscInt       csize;
30164aa3045dSJed Brown 
30174aa3045dSJed Brown   PetscFunctionBegin;
30184aa3045dSJed Brown   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
3019*b79d0421SJed Brown   if (call == MAT_REUSE_MATRIX) {
3020*b79d0421SJed Brown     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
3021*b79d0421SJed Brown     if (!iscol_local) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
3022*b79d0421SJed Brown   } else {
30234aa3045dSJed Brown     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
3024*b79d0421SJed Brown   }
30254aa3045dSJed Brown   ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
3026*b79d0421SJed Brown   if (call == MAT_INITIAL_MATRIX) {
3027*b79d0421SJed Brown     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
30284aa3045dSJed Brown     ierr = ISDestroy(iscol_local);CHKERRQ(ierr);
3029*b79d0421SJed Brown   }
30304aa3045dSJed Brown   PetscFunctionReturn(0);
30314aa3045dSJed Brown }
30324aa3045dSJed Brown 
30334aa3045dSJed Brown #undef __FUNCT__
30344aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private"
3035a0ff6018SBarry Smith /*
303629da9460SBarry Smith     Not great since it makes two copies of the submatrix, first an SeqAIJ
303729da9460SBarry Smith   in local and then by concatenating the local matrices the end result.
303829da9460SBarry Smith   Writing it directly would be much like MatGetSubMatrices_MPIAIJ()
30394aa3045dSJed Brown 
30404aa3045dSJed Brown   Note: This requires a sequential iscol with all indices.
3041a0ff6018SBarry Smith */
30424aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
3043a0ff6018SBarry Smith {
3044dfbe8321SBarry Smith   PetscErrorCode ierr;
304532dcc486SBarry Smith   PetscMPIInt    rank,size;
3046b1d57f15SBarry Smith   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j;
3047b1d57f15SBarry Smith   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal;
3048fee21e36SBarry Smith   Mat            *local,M,Mreuse;
3049a77337e4SBarry Smith   MatScalar      *vwork,*aa;
30507adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)mat)->comm;
305100e6dbe6SBarry Smith   Mat_SeqAIJ     *aij;
30527e2c5f70SBarry Smith 
3053a0ff6018SBarry Smith 
3054a0ff6018SBarry Smith   PetscFunctionBegin;
30551dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
30561dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
305700e6dbe6SBarry Smith 
3058fee21e36SBarry Smith   if (call ==  MAT_REUSE_MATRIX) {
3059fee21e36SBarry Smith     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
3060e005ede5SBarry Smith     if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
3061fee21e36SBarry Smith     local = &Mreuse;
3062fee21e36SBarry Smith     ierr  = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr);
3063fee21e36SBarry Smith   } else {
3064a0ff6018SBarry Smith     ierr   = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr);
3065fee21e36SBarry Smith     Mreuse = *local;
3066606d414cSSatish Balay     ierr   = PetscFree(local);CHKERRQ(ierr);
3067fee21e36SBarry Smith   }
3068a0ff6018SBarry Smith 
3069a0ff6018SBarry Smith   /*
3070a0ff6018SBarry Smith       m - number of local rows
3071a0ff6018SBarry Smith       n - number of columns (same on all processors)
3072a0ff6018SBarry Smith       rstart - first row in new global matrix generated
3073a0ff6018SBarry Smith   */
3074fee21e36SBarry Smith   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
3075a0ff6018SBarry Smith   if (call == MAT_INITIAL_MATRIX) {
3076fee21e36SBarry Smith     aij = (Mat_SeqAIJ*)(Mreuse)->data;
307700e6dbe6SBarry Smith     ii  = aij->i;
307800e6dbe6SBarry Smith     jj  = aij->j;
307900e6dbe6SBarry Smith 
3080a0ff6018SBarry Smith     /*
308100e6dbe6SBarry Smith         Determine the number of non-zeros in the diagonal and off-diagonal
308200e6dbe6SBarry Smith         portions of the matrix in order to do correct preallocation
3083a0ff6018SBarry Smith     */
308400e6dbe6SBarry Smith 
308500e6dbe6SBarry Smith     /* first get start and end of "diagonal" columns */
30866a6a5d1dSBarry Smith     if (csize == PETSC_DECIDE) {
3087ab50ec6bSBarry Smith       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
3088ab50ec6bSBarry Smith       if (mglobal == n) { /* square matrix */
3089e2c4fddaSBarry Smith 	nlocal = m;
30906a6a5d1dSBarry Smith       } else {
3091ab50ec6bSBarry Smith         nlocal = n/size + ((n % size) > rank);
3092ab50ec6bSBarry Smith       }
3093ab50ec6bSBarry Smith     } else {
30946a6a5d1dSBarry Smith       nlocal = csize;
30956a6a5d1dSBarry Smith     }
3096b1d57f15SBarry Smith     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
309700e6dbe6SBarry Smith     rstart = rend - nlocal;
30986a6a5d1dSBarry Smith     if (rank == size - 1 && rend != n) {
309977431f27SBarry Smith       SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
31006a6a5d1dSBarry Smith     }
310100e6dbe6SBarry Smith 
310200e6dbe6SBarry Smith     /* next, compute all the lengths */
3103b1d57f15SBarry Smith     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
310400e6dbe6SBarry Smith     olens = dlens + m;
310500e6dbe6SBarry Smith     for (i=0; i<m; i++) {
310600e6dbe6SBarry Smith       jend = ii[i+1] - ii[i];
310700e6dbe6SBarry Smith       olen = 0;
310800e6dbe6SBarry Smith       dlen = 0;
310900e6dbe6SBarry Smith       for (j=0; j<jend; j++) {
311000e6dbe6SBarry Smith         if (*jj < rstart || *jj >= rend) olen++;
311100e6dbe6SBarry Smith         else dlen++;
311200e6dbe6SBarry Smith         jj++;
311300e6dbe6SBarry Smith       }
311400e6dbe6SBarry Smith       olens[i] = olen;
311500e6dbe6SBarry Smith       dlens[i] = dlen;
311600e6dbe6SBarry Smith     }
3117f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
3118f69a0ea3SMatthew Knepley     ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr);
31197adad957SLisandro Dalcin     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
3120e2d9671bSKris Buschelman     ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr);
3121606d414cSSatish Balay     ierr = PetscFree(dlens);CHKERRQ(ierr);
3122a0ff6018SBarry Smith   } else {
3123b1d57f15SBarry Smith     PetscInt ml,nl;
3124a0ff6018SBarry Smith 
3125a0ff6018SBarry Smith     M = *newmat;
3126a0ff6018SBarry Smith     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
312729bbc08cSBarry Smith     if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
3128a0ff6018SBarry Smith     ierr = MatZeroEntries(M);CHKERRQ(ierr);
3129c48de900SBarry Smith     /*
3130c48de900SBarry Smith          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
3131c48de900SBarry Smith        rather than the slower MatSetValues().
3132c48de900SBarry Smith     */
3133c48de900SBarry Smith     M->was_assembled = PETSC_TRUE;
3134c48de900SBarry Smith     M->assembled     = PETSC_FALSE;
3135a0ff6018SBarry Smith   }
3136a0ff6018SBarry Smith   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
3137fee21e36SBarry Smith   aij = (Mat_SeqAIJ*)(Mreuse)->data;
313800e6dbe6SBarry Smith   ii  = aij->i;
313900e6dbe6SBarry Smith   jj  = aij->j;
314000e6dbe6SBarry Smith   aa  = aij->a;
3141a0ff6018SBarry Smith   for (i=0; i<m; i++) {
3142a0ff6018SBarry Smith     row   = rstart + i;
314300e6dbe6SBarry Smith     nz    = ii[i+1] - ii[i];
314400e6dbe6SBarry Smith     cwork = jj;     jj += nz;
314500e6dbe6SBarry Smith     vwork = aa;     aa += nz;
31468c638d02SBarry Smith     ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
3147a0ff6018SBarry Smith   }
3148a0ff6018SBarry Smith 
3149a0ff6018SBarry Smith   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3150a0ff6018SBarry Smith   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3151a0ff6018SBarry Smith   *newmat = M;
3152fee21e36SBarry Smith 
3153fee21e36SBarry Smith   /* save submatrix used in processor for next request */
3154fee21e36SBarry Smith   if (call ==  MAT_INITIAL_MATRIX) {
3155fee21e36SBarry Smith     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
3156fee21e36SBarry Smith     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
3157fee21e36SBarry Smith   }
3158fee21e36SBarry Smith 
3159a0ff6018SBarry Smith   PetscFunctionReturn(0);
3160a0ff6018SBarry Smith }
3161273d9f13SBarry Smith 
3162e2e86b8fSSatish Balay EXTERN_C_BEGIN
31634a2ae208SSatish Balay #undef __FUNCT__
3164ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ"
3165b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[])
3166ccd8e176SBarry Smith {
3167899cda47SBarry Smith   PetscInt       m,cstart, cend,j,nnz,i,d;
3168899cda47SBarry Smith   PetscInt       *d_nnz,*o_nnz,nnz_max = 0,rstart,ii;
3169ccd8e176SBarry Smith   const PetscInt *JJ;
3170ccd8e176SBarry Smith   PetscScalar    *values;
3171ccd8e176SBarry Smith   PetscErrorCode ierr;
3172ccd8e176SBarry Smith 
3173ccd8e176SBarry Smith   PetscFunctionBegin;
3174b7940d39SSatish Balay   if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]);
3175899cda47SBarry Smith 
31767408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
31777408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
3178d0f46423SBarry Smith   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
3179d0f46423SBarry Smith   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
3180d0f46423SBarry Smith   m      = B->rmap->n;
3181d0f46423SBarry Smith   cstart = B->cmap->rstart;
3182d0f46423SBarry Smith   cend   = B->cmap->rend;
3183d0f46423SBarry Smith   rstart = B->rmap->rstart;
3184899cda47SBarry Smith 
3185ccd8e176SBarry Smith   ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
3186ccd8e176SBarry Smith   o_nnz = d_nnz + m;
3187ccd8e176SBarry Smith 
3188ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING)
3189ecc77c7aSBarry Smith   for (i=0; i<m; i++) {
3190ecc77c7aSBarry Smith     nnz     = Ii[i+1]- Ii[i];
3191ecc77c7aSBarry Smith     JJ      = J + Ii[i];
3192ecc77c7aSBarry Smith     if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz);
3193ecc77c7aSBarry Smith     if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j);
3194d0f46423SBarry Smith     if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N);
3195ecc77c7aSBarry Smith     for (j=1; j<nnz; j++) {
3196ecc77c7aSBarry Smith       if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j);
3197ecc77c7aSBarry Smith     }
3198ecc77c7aSBarry Smith   }
3199ecc77c7aSBarry Smith #endif
3200ecc77c7aSBarry Smith 
3201ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3202b7940d39SSatish Balay     nnz     = Ii[i+1]- Ii[i];
3203b7940d39SSatish Balay     JJ      = J + Ii[i];
3204ccd8e176SBarry Smith     nnz_max = PetscMax(nnz_max,nnz);
3205ccd8e176SBarry Smith     for (j=0; j<nnz; j++) {
3206ccd8e176SBarry Smith       if (*JJ >= cstart) break;
3207ccd8e176SBarry Smith       JJ++;
3208ccd8e176SBarry Smith     }
3209ccd8e176SBarry Smith     d = 0;
3210ccd8e176SBarry Smith     for (; j<nnz; j++) {
3211ccd8e176SBarry Smith       if (*JJ++ >= cend) break;
3212ccd8e176SBarry Smith       d++;
3213ccd8e176SBarry Smith     }
3214ccd8e176SBarry Smith     d_nnz[i] = d;
3215ccd8e176SBarry Smith     o_nnz[i] = nnz - d;
3216ccd8e176SBarry Smith   }
3217ccd8e176SBarry Smith   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
3218ccd8e176SBarry Smith   ierr = PetscFree(d_nnz);CHKERRQ(ierr);
3219ccd8e176SBarry Smith 
3220ccd8e176SBarry Smith   if (v) values = (PetscScalar*)v;
3221ccd8e176SBarry Smith   else {
3222ccd8e176SBarry Smith     ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr);
3223ccd8e176SBarry Smith     ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr);
3224ccd8e176SBarry Smith   }
3225ccd8e176SBarry Smith 
3226ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3227ccd8e176SBarry Smith     ii   = i + rstart;
3228b7940d39SSatish Balay     nnz  = Ii[i+1]- Ii[i];
3229b7940d39SSatish Balay     ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr);
3230ccd8e176SBarry Smith   }
3231ccd8e176SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3232ccd8e176SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3233ccd8e176SBarry Smith 
3234ccd8e176SBarry Smith   if (!v) {
3235ccd8e176SBarry Smith     ierr = PetscFree(values);CHKERRQ(ierr);
3236ccd8e176SBarry Smith   }
3237ccd8e176SBarry Smith   PetscFunctionReturn(0);
3238ccd8e176SBarry Smith }
3239e2e86b8fSSatish Balay EXTERN_C_END
3240ccd8e176SBarry Smith 
3241ccd8e176SBarry Smith #undef __FUNCT__
3242ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR"
32431eea217eSSatish Balay /*@
3244ccd8e176SBarry Smith    MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format
3245ccd8e176SBarry Smith    (the default parallel PETSc format).
3246ccd8e176SBarry Smith 
3247ccd8e176SBarry Smith    Collective on MPI_Comm
3248ccd8e176SBarry Smith 
3249ccd8e176SBarry Smith    Input Parameters:
3250a1661176SMatthew Knepley +  B - the matrix
3251ccd8e176SBarry Smith .  i - the indices into j for the start of each local row (starts with zero)
3252ccd8e176SBarry Smith .  j - the column indices for each local row (starts with zero) these must be sorted for each row
3253ccd8e176SBarry Smith -  v - optional values in the matrix
3254ccd8e176SBarry Smith 
3255ccd8e176SBarry Smith    Level: developer
3256ccd8e176SBarry Smith 
325712251496SSatish Balay    Notes:
325812251496SSatish Balay        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
325912251496SSatish Balay      thus you CANNOT change the matrix entries by changing the values of a[] after you have
326012251496SSatish Balay      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
326112251496SSatish Balay 
326212251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
326312251496SSatish Balay 
326412251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
326512251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
326612251496SSatish Balay     as shown:
326712251496SSatish Balay 
326812251496SSatish Balay         1 0 0
326912251496SSatish Balay         2 0 3     P0
327012251496SSatish Balay        -------
327112251496SSatish Balay         4 5 6     P1
327212251496SSatish Balay 
327312251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
327412251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
327512251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
327612251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
327712251496SSatish Balay 
327812251496SSatish Balay      Process1 [P1]: rows_owned=[2]
327912251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
328012251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
328112251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
328212251496SSatish Balay 
3283ecc77c7aSBarry Smith       The column indices for each row MUST be sorted.
32842fb0ec9aSBarry Smith 
3285ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3286ccd8e176SBarry Smith 
32872fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ,
32888d7a6e47SBarry Smith           MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays()
3289ccd8e176SBarry Smith @*/
3290be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3291ccd8e176SBarry Smith {
3292ccd8e176SBarry Smith   PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
3293ccd8e176SBarry Smith 
3294ccd8e176SBarry Smith   PetscFunctionBegin;
3295ccd8e176SBarry Smith   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr);
3296ccd8e176SBarry Smith   if (f) {
3297ccd8e176SBarry Smith     ierr = (*f)(B,i,j,v);CHKERRQ(ierr);
3298ccd8e176SBarry Smith   }
3299ccd8e176SBarry Smith   PetscFunctionReturn(0);
3300ccd8e176SBarry Smith }
3301ccd8e176SBarry Smith 
3302ccd8e176SBarry Smith #undef __FUNCT__
33034a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation"
3304273d9f13SBarry Smith /*@C
3305ccd8e176SBarry Smith    MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format
3306273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3307273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3308273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3309273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3310273d9f13SBarry Smith 
3311273d9f13SBarry Smith    Collective on MPI_Comm
3312273d9f13SBarry Smith 
3313273d9f13SBarry Smith    Input Parameters:
3314273d9f13SBarry Smith +  A - the matrix
3315273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3316273d9f13SBarry Smith            (same value is used for all local rows)
3317273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3318273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3319273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3320273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3321273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3322273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3323273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3324273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3325273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3326273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3327273d9f13SBarry Smith            structure. The size of this array is equal to the number
3328273d9f13SBarry Smith            of local rows, i.e 'm'.
3329273d9f13SBarry Smith 
333049a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
333149a6f317SBarry Smith 
3332273d9f13SBarry Smith    The AIJ format (also called the Yale sparse matrix format or
3333ccd8e176SBarry Smith    compressed row storage (CSR)), is fully compatible with standard Fortran 77
3334ccd8e176SBarry Smith    storage.  The stored row and column indices begin with zero.  See the users manual for details.
3335273d9f13SBarry Smith 
3336273d9f13SBarry Smith    The parallel matrix is partitioned such that the first m0 rows belong to
3337273d9f13SBarry Smith    process 0, the next m1 rows belong to process 1, the next m2 rows belong
3338273d9f13SBarry Smith    to process 2 etc.. where m0,m1,m2... are the input parameter 'm'.
3339273d9f13SBarry Smith 
3340273d9f13SBarry Smith    The DIAGONAL portion of the local submatrix of a processor can be defined
3341273d9f13SBarry Smith    as the submatrix which is obtained by extraction the part corresponding
3342273d9f13SBarry Smith    to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the
3343273d9f13SBarry Smith    first row that belongs to the processor, and r2 is the last row belonging
3344273d9f13SBarry Smith    to the this processor. This is a square mxm matrix. The remaining portion
3345273d9f13SBarry Smith    of the local submatrix (mxN) constitute the OFF-DIAGONAL portion.
3346273d9f13SBarry Smith 
3347273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3348273d9f13SBarry Smith 
3349aa95bbe8SBarry Smith    You can call MatGetInfo() to get information on how effective the preallocation was;
3350aa95bbe8SBarry Smith    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3351aa95bbe8SBarry Smith    You can also run with the option -info and look for messages with the string
3352aa95bbe8SBarry Smith    malloc in them to see if additional memory allocation was needed.
3353aa95bbe8SBarry Smith 
3354273d9f13SBarry Smith    Example usage:
3355273d9f13SBarry Smith 
3356273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3357273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3358273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3359273d9f13SBarry Smith    as follows:
3360273d9f13SBarry Smith 
3361273d9f13SBarry Smith .vb
3362273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3363273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3364273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3365273d9f13SBarry Smith     -------------------------------------
3366273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3367273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3368273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3369273d9f13SBarry Smith     -------------------------------------
3370273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3371273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3372273d9f13SBarry Smith .ve
3373273d9f13SBarry Smith 
3374273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3375273d9f13SBarry Smith 
3376273d9f13SBarry Smith .vb
3377273d9f13SBarry Smith       A B C
3378273d9f13SBarry Smith       D E F
3379273d9f13SBarry Smith       G H I
3380273d9f13SBarry Smith .ve
3381273d9f13SBarry Smith 
3382273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3383273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3384273d9f13SBarry Smith 
3385273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3386273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3387273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3388273d9f13SBarry Smith 
3389273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3390273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3391273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3392273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3393273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3394273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3395273d9f13SBarry Smith 
3396273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3397273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3398273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3399273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3400273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3401273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3402273d9f13SBarry Smith .vb
3403273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3404273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3405273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3406273d9f13SBarry Smith .ve
3407273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3408273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3409273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3410273d9f13SBarry Smith    34 values.
3411273d9f13SBarry Smith 
3412273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3413273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3414273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3415273d9f13SBarry Smith .vb
3416273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3417273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3418273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3419273d9f13SBarry Smith .ve
3420273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3421273d9f13SBarry Smith    hence pre-allocation is perfect.
3422273d9f13SBarry Smith 
3423273d9f13SBarry Smith    Level: intermediate
3424273d9f13SBarry Smith 
3425273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3426273d9f13SBarry Smith 
3427ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(),
3428aa95bbe8SBarry Smith           MPIAIJ, MatGetInfo()
3429273d9f13SBarry Smith @*/
3430be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3431273d9f13SBarry Smith {
3432b1d57f15SBarry Smith   PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
3433273d9f13SBarry Smith 
3434273d9f13SBarry Smith   PetscFunctionBegin;
3435a23d5eceSKris Buschelman   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
3436a23d5eceSKris Buschelman   if (f) {
3437a23d5eceSKris Buschelman     ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3438273d9f13SBarry Smith   }
3439273d9f13SBarry Smith   PetscFunctionReturn(0);
3440273d9f13SBarry Smith }
3441273d9f13SBarry Smith 
34424a2ae208SSatish Balay #undef __FUNCT__
34432fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays"
344458d36128SBarry Smith /*@
34452fb0ec9aSBarry Smith      MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard
34462fb0ec9aSBarry Smith          CSR format the local rows.
34472fb0ec9aSBarry Smith 
34482fb0ec9aSBarry Smith    Collective on MPI_Comm
34492fb0ec9aSBarry Smith 
34502fb0ec9aSBarry Smith    Input Parameters:
34512fb0ec9aSBarry Smith +  comm - MPI communicator
34522fb0ec9aSBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
34532fb0ec9aSBarry Smith .  n - This value should be the same as the local size used in creating the
34542fb0ec9aSBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
34552fb0ec9aSBarry Smith        calculated if N is given) For square matrices n is almost always m.
34562fb0ec9aSBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
34572fb0ec9aSBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
34582fb0ec9aSBarry Smith .   i - row indices
34592fb0ec9aSBarry Smith .   j - column indices
34602fb0ec9aSBarry Smith -   a - matrix values
34612fb0ec9aSBarry Smith 
34622fb0ec9aSBarry Smith    Output Parameter:
34632fb0ec9aSBarry Smith .   mat - the matrix
346403bfb495SBarry Smith 
34652fb0ec9aSBarry Smith    Level: intermediate
34662fb0ec9aSBarry Smith 
34672fb0ec9aSBarry Smith    Notes:
34682fb0ec9aSBarry Smith        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
34692fb0ec9aSBarry Smith      thus you CANNOT change the matrix entries by changing the values of a[] after you have
34708d7a6e47SBarry Smith      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
34712fb0ec9aSBarry Smith 
347212251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
347312251496SSatish Balay 
347412251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
347512251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
347612251496SSatish Balay     as shown:
347712251496SSatish Balay 
347812251496SSatish Balay         1 0 0
347912251496SSatish Balay         2 0 3     P0
348012251496SSatish Balay        -------
348112251496SSatish Balay         4 5 6     P1
348212251496SSatish Balay 
348312251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
348412251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
348512251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
348612251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
348712251496SSatish Balay 
348812251496SSatish Balay      Process1 [P1]: rows_owned=[2]
348912251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
349012251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
349112251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
34922fb0ec9aSBarry Smith 
34932fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
34942fb0ec9aSBarry Smith 
34952fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
34968d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays()
34972fb0ec9aSBarry Smith @*/
349882b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
34992fb0ec9aSBarry Smith {
35002fb0ec9aSBarry Smith   PetscErrorCode ierr;
35012fb0ec9aSBarry Smith 
35022fb0ec9aSBarry Smith  PetscFunctionBegin;
35032fb0ec9aSBarry Smith   if (i[0]) {
35042fb0ec9aSBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
35052fb0ec9aSBarry Smith   }
35062fb0ec9aSBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
35072fb0ec9aSBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3508d4146a68SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
35092fb0ec9aSBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
35102fb0ec9aSBarry Smith   ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr);
35112fb0ec9aSBarry Smith   PetscFunctionReturn(0);
35122fb0ec9aSBarry Smith }
35132fb0ec9aSBarry Smith 
35142fb0ec9aSBarry Smith #undef __FUNCT__
35154a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ"
3516273d9f13SBarry Smith /*@C
3517273d9f13SBarry Smith    MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format
3518273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3519273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3520273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3521273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3522273d9f13SBarry Smith 
3523273d9f13SBarry Smith    Collective on MPI_Comm
3524273d9f13SBarry Smith 
3525273d9f13SBarry Smith    Input Parameters:
3526273d9f13SBarry Smith +  comm - MPI communicator
3527273d9f13SBarry Smith .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3528273d9f13SBarry Smith            This value should be the same as the local size used in creating the
3529273d9f13SBarry Smith            y vector for the matrix-vector product y = Ax.
3530273d9f13SBarry Smith .  n - This value should be the same as the local size used in creating the
3531273d9f13SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3532273d9f13SBarry Smith        calculated if N is given) For square matrices n is almost always m.
3533273d9f13SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3534273d9f13SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3535273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3536273d9f13SBarry Smith            (same value is used for all local rows)
3537273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3538273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3539273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3540273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3541273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3542273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3543273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3544273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3545273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3546273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3547273d9f13SBarry Smith            structure. The size of this array is equal to the number
3548273d9f13SBarry Smith            of local rows, i.e 'm'.
3549273d9f13SBarry Smith 
3550273d9f13SBarry Smith    Output Parameter:
3551273d9f13SBarry Smith .  A - the matrix
3552273d9f13SBarry Smith 
3553175b88e8SBarry Smith    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3554ae1d86c5SBarry Smith    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3555175b88e8SBarry Smith    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3556175b88e8SBarry Smith 
3557273d9f13SBarry Smith    Notes:
355849a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
355949a6f317SBarry Smith 
3560273d9f13SBarry Smith    m,n,M,N parameters specify the size of the matrix, and its partitioning across
3561273d9f13SBarry Smith    processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate
3562273d9f13SBarry Smith    storage requirements for this matrix.
3563273d9f13SBarry Smith 
3564273d9f13SBarry Smith    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one
3565273d9f13SBarry Smith    processor than it must be used on all processors that share the object for
3566273d9f13SBarry Smith    that argument.
3567273d9f13SBarry Smith 
3568273d9f13SBarry Smith    The user MUST specify either the local or global matrix dimensions
3569273d9f13SBarry Smith    (possibly both).
3570273d9f13SBarry Smith 
357133a7c187SSatish Balay    The parallel matrix is partitioned across processors such that the
357233a7c187SSatish Balay    first m0 rows belong to process 0, the next m1 rows belong to
357333a7c187SSatish Balay    process 1, the next m2 rows belong to process 2 etc.. where
357433a7c187SSatish Balay    m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores
357533a7c187SSatish Balay    values corresponding to [m x N] submatrix.
3576273d9f13SBarry Smith 
357733a7c187SSatish Balay    The columns are logically partitioned with the n0 columns belonging
357833a7c187SSatish Balay    to 0th partition, the next n1 columns belonging to the next
357933a7c187SSatish Balay    partition etc.. where n0,n1,n2... are the the input parameter 'n'.
358033a7c187SSatish Balay 
358133a7c187SSatish Balay    The DIAGONAL portion of the local submatrix on any given processor
358233a7c187SSatish Balay    is the submatrix corresponding to the rows and columns m,n
358333a7c187SSatish Balay    corresponding to the given processor. i.e diagonal matrix on
358433a7c187SSatish Balay    process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1]
358533a7c187SSatish Balay    etc. The remaining portion of the local submatrix [m x (N-n)]
358633a7c187SSatish Balay    constitute the OFF-DIAGONAL portion. The example below better
358733a7c187SSatish Balay    illustrates this concept.
358833a7c187SSatish Balay 
358933a7c187SSatish Balay    For a square global matrix we define each processor's diagonal portion
359033a7c187SSatish Balay    to be its local rows and the corresponding columns (a square submatrix);
359133a7c187SSatish Balay    each processor's off-diagonal portion encompasses the remainder of the
359233a7c187SSatish Balay    local matrix (a rectangular submatrix).
3593273d9f13SBarry Smith 
3594273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3595273d9f13SBarry Smith 
359697d05335SKris Buschelman    When calling this routine with a single process communicator, a matrix of
359797d05335SKris Buschelman    type SEQAIJ is returned.  If a matrix of type MPIAIJ is desired for this
359897d05335SKris Buschelman    type of communicator, use the construction mechanism:
359978102f6cSMatthew Knepley      MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...);
360097d05335SKris Buschelman 
3601273d9f13SBarry Smith    By default, this format uses inodes (identical nodes) when possible.
3602273d9f13SBarry Smith    We search for consecutive rows with the same nonzero structure, thereby
3603273d9f13SBarry Smith    reusing matrix information to achieve increased efficiency.
3604273d9f13SBarry Smith 
3605273d9f13SBarry Smith    Options Database Keys:
3606923f20ffSKris Buschelman +  -mat_no_inode  - Do not use inodes
3607923f20ffSKris Buschelman .  -mat_inode_limit <limit> - Sets inode limit (max limit=5)
3608273d9f13SBarry Smith -  -mat_aij_oneindex - Internally use indexing starting at 1
3609273d9f13SBarry Smith         rather than 0.  Note that when calling MatSetValues(),
3610273d9f13SBarry Smith         the user still MUST index entries starting at 0!
3611273d9f13SBarry Smith 
3612273d9f13SBarry Smith 
3613273d9f13SBarry Smith    Example usage:
3614273d9f13SBarry Smith 
3615273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3616273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3617273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3618273d9f13SBarry Smith    as follows:
3619273d9f13SBarry Smith 
3620273d9f13SBarry Smith .vb
3621273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3622273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3623273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3624273d9f13SBarry Smith     -------------------------------------
3625273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3626273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3627273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3628273d9f13SBarry Smith     -------------------------------------
3629273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3630273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3631273d9f13SBarry Smith .ve
3632273d9f13SBarry Smith 
3633273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3634273d9f13SBarry Smith 
3635273d9f13SBarry Smith .vb
3636273d9f13SBarry Smith       A B C
3637273d9f13SBarry Smith       D E F
3638273d9f13SBarry Smith       G H I
3639273d9f13SBarry Smith .ve
3640273d9f13SBarry Smith 
3641273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3642273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3643273d9f13SBarry Smith 
3644273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3645273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3646273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3647273d9f13SBarry Smith 
3648273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3649273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3650273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3651273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3652273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3653273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3654273d9f13SBarry Smith 
3655273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3656273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3657273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3658273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3659273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3660273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3661273d9f13SBarry Smith .vb
3662273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3663273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3664273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3665273d9f13SBarry Smith .ve
3666273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3667273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3668273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3669273d9f13SBarry Smith    34 values.
3670273d9f13SBarry Smith 
3671273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3672273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3673273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3674273d9f13SBarry Smith .vb
3675273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3676273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3677273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3678273d9f13SBarry Smith .ve
3679273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3680273d9f13SBarry Smith    hence pre-allocation is perfect.
3681273d9f13SBarry Smith 
3682273d9f13SBarry Smith    Level: intermediate
3683273d9f13SBarry Smith 
3684273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3685273d9f13SBarry Smith 
3686ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
36872fb0ec9aSBarry Smith           MPIAIJ, MatCreateMPIAIJWithArrays()
3688273d9f13SBarry Smith @*/
3689be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3690273d9f13SBarry Smith {
36916849ba73SBarry Smith   PetscErrorCode ierr;
3692b1d57f15SBarry Smith   PetscMPIInt    size;
3693273d9f13SBarry Smith 
3694273d9f13SBarry Smith   PetscFunctionBegin;
3695f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3696f69a0ea3SMatthew Knepley   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3697273d9f13SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3698273d9f13SBarry Smith   if (size > 1) {
3699273d9f13SBarry Smith     ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr);
3700273d9f13SBarry Smith     ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3701273d9f13SBarry Smith   } else {
3702273d9f13SBarry Smith     ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr);
3703273d9f13SBarry Smith     ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr);
3704273d9f13SBarry Smith   }
3705273d9f13SBarry Smith   PetscFunctionReturn(0);
3706273d9f13SBarry Smith }
3707195d93cdSBarry Smith 
37084a2ae208SSatish Balay #undef __FUNCT__
37094a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ"
3710be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[])
3711195d93cdSBarry Smith {
3712195d93cdSBarry Smith   Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
3713b1d57f15SBarry Smith 
3714195d93cdSBarry Smith   PetscFunctionBegin;
3715195d93cdSBarry Smith   *Ad     = a->A;
3716195d93cdSBarry Smith   *Ao     = a->B;
3717195d93cdSBarry Smith   *colmap = a->garray;
3718195d93cdSBarry Smith   PetscFunctionReturn(0);
3719195d93cdSBarry Smith }
3720a2243be0SBarry Smith 
3721a2243be0SBarry Smith #undef __FUNCT__
3722a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ"
3723dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring)
3724a2243be0SBarry Smith {
3725dfbe8321SBarry Smith   PetscErrorCode ierr;
3726b1d57f15SBarry Smith   PetscInt       i;
3727a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3728a2243be0SBarry Smith 
3729a2243be0SBarry Smith   PetscFunctionBegin;
37308ee2e534SBarry Smith   if (coloring->ctype == IS_COLORING_GLOBAL) {
373108b6dcc0SBarry Smith     ISColoringValue *allcolors,*colors;
3732a2243be0SBarry Smith     ISColoring      ocoloring;
3733a2243be0SBarry Smith 
3734a2243be0SBarry Smith     /* set coloring for diagonal portion */
3735a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr);
3736a2243be0SBarry Smith 
3737a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
37387adad957SLisandro Dalcin     ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr);
3739d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3740d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3741a2243be0SBarry Smith       colors[i] = allcolors[a->garray[i]];
3742a2243be0SBarry Smith     }
3743a2243be0SBarry Smith     ierr = PetscFree(allcolors);CHKERRQ(ierr);
3744d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3745a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3746a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3747a2243be0SBarry Smith   } else if (coloring->ctype == IS_COLORING_GHOSTED) {
374808b6dcc0SBarry Smith     ISColoringValue *colors;
3749b1d57f15SBarry Smith     PetscInt        *larray;
3750a2243be0SBarry Smith     ISColoring      ocoloring;
3751a2243be0SBarry Smith 
3752a2243be0SBarry Smith     /* set coloring for diagonal portion */
3753d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3754d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3755d0f46423SBarry Smith       larray[i] = i + A->cmap->rstart;
3756a2243be0SBarry Smith     }
3757d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr);
3758d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3759d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
3760a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3761a2243be0SBarry Smith     }
3762a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3763d0f46423SBarry Smith     ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3764a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr);
3765a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3766a2243be0SBarry Smith 
3767a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
3768d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
3769d0f46423SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr);
3770d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
3771d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
3772a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
3773a2243be0SBarry Smith     }
3774a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
3775d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
3776a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
3777a2243be0SBarry Smith     ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr);
3778a2243be0SBarry Smith   } else {
377977431f27SBarry Smith     SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype);
3780a2243be0SBarry Smith   }
3781a2243be0SBarry Smith 
3782a2243be0SBarry Smith   PetscFunctionReturn(0);
3783a2243be0SBarry Smith }
3784a2243be0SBarry Smith 
3785dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
3786a2243be0SBarry Smith #undef __FUNCT__
3787779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ"
3788dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues)
3789a2243be0SBarry Smith {
3790a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3791dfbe8321SBarry Smith   PetscErrorCode ierr;
3792a2243be0SBarry Smith 
3793a2243be0SBarry Smith   PetscFunctionBegin;
3794779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr);
3795779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr);
3796779c1a83SBarry Smith   PetscFunctionReturn(0);
3797779c1a83SBarry Smith }
3798dcf5cc72SBarry Smith #endif
3799779c1a83SBarry Smith 
3800779c1a83SBarry Smith #undef __FUNCT__
3801779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ"
3802b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues)
3803779c1a83SBarry Smith {
3804779c1a83SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
3805dfbe8321SBarry Smith   PetscErrorCode ierr;
3806779c1a83SBarry Smith 
3807779c1a83SBarry Smith   PetscFunctionBegin;
3808779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr);
3809779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr);
3810a2243be0SBarry Smith   PetscFunctionReturn(0);
3811a2243be0SBarry Smith }
3812c5d6d63eSBarry Smith 
3813c5d6d63eSBarry Smith #undef __FUNCT__
381451dd7536SBarry Smith #define __FUNCT__ "MatMerge"
3815bc08b0f1SBarry Smith /*@
381651dd7536SBarry Smith       MatMerge - Creates a single large PETSc matrix by concatinating sequential
381751dd7536SBarry Smith                  matrices from each processor
3818c5d6d63eSBarry Smith 
3819c5d6d63eSBarry Smith     Collective on MPI_Comm
3820c5d6d63eSBarry Smith 
3821c5d6d63eSBarry Smith    Input Parameters:
382251dd7536SBarry Smith +    comm - the communicators the parallel matrix will live on
3823d6bb3c2dSHong Zhang .    inmat - the input sequential matrices
38240e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3825d6bb3c2dSHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
382651dd7536SBarry Smith 
382751dd7536SBarry Smith    Output Parameter:
382851dd7536SBarry Smith .    outmat - the parallel matrix generated
3829c5d6d63eSBarry Smith 
38307e25d530SSatish Balay     Level: advanced
38317e25d530SSatish Balay 
3832f08fae4eSHong Zhang    Notes: The number of columns of the matrix in EACH processor MUST be the same.
3833c5d6d63eSBarry Smith 
3834c5d6d63eSBarry Smith @*/
3835be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3836c5d6d63eSBarry Smith {
3837dfbe8321SBarry Smith   PetscErrorCode ierr;
3838b7940d39SSatish Balay   PetscInt       m,N,i,rstart,nnz,Ii,*dnz,*onz;
3839ba8c8a56SBarry Smith   PetscInt       *indx;
3840ba8c8a56SBarry Smith   PetscScalar    *values;
3841c5d6d63eSBarry Smith 
3842c5d6d63eSBarry Smith   PetscFunctionBegin;
38430e36024fSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3844d6bb3c2dSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
3845d6bb3c2dSHong Zhang     /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */
38460e36024fSHong Zhang     if (n == PETSC_DECIDE){
3847357abbc8SBarry Smith       ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
38480e36024fSHong Zhang     }
3849357abbc8SBarry Smith     ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3850357abbc8SBarry Smith     rstart -= m;
3851d6bb3c2dSHong Zhang 
3852d6bb3c2dSHong Zhang     ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
3853d6bb3c2dSHong Zhang     for (i=0;i<m;i++) {
3854ba8c8a56SBarry Smith       ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3855d6bb3c2dSHong Zhang       ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr);
3856ba8c8a56SBarry Smith       ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
3857d6bb3c2dSHong Zhang     }
3858d6bb3c2dSHong Zhang     /* This routine will ONLY return MPIAIJ type matrix */
3859f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3860f69a0ea3SMatthew Knepley     ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3861d6bb3c2dSHong Zhang     ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr);
3862d6bb3c2dSHong Zhang     ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr);
3863d6bb3c2dSHong Zhang     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3864d6bb3c2dSHong Zhang 
3865d6bb3c2dSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
3866d6bb3c2dSHong Zhang     ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr);
3867d6bb3c2dSHong Zhang   } else {
386877431f27SBarry Smith     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
3869d6bb3c2dSHong Zhang   }
3870d6bb3c2dSHong Zhang 
3871d6bb3c2dSHong Zhang   for (i=0;i<m;i++) {
3872ba8c8a56SBarry Smith     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3873b7940d39SSatish Balay     Ii    = i + rstart;
3874b7940d39SSatish Balay     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3875ba8c8a56SBarry Smith     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3876d6bb3c2dSHong Zhang   }
3877d6bb3c2dSHong Zhang   ierr = MatDestroy(inmat);CHKERRQ(ierr);
3878d6bb3c2dSHong Zhang   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3879d6bb3c2dSHong Zhang   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
388051dd7536SBarry Smith 
3881c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3882c5d6d63eSBarry Smith }
3883c5d6d63eSBarry Smith 
3884c5d6d63eSBarry Smith #undef __FUNCT__
3885c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit"
3886dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile)
3887c5d6d63eSBarry Smith {
3888dfbe8321SBarry Smith   PetscErrorCode    ierr;
388932dcc486SBarry Smith   PetscMPIInt       rank;
3890b1d57f15SBarry Smith   PetscInt          m,N,i,rstart,nnz;
3891de4209c5SBarry Smith   size_t            len;
3892b1d57f15SBarry Smith   const PetscInt    *indx;
3893c5d6d63eSBarry Smith   PetscViewer       out;
3894c5d6d63eSBarry Smith   char              *name;
3895c5d6d63eSBarry Smith   Mat               B;
3896b3cc6726SBarry Smith   const PetscScalar *values;
3897c5d6d63eSBarry Smith 
3898c5d6d63eSBarry Smith   PetscFunctionBegin;
3899c5d6d63eSBarry Smith   ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr);
3900c5d6d63eSBarry Smith   ierr = MatGetSize(A,0,&N);CHKERRQ(ierr);
3901f204ca49SKris Buschelman   /* Should this be the type of the diagonal block of A? */
3902f69a0ea3SMatthew Knepley   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
3903f69a0ea3SMatthew Knepley   ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr);
3904f204ca49SKris Buschelman   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
3905f204ca49SKris Buschelman   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
3906c5d6d63eSBarry Smith   ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr);
3907c5d6d63eSBarry Smith   for (i=0;i<m;i++) {
3908c5d6d63eSBarry Smith     ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3909c5d6d63eSBarry Smith     ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3910c5d6d63eSBarry Smith     ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
3911c5d6d63eSBarry Smith   }
3912c5d6d63eSBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3913c5d6d63eSBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3914c5d6d63eSBarry Smith 
39157adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
3916c5d6d63eSBarry Smith   ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr);
3917c5d6d63eSBarry Smith   ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr);
3918c5d6d63eSBarry Smith   sprintf(name,"%s.%d",outfile,rank);
3919852598b0SBarry Smith   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr);
3920c5d6d63eSBarry Smith   ierr = PetscFree(name);
3921c5d6d63eSBarry Smith   ierr = MatView(B,out);CHKERRQ(ierr);
3922c5d6d63eSBarry Smith   ierr = PetscViewerDestroy(out);CHKERRQ(ierr);
3923c5d6d63eSBarry Smith   ierr = MatDestroy(B);CHKERRQ(ierr);
3924c5d6d63eSBarry Smith   PetscFunctionReturn(0);
3925c5d6d63eSBarry Smith }
3926e5f2cdd8SHong Zhang 
392751a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
392851a7d1a8SHong Zhang #undef __FUNCT__
392951a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI"
3930be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A)
393151a7d1a8SHong Zhang {
393251a7d1a8SHong Zhang   PetscErrorCode       ierr;
3933671beff6SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
3934776b82aeSLisandro Dalcin   PetscContainer       container;
393551a7d1a8SHong Zhang 
393651a7d1a8SHong Zhang   PetscFunctionBegin;
3937671beff6SHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
3938671beff6SHong Zhang   if (container) {
3939776b82aeSLisandro Dalcin     ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
394051a7d1a8SHong Zhang     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
39413e06a4e6SHong Zhang     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
39423e06a4e6SHong Zhang     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
394351a7d1a8SHong Zhang     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
394451a7d1a8SHong Zhang     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
394502c68681SHong Zhang     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
394602c68681SHong Zhang     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
394705b42c5fSBarry Smith     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
394805b42c5fSBarry Smith     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
394905b42c5fSBarry Smith     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
39502c72b5baSSatish Balay     ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr);
3951671beff6SHong Zhang 
3952776b82aeSLisandro Dalcin     ierr = PetscContainerDestroy(container);CHKERRQ(ierr);
3953671beff6SHong Zhang     ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr);
3954671beff6SHong Zhang   }
395551a7d1a8SHong Zhang   ierr = PetscFree(merge);CHKERRQ(ierr);
395651a7d1a8SHong Zhang 
395751a7d1a8SHong Zhang   ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
395851a7d1a8SHong Zhang   PetscFunctionReturn(0);
395951a7d1a8SHong Zhang }
396051a7d1a8SHong Zhang 
39617c4f633dSBarry Smith #include "../src/mat/utils/freespace.h"
3962be0fcf8dSHong Zhang #include "petscbt.h"
39634ebed01fSBarry Smith 
3964e5f2cdd8SHong Zhang #undef __FUNCT__
396538f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric"
3966e5f2cdd8SHong Zhang /*@C
3967f08fae4eSHong Zhang       MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential
3968e5f2cdd8SHong Zhang                  matrices from each processor
3969e5f2cdd8SHong Zhang 
3970e5f2cdd8SHong Zhang     Collective on MPI_Comm
3971e5f2cdd8SHong Zhang 
3972e5f2cdd8SHong Zhang    Input Parameters:
3973e5f2cdd8SHong Zhang +    comm - the communicators the parallel matrix will live on
3974f08fae4eSHong Zhang .    seqmat - the input sequential matrices
39750e36024fSHong Zhang .    m - number of local rows (or PETSC_DECIDE)
39760e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
3977e5f2cdd8SHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
3978e5f2cdd8SHong Zhang 
3979e5f2cdd8SHong Zhang    Output Parameter:
3980f08fae4eSHong Zhang .    mpimat - the parallel matrix generated
3981e5f2cdd8SHong Zhang 
3982e5f2cdd8SHong Zhang     Level: advanced
3983e5f2cdd8SHong Zhang 
3984affca5deSHong Zhang    Notes:
3985affca5deSHong Zhang      The dimensions of the sequential matrix in each processor MUST be the same.
3986affca5deSHong Zhang      The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be
3987affca5deSHong Zhang      destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat.
3988e5f2cdd8SHong Zhang @*/
3989be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat)
399055d1abb9SHong Zhang {
399155d1abb9SHong Zhang   PetscErrorCode       ierr;
39927adad957SLisandro Dalcin   MPI_Comm             comm=((PetscObject)mpimat)->comm;
399355d1abb9SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
3994b1d57f15SBarry Smith   PetscMPIInt          size,rank,taga,*len_s;
3995d0f46423SBarry Smith   PetscInt             N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j;
3996b1d57f15SBarry Smith   PetscInt             proc,m;
3997b1d57f15SBarry Smith   PetscInt             **buf_ri,**buf_rj;
3998b1d57f15SBarry Smith   PetscInt             k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj;
3999b1d57f15SBarry Smith   PetscInt             nrows,**buf_ri_k,**nextrow,**nextai;
400055d1abb9SHong Zhang   MPI_Request          *s_waits,*r_waits;
400155d1abb9SHong Zhang   MPI_Status           *status;
4002a77337e4SBarry Smith   MatScalar            *aa=a->a;
4003dd6ea824SBarry Smith   MatScalar            **abuf_r,*ba_i;
400455d1abb9SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4005776b82aeSLisandro Dalcin   PetscContainer       container;
400655d1abb9SHong Zhang 
400755d1abb9SHong Zhang   PetscFunctionBegin;
40084ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
40093c2c1871SHong Zhang 
401055d1abb9SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
401155d1abb9SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
401255d1abb9SHong Zhang 
401355d1abb9SHong Zhang   ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
401455d1abb9SHong Zhang   if (container) {
4015776b82aeSLisandro Dalcin     ierr  = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
401655d1abb9SHong Zhang   }
401755d1abb9SHong Zhang   bi     = merge->bi;
401855d1abb9SHong Zhang   bj     = merge->bj;
401955d1abb9SHong Zhang   buf_ri = merge->buf_ri;
402055d1abb9SHong Zhang   buf_rj = merge->buf_rj;
402155d1abb9SHong Zhang 
402255d1abb9SHong Zhang   ierr   = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
4023357abbc8SBarry Smith   owners = merge->rowmap.range;
402455d1abb9SHong Zhang   len_s  = merge->len_s;
402555d1abb9SHong Zhang 
402655d1abb9SHong Zhang   /* send and recv matrix values */
402755d1abb9SHong Zhang   /*-----------------------------*/
4028357abbc8SBarry Smith   ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr);
402955d1abb9SHong Zhang   ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr);
403055d1abb9SHong Zhang 
403155d1abb9SHong Zhang   ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr);
403255d1abb9SHong Zhang   for (proc=0,k=0; proc<size; proc++){
403355d1abb9SHong Zhang     if (!len_s[proc]) continue;
403455d1abb9SHong Zhang     i = owners[proc];
403555d1abb9SHong Zhang     ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr);
403655d1abb9SHong Zhang     k++;
403755d1abb9SHong Zhang   }
403855d1abb9SHong Zhang 
40390c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);}
40400c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);}
404155d1abb9SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
404255d1abb9SHong Zhang 
404355d1abb9SHong Zhang   ierr = PetscFree(s_waits);CHKERRQ(ierr);
404455d1abb9SHong Zhang   ierr = PetscFree(r_waits);CHKERRQ(ierr);
404555d1abb9SHong Zhang 
404655d1abb9SHong Zhang   /* insert mat values of mpimat */
404755d1abb9SHong Zhang   /*----------------------------*/
4048a77337e4SBarry Smith   ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr);
4049b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
405055d1abb9SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
405155d1abb9SHong Zhang   nextai  = nextrow + merge->nrecv;
405255d1abb9SHong Zhang 
405355d1abb9SHong Zhang   for (k=0; k<merge->nrecv; k++){
405455d1abb9SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
405555d1abb9SHong Zhang     nrows = *(buf_ri_k[k]);
405655d1abb9SHong Zhang     nextrow[k]  = buf_ri_k[k]+1;  /* next row number of k-th recved i-structure */
405755d1abb9SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
405855d1abb9SHong Zhang   }
405955d1abb9SHong Zhang 
406055d1abb9SHong Zhang   /* set values of ba */
4061357abbc8SBarry Smith   m = merge->rowmap.n;
406255d1abb9SHong Zhang   for (i=0; i<m; i++) {
406355d1abb9SHong Zhang     arow = owners[rank] + i;
406455d1abb9SHong Zhang     bj_i = bj+bi[i];  /* col indices of the i-th row of mpimat */
406555d1abb9SHong Zhang     bnzi = bi[i+1] - bi[i];
4066a77337e4SBarry Smith     ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr);
406755d1abb9SHong Zhang 
406855d1abb9SHong Zhang     /* add local non-zero vals of this proc's seqmat into ba */
406955d1abb9SHong Zhang     anzi = ai[arow+1] - ai[arow];
407055d1abb9SHong Zhang     aj   = a->j + ai[arow];
407155d1abb9SHong Zhang     aa   = a->a + ai[arow];
407255d1abb9SHong Zhang     nextaj = 0;
407355d1abb9SHong Zhang     for (j=0; nextaj<anzi; j++){
407455d1abb9SHong Zhang       if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
407555d1abb9SHong Zhang         ba_i[j] += aa[nextaj++];
407655d1abb9SHong Zhang       }
407755d1abb9SHong Zhang     }
407855d1abb9SHong Zhang 
407955d1abb9SHong Zhang     /* add received vals into ba */
408055d1abb9SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
408155d1abb9SHong Zhang       /* i-th row */
408255d1abb9SHong Zhang       if (i == *nextrow[k]) {
408355d1abb9SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
408455d1abb9SHong Zhang         aj   = buf_rj[k] + *(nextai[k]);
408555d1abb9SHong Zhang         aa   = abuf_r[k] + *(nextai[k]);
408655d1abb9SHong Zhang         nextaj = 0;
408755d1abb9SHong Zhang         for (j=0; nextaj<anzi; j++){
408855d1abb9SHong Zhang           if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
408955d1abb9SHong Zhang             ba_i[j] += aa[nextaj++];
409055d1abb9SHong Zhang           }
409155d1abb9SHong Zhang         }
409255d1abb9SHong Zhang         nextrow[k]++; nextai[k]++;
409355d1abb9SHong Zhang       }
409455d1abb9SHong Zhang     }
409555d1abb9SHong Zhang     ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr);
409655d1abb9SHong Zhang   }
409755d1abb9SHong Zhang   ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409855d1abb9SHong Zhang   ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
409955d1abb9SHong Zhang 
410055d1abb9SHong Zhang   ierr = PetscFree(abuf_r);CHKERRQ(ierr);
410155d1abb9SHong Zhang   ierr = PetscFree(ba_i);CHKERRQ(ierr);
410255d1abb9SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
41034ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
410455d1abb9SHong Zhang   PetscFunctionReturn(0);
410555d1abb9SHong Zhang }
410638f152feSBarry Smith 
410738f152feSBarry Smith #undef __FUNCT__
410838f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic"
4109be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat)
4110e5f2cdd8SHong Zhang {
4111f08fae4eSHong Zhang   PetscErrorCode       ierr;
411255a3bba9SHong Zhang   Mat                  B_mpi;
4113c2234fe3SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
4114b1d57f15SBarry Smith   PetscMPIInt          size,rank,tagi,tagj,*len_s,*len_si,*len_ri;
4115b1d57f15SBarry Smith   PetscInt             **buf_rj,**buf_ri,**buf_ri_k;
4116d0f46423SBarry Smith   PetscInt             M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j;
4117b1d57f15SBarry Smith   PetscInt             len,proc,*dnz,*onz;
4118b1d57f15SBarry Smith   PetscInt             k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0;
4119b1d57f15SBarry Smith   PetscInt             nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai;
412055d1abb9SHong Zhang   MPI_Request          *si_waits,*sj_waits,*ri_waits,*rj_waits;
412158cb9c82SHong Zhang   MPI_Status           *status;
4122a1a86e44SBarry Smith   PetscFreeSpaceList   free_space=PETSC_NULL,current_space=PETSC_NULL;
4123be0fcf8dSHong Zhang   PetscBT              lnkbt;
412451a7d1a8SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4125776b82aeSLisandro Dalcin   PetscContainer       container;
412602c68681SHong Zhang 
4127e5f2cdd8SHong Zhang   PetscFunctionBegin;
41284ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
41293c2c1871SHong Zhang 
413038f152feSBarry Smith   /* make sure it is a PETSc comm */
413138f152feSBarry Smith   ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr);
4132e5f2cdd8SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
4133e5f2cdd8SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
413455d1abb9SHong Zhang 
413551a7d1a8SHong Zhang   ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr);
4136c2234fe3SHong Zhang   ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
4137e5f2cdd8SHong Zhang 
41386abd8857SHong Zhang   /* determine row ownership */
4139f08fae4eSHong Zhang   /*---------------------------------------------------------*/
4140b167c4dbSHong Zhang   ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr);
4141899cda47SBarry Smith   merge->rowmap.n = m;
4142899cda47SBarry Smith   merge->rowmap.N = M;
4143fc42d0c8SSatish Balay   merge->rowmap.bs = 1;
41446148ca0dSBarry Smith   ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr);
4145b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr);
4146b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr);
414755d1abb9SHong Zhang 
4148357abbc8SBarry Smith   m      = merge->rowmap.n;
4149357abbc8SBarry Smith   M      = merge->rowmap.N;
4150357abbc8SBarry Smith   owners = merge->rowmap.range;
41516abd8857SHong Zhang 
41526abd8857SHong Zhang   /* determine the number of messages to send, their lengths */
41536abd8857SHong Zhang   /*---------------------------------------------------------*/
41543e06a4e6SHong Zhang   len_s  = merge->len_s;
415551a7d1a8SHong Zhang 
41562257cef7SHong Zhang   len = 0;  /* length of buf_si[] */
4157c2234fe3SHong Zhang   merge->nsend = 0;
4158409913e3SHong Zhang   for (proc=0; proc<size; proc++){
41592257cef7SHong Zhang     len_si[proc] = 0;
41603e06a4e6SHong Zhang     if (proc == rank){
41616abd8857SHong Zhang       len_s[proc] = 0;
41623e06a4e6SHong Zhang     } else {
416302c68681SHong Zhang       len_si[proc] = owners[proc+1] - owners[proc] + 1;
41643e06a4e6SHong Zhang       len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */
41653e06a4e6SHong Zhang     }
41663e06a4e6SHong Zhang     if (len_s[proc]) {
4167c2234fe3SHong Zhang       merge->nsend++;
41682257cef7SHong Zhang       nrows = 0;
41692257cef7SHong Zhang       for (i=owners[proc]; i<owners[proc+1]; i++){
41702257cef7SHong Zhang         if (ai[i+1] > ai[i]) nrows++;
41712257cef7SHong Zhang       }
41722257cef7SHong Zhang       len_si[proc] = 2*(nrows+1);
41732257cef7SHong Zhang       len += len_si[proc];
4174409913e3SHong Zhang     }
417558cb9c82SHong Zhang   }
4176409913e3SHong Zhang 
41772257cef7SHong Zhang   /* determine the number and length of messages to receive for ij-structure */
41782257cef7SHong Zhang   /*-------------------------------------------------------------------------*/
417951a7d1a8SHong Zhang   ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr);
418055d1abb9SHong Zhang   ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr);
4181671beff6SHong Zhang 
41823e06a4e6SHong Zhang   /* post the Irecv of j-structure */
41833e06a4e6SHong Zhang   /*-------------------------------*/
41842c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
41853e06a4e6SHong Zhang   ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr);
418602c68681SHong Zhang 
41873e06a4e6SHong Zhang   /* post the Isend of j-structure */
4188affca5deSHong Zhang   /*--------------------------------*/
41892257cef7SHong Zhang   ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr);
419002c68681SHong Zhang   sj_waits = si_waits + merge->nsend;
41913e06a4e6SHong Zhang 
41922257cef7SHong Zhang   for (proc=0, k=0; proc<size; proc++){
4193409913e3SHong Zhang     if (!len_s[proc]) continue;
419402c68681SHong Zhang     i = owners[proc];
4195b1d57f15SBarry Smith     ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr);
419651a7d1a8SHong Zhang     k++;
419751a7d1a8SHong Zhang   }
419851a7d1a8SHong Zhang 
41993e06a4e6SHong Zhang   /* receives and sends of j-structure are complete */
42003e06a4e6SHong Zhang   /*------------------------------------------------*/
42010c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);}
42020c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);}
420302c68681SHong Zhang 
420402c68681SHong Zhang   /* send and recv i-structure */
420502c68681SHong Zhang   /*---------------------------*/
42062c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
420702c68681SHong Zhang   ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr);
420802c68681SHong Zhang 
4209b1d57f15SBarry Smith   ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr);
42103e06a4e6SHong Zhang   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
42112257cef7SHong Zhang   for (proc=0,k=0; proc<size; proc++){
421202c68681SHong Zhang     if (!len_s[proc]) continue;
42133e06a4e6SHong Zhang     /* form outgoing message for i-structure:
42143e06a4e6SHong Zhang          buf_si[0]:                 nrows to be sent
42153e06a4e6SHong Zhang                [1:nrows]:           row index (global)
42163e06a4e6SHong Zhang                [nrows+1:2*nrows+1]: i-structure index
42173e06a4e6SHong Zhang     */
42183e06a4e6SHong Zhang     /*-------------------------------------------*/
42192257cef7SHong Zhang     nrows = len_si[proc]/2 - 1;
42203e06a4e6SHong Zhang     buf_si_i    = buf_si + nrows+1;
42213e06a4e6SHong Zhang     buf_si[0]   = nrows;
42223e06a4e6SHong Zhang     buf_si_i[0] = 0;
42233e06a4e6SHong Zhang     nrows = 0;
42243e06a4e6SHong Zhang     for (i=owners[proc]; i<owners[proc+1]; i++){
42253e06a4e6SHong Zhang       anzi = ai[i+1] - ai[i];
42263e06a4e6SHong Zhang       if (anzi) {
42273e06a4e6SHong Zhang         buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */
42283e06a4e6SHong Zhang         buf_si[nrows+1] = i-owners[proc]; /* local row index */
42293e06a4e6SHong Zhang         nrows++;
42303e06a4e6SHong Zhang       }
42313e06a4e6SHong Zhang     }
4232b1d57f15SBarry Smith     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr);
423302c68681SHong Zhang     k++;
42342257cef7SHong Zhang     buf_si += len_si[proc];
423502c68681SHong Zhang   }
42362257cef7SHong Zhang 
42370c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);}
42380c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);}
423902c68681SHong Zhang 
4240ae15b995SBarry Smith   ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr);
42413e06a4e6SHong Zhang   for (i=0; i<merge->nrecv; i++){
4242ae15b995SBarry Smith     ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr);
42433e06a4e6SHong Zhang   }
42443e06a4e6SHong Zhang 
42453e06a4e6SHong Zhang   ierr = PetscFree(len_si);CHKERRQ(ierr);
424602c68681SHong Zhang   ierr = PetscFree(len_ri);CHKERRQ(ierr);
424702c68681SHong Zhang   ierr = PetscFree(rj_waits);CHKERRQ(ierr);
42483e06a4e6SHong Zhang   ierr = PetscFree(si_waits);CHKERRQ(ierr);
42492257cef7SHong Zhang   ierr = PetscFree(ri_waits);CHKERRQ(ierr);
42503e06a4e6SHong Zhang   ierr = PetscFree(buf_s);CHKERRQ(ierr);
4251bcc1bcd5SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
425258cb9c82SHong Zhang 
4253bcc1bcd5SHong Zhang   /* compute a local seq matrix in each processor */
4254bcc1bcd5SHong Zhang   /*----------------------------------------------*/
425558cb9c82SHong Zhang   /* allocate bi array and free space for accumulating nonzero column info */
4256b1d57f15SBarry Smith   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr);
425758cb9c82SHong Zhang   bi[0] = 0;
425858cb9c82SHong Zhang 
4259be0fcf8dSHong Zhang   /* create and initialize a linked list */
4260be0fcf8dSHong Zhang   nlnk = N+1;
4261be0fcf8dSHong Zhang   ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
426258cb9c82SHong Zhang 
4263bcc1bcd5SHong Zhang   /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */
426458cb9c82SHong Zhang   len = 0;
4265bcc1bcd5SHong Zhang   len  = ai[owners[rank+1]] - ai[owners[rank]];
4266a1a86e44SBarry Smith   ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr);
426758cb9c82SHong Zhang   current_space = free_space;
426858cb9c82SHong Zhang 
4269bcc1bcd5SHong Zhang   /* determine symbolic info for each local row */
4270b1d57f15SBarry Smith   ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr);
42713e06a4e6SHong Zhang   nextrow = buf_ri_k + merge->nrecv;
42723e06a4e6SHong Zhang   nextai  = nextrow + merge->nrecv;
42733e06a4e6SHong Zhang   for (k=0; k<merge->nrecv; k++){
42742257cef7SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
42753e06a4e6SHong Zhang     nrows = *buf_ri_k[k];
42763e06a4e6SHong Zhang     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
42772257cef7SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
42783e06a4e6SHong Zhang   }
42792257cef7SHong Zhang 
4280bcc1bcd5SHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
4281bcc1bcd5SHong Zhang   len = 0;
428258cb9c82SHong Zhang   for (i=0;i<m;i++) {
428358cb9c82SHong Zhang     bnzi   = 0;
428458cb9c82SHong Zhang     /* add local non-zero cols of this proc's seqmat into lnk */
428558cb9c82SHong Zhang     arow   = owners[rank] + i;
428658cb9c82SHong Zhang     anzi   = ai[arow+1] - ai[arow];
428758cb9c82SHong Zhang     aj     = a->j + ai[arow];
4288be0fcf8dSHong Zhang     ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
428958cb9c82SHong Zhang     bnzi += nlnk;
429058cb9c82SHong Zhang     /* add received col data into lnk */
429151a7d1a8SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
429255d1abb9SHong Zhang       if (i == *nextrow[k]) { /* i-th row */
42933e06a4e6SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
42943e06a4e6SHong Zhang         aj   = buf_rj[k] + *nextai[k];
42953e06a4e6SHong Zhang         ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
42963e06a4e6SHong Zhang         bnzi += nlnk;
42973e06a4e6SHong Zhang         nextrow[k]++; nextai[k]++;
42983e06a4e6SHong Zhang       }
429958cb9c82SHong Zhang     }
4300bcc1bcd5SHong Zhang     if (len < bnzi) len = bnzi;  /* =max(bnzi) */
430158cb9c82SHong Zhang 
430258cb9c82SHong Zhang     /* if free space is not available, make more free space */
430358cb9c82SHong Zhang     if (current_space->local_remaining<bnzi) {
43044238b7adSHong Zhang       ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,&current_space);CHKERRQ(ierr);
430558cb9c82SHong Zhang       nspacedouble++;
430658cb9c82SHong Zhang     }
430758cb9c82SHong Zhang     /* copy data into free space, then initialize lnk */
4308be0fcf8dSHong Zhang     ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr);
4309bcc1bcd5SHong Zhang     ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr);
4310bcc1bcd5SHong Zhang 
431158cb9c82SHong Zhang     current_space->array           += bnzi;
431258cb9c82SHong Zhang     current_space->local_used      += bnzi;
431358cb9c82SHong Zhang     current_space->local_remaining -= bnzi;
431458cb9c82SHong Zhang 
431558cb9c82SHong Zhang     bi[i+1] = bi[i] + bnzi;
431658cb9c82SHong Zhang   }
4317bcc1bcd5SHong Zhang 
4318bcc1bcd5SHong Zhang   ierr = PetscFree(buf_ri_k);CHKERRQ(ierr);
4319bcc1bcd5SHong Zhang 
4320b1d57f15SBarry Smith   ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr);
4321a1a86e44SBarry Smith   ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr);
4322be0fcf8dSHong Zhang   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
4323409913e3SHong Zhang 
4324bcc1bcd5SHong Zhang   /* create symbolic parallel matrix B_mpi */
4325bcc1bcd5SHong Zhang   /*---------------------------------------*/
4326f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr);
432754b84b50SHong Zhang   if (n==PETSC_DECIDE) {
4328f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr);
432954b84b50SHong Zhang   } else {
4330f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
433154b84b50SHong Zhang   }
4332bcc1bcd5SHong Zhang   ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr);
4333bcc1bcd5SHong Zhang   ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr);
4334bcc1bcd5SHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
433558cb9c82SHong Zhang 
43366abd8857SHong Zhang   /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */
43376abd8857SHong Zhang   B_mpi->assembled     = PETSC_FALSE;
4338affca5deSHong Zhang   B_mpi->ops->destroy  = MatDestroy_MPIAIJ_SeqsToMPI;
4339affca5deSHong Zhang   merge->bi            = bi;
4340affca5deSHong Zhang   merge->bj            = bj;
434102c68681SHong Zhang   merge->buf_ri        = buf_ri;
434202c68681SHong Zhang   merge->buf_rj        = buf_rj;
4343de0260b3SHong Zhang   merge->coi           = PETSC_NULL;
4344de0260b3SHong Zhang   merge->coj           = PETSC_NULL;
4345de0260b3SHong Zhang   merge->owners_co     = PETSC_NULL;
4346affca5deSHong Zhang 
4347affca5deSHong Zhang   /* attach the supporting struct to B_mpi for reuse */
4348776b82aeSLisandro Dalcin   ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
4349776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr);
4350affca5deSHong Zhang   ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr);
4351affca5deSHong Zhang   *mpimat = B_mpi;
435238f152feSBarry Smith 
435338f152feSBarry Smith   ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);
43544ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
4355e5f2cdd8SHong Zhang   PetscFunctionReturn(0);
4356e5f2cdd8SHong Zhang }
435725616d81SHong Zhang 
435838f152feSBarry Smith #undef __FUNCT__
435938f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI"
4360be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat)
436155d1abb9SHong Zhang {
436255d1abb9SHong Zhang   PetscErrorCode   ierr;
436355d1abb9SHong Zhang 
436455d1abb9SHong Zhang   PetscFunctionBegin;
43654ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
436655d1abb9SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
436755d1abb9SHong Zhang     ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr);
436855d1abb9SHong Zhang   }
436955d1abb9SHong Zhang   ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr);
43704ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
437155d1abb9SHong Zhang   PetscFunctionReturn(0);
437255d1abb9SHong Zhang }
43734ebed01fSBarry Smith 
437425616d81SHong Zhang #undef __FUNCT__
437525616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat"
4376bc08b0f1SBarry Smith /*@
437732fba14fSHong Zhang      MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows
437825616d81SHong Zhang 
437932fba14fSHong Zhang     Not Collective
438025616d81SHong Zhang 
438125616d81SHong Zhang    Input Parameters:
438225616d81SHong Zhang +    A - the matrix
438325616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
438425616d81SHong Zhang 
438525616d81SHong Zhang    Output Parameter:
438625616d81SHong Zhang .    A_loc - the local sequential matrix generated
438725616d81SHong Zhang 
438825616d81SHong Zhang     Level: developer
438925616d81SHong Zhang 
439025616d81SHong Zhang @*/
4391be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc)
439225616d81SHong Zhang {
439325616d81SHong Zhang   PetscErrorCode  ierr;
439401b7ae99SHong Zhang   Mat_MPIAIJ      *mpimat=(Mat_MPIAIJ*)A->data;
439501b7ae99SHong Zhang   Mat_SeqAIJ      *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data;
439601b7ae99SHong Zhang   PetscInt        *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray;
4397a77337e4SBarry Smith   MatScalar       *aa=a->a,*ba=b->a,*cam;
4398a77337e4SBarry Smith   PetscScalar     *ca;
4399d0f46423SBarry Smith   PetscInt        am=A->rmap->n,i,j,k,cstart=A->cmap->rstart;
44005a7d977cSHong Zhang   PetscInt        *ci,*cj,col,ncols_d,ncols_o,jo;
440125616d81SHong Zhang 
440225616d81SHong Zhang   PetscFunctionBegin;
44034ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
440401b7ae99SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4405dea91ad1SHong Zhang     ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr);
4406dea91ad1SHong Zhang     ci[0] = 0;
440701b7ae99SHong Zhang     for (i=0; i<am; i++){
4408dea91ad1SHong Zhang       ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]);
440901b7ae99SHong Zhang     }
4410dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr);
4411dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr);
4412dea91ad1SHong Zhang     k = 0;
441301b7ae99SHong Zhang     for (i=0; i<am; i++) {
44145a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
44155a7d977cSHong Zhang       ncols_d = ai[i+1] - ai[i];
441601b7ae99SHong Zhang       /* off-diagonal portion of A */
44175a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
44185a7d977cSHong Zhang         col = cmap[*bj];
44195a7d977cSHong Zhang         if (col >= cstart) break;
44205a7d977cSHong Zhang         cj[k]   = col; bj++;
44215a7d977cSHong Zhang         ca[k++] = *ba++;
44225a7d977cSHong Zhang       }
44235a7d977cSHong Zhang       /* diagonal portion of A */
44245a7d977cSHong Zhang       for (j=0; j<ncols_d; j++) {
44255a7d977cSHong Zhang         cj[k]   = cstart + *aj++;
44265a7d977cSHong Zhang         ca[k++] = *aa++;
44275a7d977cSHong Zhang       }
44285a7d977cSHong Zhang       /* off-diagonal portion of A */
44295a7d977cSHong Zhang       for (j=jo; j<ncols_o; j++) {
44305a7d977cSHong Zhang         cj[k]   = cmap[*bj++];
44315a7d977cSHong Zhang         ca[k++] = *ba++;
44325a7d977cSHong Zhang       }
443325616d81SHong Zhang     }
4434dea91ad1SHong Zhang     /* put together the new matrix */
4435d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr);
4436dea91ad1SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4437dea91ad1SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
4438dea91ad1SHong Zhang     mat          = (Mat_SeqAIJ*)(*A_loc)->data;
4439e6b907acSBarry Smith     mat->free_a  = PETSC_TRUE;
4440e6b907acSBarry Smith     mat->free_ij = PETSC_TRUE;
4441dea91ad1SHong Zhang     mat->nonew   = 0;
44425a7d977cSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
44435a7d977cSHong Zhang     mat=(Mat_SeqAIJ*)(*A_loc)->data;
4444a77337e4SBarry Smith     ci = mat->i; cj = mat->j; cam = mat->a;
44455a7d977cSHong Zhang     for (i=0; i<am; i++) {
44465a7d977cSHong Zhang       /* off-diagonal portion of A */
44475a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
44485a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
44495a7d977cSHong Zhang         col = cmap[*bj];
44505a7d977cSHong Zhang         if (col >= cstart) break;
4451a77337e4SBarry Smith         *cam++ = *ba++; bj++;
44525a7d977cSHong Zhang       }
44535a7d977cSHong Zhang       /* diagonal portion of A */
4454ecc9b87dSHong Zhang       ncols_d = ai[i+1] - ai[i];
4455a77337e4SBarry Smith       for (j=0; j<ncols_d; j++) *cam++ = *aa++;
44565a7d977cSHong Zhang       /* off-diagonal portion of A */
4457f33d1a9aSHong Zhang       for (j=jo; j<ncols_o; j++) {
4458a77337e4SBarry Smith         *cam++ = *ba++; bj++;
4459f33d1a9aSHong Zhang       }
44605a7d977cSHong Zhang     }
44615a7d977cSHong Zhang   } else {
44625a7d977cSHong Zhang     SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
446325616d81SHong Zhang   }
446401b7ae99SHong Zhang 
44654ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
446625616d81SHong Zhang   PetscFunctionReturn(0);
446725616d81SHong Zhang }
446825616d81SHong Zhang 
446932fba14fSHong Zhang #undef __FUNCT__
447032fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed"
447132fba14fSHong Zhang /*@C
447232fba14fSHong Zhang      MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns
447332fba14fSHong Zhang 
447432fba14fSHong Zhang     Not Collective
447532fba14fSHong Zhang 
447632fba14fSHong Zhang    Input Parameters:
447732fba14fSHong Zhang +    A - the matrix
447832fba14fSHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
447932fba14fSHong Zhang -    row, col - index sets of rows and columns to extract (or PETSC_NULL)
448032fba14fSHong Zhang 
448132fba14fSHong Zhang    Output Parameter:
448232fba14fSHong Zhang .    A_loc - the local sequential matrix generated
448332fba14fSHong Zhang 
448432fba14fSHong Zhang     Level: developer
448532fba14fSHong Zhang 
448632fba14fSHong Zhang @*/
4487be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc)
448832fba14fSHong Zhang {
448932fba14fSHong Zhang   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
449032fba14fSHong Zhang   PetscErrorCode    ierr;
449132fba14fSHong Zhang   PetscInt          i,start,end,ncols,nzA,nzB,*cmap,imark,*idx;
449232fba14fSHong Zhang   IS                isrowa,iscola;
449332fba14fSHong Zhang   Mat               *aloc;
449432fba14fSHong Zhang 
449532fba14fSHong Zhang   PetscFunctionBegin;
44964ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
449732fba14fSHong Zhang   if (!row){
4498d0f46423SBarry Smith     start = A->rmap->rstart; end = A->rmap->rend;
449932fba14fSHong Zhang     ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr);
450032fba14fSHong Zhang   } else {
450132fba14fSHong Zhang     isrowa = *row;
450232fba14fSHong Zhang   }
450332fba14fSHong Zhang   if (!col){
4504d0f46423SBarry Smith     start = A->cmap->rstart;
450532fba14fSHong Zhang     cmap  = a->garray;
4506d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4507d0f46423SBarry Smith     nzB   = a->B->cmap->n;
450832fba14fSHong Zhang     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
450932fba14fSHong Zhang     ncols = 0;
451032fba14fSHong Zhang     for (i=0; i<nzB; i++) {
451132fba14fSHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
451232fba14fSHong Zhang       else break;
451332fba14fSHong Zhang     }
451432fba14fSHong Zhang     imark = i;
451532fba14fSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;
451632fba14fSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i];
451732fba14fSHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr);
451832fba14fSHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
451932fba14fSHong Zhang   } else {
452032fba14fSHong Zhang     iscola = *col;
452132fba14fSHong Zhang   }
452232fba14fSHong Zhang   if (scall != MAT_INITIAL_MATRIX){
452332fba14fSHong Zhang     ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr);
452432fba14fSHong Zhang     aloc[0] = *A_loc;
452532fba14fSHong Zhang   }
452632fba14fSHong Zhang   ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr);
452732fba14fSHong Zhang   *A_loc = aloc[0];
452832fba14fSHong Zhang   ierr = PetscFree(aloc);CHKERRQ(ierr);
452932fba14fSHong Zhang   if (!row){
453032fba14fSHong Zhang     ierr = ISDestroy(isrowa);CHKERRQ(ierr);
453132fba14fSHong Zhang   }
453232fba14fSHong Zhang   if (!col){
453332fba14fSHong Zhang     ierr = ISDestroy(iscola);CHKERRQ(ierr);
453432fba14fSHong Zhang   }
45354ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
453632fba14fSHong Zhang   PetscFunctionReturn(0);
453732fba14fSHong Zhang }
453832fba14fSHong Zhang 
453925616d81SHong Zhang #undef __FUNCT__
454025616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols"
454125616d81SHong Zhang /*@C
454232fba14fSHong Zhang     MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A
454325616d81SHong Zhang 
454425616d81SHong Zhang     Collective on Mat
454525616d81SHong Zhang 
454625616d81SHong Zhang    Input Parameters:
4547e240928fSHong Zhang +    A,B - the matrices in mpiaij format
454825616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
454925616d81SHong Zhang -    rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL)
455025616d81SHong Zhang 
455125616d81SHong Zhang    Output Parameter:
455225616d81SHong Zhang +    rowb, colb - index sets of rows and columns of B to extract
4553d0f46423SBarry Smith .    brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows
455425616d81SHong Zhang -    B_seq - the sequential matrix generated
455525616d81SHong Zhang 
455625616d81SHong Zhang     Level: developer
455725616d81SHong Zhang 
455825616d81SHong Zhang @*/
4559be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq)
456025616d81SHong Zhang {
4561899cda47SBarry Smith   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
456225616d81SHong Zhang   PetscErrorCode    ierr;
4563b1d57f15SBarry Smith   PetscInt          *idx,i,start,ncols,nzA,nzB,*cmap,imark;
456425616d81SHong Zhang   IS                isrowb,iscolb;
456525616d81SHong Zhang   Mat               *bseq;
456625616d81SHong Zhang 
456725616d81SHong Zhang   PetscFunctionBegin;
4568d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4569d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
457025616d81SHong Zhang   }
45714ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
457225616d81SHong Zhang 
457325616d81SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4574d0f46423SBarry Smith     start = A->cmap->rstart;
457525616d81SHong Zhang     cmap  = a->garray;
4576d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4577d0f46423SBarry Smith     nzB   = a->B->cmap->n;
4578b1d57f15SBarry Smith     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
457925616d81SHong Zhang     ncols = 0;
45800390132cSHong Zhang     for (i=0; i<nzB; i++) {  /* row < local row index */
458125616d81SHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
458225616d81SHong Zhang       else break;
458325616d81SHong Zhang     }
458425616d81SHong Zhang     imark = i;
45850390132cSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;  /* local rows */
45860390132cSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */
458725616d81SHong Zhang     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr);
458825616d81SHong Zhang     ierr = PetscFree(idx);CHKERRQ(ierr);
458925616d81SHong Zhang     *brstart = imark;
4590d0f46423SBarry Smith     ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr);
459125616d81SHong Zhang   } else {
459225616d81SHong Zhang     if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX");
459325616d81SHong Zhang     isrowb = *rowb; iscolb = *colb;
459425616d81SHong Zhang     ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr);
459525616d81SHong Zhang     bseq[0] = *B_seq;
459625616d81SHong Zhang   }
459725616d81SHong Zhang   ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr);
459825616d81SHong Zhang   *B_seq = bseq[0];
459925616d81SHong Zhang   ierr = PetscFree(bseq);CHKERRQ(ierr);
460025616d81SHong Zhang   if (!rowb){
460125616d81SHong Zhang     ierr = ISDestroy(isrowb);CHKERRQ(ierr);
460225616d81SHong Zhang   } else {
460325616d81SHong Zhang     *rowb = isrowb;
460425616d81SHong Zhang   }
460525616d81SHong Zhang   if (!colb){
460625616d81SHong Zhang     ierr = ISDestroy(iscolb);CHKERRQ(ierr);
460725616d81SHong Zhang   } else {
460825616d81SHong Zhang     *colb = iscolb;
460925616d81SHong Zhang   }
46104ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
461125616d81SHong Zhang   PetscFunctionReturn(0);
461225616d81SHong Zhang }
4613429d309bSHong Zhang 
4614a61c8c0fSHong Zhang #undef __FUNCT__
4615a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols"
4616429d309bSHong Zhang /*@C
4617429d309bSHong Zhang     MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns
461801b7ae99SHong Zhang     of the OFF-DIAGONAL portion of local A
4619429d309bSHong Zhang 
4620429d309bSHong Zhang     Collective on Mat
4621429d309bSHong Zhang 
4622429d309bSHong Zhang    Input Parameters:
4623429d309bSHong Zhang +    A,B - the matrices in mpiaij format
462487025532SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
462587025532SHong Zhang .    startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL)
462687025532SHong Zhang -    bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL)
4627429d309bSHong Zhang 
4628429d309bSHong Zhang    Output Parameter:
462987025532SHong Zhang +    B_oth - the sequential matrix generated
4630429d309bSHong Zhang 
4631429d309bSHong Zhang     Level: developer
4632429d309bSHong Zhang 
4633429d309bSHong Zhang @*/
4634dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth)
4635429d309bSHong Zhang {
4636a6b2eed2SHong Zhang   VecScatter_MPI_General *gen_to,*gen_from;
4637429d309bSHong Zhang   PetscErrorCode         ierr;
4638899cda47SBarry Smith   Mat_MPIAIJ             *a=(Mat_MPIAIJ*)A->data;
463987025532SHong Zhang   Mat_SeqAIJ             *b_oth;
4640a6b2eed2SHong Zhang   VecScatter             ctx=a->Mvctx;
46417adad957SLisandro Dalcin   MPI_Comm               comm=((PetscObject)ctx)->comm;
46427adad957SLisandro Dalcin   PetscMPIInt            *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank;
4643d0f46423SBarry Smith   PetscInt               *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj;
4644dd6ea824SBarry Smith   PetscScalar            *rvalues,*svalues;
4645dd6ea824SBarry Smith   MatScalar              *b_otha,*bufa,*bufA;
4646e42f35eeSHong Zhang   PetscInt               i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len;
4647910ba992SMatthew Knepley   MPI_Request            *rwaits = PETSC_NULL,*swaits = PETSC_NULL;
464887025532SHong Zhang   MPI_Status             *sstatus,rstatus;
4649aa5bb8c0SSatish Balay   PetscMPIInt            jj;
4650e42f35eeSHong Zhang   PetscInt               *cols,sbs,rbs;
4651ba8c8a56SBarry Smith   PetscScalar            *vals;
4652429d309bSHong Zhang 
4653429d309bSHong Zhang   PetscFunctionBegin;
4654d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
4655d0f46423SBarry Smith     SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
4656429d309bSHong Zhang   }
46574ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4658a6b2eed2SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
4659a6b2eed2SHong Zhang 
4660a6b2eed2SHong Zhang   gen_to   = (VecScatter_MPI_General*)ctx->todata;
4661a6b2eed2SHong Zhang   gen_from = (VecScatter_MPI_General*)ctx->fromdata;
4662e42f35eeSHong Zhang   rvalues  = gen_from->values; /* holds the length of receiving row */
4663e42f35eeSHong Zhang   svalues  = gen_to->values;   /* holds the length of sending row */
4664a6b2eed2SHong Zhang   nrecvs   = gen_from->n;
4665a6b2eed2SHong Zhang   nsends   = gen_to->n;
4666d7ee0231SBarry Smith 
4667d7ee0231SBarry Smith   ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr);
4668a6b2eed2SHong Zhang   srow     = gen_to->indices;   /* local row index to be sent */
4669a6b2eed2SHong Zhang   sstarts  = gen_to->starts;
4670a6b2eed2SHong Zhang   sprocs   = gen_to->procs;
4671a6b2eed2SHong Zhang   sstatus  = gen_to->sstatus;
4672e42f35eeSHong Zhang   sbs      = gen_to->bs;
4673e42f35eeSHong Zhang   rstarts  = gen_from->starts;
4674e42f35eeSHong Zhang   rprocs   = gen_from->procs;
4675e42f35eeSHong Zhang   rbs      = gen_from->bs;
4676429d309bSHong Zhang 
4677dea91ad1SHong Zhang   if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX;
4678429d309bSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4679a6b2eed2SHong Zhang     /* i-array */
4680a6b2eed2SHong Zhang     /*---------*/
4681a6b2eed2SHong Zhang     /*  post receives */
4682a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4683e42f35eeSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4684e42f35eeSHong Zhang       nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */
468587025532SHong Zhang       ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4686429d309bSHong Zhang     }
4687a6b2eed2SHong Zhang 
4688a6b2eed2SHong Zhang     /* pack the outgoing message */
468987025532SHong Zhang     ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr);
4690a6b2eed2SHong Zhang     rstartsj = sstartsj + nsends +1;
4691a6b2eed2SHong Zhang     sstartsj[0] = 0;  rstartsj[0] = 0;
4692a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be sent */
4693a6b2eed2SHong Zhang     k = 0;
4694a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4695e42f35eeSHong Zhang       rowlen = (PetscInt*)svalues + sstarts[i]*sbs;
4696e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
469787025532SHong Zhang       for (j=0; j<nrows; j++) {
4698d0f46423SBarry Smith         row = srow[k] + B->rmap->range[rank]; /* global row idx */
4699e42f35eeSHong Zhang         for (l=0; l<sbs; l++){
4700e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */
4701e42f35eeSHong Zhang           rowlen[j*sbs+l] = ncols;
4702e42f35eeSHong Zhang           len += ncols;
4703e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
4704e42f35eeSHong Zhang         }
4705a6b2eed2SHong Zhang         k++;
4706429d309bSHong Zhang       }
4707e42f35eeSHong Zhang       ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4708dea91ad1SHong Zhang       sstartsj[i+1] = len;  /* starting point of (i+1)-th outgoing msg in bufj and bufa */
4709429d309bSHong Zhang     }
471087025532SHong Zhang     /* recvs and sends of i-array are completed */
471187025532SHong Zhang     i = nrecvs;
471287025532SHong Zhang     while (i--) {
4713aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
471487025532SHong Zhang     }
47150c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4716e42f35eeSHong Zhang 
4717a6b2eed2SHong Zhang     /* allocate buffers for sending j and a arrays */
4718a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr);
4719a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr);
4720a6b2eed2SHong Zhang 
472187025532SHong Zhang     /* create i-array of B_oth */
472287025532SHong Zhang     ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr);
472387025532SHong Zhang     b_othi[0] = 0;
4724a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be received */
4725a6b2eed2SHong Zhang     k = 0;
4726a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
4727fd0ff01cSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
4728e42f35eeSHong Zhang       nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */
472987025532SHong Zhang       for (j=0; j<nrows; j++) {
473087025532SHong Zhang         b_othi[k+1] = b_othi[k] + rowlen[j];
4731a6b2eed2SHong Zhang         len += rowlen[j]; k++;
4732a6b2eed2SHong Zhang       }
4733dea91ad1SHong Zhang       rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */
4734a6b2eed2SHong Zhang     }
4735a6b2eed2SHong Zhang 
473687025532SHong Zhang     /* allocate space for j and a arrrays of B_oth */
473787025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr);
4738dd6ea824SBarry Smith     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr);
4739a6b2eed2SHong Zhang 
474087025532SHong Zhang     /* j-array */
474187025532SHong Zhang     /*---------*/
4742a6b2eed2SHong Zhang     /*  post receives of j-array */
4743a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
474487025532SHong Zhang       nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
474587025532SHong Zhang       ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
4746a6b2eed2SHong Zhang     }
4747e42f35eeSHong Zhang 
4748e42f35eeSHong Zhang     /* pack the outgoing message j-array */
4749a6b2eed2SHong Zhang     k = 0;
4750a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
4751e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
4752a6b2eed2SHong Zhang       bufJ = bufj+sstartsj[i];
475387025532SHong Zhang       for (j=0; j<nrows; j++) {
4754d0f46423SBarry Smith         row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4755e42f35eeSHong Zhang         for (ll=0; ll<sbs; ll++){
4756e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4757a6b2eed2SHong Zhang           for (l=0; l<ncols; l++){
4758a6b2eed2SHong Zhang             *bufJ++ = cols[l];
475987025532SHong Zhang           }
4760e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
4761e42f35eeSHong Zhang         }
476287025532SHong Zhang       }
476387025532SHong Zhang       ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
476487025532SHong Zhang     }
476587025532SHong Zhang 
476687025532SHong Zhang     /* recvs and sends of j-array are completed */
476787025532SHong Zhang     i = nrecvs;
476887025532SHong Zhang     while (i--) {
4769aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
477087025532SHong Zhang     }
47710c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
477287025532SHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
477387025532SHong Zhang     sstartsj = *startsj;
477487025532SHong Zhang     rstartsj = sstartsj + nsends +1;
477587025532SHong Zhang     bufa     = *bufa_ptr;
477687025532SHong Zhang     b_oth    = (Mat_SeqAIJ*)(*B_oth)->data;
477787025532SHong Zhang     b_otha   = b_oth->a;
477887025532SHong Zhang   } else {
477987025532SHong Zhang     SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container");
478087025532SHong Zhang   }
478187025532SHong Zhang 
478287025532SHong Zhang   /* a-array */
478387025532SHong Zhang   /*---------*/
478487025532SHong Zhang   /*  post receives of a-array */
478587025532SHong Zhang   for (i=0; i<nrecvs; i++){
478687025532SHong Zhang     nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
478787025532SHong Zhang     ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
478887025532SHong Zhang   }
4789e42f35eeSHong Zhang 
4790e42f35eeSHong Zhang   /* pack the outgoing message a-array */
479187025532SHong Zhang   k = 0;
479287025532SHong Zhang   for (i=0; i<nsends; i++){
4793e42f35eeSHong Zhang     nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
479487025532SHong Zhang     bufA = bufa+sstartsj[i];
479587025532SHong Zhang     for (j=0; j<nrows; j++) {
4796d0f46423SBarry Smith       row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
4797e42f35eeSHong Zhang       for (ll=0; ll<sbs; ll++){
4798e42f35eeSHong Zhang         ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
479987025532SHong Zhang         for (l=0; l<ncols; l++){
4800a6b2eed2SHong Zhang           *bufA++ = vals[l];
4801a6b2eed2SHong Zhang         }
4802e42f35eeSHong Zhang         ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
4803e42f35eeSHong Zhang       }
4804a6b2eed2SHong Zhang     }
480587025532SHong Zhang     ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
4806a6b2eed2SHong Zhang   }
480787025532SHong Zhang   /* recvs and sends of a-array are completed */
480887025532SHong Zhang   i = nrecvs;
480987025532SHong Zhang   while (i--) {
4810aa5bb8c0SSatish Balay     ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
481187025532SHong Zhang   }
48120c468ba9SBarry Smith   if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
4813d7ee0231SBarry Smith   ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr);
4814a6b2eed2SHong Zhang 
481587025532SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4816a6b2eed2SHong Zhang     /* put together the new matrix */
4817d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr);
4818a6b2eed2SHong Zhang 
4819a6b2eed2SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4820a6b2eed2SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
482187025532SHong Zhang     b_oth          = (Mat_SeqAIJ *)(*B_oth)->data;
4822e6b907acSBarry Smith     b_oth->free_a  = PETSC_TRUE;
4823e6b907acSBarry Smith     b_oth->free_ij = PETSC_TRUE;
482487025532SHong Zhang     b_oth->nonew   = 0;
4825a6b2eed2SHong Zhang 
4826a6b2eed2SHong Zhang     ierr = PetscFree(bufj);CHKERRQ(ierr);
4827dea91ad1SHong Zhang     if (!startsj || !bufa_ptr){
4828dea91ad1SHong Zhang       ierr = PetscFree(sstartsj);CHKERRQ(ierr);
4829dea91ad1SHong Zhang       ierr = PetscFree(bufa_ptr);CHKERRQ(ierr);
4830dea91ad1SHong Zhang     } else {
483187025532SHong Zhang       *startsj  = sstartsj;
483287025532SHong Zhang       *bufa_ptr = bufa;
483387025532SHong Zhang     }
4834dea91ad1SHong Zhang   }
48354ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
4836429d309bSHong Zhang   PetscFunctionReturn(0);
4837429d309bSHong Zhang }
4838ccd8e176SBarry Smith 
483943eb5e2fSMatthew Knepley #undef __FUNCT__
484043eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs"
484143eb5e2fSMatthew Knepley /*@C
484243eb5e2fSMatthew Knepley   MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication.
484343eb5e2fSMatthew Knepley 
484443eb5e2fSMatthew Knepley   Not Collective
484543eb5e2fSMatthew Knepley 
484643eb5e2fSMatthew Knepley   Input Parameters:
484743eb5e2fSMatthew Knepley . A - The matrix in mpiaij format
484843eb5e2fSMatthew Knepley 
484943eb5e2fSMatthew Knepley   Output Parameter:
485043eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product
485143eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec
485243eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec
485343eb5e2fSMatthew Knepley 
485443eb5e2fSMatthew Knepley   Level: developer
485543eb5e2fSMatthew Knepley 
485643eb5e2fSMatthew Knepley @*/
485743eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE)
485843eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter)
485943eb5e2fSMatthew Knepley #else
486043eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter)
486143eb5e2fSMatthew Knepley #endif
486243eb5e2fSMatthew Knepley {
486343eb5e2fSMatthew Knepley   Mat_MPIAIJ *a;
486443eb5e2fSMatthew Knepley 
486543eb5e2fSMatthew Knepley   PetscFunctionBegin;
486643eb5e2fSMatthew Knepley   PetscValidHeaderSpecific(A, MAT_COOKIE, 1);
486743eb5e2fSMatthew Knepley   PetscValidPointer(lvec, 2)
486843eb5e2fSMatthew Knepley   PetscValidPointer(colmap, 3)
486943eb5e2fSMatthew Knepley   PetscValidPointer(multScatter, 4)
487043eb5e2fSMatthew Knepley   a = (Mat_MPIAIJ *) A->data;
487143eb5e2fSMatthew Knepley   if (lvec) *lvec = a->lvec;
487243eb5e2fSMatthew Knepley   if (colmap) *colmap = a->colmap;
487343eb5e2fSMatthew Knepley   if (multScatter) *multScatter = a->Mvctx;
487443eb5e2fSMatthew Knepley   PetscFunctionReturn(0);
487543eb5e2fSMatthew Knepley }
487643eb5e2fSMatthew Knepley 
487717667f90SBarry Smith EXTERN_C_BEGIN
48788cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*);
48798cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*);
488017667f90SBarry Smith EXTERN_C_END
488117667f90SBarry Smith 
48827c4f633dSBarry Smith #include "../src/mat/impls/dense/mpi/mpidense.h"
4883fc4dec0aSBarry Smith 
4884fc4dec0aSBarry Smith #undef __FUNCT__
4885fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ"
4886fc4dec0aSBarry Smith /*
4887fc4dec0aSBarry Smith     Computes (B'*A')' since computing B*A directly is untenable
4888fc4dec0aSBarry Smith 
4889fc4dec0aSBarry Smith                n                       p                          p
4890fc4dec0aSBarry Smith         (              )       (              )         (                  )
4891fc4dec0aSBarry Smith       m (      A       )  *  n (       B      )   =   m (         C        )
4892fc4dec0aSBarry Smith         (              )       (              )         (                  )
4893fc4dec0aSBarry Smith 
4894fc4dec0aSBarry Smith */
4895fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C)
4896fc4dec0aSBarry Smith {
4897fc4dec0aSBarry Smith   PetscErrorCode     ierr;
4898fc4dec0aSBarry Smith   Mat                At,Bt,Ct;
4899fc4dec0aSBarry Smith 
4900fc4dec0aSBarry Smith   PetscFunctionBegin;
4901fc4dec0aSBarry Smith   ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr);
4902fc4dec0aSBarry Smith   ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr);
4903fc4dec0aSBarry Smith   ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr);
4904fc4dec0aSBarry Smith   ierr = MatDestroy(At);CHKERRQ(ierr);
4905fc4dec0aSBarry Smith   ierr = MatDestroy(Bt);CHKERRQ(ierr);
4906fc4dec0aSBarry Smith   ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
4907e5e4356aSBarry Smith   ierr = MatDestroy(Ct);CHKERRQ(ierr);
4908fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4909fc4dec0aSBarry Smith }
4910fc4dec0aSBarry Smith 
4911fc4dec0aSBarry Smith #undef __FUNCT__
4912fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ"
4913fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C)
4914fc4dec0aSBarry Smith {
4915fc4dec0aSBarry Smith   PetscErrorCode ierr;
4916d0f46423SBarry Smith   PetscInt       m=A->rmap->n,n=B->cmap->n;
4917fc4dec0aSBarry Smith   Mat            Cmat;
4918fc4dec0aSBarry Smith 
4919fc4dec0aSBarry Smith   PetscFunctionBegin;
4920d0f46423SBarry Smith   if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n);
492139804f7cSBarry Smith   ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr);
4922fc4dec0aSBarry Smith   ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4923fc4dec0aSBarry Smith   ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr);
4924fc4dec0aSBarry Smith   ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr);
492538556019SBarry Smith   ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
492638556019SBarry Smith   ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4927fc4dec0aSBarry Smith   *C   = Cmat;
4928fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4929fc4dec0aSBarry Smith }
4930fc4dec0aSBarry Smith 
4931fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/
4932fc4dec0aSBarry Smith #undef __FUNCT__
4933fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ"
4934fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
4935fc4dec0aSBarry Smith {
4936fc4dec0aSBarry Smith   PetscErrorCode ierr;
4937fc4dec0aSBarry Smith 
4938fc4dec0aSBarry Smith   PetscFunctionBegin;
4939fc4dec0aSBarry Smith   if (scall == MAT_INITIAL_MATRIX){
4940fc4dec0aSBarry Smith     ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr);
4941fc4dec0aSBarry Smith   }
4942fc4dec0aSBarry Smith   ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr);
4943fc4dec0aSBarry Smith   PetscFunctionReturn(0);
4944fc4dec0aSBarry Smith }
4945fc4dec0aSBarry Smith 
49465c9eb25fSBarry Smith EXTERN_C_BEGIN
4947611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
49485c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*);
4949611f576cSBarry Smith #endif
49503bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
49513bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*);
49523bf14a46SMatthew Knepley #endif
4953611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
49545c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*);
4955611f576cSBarry Smith #endif
4956611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
49575c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*);
4958611f576cSBarry Smith #endif
49595c9eb25fSBarry Smith EXTERN_C_END
49605c9eb25fSBarry Smith 
4961ccd8e176SBarry Smith /*MC
4962ccd8e176SBarry Smith    MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices.
4963ccd8e176SBarry Smith 
4964ccd8e176SBarry Smith    Options Database Keys:
4965ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions()
4966ccd8e176SBarry Smith 
4967ccd8e176SBarry Smith   Level: beginner
4968ccd8e176SBarry Smith 
4969175b88e8SBarry Smith .seealso: MatCreateMPIAIJ()
4970ccd8e176SBarry Smith M*/
4971ccd8e176SBarry Smith 
4972ccd8e176SBarry Smith EXTERN_C_BEGIN
4973ccd8e176SBarry Smith #undef __FUNCT__
4974ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ"
4975be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B)
4976ccd8e176SBarry Smith {
4977ccd8e176SBarry Smith   Mat_MPIAIJ     *b;
4978ccd8e176SBarry Smith   PetscErrorCode ierr;
4979ccd8e176SBarry Smith   PetscMPIInt    size;
4980ccd8e176SBarry Smith 
4981ccd8e176SBarry Smith   PetscFunctionBegin;
49827adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
4983ccd8e176SBarry Smith 
498438f2d2fdSLisandro Dalcin   ierr            = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr);
4985ccd8e176SBarry Smith   B->data         = (void*)b;
4986ccd8e176SBarry Smith   ierr            = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
4987d0f46423SBarry Smith   B->rmap->bs      = 1;
4988ccd8e176SBarry Smith   B->assembled    = PETSC_FALSE;
4989ccd8e176SBarry Smith   B->mapping      = 0;
4990ccd8e176SBarry Smith 
4991ccd8e176SBarry Smith   B->insertmode      = NOT_SET_VALUES;
4992ccd8e176SBarry Smith   b->size            = size;
49937adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
4994ccd8e176SBarry Smith 
4995ccd8e176SBarry Smith   /* build cache for off array entries formed */
49967adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
4997ccd8e176SBarry Smith   b->donotstash  = PETSC_FALSE;
4998ccd8e176SBarry Smith   b->colmap      = 0;
4999ccd8e176SBarry Smith   b->garray      = 0;
5000ccd8e176SBarry Smith   b->roworiented = PETSC_TRUE;
5001ccd8e176SBarry Smith 
5002ccd8e176SBarry Smith   /* stuff used for matrix vector multiply */
5003ccd8e176SBarry Smith   b->lvec      = PETSC_NULL;
5004ccd8e176SBarry Smith   b->Mvctx     = PETSC_NULL;
5005ccd8e176SBarry Smith 
5006ccd8e176SBarry Smith   /* stuff for MatGetRow() */
5007ccd8e176SBarry Smith   b->rowindices   = 0;
5008ccd8e176SBarry Smith   b->rowvalues    = 0;
5009ccd8e176SBarry Smith   b->getrowactive = PETSC_FALSE;
5010ccd8e176SBarry Smith 
5011611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
50125c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_spooles_C",
50135c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_spooles",
50145c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_spooles);CHKERRQ(ierr);
5015611f576cSBarry Smith #endif
5016611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
50175c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_mumps_C",
50185c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_mumps",
50195c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_mumps);CHKERRQ(ierr);
5020611f576cSBarry Smith #endif
50213bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
50223bf14a46SMatthew Knepley   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_pastix_C",
50233bf14a46SMatthew Knepley 					   "MatGetFactor_mpiaij_pastix",
50243bf14a46SMatthew Knepley 					   MatGetFactor_mpiaij_pastix);CHKERRQ(ierr);
50253bf14a46SMatthew Knepley #endif
5026611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
50275c9eb25fSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_superlu_dist_C",
50285c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_superlu_dist",
50295c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr);
5030611f576cSBarry Smith #endif
5031ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
5032ccd8e176SBarry Smith                                      "MatStoreValues_MPIAIJ",
5033ccd8e176SBarry Smith                                      MatStoreValues_MPIAIJ);CHKERRQ(ierr);
5034ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
5035ccd8e176SBarry Smith                                      "MatRetrieveValues_MPIAIJ",
5036ccd8e176SBarry Smith                                      MatRetrieveValues_MPIAIJ);CHKERRQ(ierr);
5037ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
5038ccd8e176SBarry Smith 				     "MatGetDiagonalBlock_MPIAIJ",
5039ccd8e176SBarry Smith                                      MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr);
5040ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C",
5041ccd8e176SBarry Smith 				     "MatIsTranspose_MPIAIJ",
5042ccd8e176SBarry Smith 				     MatIsTranspose_MPIAIJ);CHKERRQ(ierr);
5043ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
5044ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocation_MPIAIJ",
5045ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr);
5046ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",
5047ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocationCSR_MPIAIJ",
5048ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr);
5049ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
5050ccd8e176SBarry Smith 				     "MatDiagonalScaleLocal_MPIAIJ",
5051ccd8e176SBarry Smith 				     MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr);
505217667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C",
505317667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICSRPERM",
505417667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr);
505517667f90SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C",
505617667f90SBarry Smith                                      "MatConvert_MPIAIJ_MPICRL",
505717667f90SBarry Smith                                       MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr);
5058fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",
5059fc4dec0aSBarry Smith                                      "MatMatMult_MPIDense_MPIAIJ",
5060fc4dec0aSBarry Smith                                       MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr);
5061fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",
5062fc4dec0aSBarry Smith                                      "MatMatMultSymbolic_MPIDense_MPIAIJ",
5063fc4dec0aSBarry Smith                                       MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr);
5064fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",
5065fc4dec0aSBarry Smith                                      "MatMatMultNumeric_MPIDense_MPIAIJ",
5066fc4dec0aSBarry Smith                                       MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr);
506717667f90SBarry Smith   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr);
5068ccd8e176SBarry Smith   PetscFunctionReturn(0);
5069ccd8e176SBarry Smith }
5070ccd8e176SBarry Smith EXTERN_C_END
507181824310SBarry Smith 
507203bfb495SBarry Smith #undef __FUNCT__
507303bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays"
507458d36128SBarry Smith /*@
507503bfb495SBarry Smith      MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal"
507603bfb495SBarry Smith          and "off-diagonal" part of the matrix in CSR format.
507703bfb495SBarry Smith 
507803bfb495SBarry Smith    Collective on MPI_Comm
507903bfb495SBarry Smith 
508003bfb495SBarry Smith    Input Parameters:
508103bfb495SBarry Smith +  comm - MPI communicator
508203bfb495SBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
508303bfb495SBarry Smith .  n - This value should be the same as the local size used in creating the
508403bfb495SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
508503bfb495SBarry Smith        calculated if N is given) For square matrices n is almost always m.
508603bfb495SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
508703bfb495SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
508803bfb495SBarry Smith .   i - row indices for "diagonal" portion of matrix
508903bfb495SBarry Smith .   j - column indices
509003bfb495SBarry Smith .   a - matrix values
509103bfb495SBarry Smith .   oi - row indices for "off-diagonal" portion of matrix
509203bfb495SBarry Smith .   oj - column indices
509303bfb495SBarry Smith -   oa - matrix values
509403bfb495SBarry Smith 
509503bfb495SBarry Smith    Output Parameter:
509603bfb495SBarry Smith .   mat - the matrix
509703bfb495SBarry Smith 
509803bfb495SBarry Smith    Level: advanced
509903bfb495SBarry Smith 
510003bfb495SBarry Smith    Notes:
510103bfb495SBarry Smith        The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc.
510203bfb495SBarry Smith 
510303bfb495SBarry Smith        The i and j indices are 0 based
510403bfb495SBarry Smith 
510503bfb495SBarry Smith        See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix
510603bfb495SBarry Smith 
510703bfb495SBarry Smith 
510803bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
510903bfb495SBarry Smith 
511003bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
51118d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays()
511203bfb495SBarry Smith @*/
51138d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],
511403bfb495SBarry Smith 								PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat)
511503bfb495SBarry Smith {
511603bfb495SBarry Smith   PetscErrorCode ierr;
511703bfb495SBarry Smith   Mat_MPIAIJ     *maij;
511803bfb495SBarry Smith 
511903bfb495SBarry Smith  PetscFunctionBegin;
512003bfb495SBarry Smith   if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
512103bfb495SBarry Smith   if (i[0]) {
512203bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
512303bfb495SBarry Smith   }
512403bfb495SBarry Smith   if (oi[0]) {
512503bfb495SBarry Smith     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0");
512603bfb495SBarry Smith   }
512703bfb495SBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
512803bfb495SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
512903bfb495SBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
513003bfb495SBarry Smith   maij = (Mat_MPIAIJ*) (*mat)->data;
51318d7a6e47SBarry Smith   maij->donotstash     = PETSC_TRUE;
51328d7a6e47SBarry Smith   (*mat)->preallocated = PETSC_TRUE;
513303bfb495SBarry Smith 
51347408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr);
51357408324eSLisandro Dalcin   ierr = PetscMapSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr);
5136d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->rmap);CHKERRQ(ierr);
5137d0f46423SBarry Smith   ierr = PetscMapSetUp((*mat)->cmap);CHKERRQ(ierr);
513803bfb495SBarry Smith 
513903bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr);
5140d0f46423SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr);
514103bfb495SBarry Smith 
51428d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51438d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51448d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51458d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
51468d7a6e47SBarry Smith 
514703bfb495SBarry Smith   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
514803bfb495SBarry Smith   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
514903bfb495SBarry Smith   PetscFunctionReturn(0);
515003bfb495SBarry Smith }
515103bfb495SBarry Smith 
515281824310SBarry Smith /*
515381824310SBarry Smith     Special version for direct calls from Fortran
515481824310SBarry Smith */
515581824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS)
515681824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ
515781824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
515881824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij
515981824310SBarry Smith #endif
516081824310SBarry Smith 
516181824310SBarry Smith /* Change these macros so can be used in void function */
516281824310SBarry Smith #undef CHKERRQ
51637adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr)
516481824310SBarry Smith #undef SETERRQ2
51657adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr)
516681824310SBarry Smith #undef SETERRQ
51677adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr)
516881824310SBarry Smith 
516981824310SBarry Smith EXTERN_C_BEGIN
517081824310SBarry Smith #undef __FUNCT__
517181824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_"
51721f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr)
517381824310SBarry Smith {
517481824310SBarry Smith   Mat             mat = *mmat;
517581824310SBarry Smith   PetscInt        m = *mm, n = *mn;
517681824310SBarry Smith   InsertMode      addv = *maddv;
517781824310SBarry Smith   Mat_MPIAIJ      *aij = (Mat_MPIAIJ*)mat->data;
517881824310SBarry Smith   PetscScalar     value;
517981824310SBarry Smith   PetscErrorCode  ierr;
5180899cda47SBarry Smith 
5181d9e2c085SLisandro Dalcin   ierr = MatPreallocated(mat);CHKERRQ(ierr);
518281824310SBarry Smith   if (mat->insertmode == NOT_SET_VALUES) {
518381824310SBarry Smith     mat->insertmode = addv;
518481824310SBarry Smith   }
518581824310SBarry Smith #if defined(PETSC_USE_DEBUG)
518681824310SBarry Smith   else if (mat->insertmode != addv) {
518781824310SBarry Smith     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
518881824310SBarry Smith   }
518981824310SBarry Smith #endif
519081824310SBarry Smith   {
5191d0f46423SBarry Smith   PetscInt        i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
5192d0f46423SBarry Smith   PetscInt        cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
519381824310SBarry Smith   PetscTruth      roworiented = aij->roworiented;
519481824310SBarry Smith 
519581824310SBarry Smith   /* Some Variables required in the macro */
519681824310SBarry Smith   Mat             A = aij->A;
519781824310SBarry Smith   Mat_SeqAIJ      *a = (Mat_SeqAIJ*)A->data;
519881824310SBarry Smith   PetscInt        *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
5199dd6ea824SBarry Smith   MatScalar       *aa = a->a;
520081824310SBarry Smith   PetscTruth      ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE);
520181824310SBarry Smith   Mat             B = aij->B;
520281824310SBarry Smith   Mat_SeqAIJ      *b = (Mat_SeqAIJ*)B->data;
5203d0f46423SBarry Smith   PetscInt        *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
5204dd6ea824SBarry Smith   MatScalar       *ba = b->a;
520581824310SBarry Smith 
520681824310SBarry Smith   PetscInt        *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
520781824310SBarry Smith   PetscInt        nonew = a->nonew;
5208dd6ea824SBarry Smith   MatScalar       *ap1,*ap2;
520981824310SBarry Smith 
521081824310SBarry Smith   PetscFunctionBegin;
521181824310SBarry Smith   for (i=0; i<m; i++) {
521281824310SBarry Smith     if (im[i] < 0) continue;
521381824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5214d0f46423SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
521581824310SBarry Smith #endif
521681824310SBarry Smith     if (im[i] >= rstart && im[i] < rend) {
521781824310SBarry Smith       row      = im[i] - rstart;
521881824310SBarry Smith       lastcol1 = -1;
521981824310SBarry Smith       rp1      = aj + ai[row];
522081824310SBarry Smith       ap1      = aa + ai[row];
522181824310SBarry Smith       rmax1    = aimax[row];
522281824310SBarry Smith       nrow1    = ailen[row];
522381824310SBarry Smith       low1     = 0;
522481824310SBarry Smith       high1    = nrow1;
522581824310SBarry Smith       lastcol2 = -1;
522681824310SBarry Smith       rp2      = bj + bi[row];
522781824310SBarry Smith       ap2      = ba + bi[row];
522881824310SBarry Smith       rmax2    = bimax[row];
522981824310SBarry Smith       nrow2    = bilen[row];
523081824310SBarry Smith       low2     = 0;
523181824310SBarry Smith       high2    = nrow2;
523281824310SBarry Smith 
523381824310SBarry Smith       for (j=0; j<n; j++) {
523481824310SBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
523581824310SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
523681824310SBarry Smith         if (in[j] >= cstart && in[j] < cend){
523781824310SBarry Smith           col = in[j] - cstart;
523881824310SBarry Smith           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
523981824310SBarry Smith         } else if (in[j] < 0) continue;
524081824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5241d0f46423SBarry Smith         else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);}
524281824310SBarry Smith #endif
524381824310SBarry Smith         else {
524481824310SBarry Smith           if (mat->was_assembled) {
524581824310SBarry Smith             if (!aij->colmap) {
524681824310SBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
524781824310SBarry Smith             }
524881824310SBarry Smith #if defined (PETSC_USE_CTABLE)
524981824310SBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
525081824310SBarry Smith 	    col--;
525181824310SBarry Smith #else
525281824310SBarry Smith             col = aij->colmap[in[j]] - 1;
525381824310SBarry Smith #endif
525481824310SBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
525581824310SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
525681824310SBarry Smith               col =  in[j];
525781824310SBarry Smith               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
525881824310SBarry Smith               B = aij->B;
525981824310SBarry Smith               b = (Mat_SeqAIJ*)B->data;
526081824310SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
526181824310SBarry Smith               rp2      = bj + bi[row];
526281824310SBarry Smith               ap2      = ba + bi[row];
526381824310SBarry Smith               rmax2    = bimax[row];
526481824310SBarry Smith               nrow2    = bilen[row];
526581824310SBarry Smith               low2     = 0;
526681824310SBarry Smith               high2    = nrow2;
5267d0f46423SBarry Smith               bm       = aij->B->rmap->n;
526881824310SBarry Smith               ba = b->a;
526981824310SBarry Smith             }
527081824310SBarry Smith           } else col = in[j];
527181824310SBarry Smith           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
527281824310SBarry Smith         }
527381824310SBarry Smith       }
527481824310SBarry Smith     } else {
527581824310SBarry Smith       if (!aij->donotstash) {
527681824310SBarry Smith         if (roworiented) {
527781824310SBarry Smith           if (ignorezeroentries && v[i*n] == 0.0) continue;
527881824310SBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
527981824310SBarry Smith         } else {
528081824310SBarry Smith           if (ignorezeroentries && v[i] == 0.0) continue;
528181824310SBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
528281824310SBarry Smith         }
528381824310SBarry Smith       }
528481824310SBarry Smith     }
528581824310SBarry Smith   }}
528681824310SBarry Smith   PetscFunctionReturnVoid();
528781824310SBarry Smith }
528881824310SBarry Smith EXTERN_C_END
528903bfb495SBarry Smith 
5290