xref: /petsc/src/mat/impls/aij/mpi/mpiaij.c (revision 8cdbd7576f676fd2f0cf4ce91c9fbf7f9b0d4574)
18a729477SBarry Smith 
2c6db04a5SJed Brown #include <../src/mat/impls/aij/mpi/mpiaij.h>   /*I "petscmat.h" I*/
3c6db04a5SJed Brown #include <petscblaslapack.h>
48a729477SBarry Smith 
501bebe75SBarry Smith /*MC
601bebe75SBarry Smith    MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices.
701bebe75SBarry Smith 
801bebe75SBarry Smith    This matrix type is identical to MATSEQAIJ when constructed with a single process communicator,
901bebe75SBarry Smith    and MATMPIAIJ otherwise.  As a result, for single process communicators,
1001bebe75SBarry Smith   MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported
1101bebe75SBarry Smith   for communicators controlling multiple processes.  It is recommended that you call both of
1201bebe75SBarry Smith   the above preallocation routines for simplicity.
1301bebe75SBarry Smith 
1401bebe75SBarry Smith    Options Database Keys:
1501bebe75SBarry Smith . -mat_type aij - sets the matrix type to "aij" during a call to MatSetFromOptions()
1601bebe75SBarry Smith 
1701bebe75SBarry Smith   Developer Notes: Subclasses include MATAIJCUSP, MATAIJPERM, MATAIJCRL, and also automatically switches over to use inodes when
1801bebe75SBarry Smith    enough exist.
1901bebe75SBarry Smith 
2001bebe75SBarry Smith   Level: beginner
2101bebe75SBarry Smith 
2201bebe75SBarry Smith .seealso: MatCreateMPIAIJ(), MatCreateSeqAIJ(), MATSEQAIJ,MATMPIAIJ
2301bebe75SBarry Smith M*/
2401bebe75SBarry Smith 
2501bebe75SBarry Smith /*MC
2601bebe75SBarry Smith    MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices.
2701bebe75SBarry Smith 
2801bebe75SBarry Smith    This matrix type is identical to MATSEQAIJCRL when constructed with a single process communicator,
2901bebe75SBarry Smith    and MATMPIAIJCRL otherwise.  As a result, for single process communicators,
3001bebe75SBarry Smith    MatSeqAIJSetPreallocation() is supported, and similarly MatMPIAIJSetPreallocation() is supported
3101bebe75SBarry Smith   for communicators controlling multiple processes.  It is recommended that you call both of
3201bebe75SBarry Smith   the above preallocation routines for simplicity.
3301bebe75SBarry Smith 
3401bebe75SBarry Smith    Options Database Keys:
3501bebe75SBarry Smith . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to MatSetFromOptions()
3601bebe75SBarry Smith 
3701bebe75SBarry Smith   Level: beginner
3801bebe75SBarry Smith 
3901bebe75SBarry Smith .seealso: MatCreateMPIAIJCRL,MATSEQAIJCRL,MATMPIAIJCRL, MATSEQAIJCRL, MATMPIAIJCRL
4001bebe75SBarry Smith M*/
4101bebe75SBarry Smith 
42dd6ea824SBarry Smith #undef __FUNCT__
4327d4218bSShri Abhyankar #define __FUNCT__ "MatFindNonZeroRows_MPIAIJ"
4427d4218bSShri Abhyankar PetscErrorCode MatFindNonZeroRows_MPIAIJ(Mat M,IS *keptrows)
4527d4218bSShri Abhyankar {
4627d4218bSShri Abhyankar   PetscErrorCode  ierr;
4727d4218bSShri Abhyankar   Mat_MPIAIJ      *mat = (Mat_MPIAIJ*)M->data;
4827d4218bSShri Abhyankar   Mat_SeqAIJ      *a = (Mat_SeqAIJ*)mat->A->data;
4927d4218bSShri Abhyankar   Mat_SeqAIJ      *b = (Mat_SeqAIJ*)mat->B->data;
5027d4218bSShri Abhyankar   const PetscInt  *ia,*ib;
5127d4218bSShri Abhyankar   const MatScalar *aa,*bb;
5227d4218bSShri Abhyankar   PetscInt        na,nb,i,j,*rows,cnt=0,n0rows;
5327d4218bSShri Abhyankar   PetscInt        m = M->rmap->n,rstart = M->rmap->rstart;
5427d4218bSShri Abhyankar 
5527d4218bSShri Abhyankar   PetscFunctionBegin;
5627d4218bSShri Abhyankar   *keptrows = 0;
5727d4218bSShri Abhyankar   ia = a->i;
5827d4218bSShri Abhyankar   ib = b->i;
5927d4218bSShri Abhyankar   for (i=0; i<m; i++) {
6027d4218bSShri Abhyankar     na = ia[i+1] - ia[i];
6127d4218bSShri Abhyankar     nb = ib[i+1] - ib[i];
6227d4218bSShri Abhyankar     if (!na && !nb) {
6327d4218bSShri Abhyankar       cnt++;
6427d4218bSShri Abhyankar       goto ok1;
6527d4218bSShri Abhyankar     }
6627d4218bSShri Abhyankar     aa = a->a + ia[i];
6727d4218bSShri Abhyankar     for (j=0; j<na; j++) {
6827d4218bSShri Abhyankar       if (aa[j] != 0.0) goto ok1;
6927d4218bSShri Abhyankar     }
7027d4218bSShri Abhyankar     bb = b->a + ib[i];
7127d4218bSShri Abhyankar     for (j=0; j <nb; j++) {
7227d4218bSShri Abhyankar       if (bb[j] != 0.0) goto ok1;
7327d4218bSShri Abhyankar     }
7427d4218bSShri Abhyankar     cnt++;
7527d4218bSShri Abhyankar     ok1:;
7627d4218bSShri Abhyankar   }
7727d4218bSShri Abhyankar   ierr = MPI_Allreduce(&cnt,&n0rows,1,MPIU_INT,MPI_SUM,((PetscObject)M)->comm);CHKERRQ(ierr);
7827d4218bSShri Abhyankar   if (!n0rows) PetscFunctionReturn(0);
7927d4218bSShri Abhyankar   ierr = PetscMalloc((M->rmap->n-cnt)*sizeof(PetscInt),&rows);CHKERRQ(ierr);
8027d4218bSShri Abhyankar   cnt = 0;
8127d4218bSShri Abhyankar   for (i=0; i<m; i++) {
8227d4218bSShri Abhyankar     na = ia[i+1] - ia[i];
8327d4218bSShri Abhyankar     nb = ib[i+1] - ib[i];
8427d4218bSShri Abhyankar     if (!na && !nb) continue;
8527d4218bSShri Abhyankar     aa = a->a + ia[i];
8627d4218bSShri Abhyankar     for(j=0; j<na;j++) {
8727d4218bSShri Abhyankar       if (aa[j] != 0.0) {
8827d4218bSShri Abhyankar         rows[cnt++] = rstart + i;
8927d4218bSShri Abhyankar         goto ok2;
9027d4218bSShri Abhyankar       }
9127d4218bSShri Abhyankar     }
9227d4218bSShri Abhyankar     bb = b->a + ib[i];
9327d4218bSShri Abhyankar     for (j=0; j<nb; j++) {
9427d4218bSShri Abhyankar       if (bb[j] != 0.0) {
9527d4218bSShri Abhyankar         rows[cnt++] = rstart + i;
9627d4218bSShri Abhyankar         goto ok2;
9727d4218bSShri Abhyankar       }
9827d4218bSShri Abhyankar     }
9927d4218bSShri Abhyankar     ok2:;
10027d4218bSShri Abhyankar   }
10127d4218bSShri Abhyankar   ierr = ISCreateGeneral(PETSC_COMM_WORLD,cnt,rows,PETSC_OWN_POINTER,keptrows);CHKERRQ(ierr);
10227d4218bSShri Abhyankar   PetscFunctionReturn(0);
10327d4218bSShri Abhyankar }
10427d4218bSShri Abhyankar 
10527d4218bSShri Abhyankar #undef __FUNCT__
1060716a85fSBarry Smith #define __FUNCT__ "MatGetColumnNorms_MPIAIJ"
1070716a85fSBarry Smith PetscErrorCode MatGetColumnNorms_MPIAIJ(Mat A,NormType type,PetscReal *norms)
1080716a85fSBarry Smith {
1090716a85fSBarry Smith   PetscErrorCode ierr;
1100716a85fSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)A->data;
1110716a85fSBarry Smith   PetscInt       i,n,*garray = aij->garray;
1120716a85fSBarry Smith   Mat_SeqAIJ     *a_aij = (Mat_SeqAIJ*) aij->A->data;
1130716a85fSBarry Smith   Mat_SeqAIJ     *b_aij = (Mat_SeqAIJ*) aij->B->data;
1140716a85fSBarry Smith   PetscReal      *work;
1150716a85fSBarry Smith 
1160716a85fSBarry Smith   PetscFunctionBegin;
1170716a85fSBarry Smith   ierr = MatGetSize(A,PETSC_NULL,&n);CHKERRQ(ierr);
1180716a85fSBarry Smith   ierr = PetscMalloc(n*sizeof(PetscReal),&work);CHKERRQ(ierr);
1190716a85fSBarry Smith   ierr = PetscMemzero(work,n*sizeof(PetscReal));CHKERRQ(ierr);
1200716a85fSBarry Smith   if (type == NORM_2) {
1210716a85fSBarry Smith     for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) {
1220716a85fSBarry Smith       work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]*a_aij->a[i]);
1230716a85fSBarry Smith     }
1240716a85fSBarry Smith     for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) {
1250716a85fSBarry Smith       work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]*b_aij->a[i]);
1260716a85fSBarry Smith     }
1270716a85fSBarry Smith   } else if (type == NORM_1) {
1280716a85fSBarry Smith     for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) {
1290716a85fSBarry Smith       work[A->cmap->rstart + a_aij->j[i]] += PetscAbsScalar(a_aij->a[i]);
1300716a85fSBarry Smith     }
1310716a85fSBarry Smith     for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) {
1320716a85fSBarry Smith       work[garray[b_aij->j[i]]] += PetscAbsScalar(b_aij->a[i]);
1330716a85fSBarry Smith     }
1340716a85fSBarry Smith   } else if (type == NORM_INFINITY) {
1350716a85fSBarry Smith     for (i=0; i<a_aij->i[aij->A->rmap->n]; i++) {
1360716a85fSBarry Smith       work[A->cmap->rstart + a_aij->j[i]] = PetscMax(PetscAbsScalar(a_aij->a[i]), work[A->cmap->rstart + a_aij->j[i]]);
1370716a85fSBarry Smith     }
1380716a85fSBarry Smith     for (i=0; i<b_aij->i[aij->B->rmap->n]; i++) {
1390716a85fSBarry Smith       work[garray[b_aij->j[i]]] = PetscMax(PetscAbsScalar(b_aij->a[i]),work[garray[b_aij->j[i]]]);
1400716a85fSBarry Smith     }
1410716a85fSBarry Smith 
1420716a85fSBarry Smith   } else SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_WRONG,"Unknown NormType");
1430716a85fSBarry Smith   if (type == NORM_INFINITY) {
1440716a85fSBarry Smith     ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_MAX,A->hdr.comm);CHKERRQ(ierr);
1450716a85fSBarry Smith   } else {
1460716a85fSBarry Smith     ierr = MPI_Allreduce(work,norms,n,MPIU_REAL,MPIU_SUM,A->hdr.comm);CHKERRQ(ierr);
1470716a85fSBarry Smith   }
1480716a85fSBarry Smith   ierr = PetscFree(work);CHKERRQ(ierr);
1490716a85fSBarry Smith   if (type == NORM_2) {
1508f1a2a5eSBarry Smith     for (i=0; i<n; i++) norms[i] = PetscSqrtReal(norms[i]);
1510716a85fSBarry Smith   }
1520716a85fSBarry Smith   PetscFunctionReturn(0);
1530716a85fSBarry Smith }
1540716a85fSBarry Smith 
1550716a85fSBarry Smith #undef __FUNCT__
156dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ"
157dd6ea824SBarry Smith /*
158dd6ea824SBarry Smith     Distributes a SeqAIJ matrix across a set of processes. Code stolen from
159dd6ea824SBarry Smith     MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type.
160dd6ea824SBarry Smith 
161dd6ea824SBarry Smith     Only for square matrices
162dd6ea824SBarry Smith */
163dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat)
164dd6ea824SBarry Smith {
165dd6ea824SBarry Smith   PetscMPIInt    rank,size;
166dd6ea824SBarry Smith   PetscInt       *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld;
167dd6ea824SBarry Smith   PetscErrorCode ierr;
168dd6ea824SBarry Smith   Mat            mat;
169dd6ea824SBarry Smith   Mat_SeqAIJ     *gmata;
170dd6ea824SBarry Smith   PetscMPIInt    tag;
171dd6ea824SBarry Smith   MPI_Status     status;
172ace3abfcSBarry Smith   PetscBool      aij;
173dd6ea824SBarry Smith   MatScalar      *gmataa,*ao,*ad,*gmataarestore=0;
174dd6ea824SBarry Smith 
175dd6ea824SBarry Smith   PetscFunctionBegin;
176dd6ea824SBarry Smith   CHKMEMQ;
177dd6ea824SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
178dd6ea824SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
179dd6ea824SBarry Smith   if (!rank) {
180dd6ea824SBarry Smith     ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr);
18165e19b50SBarry Smith     if (!aij) SETERRQ1(((PetscObject)gmat)->comm,PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name);
182dd6ea824SBarry Smith   }
183dd6ea824SBarry Smith   if (reuse == MAT_INITIAL_MATRIX) {
184dd6ea824SBarry Smith     ierr = MatCreate(comm,&mat);CHKERRQ(ierr);
185dd6ea824SBarry Smith     ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
186dd6ea824SBarry Smith     ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr);
187dd6ea824SBarry Smith     ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
188dd6ea824SBarry Smith     ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr);
189dd6ea824SBarry Smith     ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
190dd6ea824SBarry Smith     rowners[0] = 0;
191dd6ea824SBarry Smith     for (i=2; i<=size; i++) {
192dd6ea824SBarry Smith       rowners[i] += rowners[i-1];
193dd6ea824SBarry Smith     }
194dd6ea824SBarry Smith     rstart = rowners[rank];
195dd6ea824SBarry Smith     rend   = rowners[rank+1];
196dd6ea824SBarry Smith     ierr   = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
197dd6ea824SBarry Smith     if (!rank) {
198dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
199dd6ea824SBarry Smith       /* send row lengths to all processors */
200dd6ea824SBarry Smith       for (i=0; i<m; i++) dlens[i] = gmata->ilen[i];
201dd6ea824SBarry Smith       for (i=1; i<size; i++) {
202dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
203dd6ea824SBarry Smith       }
204dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
205dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
206dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
207dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
208dd6ea824SBarry Smith       jj = 0;
209dd6ea824SBarry Smith       for (i=0; i<m; i++) {
210dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
211dd6ea824SBarry Smith           if (gmata->j[jj] < rstart) ld[i]++;
212dd6ea824SBarry Smith 	  if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++;
213dd6ea824SBarry Smith 	  jj++;
214dd6ea824SBarry Smith 	}
215dd6ea824SBarry Smith       }
216dd6ea824SBarry Smith       /* send column indices to other processes */
217dd6ea824SBarry Smith       for (i=1; i<size; i++) {
218dd6ea824SBarry Smith 	nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
219dd6ea824SBarry Smith 	ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
220dd6ea824SBarry Smith 	ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
221dd6ea824SBarry Smith       }
222dd6ea824SBarry Smith 
223dd6ea824SBarry Smith       /* send numerical values to other processes */
224dd6ea824SBarry Smith       for (i=1; i<size; i++) {
225dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
226dd6ea824SBarry Smith         ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
227dd6ea824SBarry Smith       }
228dd6ea824SBarry Smith       gmataa = gmata->a;
229dd6ea824SBarry Smith       gmataj = gmata->j;
230dd6ea824SBarry Smith 
231dd6ea824SBarry Smith     } else {
232dd6ea824SBarry Smith       /* receive row lengths */
233dd6ea824SBarry Smith       ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
234dd6ea824SBarry Smith       /* receive column indices */
235dd6ea824SBarry Smith       ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
236dd6ea824SBarry Smith       ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr);
237dd6ea824SBarry Smith       ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
238dd6ea824SBarry Smith       /* determine number diagonal and off-diagonal counts */
239dd6ea824SBarry Smith       ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr);
240dd6ea824SBarry Smith       ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr);
241dd6ea824SBarry Smith       ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr);
242dd6ea824SBarry Smith       jj = 0;
243dd6ea824SBarry Smith       for (i=0; i<m; i++) {
244dd6ea824SBarry Smith 	for (j=0; j<dlens[i]; j++) {
245dd6ea824SBarry Smith           if (gmataj[jj] < rstart) ld[i]++;
246dd6ea824SBarry Smith 	  if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++;
247dd6ea824SBarry Smith 	  jj++;
248dd6ea824SBarry Smith 	}
249dd6ea824SBarry Smith       }
250dd6ea824SBarry Smith       /* receive numerical values */
251dd6ea824SBarry Smith       ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr);
252dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
253dd6ea824SBarry Smith     }
254dd6ea824SBarry Smith     /* set preallocation */
255dd6ea824SBarry Smith     for (i=0; i<m; i++) {
256dd6ea824SBarry Smith       dlens[i] -= olens[i];
257dd6ea824SBarry Smith     }
258dd6ea824SBarry Smith     ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr);
259dd6ea824SBarry Smith     ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr);
260dd6ea824SBarry Smith 
261dd6ea824SBarry Smith     for (i=0; i<m; i++) {
262dd6ea824SBarry Smith       dlens[i] += olens[i];
263dd6ea824SBarry Smith     }
264dd6ea824SBarry Smith     cnt  = 0;
265dd6ea824SBarry Smith     for (i=0; i<m; i++) {
266dd6ea824SBarry Smith       row  = rstart + i;
267dd6ea824SBarry Smith       ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr);
268dd6ea824SBarry Smith       cnt += dlens[i];
269dd6ea824SBarry Smith     }
270dd6ea824SBarry Smith     if (rank) {
271dd6ea824SBarry Smith       ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr);
272dd6ea824SBarry Smith     }
273dd6ea824SBarry Smith     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
274dd6ea824SBarry Smith     ierr = PetscFree(rowners);CHKERRQ(ierr);
275dd6ea824SBarry Smith     ((Mat_MPIAIJ*)(mat->data))->ld = ld;
276dd6ea824SBarry Smith     *inmat = mat;
277dd6ea824SBarry Smith   } else {   /* column indices are already set; only need to move over numerical values from process 0 */
278dd6ea824SBarry Smith     Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data;
279dd6ea824SBarry Smith     Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data;
280dd6ea824SBarry Smith     mat   = *inmat;
281dd6ea824SBarry Smith     ierr  = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr);
282dd6ea824SBarry Smith     if (!rank) {
283dd6ea824SBarry Smith       /* send numerical values to other processes */
284dd6ea824SBarry Smith       gmata = (Mat_SeqAIJ*) gmat->data;
285dd6ea824SBarry Smith       ierr   = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr);
286dd6ea824SBarry Smith       gmataa = gmata->a;
287dd6ea824SBarry Smith       for (i=1; i<size; i++) {
288dd6ea824SBarry Smith         nz   = gmata->i[rowners[i+1]]-gmata->i[rowners[i]];
289dd6ea824SBarry Smith         ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr);
290dd6ea824SBarry Smith       }
291dd6ea824SBarry Smith       nz   = gmata->i[rowners[1]]-gmata->i[rowners[0]];
292dd6ea824SBarry Smith     } else {
293dd6ea824SBarry Smith       /* receive numerical values from process 0*/
294dd6ea824SBarry Smith       nz   = Ad->nz + Ao->nz;
295dd6ea824SBarry Smith       ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa;
296dd6ea824SBarry Smith       ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr);
297dd6ea824SBarry Smith     }
298dd6ea824SBarry Smith     /* transfer numerical values into the diagonal A and off diagonal B parts of mat */
299dd6ea824SBarry Smith     ld = ((Mat_MPIAIJ*)(mat->data))->ld;
300dd6ea824SBarry Smith     ad = Ad->a;
301dd6ea824SBarry Smith     ao = Ao->a;
302d0f46423SBarry Smith     if (mat->rmap->n) {
303dd6ea824SBarry Smith       i  = 0;
304dd6ea824SBarry Smith       nz = ld[i];                                   ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
305dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
306dd6ea824SBarry Smith     }
307d0f46423SBarry Smith     for (i=1; i<mat->rmap->n; i++) {
308dd6ea824SBarry Smith       nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
309dd6ea824SBarry Smith       nz = Ad->i[i+1] - Ad->i[i];                   ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz;
310dd6ea824SBarry Smith     }
311dd6ea824SBarry Smith     i--;
312d0f46423SBarry Smith     if (mat->rmap->n) {
313dd6ea824SBarry Smith       nz = Ao->i[i+1] - Ao->i[i] - ld[i];           ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz;
314dd6ea824SBarry Smith     }
315dd6ea824SBarry Smith     if (rank) {
316dd6ea824SBarry Smith       ierr = PetscFree(gmataarestore);CHKERRQ(ierr);
317dd6ea824SBarry Smith     }
318dd6ea824SBarry Smith   }
319dd6ea824SBarry Smith   ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
320dd6ea824SBarry Smith   ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
321dd6ea824SBarry Smith   CHKMEMQ;
322dd6ea824SBarry Smith   PetscFunctionReturn(0);
323dd6ea824SBarry Smith }
324dd6ea824SBarry Smith 
3250f5bd95cSBarry Smith /*
3260f5bd95cSBarry Smith   Local utility routine that creates a mapping from the global column
3279e25ed09SBarry Smith number to the local number in the off-diagonal part of the local
3280f5bd95cSBarry Smith storage of the matrix.  When PETSC_USE_CTABLE is used this is scalable at
3290f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor
3300f5bd95cSBarry Smith has an order N integer array but is fast to acess.
3319e25ed09SBarry Smith */
3324a2ae208SSatish Balay #undef __FUNCT__
3334a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private"
334dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat)
3359e25ed09SBarry Smith {
33644a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
3376849ba73SBarry Smith   PetscErrorCode ierr;
338d0f46423SBarry Smith   PetscInt       n = aij->B->cmap->n,i;
339dbb450caSBarry Smith 
3403a40ed3dSBarry Smith   PetscFunctionBegin;
341aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
342e23dfa41SBarry Smith   ierr = PetscTableCreate(n,mat->cmap->N+1,&aij->colmap);CHKERRQ(ierr);
343b1fc9764SSatish Balay   for (i=0; i<n; i++){
3440f5bd95cSBarry Smith     ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr);
345b1fc9764SSatish Balay   }
346b1fc9764SSatish Balay #else
347d0f46423SBarry Smith   ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr);
348d0f46423SBarry Smith   ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
349d0f46423SBarry Smith   ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr);
350905e6a2fSBarry Smith   for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1;
351b1fc9764SSatish Balay #endif
3523a40ed3dSBarry Smith   PetscFunctionReturn(0);
3539e25ed09SBarry Smith }
3549e25ed09SBarry Smith 
35530770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \
3560520107fSSatish Balay { \
3577cd84e04SBarry Smith     if (col <= lastcol1) low1 = 0; else high1 = nrow1; \
358fd3458f5SBarry Smith     lastcol1 = col;\
359fd3458f5SBarry Smith     while (high1-low1 > 5) { \
360fd3458f5SBarry Smith       t = (low1+high1)/2; \
361fd3458f5SBarry Smith       if (rp1[t] > col) high1 = t; \
362fd3458f5SBarry Smith       else             low1  = t; \
363ba4e3ef2SSatish Balay     } \
364fd3458f5SBarry Smith       for (_i=low1; _i<high1; _i++) { \
365fd3458f5SBarry Smith         if (rp1[_i] > col) break; \
366fd3458f5SBarry Smith         if (rp1[_i] == col) { \
367fd3458f5SBarry Smith           if (addv == ADD_VALUES) ap1[_i] += value;   \
368fd3458f5SBarry Smith           else                    ap1[_i] = value; \
36930770e4dSSatish Balay           goto a_noinsert; \
3700520107fSSatish Balay         } \
3710520107fSSatish Balay       }  \
372e44c0bd4SBarry Smith       if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \
373e44c0bd4SBarry Smith       if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;}		\
374e32f2f54SBarry Smith       if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
375fef13f97SBarry Smith       MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \
376669a8dbcSSatish Balay       N = nrow1++ - 1; a->nz++; high1++; \
3770520107fSSatish Balay       /* shift up all the later entries in this row */ \
3780520107fSSatish Balay       for (ii=N; ii>=_i; ii--) { \
379fd3458f5SBarry Smith         rp1[ii+1] = rp1[ii]; \
380fd3458f5SBarry Smith         ap1[ii+1] = ap1[ii]; \
3810520107fSSatish Balay       } \
382fd3458f5SBarry Smith       rp1[_i] = col;  \
383fd3458f5SBarry Smith       ap1[_i] = value;  \
38430770e4dSSatish Balay       a_noinsert: ; \
385fd3458f5SBarry Smith       ailen[row] = nrow1; \
3860520107fSSatish Balay }
3870a198c4cSBarry Smith 
388085a36d4SBarry Smith 
38930770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \
39030770e4dSSatish Balay { \
3917cd84e04SBarry Smith     if (col <= lastcol2) low2 = 0; else high2 = nrow2; \
392fd3458f5SBarry Smith     lastcol2 = col;\
393fd3458f5SBarry Smith     while (high2-low2 > 5) { \
394fd3458f5SBarry Smith       t = (low2+high2)/2; \
395fd3458f5SBarry Smith       if (rp2[t] > col) high2 = t; \
396fd3458f5SBarry Smith       else             low2  = t; \
397ba4e3ef2SSatish Balay     } \
398fd3458f5SBarry Smith     for (_i=low2; _i<high2; _i++) {		\
399fd3458f5SBarry Smith       if (rp2[_i] > col) break;			\
400fd3458f5SBarry Smith       if (rp2[_i] == col) {			      \
401fd3458f5SBarry Smith 	if (addv == ADD_VALUES) ap2[_i] += value;     \
402fd3458f5SBarry Smith 	else                    ap2[_i] = value;      \
40330770e4dSSatish Balay 	goto b_noinsert;			      \
40430770e4dSSatish Balay       }						      \
40530770e4dSSatish Balay     }							      \
406e44c0bd4SBarry Smith     if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \
407e44c0bd4SBarry Smith     if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;}		\
408e32f2f54SBarry Smith     if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
409fef13f97SBarry Smith     MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \
410669a8dbcSSatish Balay     N = nrow2++ - 1; b->nz++; high2++;					\
41130770e4dSSatish Balay     /* shift up all the later entries in this row */			\
41230770e4dSSatish Balay     for (ii=N; ii>=_i; ii--) {						\
413fd3458f5SBarry Smith       rp2[ii+1] = rp2[ii];						\
414fd3458f5SBarry Smith       ap2[ii+1] = ap2[ii];						\
41530770e4dSSatish Balay     }									\
416fd3458f5SBarry Smith     rp2[_i] = col;							\
417fd3458f5SBarry Smith     ap2[_i] = value;							\
41830770e4dSSatish Balay     b_noinsert: ;								\
419fd3458f5SBarry Smith     bilen[row] = nrow2;							\
42030770e4dSSatish Balay }
42130770e4dSSatish Balay 
4224a2ae208SSatish Balay #undef __FUNCT__
4232fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ"
4242fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[])
4252fd7e33dSBarry Smith {
4262fd7e33dSBarry Smith   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)A->data;
4272fd7e33dSBarry Smith   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data;
4282fd7e33dSBarry Smith   PetscErrorCode ierr;
4292fd7e33dSBarry Smith   PetscInt       l,*garray = mat->garray,diag;
4302fd7e33dSBarry Smith 
4312fd7e33dSBarry Smith   PetscFunctionBegin;
4322fd7e33dSBarry Smith   /* code only works for square matrices A */
4332fd7e33dSBarry Smith 
4342fd7e33dSBarry Smith   /* find size of row to the left of the diagonal part */
4352fd7e33dSBarry Smith   ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr);
4362fd7e33dSBarry Smith   row  = row - diag;
4372fd7e33dSBarry Smith   for (l=0; l<b->i[row+1]-b->i[row]; l++) {
4382fd7e33dSBarry Smith     if (garray[b->j[b->i[row]+l]] > diag) break;
4392fd7e33dSBarry Smith   }
4402fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr);
4412fd7e33dSBarry Smith 
4422fd7e33dSBarry Smith   /* diagonal part */
4432fd7e33dSBarry Smith   ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr);
4442fd7e33dSBarry Smith 
4452fd7e33dSBarry Smith   /* right of diagonal part */
4462fd7e33dSBarry Smith   ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr);
4472fd7e33dSBarry Smith   PetscFunctionReturn(0);
4482fd7e33dSBarry Smith }
4492fd7e33dSBarry Smith 
4502fd7e33dSBarry Smith #undef __FUNCT__
4514a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ"
452b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
4538a729477SBarry Smith {
45444a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
45587828ca2SBarry Smith   PetscScalar    value;
456dfbe8321SBarry Smith   PetscErrorCode ierr;
457d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
458d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
459ace3abfcSBarry Smith   PetscBool      roworiented = aij->roworiented;
4608a729477SBarry Smith 
4610520107fSSatish Balay   /* Some Variables required in the macro */
4624ee7247eSSatish Balay   Mat            A = aij->A;
4634ee7247eSSatish Balay   Mat_SeqAIJ     *a = (Mat_SeqAIJ*)A->data;
46457809a77SBarry Smith   PetscInt       *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
465a77337e4SBarry Smith   MatScalar      *aa = a->a;
466ace3abfcSBarry Smith   PetscBool      ignorezeroentries = a->ignorezeroentries;
46730770e4dSSatish Balay   Mat            B = aij->B;
46830770e4dSSatish Balay   Mat_SeqAIJ     *b = (Mat_SeqAIJ*)B->data;
469d0f46423SBarry Smith   PetscInt       *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
470a77337e4SBarry Smith   MatScalar      *ba = b->a;
47130770e4dSSatish Balay 
472fd3458f5SBarry Smith   PetscInt       *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
473fd3458f5SBarry Smith   PetscInt       nonew = a->nonew;
474a77337e4SBarry Smith   MatScalar      *ap1,*ap2;
4754ee7247eSSatish Balay 
4763a40ed3dSBarry Smith   PetscFunctionBegin;
47771fd2e92SBarry Smith   if (v) PetscValidScalarPointer(v,6);
4788a729477SBarry Smith   for (i=0; i<m; i++) {
4795ef9f2a5SBarry Smith     if (im[i] < 0) continue;
4802515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
481e32f2f54SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
4820a198c4cSBarry Smith #endif
4834b0e389bSBarry Smith     if (im[i] >= rstart && im[i] < rend) {
4844b0e389bSBarry Smith       row      = im[i] - rstart;
485fd3458f5SBarry Smith       lastcol1 = -1;
486fd3458f5SBarry Smith       rp1      = aj + ai[row];
487fd3458f5SBarry Smith       ap1      = aa + ai[row];
488fd3458f5SBarry Smith       rmax1    = aimax[row];
489fd3458f5SBarry Smith       nrow1    = ailen[row];
490fd3458f5SBarry Smith       low1     = 0;
491fd3458f5SBarry Smith       high1    = nrow1;
492fd3458f5SBarry Smith       lastcol2 = -1;
493fd3458f5SBarry Smith       rp2      = bj + bi[row];
494d498b1e9SBarry Smith       ap2      = ba + bi[row];
495fd3458f5SBarry Smith       rmax2    = bimax[row];
496d498b1e9SBarry Smith       nrow2    = bilen[row];
497fd3458f5SBarry Smith       low2     = 0;
498fd3458f5SBarry Smith       high2    = nrow2;
499fd3458f5SBarry Smith 
5001eb62cbbSBarry Smith       for (j=0; j<n; j++) {
50116371a99SBarry Smith         if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0;
502abc0a331SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
503fd3458f5SBarry Smith         if (in[j] >= cstart && in[j] < cend){
504fd3458f5SBarry Smith           col = in[j] - cstart;
50530770e4dSSatish Balay           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
506273d9f13SBarry Smith         } else if (in[j] < 0) continue;
5072515c552SBarry Smith #if defined(PETSC_USE_DEBUG)
508cb9801acSJed Brown         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
5090a198c4cSBarry Smith #endif
5101eb62cbbSBarry Smith         else {
511227d817aSBarry Smith           if (mat->was_assembled) {
512905e6a2fSBarry Smith             if (!aij->colmap) {
513905e6a2fSBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
514905e6a2fSBarry Smith             }
515aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
5160f5bd95cSBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
517fa46199cSSatish Balay 	    col--;
518b1fc9764SSatish Balay #else
519905e6a2fSBarry Smith             col = aij->colmap[in[j]] - 1;
520b1fc9764SSatish Balay #endif
521ec8511deSBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
5222493cbb0SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
5234b0e389bSBarry Smith               col =  in[j];
5249bf004c3SSatish Balay               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
525f9508a3cSSatish Balay               B = aij->B;
526f9508a3cSSatish Balay               b = (Mat_SeqAIJ*)B->data;
527e44c0bd4SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a;
528d498b1e9SBarry Smith               rp2      = bj + bi[row];
529d498b1e9SBarry Smith               ap2      = ba + bi[row];
530d498b1e9SBarry Smith               rmax2    = bimax[row];
531d498b1e9SBarry Smith               nrow2    = bilen[row];
532d498b1e9SBarry Smith               low2     = 0;
533d498b1e9SBarry Smith               high2    = nrow2;
534d0f46423SBarry Smith               bm       = aij->B->rmap->n;
535f9508a3cSSatish Balay               ba = b->a;
536d6dfbf8fSBarry Smith             }
537c48de900SBarry Smith           } else col = in[j];
53830770e4dSSatish Balay           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
5391eb62cbbSBarry Smith         }
5401eb62cbbSBarry Smith       }
5415ef9f2a5SBarry Smith     } else {
5424cb17eb5SBarry Smith       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
54390f02eecSBarry Smith       if (!aij->donotstash) {
544d36fbae8SSatish Balay         if (roworiented) {
545ace3abfcSBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr);
546d36fbae8SSatish Balay         } else {
547ace3abfcSBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr);
5484b0e389bSBarry Smith         }
5491eb62cbbSBarry Smith       }
5508a729477SBarry Smith     }
55190f02eecSBarry Smith   }
5523a40ed3dSBarry Smith   PetscFunctionReturn(0);
5538a729477SBarry Smith }
5548a729477SBarry Smith 
5554a2ae208SSatish Balay #undef __FUNCT__
5564a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ"
557b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
558b49de8d1SLois Curfman McInnes {
559b49de8d1SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
560dfbe8321SBarry Smith   PetscErrorCode ierr;
561d0f46423SBarry Smith   PetscInt       i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
562d0f46423SBarry Smith   PetscInt       cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
563b49de8d1SLois Curfman McInnes 
5643a40ed3dSBarry Smith   PetscFunctionBegin;
565b49de8d1SLois Curfman McInnes   for (i=0; i<m; i++) {
566e32f2f54SBarry Smith     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
567e32f2f54SBarry Smith     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
568b49de8d1SLois Curfman McInnes     if (idxm[i] >= rstart && idxm[i] < rend) {
569b49de8d1SLois Curfman McInnes       row = idxm[i] - rstart;
570b49de8d1SLois Curfman McInnes       for (j=0; j<n; j++) {
571e32f2f54SBarry Smith         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
572e32f2f54SBarry Smith         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
573b49de8d1SLois Curfman McInnes         if (idxn[j] >= cstart && idxn[j] < cend){
574b49de8d1SLois Curfman McInnes           col = idxn[j] - cstart;
575b49de8d1SLois Curfman McInnes           ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
576fa852ad4SSatish Balay         } else {
577905e6a2fSBarry Smith           if (!aij->colmap) {
578905e6a2fSBarry Smith             ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
579905e6a2fSBarry Smith           }
580aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
5810f5bd95cSBarry Smith           ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr);
582fa46199cSSatish Balay           col --;
583b1fc9764SSatish Balay #else
584905e6a2fSBarry Smith           col = aij->colmap[idxn[j]] - 1;
585b1fc9764SSatish Balay #endif
586e60e1c95SSatish Balay           if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0;
587d9d09a02SSatish Balay           else {
588b49de8d1SLois Curfman McInnes             ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
589b49de8d1SLois Curfman McInnes           }
590b49de8d1SLois Curfman McInnes         }
591b49de8d1SLois Curfman McInnes       }
592a8c6a408SBarry Smith     } else {
593e32f2f54SBarry Smith       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
594b49de8d1SLois Curfman McInnes     }
595b49de8d1SLois Curfman McInnes   }
5963a40ed3dSBarry Smith   PetscFunctionReturn(0);
597b49de8d1SLois Curfman McInnes }
598bc5ccf88SSatish Balay 
599bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec);
600bd0c2dcbSBarry Smith 
6014a2ae208SSatish Balay #undef __FUNCT__
6024a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ"
603dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode)
604bc5ccf88SSatish Balay {
605bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
606dfbe8321SBarry Smith   PetscErrorCode ierr;
607b1d57f15SBarry Smith   PetscInt       nstash,reallocs;
608bc5ccf88SSatish Balay   InsertMode     addv;
609bc5ccf88SSatish Balay 
610bc5ccf88SSatish Balay   PetscFunctionBegin;
6114cb17eb5SBarry Smith   if (aij->donotstash || mat->nooffprocentries) {
612bc5ccf88SSatish Balay     PetscFunctionReturn(0);
613bc5ccf88SSatish Balay   }
614bc5ccf88SSatish Balay 
615bc5ccf88SSatish Balay   /* make sure all processors are either in INSERTMODE or ADDMODE */
6167adad957SLisandro Dalcin   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr);
617e7e72b3dSBarry Smith   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
618bc5ccf88SSatish Balay   mat->insertmode = addv; /* in case this processor had no cache */
619bc5ccf88SSatish Balay 
620d0f46423SBarry Smith   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
6218798bf22SSatish Balay   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
622ae15b995SBarry Smith   ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
623bc5ccf88SSatish Balay   PetscFunctionReturn(0);
624bc5ccf88SSatish Balay }
625bc5ccf88SSatish Balay 
6264a2ae208SSatish Balay #undef __FUNCT__
6274a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ"
628dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode)
629bc5ccf88SSatish Balay {
630bc5ccf88SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
63191c97fd4SSatish Balay   Mat_SeqAIJ     *a=(Mat_SeqAIJ *)aij->A->data;
6326849ba73SBarry Smith   PetscErrorCode ierr;
633b1d57f15SBarry Smith   PetscMPIInt    n;
634b1d57f15SBarry Smith   PetscInt       i,j,rstart,ncols,flg;
635e44c0bd4SBarry Smith   PetscInt       *row,*col;
636ace3abfcSBarry Smith   PetscBool      other_disassembled;
63787828ca2SBarry Smith   PetscScalar    *val;
638bc5ccf88SSatish Balay   InsertMode     addv = mat->insertmode;
639bc5ccf88SSatish Balay 
64091c97fd4SSatish Balay   /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */
641bc5ccf88SSatish Balay   PetscFunctionBegin;
6424cb17eb5SBarry Smith   if (!aij->donotstash && !mat->nooffprocentries) {
643a2d1c673SSatish Balay     while (1) {
6448798bf22SSatish Balay       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
645a2d1c673SSatish Balay       if (!flg) break;
646a2d1c673SSatish Balay 
647bc5ccf88SSatish Balay       for (i=0; i<n;) {
648bc5ccf88SSatish Balay         /* Now identify the consecutive vals belonging to the same row */
649bc5ccf88SSatish Balay         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
650bc5ccf88SSatish Balay         if (j < n) ncols = j-i;
651bc5ccf88SSatish Balay         else       ncols = n-i;
652bc5ccf88SSatish Balay         /* Now assemble all these values with a single function call */
653bc5ccf88SSatish Balay         ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
654bc5ccf88SSatish Balay         i = j;
655bc5ccf88SSatish Balay       }
656bc5ccf88SSatish Balay     }
6578798bf22SSatish Balay     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
658bc5ccf88SSatish Balay   }
659bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr);
660bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr);
661bc5ccf88SSatish Balay 
662bc5ccf88SSatish Balay   /* determine if any processor has disassembled, if so we must
663bc5ccf88SSatish Balay      also disassemble ourselfs, in order that we may reassemble. */
664bc5ccf88SSatish Balay   /*
665bc5ccf88SSatish Balay      if nonzero structure of submatrix B cannot change then we know that
666bc5ccf88SSatish Balay      no processor disassembled thus we can skip this stuff
667bc5ccf88SSatish Balay   */
668bc5ccf88SSatish Balay   if (!((Mat_SeqAIJ*)aij->B->data)->nonew)  {
6697adad957SLisandro Dalcin     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr);
670bc5ccf88SSatish Balay     if (mat->was_assembled && !other_disassembled) {
671bc5ccf88SSatish Balay       ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
672ad59fb31SSatish Balay     }
673ad59fb31SSatish Balay   }
674bc5ccf88SSatish Balay   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
675bc5ccf88SSatish Balay     ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr);
676bc5ccf88SSatish Balay   }
6774e0d8c25SBarry Smith   ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr);
6784e35b6f3SSatish Balay   ierr = MatSetOption(aij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr);
679bc5ccf88SSatish Balay   ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr);
680bc5ccf88SSatish Balay   ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr);
681bc5ccf88SSatish Balay 
6821d79065fSBarry Smith   ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr);
683606d414cSSatish Balay   aij->rowvalues = 0;
684a30b2313SHong Zhang 
685a30b2313SHong Zhang   /* used by MatAXPY() */
68691c97fd4SSatish Balay   a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0;  /* b->xtoy = 0 */
68791c97fd4SSatish Balay   a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0;  /* b->XtoY = 0 */
688a30b2313SHong Zhang 
6896bf464f9SBarry Smith   ierr = VecDestroy(&aij->diag);CHKERRQ(ierr);
690bd0c2dcbSBarry Smith   if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ;
691bc5ccf88SSatish Balay   PetscFunctionReturn(0);
692bc5ccf88SSatish Balay }
693bc5ccf88SSatish Balay 
6944a2ae208SSatish Balay #undef __FUNCT__
6954a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ"
696dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A)
6971eb62cbbSBarry Smith {
69844a69424SLois Curfman McInnes   Mat_MPIAIJ     *l = (Mat_MPIAIJ*)A->data;
699dfbe8321SBarry Smith   PetscErrorCode ierr;
7003a40ed3dSBarry Smith 
7013a40ed3dSBarry Smith   PetscFunctionBegin;
70278b31e54SBarry Smith   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
70378b31e54SBarry Smith   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
7043a40ed3dSBarry Smith   PetscFunctionReturn(0);
7051eb62cbbSBarry Smith }
7061eb62cbbSBarry Smith 
7074a2ae208SSatish Balay #undef __FUNCT__
7084a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ"
7092b40b63fSBarry Smith PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
7101eb62cbbSBarry Smith {
71144a69424SLois Curfman McInnes   Mat_MPIAIJ        *l = (Mat_MPIAIJ*)A->data;
7126849ba73SBarry Smith   PetscErrorCode    ierr;
7137adad957SLisandro Dalcin   PetscMPIInt       size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
714d0f46423SBarry Smith   PetscInt          i,*owners = A->rmap->range;
715b1d57f15SBarry Smith   PetscInt          *nprocs,j,idx,nsends,row;
716b1d57f15SBarry Smith   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
717b1d57f15SBarry Smith   PetscInt          *rvalues,count,base,slen,*source;
718d0f46423SBarry Smith   PetscInt          *lens,*lrows,*values,rstart=A->rmap->rstart;
7197adad957SLisandro Dalcin   MPI_Comm          comm = ((PetscObject)A)->comm;
7201eb62cbbSBarry Smith   MPI_Request       *send_waits,*recv_waits;
7211eb62cbbSBarry Smith   MPI_Status        recv_status,*send_status;
72297b48c8fSBarry Smith   const PetscScalar *xx;
72397b48c8fSBarry Smith   PetscScalar       *bb;
7246543fbbaSBarry Smith #if defined(PETSC_DEBUG)
725ace3abfcSBarry Smith   PetscBool      found = PETSC_FALSE;
7266543fbbaSBarry Smith #endif
7271eb62cbbSBarry Smith 
7283a40ed3dSBarry Smith   PetscFunctionBegin;
7291eb62cbbSBarry Smith   /*  first count number of contributors to each processor */
730b1d57f15SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
731b1d57f15SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
732b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
7336543fbbaSBarry Smith   j = 0;
7341eb62cbbSBarry Smith   for (i=0; i<N; i++) {
7356543fbbaSBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
7366543fbbaSBarry Smith     lastidx = idx;
7376543fbbaSBarry Smith     for (; j<size; j++) {
7381eb62cbbSBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
7396543fbbaSBarry Smith         nprocs[2*j]++;
7406543fbbaSBarry Smith         nprocs[2*j+1] = 1;
7416543fbbaSBarry Smith         owner[i] = j;
7426543fbbaSBarry Smith #if defined(PETSC_DEBUG)
7436543fbbaSBarry Smith         found = PETSC_TRUE;
7446543fbbaSBarry Smith #endif
7456543fbbaSBarry Smith         break;
7461eb62cbbSBarry Smith       }
7471eb62cbbSBarry Smith     }
7486543fbbaSBarry Smith #if defined(PETSC_DEBUG)
749e32f2f54SBarry Smith     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
7506543fbbaSBarry Smith     found = PETSC_FALSE;
7516543fbbaSBarry Smith #endif
7521eb62cbbSBarry Smith   }
753c1dc657dSBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
7541eb62cbbSBarry Smith 
7557367270fSBarry Smith   if (A->nooffproczerorows) {
7567367270fSBarry Smith     if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row");
7577367270fSBarry Smith     nrecvs = nsends;
7587367270fSBarry Smith     nmax   = N;
7597367270fSBarry Smith   } else {
7601eb62cbbSBarry Smith     /* inform other processors of number of messages and max length*/
761c1dc657dSBarry Smith     ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
7627367270fSBarry Smith   }
7631eb62cbbSBarry Smith 
7641eb62cbbSBarry Smith   /* post receives:   */
765b1d57f15SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
766b0a32e0cSBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
7671eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
768b1d57f15SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
7691eb62cbbSBarry Smith   }
7701eb62cbbSBarry Smith 
7711eb62cbbSBarry Smith   /* do sends:
7721eb62cbbSBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
7731eb62cbbSBarry Smith          the ith processor
7741eb62cbbSBarry Smith   */
775b1d57f15SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
776b0a32e0cSBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
777b1d57f15SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
7781eb62cbbSBarry Smith   starts[0] = 0;
779c1dc657dSBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
7801eb62cbbSBarry Smith   for (i=0; i<N; i++) {
7811eb62cbbSBarry Smith     svalues[starts[owner[i]]++] = rows[i];
7821eb62cbbSBarry Smith   }
7831eb62cbbSBarry Smith 
7841eb62cbbSBarry Smith   starts[0] = 0;
785c1dc657dSBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
7861eb62cbbSBarry Smith   count = 0;
78717699dbbSLois Curfman McInnes   for (i=0; i<size; i++) {
788c1dc657dSBarry Smith     if (nprocs[2*i+1]) {
789b1d57f15SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
7901eb62cbbSBarry Smith     }
7911eb62cbbSBarry Smith   }
792606d414cSSatish Balay   ierr = PetscFree(starts);CHKERRQ(ierr);
7931eb62cbbSBarry Smith 
79417699dbbSLois Curfman McInnes   base = owners[rank];
7951eb62cbbSBarry Smith 
7961eb62cbbSBarry Smith   /*  wait on receives */
7971d79065fSBarry Smith   ierr   = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr);
7981eb62cbbSBarry Smith   count  = nrecvs; slen = 0;
7991eb62cbbSBarry Smith   while (count) {
800ca161407SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
8011eb62cbbSBarry Smith     /* unpack receives into our local space */
802b1d57f15SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
803d6dfbf8fSBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
804d6dfbf8fSBarry Smith     lens[imdex]    = n;
8051eb62cbbSBarry Smith     slen          += n;
8061eb62cbbSBarry Smith     count--;
8071eb62cbbSBarry Smith   }
808606d414cSSatish Balay   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
8091eb62cbbSBarry Smith 
8101eb62cbbSBarry Smith   /* move the data into the send scatter */
811b1d57f15SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
8121eb62cbbSBarry Smith   count = 0;
8131eb62cbbSBarry Smith   for (i=0; i<nrecvs; i++) {
8141eb62cbbSBarry Smith     values = rvalues + i*nmax;
8151eb62cbbSBarry Smith     for (j=0; j<lens[i]; j++) {
8161eb62cbbSBarry Smith       lrows[count++] = values[j] - base;
8171eb62cbbSBarry Smith     }
8181eb62cbbSBarry Smith   }
819606d414cSSatish Balay   ierr = PetscFree(rvalues);CHKERRQ(ierr);
8201d79065fSBarry Smith   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
821606d414cSSatish Balay   ierr = PetscFree(owner);CHKERRQ(ierr);
822606d414cSSatish Balay   ierr = PetscFree(nprocs);CHKERRQ(ierr);
8231eb62cbbSBarry Smith 
82497b48c8fSBarry Smith   /* fix right hand side if needed */
82597b48c8fSBarry Smith   if (x && b) {
82697b48c8fSBarry Smith     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
82797b48c8fSBarry Smith     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
828564f14d6SBarry Smith     for (i=0; i<slen; i++) {
82997b48c8fSBarry Smith       bb[lrows[i]] = diag*xx[lrows[i]];
83097b48c8fSBarry Smith     }
83197b48c8fSBarry Smith     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
83297b48c8fSBarry Smith     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
83397b48c8fSBarry Smith   }
8346eb55b6aSBarry Smith   /*
8356eb55b6aSBarry Smith         Zero the required rows. If the "diagonal block" of the matrix
836a8c7a070SBarry Smith      is square and the user wishes to set the diagonal we use separate
8376eb55b6aSBarry Smith      code so that MatSetValues() is not called for each diagonal allocating
8386eb55b6aSBarry Smith      new memory, thus calling lots of mallocs and slowing things down.
8396eb55b6aSBarry Smith 
8406eb55b6aSBarry Smith   */
841e2d53e46SBarry Smith   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
8422b40b63fSBarry Smith   ierr = MatZeroRows(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr);
843d0f46423SBarry Smith   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
8442b40b63fSBarry Smith     ierr = MatZeroRows(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr);
845f4df32b1SMatthew Knepley   } else if (diag != 0.0) {
8462b40b63fSBarry Smith     ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
847fa46199cSSatish Balay     if (((Mat_SeqAIJ*)l->A->data)->nonew) {
848e32f2f54SBarry Smith       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\
849512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
8506525c446SSatish Balay     }
851e2d53e46SBarry Smith     for (i = 0; i < slen; i++) {
852e2d53e46SBarry Smith       row  = lrows[i] + rstart;
853f4df32b1SMatthew Knepley       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
854e2d53e46SBarry Smith     }
855e2d53e46SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
856e2d53e46SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
8576eb55b6aSBarry Smith   } else {
8582b40b63fSBarry Smith     ierr = MatZeroRows(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
8596eb55b6aSBarry Smith   }
860606d414cSSatish Balay   ierr = PetscFree(lrows);CHKERRQ(ierr);
86172dacd9aSBarry Smith 
8621eb62cbbSBarry Smith   /* wait on sends */
8631eb62cbbSBarry Smith   if (nsends) {
864b0a32e0cSBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
865ca161407SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
866606d414cSSatish Balay     ierr = PetscFree(send_status);CHKERRQ(ierr);
8671eb62cbbSBarry Smith   }
868606d414cSSatish Balay   ierr = PetscFree(send_waits);CHKERRQ(ierr);
869606d414cSSatish Balay   ierr = PetscFree(svalues);CHKERRQ(ierr);
8703a40ed3dSBarry Smith   PetscFunctionReturn(0);
8711eb62cbbSBarry Smith }
8721eb62cbbSBarry Smith 
8734a2ae208SSatish Balay #undef __FUNCT__
8749c7c4993SBarry Smith #define __FUNCT__ "MatZeroRowsColumns_MPIAIJ"
8759c7c4993SBarry Smith PetscErrorCode MatZeroRowsColumns_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
8769c7c4993SBarry Smith {
8779c7c4993SBarry Smith   Mat_MPIAIJ        *l = (Mat_MPIAIJ*)A->data;
8789c7c4993SBarry Smith   PetscErrorCode    ierr;
8799c7c4993SBarry Smith   PetscMPIInt       size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1;
8809c7c4993SBarry Smith   PetscInt          i,*owners = A->rmap->range;
881564f14d6SBarry Smith   PetscInt          *nprocs,j,idx,nsends;
8829c7c4993SBarry Smith   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
8839c7c4993SBarry Smith   PetscInt          *rvalues,count,base,slen,*source;
884564f14d6SBarry Smith   PetscInt          *lens,*lrows,*values,m;
8859c7c4993SBarry Smith   MPI_Comm          comm = ((PetscObject)A)->comm;
8869c7c4993SBarry Smith   MPI_Request       *send_waits,*recv_waits;
8879c7c4993SBarry Smith   MPI_Status        recv_status,*send_status;
8889c7c4993SBarry Smith   const PetscScalar *xx;
889564f14d6SBarry Smith   PetscScalar       *bb,*mask;
890564f14d6SBarry Smith   Vec               xmask,lmask;
891564f14d6SBarry Smith   Mat_SeqAIJ        *aij = (Mat_SeqAIJ*)l->B->data;
892564f14d6SBarry Smith   const PetscInt    *aj, *ii,*ridx;
893564f14d6SBarry Smith   PetscScalar       *aa;
8949c7c4993SBarry Smith #if defined(PETSC_DEBUG)
8959c7c4993SBarry Smith   PetscBool         found = PETSC_FALSE;
8969c7c4993SBarry Smith #endif
8979c7c4993SBarry Smith 
8989c7c4993SBarry Smith   PetscFunctionBegin;
8999c7c4993SBarry Smith   /*  first count number of contributors to each processor */
9009c7c4993SBarry Smith   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
9019c7c4993SBarry Smith   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
9029c7c4993SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/
9039c7c4993SBarry Smith   j = 0;
9049c7c4993SBarry Smith   for (i=0; i<N; i++) {
9059c7c4993SBarry Smith     if (lastidx > (idx = rows[i])) j = 0;
9069c7c4993SBarry Smith     lastidx = idx;
9079c7c4993SBarry Smith     for (; j<size; j++) {
9089c7c4993SBarry Smith       if (idx >= owners[j] && idx < owners[j+1]) {
9099c7c4993SBarry Smith         nprocs[2*j]++;
9109c7c4993SBarry Smith         nprocs[2*j+1] = 1;
9119c7c4993SBarry Smith         owner[i] = j;
9129c7c4993SBarry Smith #if defined(PETSC_DEBUG)
9139c7c4993SBarry Smith         found = PETSC_TRUE;
9149c7c4993SBarry Smith #endif
9159c7c4993SBarry Smith         break;
9169c7c4993SBarry Smith       }
9179c7c4993SBarry Smith     }
9189c7c4993SBarry Smith #if defined(PETSC_DEBUG)
9199c7c4993SBarry Smith     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
9209c7c4993SBarry Smith     found = PETSC_FALSE;
9219c7c4993SBarry Smith #endif
9229c7c4993SBarry Smith   }
9239c7c4993SBarry Smith   nsends = 0;  for (i=0; i<size; i++) { nsends += nprocs[2*i+1];}
9249c7c4993SBarry Smith 
9259c7c4993SBarry Smith   /* inform other processors of number of messages and max length*/
9269c7c4993SBarry Smith   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
9279c7c4993SBarry Smith 
9289c7c4993SBarry Smith   /* post receives:   */
9299c7c4993SBarry Smith   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
9309c7c4993SBarry Smith   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
9319c7c4993SBarry Smith   for (i=0; i<nrecvs; i++) {
9329c7c4993SBarry Smith     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
9339c7c4993SBarry Smith   }
9349c7c4993SBarry Smith 
9359c7c4993SBarry Smith   /* do sends:
9369c7c4993SBarry Smith       1) starts[i] gives the starting index in svalues for stuff going to
9379c7c4993SBarry Smith          the ith processor
9389c7c4993SBarry Smith   */
9399c7c4993SBarry Smith   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
9409c7c4993SBarry Smith   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
9419c7c4993SBarry Smith   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
9429c7c4993SBarry Smith   starts[0] = 0;
9439c7c4993SBarry Smith   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
9449c7c4993SBarry Smith   for (i=0; i<N; i++) {
9459c7c4993SBarry Smith     svalues[starts[owner[i]]++] = rows[i];
9469c7c4993SBarry Smith   }
9479c7c4993SBarry Smith 
9489c7c4993SBarry Smith   starts[0] = 0;
9499c7c4993SBarry Smith   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];}
9509c7c4993SBarry Smith   count = 0;
9519c7c4993SBarry Smith   for (i=0; i<size; i++) {
9529c7c4993SBarry Smith     if (nprocs[2*i+1]) {
9539c7c4993SBarry Smith       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
9549c7c4993SBarry Smith     }
9559c7c4993SBarry Smith   }
9569c7c4993SBarry Smith   ierr = PetscFree(starts);CHKERRQ(ierr);
9579c7c4993SBarry Smith 
9589c7c4993SBarry Smith   base = owners[rank];
9599c7c4993SBarry Smith 
9609c7c4993SBarry Smith   /*  wait on receives */
9619c7c4993SBarry Smith   ierr   = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr);
9629c7c4993SBarry Smith   count  = nrecvs; slen = 0;
9639c7c4993SBarry Smith   while (count) {
9649c7c4993SBarry Smith     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
9659c7c4993SBarry Smith     /* unpack receives into our local space */
9669c7c4993SBarry Smith     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
9679c7c4993SBarry Smith     source[imdex]  = recv_status.MPI_SOURCE;
9689c7c4993SBarry Smith     lens[imdex]    = n;
9699c7c4993SBarry Smith     slen          += n;
9709c7c4993SBarry Smith     count--;
9719c7c4993SBarry Smith   }
9729c7c4993SBarry Smith   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
9739c7c4993SBarry Smith 
9749c7c4993SBarry Smith   /* move the data into the send scatter */
9759c7c4993SBarry Smith   ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
9769c7c4993SBarry Smith   count = 0;
9779c7c4993SBarry Smith   for (i=0; i<nrecvs; i++) {
9789c7c4993SBarry Smith     values = rvalues + i*nmax;
9799c7c4993SBarry Smith     for (j=0; j<lens[i]; j++) {
9809c7c4993SBarry Smith       lrows[count++] = values[j] - base;
9819c7c4993SBarry Smith     }
9829c7c4993SBarry Smith   }
9839c7c4993SBarry Smith   ierr = PetscFree(rvalues);CHKERRQ(ierr);
9849c7c4993SBarry Smith   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
9859c7c4993SBarry Smith   ierr = PetscFree(owner);CHKERRQ(ierr);
9869c7c4993SBarry Smith   ierr = PetscFree(nprocs);CHKERRQ(ierr);
987564f14d6SBarry Smith   /* lrows are the local rows to be zeroed, slen is the number of local rows */
9889c7c4993SBarry Smith 
989564f14d6SBarry Smith   /* zero diagonal part of matrix */
990564f14d6SBarry Smith   ierr = MatZeroRowsColumns(l->A,slen,lrows,diag,x,b);CHKERRQ(ierr);
9919c7c4993SBarry Smith 
992564f14d6SBarry Smith   /* handle off diagonal part of matrix */
993564f14d6SBarry Smith   ierr = MatGetVecs(A,&xmask,PETSC_NULL);CHKERRQ(ierr);
994564f14d6SBarry Smith   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
995564f14d6SBarry Smith   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
9969c7c4993SBarry Smith   for (i=0; i<slen; i++) {
997564f14d6SBarry Smith     bb[lrows[i]] = 1;
9989c7c4993SBarry Smith   }
999564f14d6SBarry Smith   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1000564f14d6SBarry Smith   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1001564f14d6SBarry Smith   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
10026bf464f9SBarry Smith   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1003564f14d6SBarry Smith   ierr = VecScatterBegin(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1004564f14d6SBarry Smith   ierr = VecScatterEnd(l->Mvctx,x,l->lvec,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1005564f14d6SBarry Smith   ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1006564f14d6SBarry Smith   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1007564f14d6SBarry Smith   ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1008564f14d6SBarry Smith 
1009564f14d6SBarry Smith   /* remove zeroed rows of off diagonal matrix */
1010564f14d6SBarry Smith   ii = aij->i;
1011564f14d6SBarry Smith   for (i=0; i<slen; i++) {
1012564f14d6SBarry Smith     ierr = PetscMemzero(aij->a + ii[lrows[i]],(ii[lrows[i]+1] - ii[lrows[i]])*sizeof(PetscScalar));CHKERRQ(ierr);
10139c7c4993SBarry Smith   }
1014564f14d6SBarry Smith 
1015564f14d6SBarry Smith   /* loop over all elements of off process part of matrix zeroing removed columns*/
1016564f14d6SBarry Smith   if (aij->compressedrow.use){
1017564f14d6SBarry Smith     m    = aij->compressedrow.nrows;
1018564f14d6SBarry Smith     ii   = aij->compressedrow.i;
1019564f14d6SBarry Smith     ridx = aij->compressedrow.rindex;
1020564f14d6SBarry Smith     for (i=0; i<m; i++){
1021564f14d6SBarry Smith       n   = ii[i+1] - ii[i];
1022564f14d6SBarry Smith       aj  = aij->j + ii[i];
1023564f14d6SBarry Smith       aa  = aij->a + ii[i];
1024564f14d6SBarry Smith 
1025564f14d6SBarry Smith       for (j=0; j<n; j++) {
102625266a92SSatish Balay         if (PetscAbsScalar(mask[*aj])) {
1027564f14d6SBarry Smith           bb[*ridx] -= *aa*xx[*aj];
1028564f14d6SBarry Smith           *aa        = 0.0;
1029564f14d6SBarry Smith         }
1030564f14d6SBarry Smith         aa++;
1031564f14d6SBarry Smith         aj++;
1032564f14d6SBarry Smith       }
1033564f14d6SBarry Smith       ridx++;
1034564f14d6SBarry Smith     }
1035564f14d6SBarry Smith   } else { /* do not use compressed row format */
1036564f14d6SBarry Smith     m = l->B->rmap->n;
1037564f14d6SBarry Smith     for (i=0; i<m; i++) {
1038564f14d6SBarry Smith       n   = ii[i+1] - ii[i];
1039564f14d6SBarry Smith       aj  = aij->j + ii[i];
1040564f14d6SBarry Smith       aa  = aij->a + ii[i];
1041564f14d6SBarry Smith       for (j=0; j<n; j++) {
104225266a92SSatish Balay         if (PetscAbsScalar(mask[*aj])) {
1043564f14d6SBarry Smith           bb[i] -= *aa*xx[*aj];
1044564f14d6SBarry Smith           *aa    = 0.0;
1045564f14d6SBarry Smith         }
1046564f14d6SBarry Smith         aa++;
1047564f14d6SBarry Smith         aj++;
1048564f14d6SBarry Smith       }
1049564f14d6SBarry Smith     }
1050564f14d6SBarry Smith   }
1051564f14d6SBarry Smith   ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1052564f14d6SBarry Smith   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1053564f14d6SBarry Smith   ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
10546bf464f9SBarry Smith   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
10559c7c4993SBarry Smith   ierr = PetscFree(lrows);CHKERRQ(ierr);
10569c7c4993SBarry Smith 
10579c7c4993SBarry Smith   /* wait on sends */
10589c7c4993SBarry Smith   if (nsends) {
10599c7c4993SBarry Smith     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
10609c7c4993SBarry Smith     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
10619c7c4993SBarry Smith     ierr = PetscFree(send_status);CHKERRQ(ierr);
10629c7c4993SBarry Smith   }
10639c7c4993SBarry Smith   ierr = PetscFree(send_waits);CHKERRQ(ierr);
10649c7c4993SBarry Smith   ierr = PetscFree(svalues);CHKERRQ(ierr);
10659c7c4993SBarry Smith 
10669c7c4993SBarry Smith   PetscFunctionReturn(0);
10679c7c4993SBarry Smith }
10689c7c4993SBarry Smith 
10699c7c4993SBarry Smith #undef __FUNCT__
10704a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ"
1071dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy)
10721eb62cbbSBarry Smith {
1073416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1074dfbe8321SBarry Smith   PetscErrorCode ierr;
1075b1d57f15SBarry Smith   PetscInt       nt;
1076416022c9SBarry Smith 
10773a40ed3dSBarry Smith   PetscFunctionBegin;
1078a2ce50c7SBarry Smith   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
107965e19b50SBarry Smith   if (nt != A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt);
1080ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1081f830108cSBarry Smith   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1082ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1083f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
10843a40ed3dSBarry Smith   PetscFunctionReturn(0);
10851eb62cbbSBarry Smith }
10861eb62cbbSBarry Smith 
10874a2ae208SSatish Balay #undef __FUNCT__
1088bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ"
1089bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx)
1090bd0c2dcbSBarry Smith {
1091bd0c2dcbSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1092bd0c2dcbSBarry Smith   PetscErrorCode ierr;
1093bd0c2dcbSBarry Smith 
1094bd0c2dcbSBarry Smith   PetscFunctionBegin;
1095bd0c2dcbSBarry Smith   ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr);
1096bd0c2dcbSBarry Smith   PetscFunctionReturn(0);
1097bd0c2dcbSBarry Smith }
1098bd0c2dcbSBarry Smith 
1099bd0c2dcbSBarry Smith #undef __FUNCT__
11004a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ"
1101dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1102da3a660dSBarry Smith {
1103416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1104dfbe8321SBarry Smith   PetscErrorCode ierr;
11053a40ed3dSBarry Smith 
11063a40ed3dSBarry Smith   PetscFunctionBegin;
1107ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1108f830108cSBarry Smith   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1109ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1110f830108cSBarry Smith   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
11113a40ed3dSBarry Smith   PetscFunctionReturn(0);
1112da3a660dSBarry Smith }
1113da3a660dSBarry Smith 
11144a2ae208SSatish Balay #undef __FUNCT__
11154a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ"
1116dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy)
1117da3a660dSBarry Smith {
1118416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1119dfbe8321SBarry Smith   PetscErrorCode ierr;
1120ace3abfcSBarry Smith   PetscBool      merged;
1121da3a660dSBarry Smith 
11223a40ed3dSBarry Smith   PetscFunctionBegin;
1123a5ff213dSBarry Smith   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1124da3a660dSBarry Smith   /* do nondiagonal part */
11257c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1126a5ff213dSBarry Smith   if (!merged) {
1127da3a660dSBarry Smith     /* send it on its way */
1128ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1129da3a660dSBarry Smith     /* do local part */
11307c922b88SBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1131da3a660dSBarry Smith     /* receive remote parts: note this assumes the values are not actually */
1132a5ff213dSBarry Smith     /* added in yy until the next line, */
1133ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1134a5ff213dSBarry Smith   } else {
1135a5ff213dSBarry Smith     /* do local part */
1136a5ff213dSBarry Smith     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1137a5ff213dSBarry Smith     /* send it on its way */
1138ca9f406cSSatish Balay     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1139a5ff213dSBarry Smith     /* values actually were received in the Begin() but we need to call this nop */
1140ca9f406cSSatish Balay     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1141a5ff213dSBarry Smith   }
11423a40ed3dSBarry Smith   PetscFunctionReturn(0);
1143da3a660dSBarry Smith }
1144da3a660dSBarry Smith 
1145cd0d46ebSvictorle EXTERN_C_BEGIN
1146cd0d46ebSvictorle #undef __FUNCT__
11475fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ"
11487087cfbeSBarry Smith PetscErrorCode  MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscBool  *f)
1149cd0d46ebSvictorle {
11504f423910Svictorle   MPI_Comm       comm;
1151cd0d46ebSvictorle   Mat_MPIAIJ     *Aij = (Mat_MPIAIJ *) Amat->data, *Bij;
115266501d38Svictorle   Mat            Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs;
1153cd0d46ebSvictorle   IS             Me,Notme;
11546849ba73SBarry Smith   PetscErrorCode ierr;
1155b1d57f15SBarry Smith   PetscInt       M,N,first,last,*notme,i;
1156b1d57f15SBarry Smith   PetscMPIInt    size;
1157cd0d46ebSvictorle 
1158cd0d46ebSvictorle   PetscFunctionBegin;
115942e5f5b4Svictorle 
116042e5f5b4Svictorle   /* Easy test: symmetric diagonal block */
116166501d38Svictorle   Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A;
11625485867bSBarry Smith   ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr);
1163cd0d46ebSvictorle   if (!*f) PetscFunctionReturn(0);
11644f423910Svictorle   ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr);
1165b1d57f15SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1166b1d57f15SBarry Smith   if (size == 1) PetscFunctionReturn(0);
116742e5f5b4Svictorle 
116842e5f5b4Svictorle   /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */
1169cd0d46ebSvictorle   ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr);
1170cd0d46ebSvictorle   ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr);
1171b1d57f15SBarry Smith   ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),&notme);CHKERRQ(ierr);
1172cd0d46ebSvictorle   for (i=0; i<first; i++) notme[i] = i;
1173cd0d46ebSvictorle   for (i=last; i<M; i++) notme[i-last+first] = i;
117470b3c8c7SBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,PETSC_COPY_VALUES,&Notme);CHKERRQ(ierr);
1175268466fbSBarry Smith   ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr);
1176268466fbSBarry Smith   ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr);
117766501d38Svictorle   Aoff = Aoffs[0];
1178268466fbSBarry Smith   ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr);
117966501d38Svictorle   Boff = Boffs[0];
11805485867bSBarry Smith   ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr);
118166501d38Svictorle   ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr);
118266501d38Svictorle   ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr);
11836bf464f9SBarry Smith   ierr = ISDestroy(&Me);CHKERRQ(ierr);
11846bf464f9SBarry Smith   ierr = ISDestroy(&Notme);CHKERRQ(ierr);
11853e0d0d19SHong Zhang   ierr = PetscFree(notme);CHKERRQ(ierr);
1186cd0d46ebSvictorle    PetscFunctionReturn(0);
1187cd0d46ebSvictorle }
1188cd0d46ebSvictorle EXTERN_C_END
1189cd0d46ebSvictorle 
11904a2ae208SSatish Balay #undef __FUNCT__
11914a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ"
1192dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1193da3a660dSBarry Smith {
1194416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1195dfbe8321SBarry Smith   PetscErrorCode ierr;
1196da3a660dSBarry Smith 
11973a40ed3dSBarry Smith   PetscFunctionBegin;
1198da3a660dSBarry Smith   /* do nondiagonal part */
11997c922b88SBarry Smith   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1200da3a660dSBarry Smith   /* send it on its way */
1201ca9f406cSSatish Balay   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1202da3a660dSBarry Smith   /* do local part */
12037c922b88SBarry Smith   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1204a5ff213dSBarry Smith   /* receive remote parts */
1205ca9f406cSSatish Balay   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
12063a40ed3dSBarry Smith   PetscFunctionReturn(0);
1207da3a660dSBarry Smith }
1208da3a660dSBarry Smith 
12091eb62cbbSBarry Smith /*
12101eb62cbbSBarry Smith   This only works correctly for square matrices where the subblock A->A is the
12111eb62cbbSBarry Smith    diagonal block
12121eb62cbbSBarry Smith */
12134a2ae208SSatish Balay #undef __FUNCT__
12144a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ"
1215dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v)
12161eb62cbbSBarry Smith {
1217dfbe8321SBarry Smith   PetscErrorCode ierr;
1218416022c9SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
12193a40ed3dSBarry Smith 
12203a40ed3dSBarry Smith   PetscFunctionBegin;
1221e7e72b3dSBarry Smith   if (A->rmap->N != A->cmap->N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1222e7e72b3dSBarry Smith   if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"row partition must equal col partition");
12233a40ed3dSBarry Smith   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
12243a40ed3dSBarry Smith   PetscFunctionReturn(0);
12251eb62cbbSBarry Smith }
12261eb62cbbSBarry Smith 
12274a2ae208SSatish Balay #undef __FUNCT__
12284a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ"
1229f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa)
1230052efed2SBarry Smith {
1231052efed2SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1232dfbe8321SBarry Smith   PetscErrorCode ierr;
12333a40ed3dSBarry Smith 
12343a40ed3dSBarry Smith   PetscFunctionBegin;
1235f4df32b1SMatthew Knepley   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1236f4df32b1SMatthew Knepley   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
12373a40ed3dSBarry Smith   PetscFunctionReturn(0);
1238052efed2SBarry Smith }
1239052efed2SBarry Smith 
12404a2ae208SSatish Balay #undef __FUNCT__
12414a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ"
1242dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat)
12431eb62cbbSBarry Smith {
124444a69424SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1245dfbe8321SBarry Smith   PetscErrorCode ierr;
124683e2fdc7SBarry Smith 
12473a40ed3dSBarry Smith   PetscFunctionBegin;
1248aa482453SBarry Smith #if defined(PETSC_USE_LOG)
1249d0f46423SBarry Smith   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N);
1250a5a9c739SBarry Smith #endif
12518798bf22SSatish Balay   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
12526bf464f9SBarry Smith   ierr = VecDestroy(&aij->diag);CHKERRQ(ierr);
12536bf464f9SBarry Smith   ierr = MatDestroy(&aij->A);CHKERRQ(ierr);
12546bf464f9SBarry Smith   ierr = MatDestroy(&aij->B);CHKERRQ(ierr);
1255aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
12566bc0bbbfSBarry Smith   ierr = PetscTableDestroy(&aij->colmap);CHKERRQ(ierr);
1257b1fc9764SSatish Balay #else
125805b42c5fSBarry Smith   ierr = PetscFree(aij->colmap);CHKERRQ(ierr);
1259b1fc9764SSatish Balay #endif
126005b42c5fSBarry Smith   ierr = PetscFree(aij->garray);CHKERRQ(ierr);
12616bf464f9SBarry Smith   ierr = VecDestroy(&aij->lvec);CHKERRQ(ierr);
12626bf464f9SBarry Smith   ierr = VecScatterDestroy(&aij->Mvctx);CHKERRQ(ierr);
126303095fedSBarry Smith   ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr);
12648aa348c1SBarry Smith   ierr = PetscFree(aij->ld);CHKERRQ(ierr);
1265bf0cc555SLisandro Dalcin   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1266901853e0SKris Buschelman 
1267dbd8c25aSHong Zhang   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1268901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr);
1269901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr);
1270901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr);
1271901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr);
1272901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
1273ff69c46cSKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr);
1274901853e0SKris Buschelman   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr);
1275471cc821SHong Zhang   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr);
12763a40ed3dSBarry Smith   PetscFunctionReturn(0);
12771eb62cbbSBarry Smith }
1278ee50ffe9SBarry Smith 
12794a2ae208SSatish Balay #undef __FUNCT__
12808e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary"
1281dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer)
12828e2fed03SBarry Smith {
12838e2fed03SBarry Smith   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
12848e2fed03SBarry Smith   Mat_SeqAIJ*       A = (Mat_SeqAIJ*)aij->A->data;
12858e2fed03SBarry Smith   Mat_SeqAIJ*       B = (Mat_SeqAIJ*)aij->B->data;
12866849ba73SBarry Smith   PetscErrorCode    ierr;
128732dcc486SBarry Smith   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
12886f69ff64SBarry Smith   int               fd;
1289a788621eSSatish Balay   PetscInt          nz,header[4],*row_lengths,*range=0,rlen,i;
1290d0f46423SBarry Smith   PetscInt          nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz;
12918e2fed03SBarry Smith   PetscScalar       *column_values;
129285ebf7a4SBarry Smith   PetscInt          message_count,flowcontrolcount;
12938e2fed03SBarry Smith 
12948e2fed03SBarry Smith   PetscFunctionBegin;
12957adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
12967adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr);
12978e2fed03SBarry Smith   nz   = A->nz + B->nz;
1298958c9bccSBarry Smith   if (!rank) {
12990700a824SBarry Smith     header[0] = MAT_FILE_CLASSID;
1300d0f46423SBarry Smith     header[1] = mat->rmap->N;
1301d0f46423SBarry Smith     header[2] = mat->cmap->N;
13027adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
13038e2fed03SBarry Smith     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
13046f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
13058e2fed03SBarry Smith     /* get largest number of rows any processor has */
1306d0f46423SBarry Smith     rlen = mat->rmap->n;
1307d0f46423SBarry Smith     range = mat->rmap->range;
13088e2fed03SBarry Smith     for (i=1; i<size; i++) {
13098e2fed03SBarry Smith       rlen = PetscMax(rlen,range[i+1] - range[i]);
13108e2fed03SBarry Smith     }
13118e2fed03SBarry Smith   } else {
13127adad957SLisandro Dalcin     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
1313d0f46423SBarry Smith     rlen = mat->rmap->n;
13148e2fed03SBarry Smith   }
13158e2fed03SBarry Smith 
13168e2fed03SBarry Smith   /* load up the local row counts */
1317b1d57f15SBarry Smith   ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr);
1318d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
13198e2fed03SBarry Smith     row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
13208e2fed03SBarry Smith   }
13218e2fed03SBarry Smith 
13228e2fed03SBarry Smith   /* store the row lengths to the file */
132385ebf7a4SBarry Smith   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1324958c9bccSBarry Smith   if (!rank) {
1325d0f46423SBarry Smith     ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
13268e2fed03SBarry Smith     for (i=1; i<size; i++) {
132785ebf7a4SBarry Smith       ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
13288e2fed03SBarry Smith       rlen = range[i+1] - range[i];
13295de62a1aSBarry Smith       ierr = MPILong_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
13306f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
13318e2fed03SBarry Smith     }
133285ebf7a4SBarry Smith     ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
13338e2fed03SBarry Smith   } else {
133485ebf7a4SBarry Smith     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
13355de62a1aSBarry Smith     ierr = MPILong_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
133685ebf7a4SBarry Smith     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
13378e2fed03SBarry Smith   }
13388e2fed03SBarry Smith   ierr = PetscFree(row_lengths);CHKERRQ(ierr);
13398e2fed03SBarry Smith 
13408e2fed03SBarry Smith   /* load up the local column indices */
13418e2fed03SBarry Smith   nzmax = nz; /* )th processor needs space a largest processor needs */
13427adad957SLisandro Dalcin   ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr);
1343b1d57f15SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
13448e2fed03SBarry Smith   cnt  = 0;
1345d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
13468e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
13478e2fed03SBarry Smith       if ( (col = garray[B->j[j]]) > cstart) break;
13488e2fed03SBarry Smith       column_indices[cnt++] = col;
13498e2fed03SBarry Smith     }
13508e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
13518e2fed03SBarry Smith       column_indices[cnt++] = A->j[k] + cstart;
13528e2fed03SBarry Smith     }
13538e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
13548e2fed03SBarry Smith       column_indices[cnt++] = garray[B->j[j]];
13558e2fed03SBarry Smith     }
13568e2fed03SBarry Smith   }
1357e32f2f54SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
13588e2fed03SBarry Smith 
13598e2fed03SBarry Smith   /* store the column indices to the file */
136085ebf7a4SBarry Smith    ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1361958c9bccSBarry Smith   if (!rank) {
13628e2fed03SBarry Smith     MPI_Status status;
13636f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
13648e2fed03SBarry Smith     for (i=1; i<size; i++) {
136585ebf7a4SBarry Smith       ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
13667adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1367e32f2f54SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
13685de62a1aSBarry Smith       ierr = MPILong_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
13696f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
13708e2fed03SBarry Smith     }
137185ebf7a4SBarry Smith      ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
13728e2fed03SBarry Smith   } else {
137385ebf7a4SBarry Smith     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
13747adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
13755de62a1aSBarry Smith     ierr = MPILong_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
137685ebf7a4SBarry Smith     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
13778e2fed03SBarry Smith   }
13788e2fed03SBarry Smith   ierr = PetscFree(column_indices);CHKERRQ(ierr);
13798e2fed03SBarry Smith 
13808e2fed03SBarry Smith   /* load up the local column values */
13818e2fed03SBarry Smith   ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
13828e2fed03SBarry Smith   cnt  = 0;
1383d0f46423SBarry Smith   for (i=0; i<mat->rmap->n; i++) {
13848e2fed03SBarry Smith     for (j=B->i[i]; j<B->i[i+1]; j++) {
13858e2fed03SBarry Smith       if ( garray[B->j[j]] > cstart) break;
13868e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
13878e2fed03SBarry Smith     }
13888e2fed03SBarry Smith     for (k=A->i[i]; k<A->i[i+1]; k++) {
13898e2fed03SBarry Smith       column_values[cnt++] = A->a[k];
13908e2fed03SBarry Smith     }
13918e2fed03SBarry Smith     for (; j<B->i[i+1]; j++) {
13928e2fed03SBarry Smith       column_values[cnt++] = B->a[j];
13938e2fed03SBarry Smith     }
13948e2fed03SBarry Smith   }
1395e32f2f54SBarry Smith   if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz);
13968e2fed03SBarry Smith 
13978e2fed03SBarry Smith   /* store the column values to the file */
139885ebf7a4SBarry Smith    ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1399958c9bccSBarry Smith   if (!rank) {
14008e2fed03SBarry Smith     MPI_Status status;
14016f69ff64SBarry Smith     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
14028e2fed03SBarry Smith     for (i=1; i<size; i++) {
140385ebf7a4SBarry Smith        ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr);
14047adad957SLisandro Dalcin       ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr);
1405e32f2f54SBarry Smith       if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax);
14065de62a1aSBarry Smith       ierr = MPILong_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
14076f69ff64SBarry Smith       ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
14088e2fed03SBarry Smith     }
140985ebf7a4SBarry Smith     ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr);
14108e2fed03SBarry Smith   } else {
141185ebf7a4SBarry Smith     ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr);
14127adad957SLisandro Dalcin     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
14135de62a1aSBarry Smith     ierr = MPILong_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr);
141485ebf7a4SBarry Smith     ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr);
14158e2fed03SBarry Smith   }
14168e2fed03SBarry Smith   ierr = PetscFree(column_values);CHKERRQ(ierr);
14178e2fed03SBarry Smith   PetscFunctionReturn(0);
14188e2fed03SBarry Smith }
14198e2fed03SBarry Smith 
14208e2fed03SBarry Smith #undef __FUNCT__
14214a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket"
1422dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1423416022c9SBarry Smith {
142444a69424SLois Curfman McInnes   Mat_MPIAIJ        *aij = (Mat_MPIAIJ*)mat->data;
1425dfbe8321SBarry Smith   PetscErrorCode    ierr;
142632dcc486SBarry Smith   PetscMPIInt       rank = aij->rank,size = aij->size;
1427ace3abfcSBarry Smith   PetscBool         isdraw,iascii,isbinary;
1428b0a32e0cSBarry Smith   PetscViewer       sviewer;
1429f3ef73ceSBarry Smith   PetscViewerFormat format;
1430416022c9SBarry Smith 
14313a40ed3dSBarry Smith   PetscFunctionBegin;
14322692d6eeSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
14332692d6eeSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
14342692d6eeSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
143532077d6dSBarry Smith   if (iascii) {
1436b0a32e0cSBarry Smith     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1437456192e2SBarry Smith     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
14384e220ebcSLois Curfman McInnes       MatInfo    info;
1439ace3abfcSBarry Smith       PetscBool  inodes;
1440923f20ffSKris Buschelman 
14417adad957SLisandro Dalcin       ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr);
1442888f2ed8SSatish Balay       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1443923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr);
14447b23a99aSBarry Smith       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
1445923f20ffSKris Buschelman       if (!inodes) {
144677431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n",
1447d0f46423SBarry Smith 					      rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
14486831982aSBarry Smith       } else {
144977431f27SBarry Smith         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n",
1450d0f46423SBarry Smith 		    rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr);
14516831982aSBarry Smith       }
1452888f2ed8SSatish Balay       ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
145377431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1454888f2ed8SSatish Balay       ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
145577431f27SBarry Smith       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1456b0a32e0cSBarry Smith       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
14577b23a99aSBarry Smith       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
145807d81ca4SBarry Smith       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1459a40aa06bSLois Curfman McInnes       ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr);
14603a40ed3dSBarry Smith       PetscFunctionReturn(0);
1461fb9695e5SSatish Balay     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1462923f20ffSKris Buschelman       PetscInt   inodecount,inodelimit,*inodes;
1463923f20ffSKris Buschelman       ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr);
1464923f20ffSKris Buschelman       if (inodes) {
1465923f20ffSKris Buschelman         ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr);
1466d38fa0fbSBarry Smith       } else {
1467d38fa0fbSBarry Smith         ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr);
1468d38fa0fbSBarry Smith       }
14693a40ed3dSBarry Smith       PetscFunctionReturn(0);
14704aedb280SBarry Smith     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
14714aedb280SBarry Smith       PetscFunctionReturn(0);
147208480c60SBarry Smith     }
14738e2fed03SBarry Smith   } else if (isbinary) {
14748e2fed03SBarry Smith     if (size == 1) {
14757adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
14768e2fed03SBarry Smith       ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
14778e2fed03SBarry Smith     } else {
14788e2fed03SBarry Smith       ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr);
14798e2fed03SBarry Smith     }
14808e2fed03SBarry Smith     PetscFunctionReturn(0);
14810f5bd95cSBarry Smith   } else if (isdraw) {
1482b0a32e0cSBarry Smith     PetscDraw  draw;
1483ace3abfcSBarry Smith     PetscBool  isnull;
1484b0a32e0cSBarry Smith     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1485b0a32e0cSBarry Smith     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
148619bcc07fSBarry Smith   }
148719bcc07fSBarry Smith 
148817699dbbSLois Curfman McInnes   if (size == 1) {
14897adad957SLisandro Dalcin     ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
149078b31e54SBarry Smith     ierr = MatView(aij->A,viewer);CHKERRQ(ierr);
14913a40ed3dSBarry Smith   } else {
149295373324SBarry Smith     /* assemble the entire matrix onto first processor. */
149395373324SBarry Smith     Mat         A;
1494ec8511deSBarry Smith     Mat_SeqAIJ  *Aloc;
1495d0f46423SBarry Smith     PetscInt    M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct;
1496dd6ea824SBarry Smith     MatScalar   *a;
14972ee70a88SLois Curfman McInnes 
149832a366e4SMatthew Knepley     if (mat->rmap->N > 1024) {
1499ace3abfcSBarry Smith       PetscBool  flg = PETSC_FALSE;
150032a366e4SMatthew Knepley 
1501acfcf0e5SJed Brown       ierr = PetscOptionsGetBool(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr);
150232a366e4SMatthew Knepley       if (!flg) {
1503e7e72b3dSBarry Smith         SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large.");
150432a366e4SMatthew Knepley       }
150532a366e4SMatthew Knepley     }
15060805154bSBarry Smith 
15077adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr);
150817699dbbSLois Curfman McInnes     if (!rank) {
1509f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
15103a40ed3dSBarry Smith     } else {
1511f69a0ea3SMatthew Knepley       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
151295373324SBarry Smith     }
1513f204ca49SKris Buschelman     /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */
1514f204ca49SKris Buschelman     ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr);
1515f204ca49SKris Buschelman     ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr);
151652e6d16bSBarry Smith     ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr);
1517416022c9SBarry Smith 
151895373324SBarry Smith     /* copy over the A part */
1519ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->A->data;
1520d0f46423SBarry Smith     m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1521d0f46423SBarry Smith     row = mat->rmap->rstart;
1522d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;}
152395373324SBarry Smith     for (i=0; i<m; i++) {
1524416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr);
152595373324SBarry Smith       row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i];
152695373324SBarry Smith     }
15272ee70a88SLois Curfman McInnes     aj = Aloc->j;
1528d0f46423SBarry Smith     for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;}
152995373324SBarry Smith 
153095373324SBarry Smith     /* copy over the B part */
1531ec8511deSBarry Smith     Aloc = (Mat_SeqAIJ*)aij->B->data;
1532d0f46423SBarry Smith     m    = aij->B->rmap->n;  ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1533d0f46423SBarry Smith     row  = mat->rmap->rstart;
1534b1d57f15SBarry Smith     ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
1535b0a32e0cSBarry Smith     ct   = cols;
1536bfec09a0SHong Zhang     for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];}
153795373324SBarry Smith     for (i=0; i<m; i++) {
1538416022c9SBarry Smith       ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr);
153995373324SBarry Smith       row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i];
154095373324SBarry Smith     }
1541606d414cSSatish Balay     ierr = PetscFree(ct);CHKERRQ(ierr);
15426d4a8577SBarry Smith     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
15436d4a8577SBarry Smith     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
154455843e3eSBarry Smith     /*
154555843e3eSBarry Smith        Everyone has to call to draw the matrix since the graphics waits are
1546b0a32e0cSBarry Smith        synchronized across all processors that share the PetscDraw object
154755843e3eSBarry Smith     */
1548b0a32e0cSBarry Smith     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1549e03a110bSBarry Smith     if (!rank) {
15507adad957SLisandro Dalcin       ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
15517566de4bSShri Abhyankar       /* Set the type name to MATMPIAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqAIJ_ASCII()*/
15527566de4bSShri Abhyankar       PetscStrcpy(((PetscObject)((Mat_MPIAIJ*)(A->data))->A)->type_name,MATMPIAIJ);
15536831982aSBarry Smith       ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
155495373324SBarry Smith     }
1555b0a32e0cSBarry Smith     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
15566bf464f9SBarry Smith     ierr = MatDestroy(&A);CHKERRQ(ierr);
155795373324SBarry Smith   }
15583a40ed3dSBarry Smith   PetscFunctionReturn(0);
15591eb62cbbSBarry Smith }
15601eb62cbbSBarry Smith 
15614a2ae208SSatish Balay #undef __FUNCT__
15624a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ"
1563dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer)
1564416022c9SBarry Smith {
1565dfbe8321SBarry Smith   PetscErrorCode ierr;
1566ace3abfcSBarry Smith   PetscBool      iascii,isdraw,issocket,isbinary;
1567416022c9SBarry Smith 
15683a40ed3dSBarry Smith   PetscFunctionBegin;
15692692d6eeSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
15702692d6eeSBarry Smith   ierr  = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
15712692d6eeSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
15722692d6eeSBarry Smith   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
157332077d6dSBarry Smith   if (iascii || isdraw || isbinary || issocket) {
15747b2a1423SBarry Smith     ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
15755cd90555SBarry Smith   } else {
1576e32f2f54SBarry Smith     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name);
1577416022c9SBarry Smith   }
15783a40ed3dSBarry Smith   PetscFunctionReturn(0);
1579416022c9SBarry Smith }
1580416022c9SBarry Smith 
15814a2ae208SSatish Balay #undef __FUNCT__
158241f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ"
158341f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
15848a729477SBarry Smith {
158544a69424SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1586dfbe8321SBarry Smith   PetscErrorCode ierr;
15876987fefcSBarry Smith   Vec            bb1 = 0;
1588ace3abfcSBarry Smith   PetscBool      hasop;
15898a729477SBarry Smith 
15903a40ed3dSBarry Smith   PetscFunctionBegin;
159185911e72SJed Brown   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) {
159285911e72SJed Brown     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
159385911e72SJed Brown   }
15942798e883SHong Zhang 
1595a2b30743SBarry Smith   if (flag == SOR_APPLY_UPPER) {
159641f059aeSBarry Smith     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
1597a2b30743SBarry Smith     PetscFunctionReturn(0);
1598a2b30743SBarry Smith   }
1599a2b30743SBarry Smith 
1600c16cb8f2SBarry Smith   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){
1601da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
160241f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
16032798e883SHong Zhang       its--;
1604da3a660dSBarry Smith     }
16052798e883SHong Zhang 
16062798e883SHong Zhang     while (its--) {
1607ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1608ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
16092798e883SHong Zhang 
1610c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1611efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1612c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
16132798e883SHong Zhang 
1614c14dc6b6SHong Zhang       /* local sweep */
161541f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
16162798e883SHong Zhang     }
16173a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_FORWARD_SWEEP){
1618da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
161941f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
16202798e883SHong Zhang       its--;
1621da3a660dSBarry Smith     }
16222798e883SHong Zhang     while (its--) {
1623ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1624ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
16252798e883SHong Zhang 
1626c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1627efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1628c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
1629c14dc6b6SHong Zhang 
1630c14dc6b6SHong Zhang       /* local sweep */
163141f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
16322798e883SHong Zhang     }
16333a40ed3dSBarry Smith   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){
1634da3a660dSBarry Smith     if (flag & SOR_ZERO_INITIAL_GUESS) {
163541f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
16362798e883SHong Zhang       its--;
1637da3a660dSBarry Smith     }
16382798e883SHong Zhang     while (its--) {
1639ca9f406cSSatish Balay       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1640ca9f406cSSatish Balay       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
16412798e883SHong Zhang 
1642c14dc6b6SHong Zhang       /* update rhs: bb1 = bb - B*x */
1643efb30889SBarry Smith       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
1644c14dc6b6SHong Zhang       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
16452798e883SHong Zhang 
1646c14dc6b6SHong Zhang       /* local sweep */
164741f059aeSBarry Smith       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
16482798e883SHong Zhang     }
1649a7420bb7SBarry Smith   }  else if (flag & SOR_EISENSTAT) {
1650a7420bb7SBarry Smith     Vec         xx1;
1651a7420bb7SBarry Smith 
1652a7420bb7SBarry Smith     ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr);
165341f059aeSBarry Smith     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr);
1654a7420bb7SBarry Smith 
1655a7420bb7SBarry Smith     ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1656a7420bb7SBarry Smith     ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1657a7420bb7SBarry Smith     if (!mat->diag) {
1658a7420bb7SBarry Smith       ierr = MatGetVecs(matin,&mat->diag,PETSC_NULL);CHKERRQ(ierr);
1659a7420bb7SBarry Smith       ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr);
1660a7420bb7SBarry Smith     }
1661bd0c2dcbSBarry Smith     ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
1662bd0c2dcbSBarry Smith     if (hasop) {
1663bd0c2dcbSBarry Smith       ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr);
1664bd0c2dcbSBarry Smith     } else {
1665a7420bb7SBarry Smith       ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr);
1666bd0c2dcbSBarry Smith     }
1667887ee2caSBarry Smith     ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr);
1668887ee2caSBarry Smith 
1669a7420bb7SBarry Smith     ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr);
1670a7420bb7SBarry Smith 
1671a7420bb7SBarry Smith     /* local sweep */
167241f059aeSBarry Smith     ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr);
1673a7420bb7SBarry Smith     ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr);
16746bf464f9SBarry Smith     ierr = VecDestroy(&xx1);CHKERRQ(ierr);
167544b1af1bSBarry Smith   } else SETERRQ(((PetscObject)matin)->comm,PETSC_ERR_SUP,"Parallel SOR not supported");
1676c14dc6b6SHong Zhang 
16776bf464f9SBarry Smith   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
16783a40ed3dSBarry Smith   PetscFunctionReturn(0);
16798a729477SBarry Smith }
1680a66be287SLois Curfman McInnes 
16814a2ae208SSatish Balay #undef __FUNCT__
168242e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ"
168342e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B)
168442e855d1Svictor {
168542e855d1Svictor   MPI_Comm       comm,pcomm;
16865d0c19d7SBarry Smith   PetscInt       first,local_size,nrows;
16875d0c19d7SBarry Smith   const PetscInt *rows;
1688dbf0e21dSBarry Smith   PetscMPIInt    size;
168942e855d1Svictor   IS             crowp,growp,irowp,lrowp,lcolp,icolp;
169042e855d1Svictor   PetscErrorCode ierr;
169142e855d1Svictor 
169242e855d1Svictor   PetscFunctionBegin;
169342e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
169442e855d1Svictor   /* make a collective version of 'rowp' */
169542e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
169642e855d1Svictor   if (pcomm==comm) {
169742e855d1Svictor     crowp = rowp;
169842e855d1Svictor   } else {
169942e855d1Svictor     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
170042e855d1Svictor     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
170170b3c8c7SBarry Smith     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
170242e855d1Svictor     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
170342e855d1Svictor   }
170442e855d1Svictor   /* collect the global row permutation and invert it */
170542e855d1Svictor   ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr);
170642e855d1Svictor   ierr = ISSetPermutation(growp);CHKERRQ(ierr);
170742e855d1Svictor   if (pcomm!=comm) {
17086bf464f9SBarry Smith     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
170942e855d1Svictor   }
171042e855d1Svictor   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
171142e855d1Svictor   /* get the local target indices */
171242e855d1Svictor   ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr);
171342e855d1Svictor   ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr);
171442e855d1Svictor   ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr);
171570b3c8c7SBarry Smith   ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,PETSC_COPY_VALUES,&lrowp);CHKERRQ(ierr);
171642e855d1Svictor   ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr);
17176bf464f9SBarry Smith   ierr = ISDestroy(&irowp);CHKERRQ(ierr);
171842e855d1Svictor   /* the column permutation is so much easier;
171942e855d1Svictor      make a local version of 'colp' and invert it */
172042e855d1Svictor   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
1721dbf0e21dSBarry Smith   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
1722dbf0e21dSBarry Smith   if (size==1) {
172342e855d1Svictor     lcolp = colp;
172442e855d1Svictor   } else {
172542e855d1Svictor     ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr);
172642e855d1Svictor     ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr);
172770b3c8c7SBarry Smith     ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,PETSC_COPY_VALUES,&lcolp);CHKERRQ(ierr);
172842e855d1Svictor   }
1729dbf0e21dSBarry Smith   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
173042e855d1Svictor   ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr);
17314aa3045dSJed Brown   ierr = ISSetPermutation(icolp);CHKERRQ(ierr);
1732dbf0e21dSBarry Smith   if (size>1) {
173342e855d1Svictor     ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr);
17346bf464f9SBarry Smith     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
173542e855d1Svictor   }
173642e855d1Svictor   /* now we just get the submatrix */
17374aa3045dSJed Brown   ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
173842e855d1Svictor   /* clean up */
17396bf464f9SBarry Smith   ierr = ISDestroy(&lrowp);CHKERRQ(ierr);
17406bf464f9SBarry Smith   ierr = ISDestroy(&icolp);CHKERRQ(ierr);
174142e855d1Svictor   PetscFunctionReturn(0);
174242e855d1Svictor }
174342e855d1Svictor 
174442e855d1Svictor #undef __FUNCT__
17454a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ"
1746dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1747a66be287SLois Curfman McInnes {
1748a66be287SLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
1749a66be287SLois Curfman McInnes   Mat            A = mat->A,B = mat->B;
1750dfbe8321SBarry Smith   PetscErrorCode ierr;
1751329f5518SBarry Smith   PetscReal      isend[5],irecv[5];
1752a66be287SLois Curfman McInnes 
17533a40ed3dSBarry Smith   PetscFunctionBegin;
17544e220ebcSLois Curfman McInnes   info->block_size     = 1.0;
17554e220ebcSLois Curfman McInnes   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
17564e220ebcSLois Curfman McInnes   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
17574e220ebcSLois Curfman McInnes   isend[3] = info->memory;  isend[4] = info->mallocs;
17584e220ebcSLois Curfman McInnes   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
17594e220ebcSLois Curfman McInnes   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
17604e220ebcSLois Curfman McInnes   isend[3] += info->memory;  isend[4] += info->mallocs;
1761a66be287SLois Curfman McInnes   if (flag == MAT_LOCAL) {
17624e220ebcSLois Curfman McInnes     info->nz_used      = isend[0];
17634e220ebcSLois Curfman McInnes     info->nz_allocated = isend[1];
17644e220ebcSLois Curfman McInnes     info->nz_unneeded  = isend[2];
17654e220ebcSLois Curfman McInnes     info->memory       = isend[3];
17664e220ebcSLois Curfman McInnes     info->mallocs      = isend[4];
1767a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_MAX) {
1768d9822059SBarry Smith     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr);
17694e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
17704e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
17714e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
17724e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
17734e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1774a66be287SLois Curfman McInnes   } else if (flag == MAT_GLOBAL_SUM) {
1775d9822059SBarry Smith     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr);
17764e220ebcSLois Curfman McInnes     info->nz_used      = irecv[0];
17774e220ebcSLois Curfman McInnes     info->nz_allocated = irecv[1];
17784e220ebcSLois Curfman McInnes     info->nz_unneeded  = irecv[2];
17794e220ebcSLois Curfman McInnes     info->memory       = irecv[3];
17804e220ebcSLois Curfman McInnes     info->mallocs      = irecv[4];
1781a66be287SLois Curfman McInnes   }
17824e220ebcSLois Curfman McInnes   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
17834e220ebcSLois Curfman McInnes   info->fill_ratio_needed = 0;
17844e220ebcSLois Curfman McInnes   info->factor_mallocs    = 0;
17854e220ebcSLois Curfman McInnes 
17863a40ed3dSBarry Smith   PetscFunctionReturn(0);
1787a66be287SLois Curfman McInnes }
1788a66be287SLois Curfman McInnes 
17894a2ae208SSatish Balay #undef __FUNCT__
17904a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ"
1791ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscBool  flg)
1792c74985f6SBarry Smith {
1793c0bbcb79SLois Curfman McInnes   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
1794dfbe8321SBarry Smith   PetscErrorCode ierr;
1795c74985f6SBarry Smith 
17963a40ed3dSBarry Smith   PetscFunctionBegin;
179712c028f9SKris Buschelman   switch (op) {
1798512a5fc5SBarry Smith   case MAT_NEW_NONZERO_LOCATIONS:
179912c028f9SKris Buschelman   case MAT_NEW_NONZERO_ALLOCATION_ERR:
180028b2fa4aSMatthew Knepley   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1801a9817697SBarry Smith   case MAT_KEEP_NONZERO_PATTERN:
180212c028f9SKris Buschelman   case MAT_NEW_NONZERO_LOCATION_ERR:
180312c028f9SKris Buschelman   case MAT_USE_INODES:
180412c028f9SKris Buschelman   case MAT_IGNORE_ZERO_ENTRIES:
18054e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
18064e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
180712c028f9SKris Buschelman     break;
180812c028f9SKris Buschelman   case MAT_ROW_ORIENTED:
18094e0d8c25SBarry Smith     a->roworiented = flg;
18104e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
18114e0d8c25SBarry Smith     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
181212c028f9SKris Buschelman     break;
18134e0d8c25SBarry Smith   case MAT_NEW_DIAGONALS:
1814290bbb0aSBarry Smith     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
181512c028f9SKris Buschelman     break;
181612c028f9SKris Buschelman   case MAT_IGNORE_OFF_PROC_ENTRIES:
18175c0f0b64SBarry Smith     a->donotstash = flg;
181812c028f9SKris Buschelman     break;
1819ffa07934SHong Zhang   case MAT_SPD:
1820ffa07934SHong Zhang     A->spd_set                         = PETSC_TRUE;
1821ffa07934SHong Zhang     A->spd                             = flg;
1822ffa07934SHong Zhang     if (flg) {
1823ffa07934SHong Zhang       A->symmetric                     = PETSC_TRUE;
1824ffa07934SHong Zhang       A->structurally_symmetric        = PETSC_TRUE;
1825ffa07934SHong Zhang       A->symmetric_set                 = PETSC_TRUE;
1826ffa07934SHong Zhang       A->structurally_symmetric_set    = PETSC_TRUE;
1827ffa07934SHong Zhang     }
1828ffa07934SHong Zhang     break;
182977e54ba9SKris Buschelman   case MAT_SYMMETRIC:
18304e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
183125f421beSHong Zhang     break;
183277e54ba9SKris Buschelman   case MAT_STRUCTURALLY_SYMMETRIC:
1833eeffb40dSHong Zhang     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1834eeffb40dSHong Zhang     break;
1835bf108f30SBarry Smith   case MAT_HERMITIAN:
1836eeffb40dSHong Zhang     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1837eeffb40dSHong Zhang     break;
1838bf108f30SBarry Smith   case MAT_SYMMETRY_ETERNAL:
18394e0d8c25SBarry Smith     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
184077e54ba9SKris Buschelman     break;
184112c028f9SKris Buschelman   default:
1842e32f2f54SBarry Smith     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op);
18433a40ed3dSBarry Smith   }
18443a40ed3dSBarry Smith   PetscFunctionReturn(0);
1845c74985f6SBarry Smith }
1846c74985f6SBarry Smith 
18474a2ae208SSatish Balay #undef __FUNCT__
18484a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ"
1849b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
185039e00950SLois Curfman McInnes {
1851154123eaSLois Curfman McInnes   Mat_MPIAIJ     *mat = (Mat_MPIAIJ*)matin->data;
185287828ca2SBarry Smith   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
18536849ba73SBarry Smith   PetscErrorCode ierr;
1854d0f46423SBarry Smith   PetscInt       i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart;
1855d0f46423SBarry Smith   PetscInt       nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend;
1856b1d57f15SBarry Smith   PetscInt       *cmap,*idx_p;
185739e00950SLois Curfman McInnes 
18583a40ed3dSBarry Smith   PetscFunctionBegin;
1859e32f2f54SBarry Smith   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
18607a0afa10SBarry Smith   mat->getrowactive = PETSC_TRUE;
18617a0afa10SBarry Smith 
186270f0671dSBarry Smith   if (!mat->rowvalues && (idx || v)) {
18637a0afa10SBarry Smith     /*
18647a0afa10SBarry Smith         allocate enough space to hold information from the longest row.
18657a0afa10SBarry Smith     */
18667a0afa10SBarry Smith     Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data;
1867b1d57f15SBarry Smith     PetscInt   max = 1,tmp;
1868d0f46423SBarry Smith     for (i=0; i<matin->rmap->n; i++) {
18697a0afa10SBarry Smith       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
18707a0afa10SBarry Smith       if (max < tmp) { max = tmp; }
18717a0afa10SBarry Smith     }
18721d79065fSBarry Smith     ierr = PetscMalloc2(max,PetscScalar,&mat->rowvalues,max,PetscInt,&mat->rowindices);CHKERRQ(ierr);
18737a0afa10SBarry Smith   }
18747a0afa10SBarry Smith 
1875e7e72b3dSBarry Smith   if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only local rows");
1876abc0e9e4SLois Curfman McInnes   lrow = row - rstart;
187739e00950SLois Curfman McInnes 
1878154123eaSLois Curfman McInnes   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1879154123eaSLois Curfman McInnes   if (!v)   {pvA = 0; pvB = 0;}
1880154123eaSLois Curfman McInnes   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1881f830108cSBarry Smith   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1882f830108cSBarry Smith   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1883154123eaSLois Curfman McInnes   nztot = nzA + nzB;
1884154123eaSLois Curfman McInnes 
188570f0671dSBarry Smith   cmap  = mat->garray;
1886154123eaSLois Curfman McInnes   if (v  || idx) {
1887154123eaSLois Curfman McInnes     if (nztot) {
1888154123eaSLois Curfman McInnes       /* Sort by increasing column numbers, assuming A and B already sorted */
1889b1d57f15SBarry Smith       PetscInt imark = -1;
1890154123eaSLois Curfman McInnes       if (v) {
189170f0671dSBarry Smith         *v = v_p = mat->rowvalues;
189239e00950SLois Curfman McInnes         for (i=0; i<nzB; i++) {
189370f0671dSBarry Smith           if (cmap[cworkB[i]] < cstart)   v_p[i] = vworkB[i];
1894154123eaSLois Curfman McInnes           else break;
1895154123eaSLois Curfman McInnes         }
1896154123eaSLois Curfman McInnes         imark = i;
189770f0671dSBarry Smith         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
189870f0671dSBarry Smith         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1899154123eaSLois Curfman McInnes       }
1900154123eaSLois Curfman McInnes       if (idx) {
190170f0671dSBarry Smith         *idx = idx_p = mat->rowindices;
190270f0671dSBarry Smith         if (imark > -1) {
190370f0671dSBarry Smith           for (i=0; i<imark; i++) {
190470f0671dSBarry Smith             idx_p[i] = cmap[cworkB[i]];
190570f0671dSBarry Smith           }
190670f0671dSBarry Smith         } else {
1907154123eaSLois Curfman McInnes           for (i=0; i<nzB; i++) {
190870f0671dSBarry Smith             if (cmap[cworkB[i]] < cstart)   idx_p[i] = cmap[cworkB[i]];
1909154123eaSLois Curfman McInnes             else break;
1910154123eaSLois Curfman McInnes           }
1911154123eaSLois Curfman McInnes           imark = i;
191270f0671dSBarry Smith         }
191370f0671dSBarry Smith         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart + cworkA[i];
191470f0671dSBarry Smith         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]];
191539e00950SLois Curfman McInnes       }
19163f97c4b0SBarry Smith     } else {
19171ca473b0SSatish Balay       if (idx) *idx = 0;
19181ca473b0SSatish Balay       if (v)   *v   = 0;
19191ca473b0SSatish Balay     }
1920154123eaSLois Curfman McInnes   }
192139e00950SLois Curfman McInnes   *nz = nztot;
1922f830108cSBarry Smith   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1923f830108cSBarry Smith   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
19243a40ed3dSBarry Smith   PetscFunctionReturn(0);
192539e00950SLois Curfman McInnes }
192639e00950SLois Curfman McInnes 
19274a2ae208SSatish Balay #undef __FUNCT__
19284a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ"
1929b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
193039e00950SLois Curfman McInnes {
19317a0afa10SBarry Smith   Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
19323a40ed3dSBarry Smith 
19333a40ed3dSBarry Smith   PetscFunctionBegin;
1934e7e72b3dSBarry Smith   if (!aij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
19357a0afa10SBarry Smith   aij->getrowactive = PETSC_FALSE;
19363a40ed3dSBarry Smith   PetscFunctionReturn(0);
193739e00950SLois Curfman McInnes }
193839e00950SLois Curfman McInnes 
19394a2ae208SSatish Balay #undef __FUNCT__
19404a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ"
1941dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm)
1942855ac2c5SLois Curfman McInnes {
1943855ac2c5SLois Curfman McInnes   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
1944ec8511deSBarry Smith   Mat_SeqAIJ     *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data;
1945dfbe8321SBarry Smith   PetscErrorCode ierr;
1946d0f46423SBarry Smith   PetscInt       i,j,cstart = mat->cmap->rstart;
1947329f5518SBarry Smith   PetscReal      sum = 0.0;
1948a77337e4SBarry Smith   MatScalar      *v;
194904ca555eSLois Curfman McInnes 
19503a40ed3dSBarry Smith   PetscFunctionBegin;
195117699dbbSLois Curfman McInnes   if (aij->size == 1) {
195214183eadSLois Curfman McInnes     ierr =  MatNorm(aij->A,type,norm);CHKERRQ(ierr);
195337fa93a5SLois Curfman McInnes   } else {
195404ca555eSLois Curfman McInnes     if (type == NORM_FROBENIUS) {
195504ca555eSLois Curfman McInnes       v = amat->a;
195604ca555eSLois Curfman McInnes       for (i=0; i<amat->nz; i++) {
1957aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1958329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
195904ca555eSLois Curfman McInnes #else
196004ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
196104ca555eSLois Curfman McInnes #endif
196204ca555eSLois Curfman McInnes       }
196304ca555eSLois Curfman McInnes       v = bmat->a;
196404ca555eSLois Curfman McInnes       for (i=0; i<bmat->nz; i++) {
1965aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX)
1966329f5518SBarry Smith         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
196704ca555eSLois Curfman McInnes #else
196804ca555eSLois Curfman McInnes         sum += (*v)*(*v); v++;
196904ca555eSLois Curfman McInnes #endif
197004ca555eSLois Curfman McInnes       }
1971d9822059SBarry Smith       ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
19728f1a2a5eSBarry Smith       *norm = PetscSqrtReal(*norm);
19733a40ed3dSBarry Smith     } else if (type == NORM_1) { /* max column norm */
1974329f5518SBarry Smith       PetscReal *tmp,*tmp2;
1975b1d57f15SBarry Smith       PetscInt  *jj,*garray = aij->garray;
1976d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr);
1977d0f46423SBarry Smith       ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr);
1978d0f46423SBarry Smith       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
197904ca555eSLois Curfman McInnes       *norm = 0.0;
198004ca555eSLois Curfman McInnes       v = amat->a; jj = amat->j;
198104ca555eSLois Curfman McInnes       for (j=0; j<amat->nz; j++) {
1982bfec09a0SHong Zhang         tmp[cstart + *jj++ ] += PetscAbsScalar(*v);  v++;
198304ca555eSLois Curfman McInnes       }
198404ca555eSLois Curfman McInnes       v = bmat->a; jj = bmat->j;
198504ca555eSLois Curfman McInnes       for (j=0; j<bmat->nz; j++) {
1986bfec09a0SHong Zhang         tmp[garray[*jj++]] += PetscAbsScalar(*v); v++;
198704ca555eSLois Curfman McInnes       }
1988d9822059SBarry Smith       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr);
1989d0f46423SBarry Smith       for (j=0; j<mat->cmap->N; j++) {
199004ca555eSLois Curfman McInnes         if (tmp2[j] > *norm) *norm = tmp2[j];
199104ca555eSLois Curfman McInnes       }
1992606d414cSSatish Balay       ierr = PetscFree(tmp);CHKERRQ(ierr);
1993606d414cSSatish Balay       ierr = PetscFree(tmp2);CHKERRQ(ierr);
19943a40ed3dSBarry Smith     } else if (type == NORM_INFINITY) { /* max row norm */
1995329f5518SBarry Smith       PetscReal ntemp = 0.0;
1996d0f46423SBarry Smith       for (j=0; j<aij->A->rmap->n; j++) {
1997bfec09a0SHong Zhang         v = amat->a + amat->i[j];
199804ca555eSLois Curfman McInnes         sum = 0.0;
199904ca555eSLois Curfman McInnes         for (i=0; i<amat->i[j+1]-amat->i[j]; i++) {
2000cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
200104ca555eSLois Curfman McInnes         }
2002bfec09a0SHong Zhang         v = bmat->a + bmat->i[j];
200304ca555eSLois Curfman McInnes         for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) {
2004cddf8d76SBarry Smith           sum += PetscAbsScalar(*v); v++;
200504ca555eSLois Curfman McInnes         }
2006515d9167SLois Curfman McInnes         if (sum > ntemp) ntemp = sum;
200704ca555eSLois Curfman McInnes       }
2008d9822059SBarry Smith       ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPIU_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr);
2009ca161407SBarry Smith     } else {
2010e7e72b3dSBarry Smith       SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support for two norm");
201104ca555eSLois Curfman McInnes     }
201237fa93a5SLois Curfman McInnes   }
20133a40ed3dSBarry Smith   PetscFunctionReturn(0);
2014855ac2c5SLois Curfman McInnes }
2015855ac2c5SLois Curfman McInnes 
20164a2ae208SSatish Balay #undef __FUNCT__
20174a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ"
2018fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout)
2019b7c46309SBarry Smith {
2020b7c46309SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2021da668accSHong Zhang   Mat_SeqAIJ     *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data;
2022dfbe8321SBarry Smith   PetscErrorCode ierr;
2023d0f46423SBarry Smith   PetscInt       M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz;
2024d0f46423SBarry Smith   PetscInt       cstart=A->cmap->rstart,ncol;
20253a40ed3dSBarry Smith   Mat            B;
2026a77337e4SBarry Smith   MatScalar      *array;
2027b7c46309SBarry Smith 
20283a40ed3dSBarry Smith   PetscFunctionBegin;
2029e7e72b3dSBarry Smith   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
2030da668accSHong Zhang 
2031d0f46423SBarry Smith   ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n;
2032da668accSHong Zhang   ai = Aloc->i; aj = Aloc->j;
2033da668accSHong Zhang   bi = Bloc->i; bj = Bloc->j;
2034fc73b1b3SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
2035fc73b1b3SBarry Smith     /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */
2036fc73b1b3SBarry Smith     ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr);
2037da668accSHong Zhang     ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr);
2038da668accSHong Zhang     for (i=0; i<ai[ma]; i++){
2039da668accSHong Zhang       d_nnz[aj[i]] ++;
2040da668accSHong Zhang       aj[i] += cstart; /* global col index to be used by MatSetValues() */
2041d4bb536fSBarry Smith     }
2042d4bb536fSBarry Smith 
20437adad957SLisandro Dalcin     ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr);
2044d0f46423SBarry Smith     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
20457adad957SLisandro Dalcin     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
2046da668accSHong Zhang     ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr);
2047fc73b1b3SBarry Smith     ierr = PetscFree(d_nnz);CHKERRQ(ierr);
2048fc4dec0aSBarry Smith   } else {
2049fc4dec0aSBarry Smith     B = *matout;
2050fc4dec0aSBarry Smith   }
2051b7c46309SBarry Smith 
2052b7c46309SBarry Smith   /* copy over the A part */
2053da668accSHong Zhang   array = Aloc->a;
2054d0f46423SBarry Smith   row = A->rmap->rstart;
2055da668accSHong Zhang   for (i=0; i<ma; i++) {
2056da668accSHong Zhang     ncol = ai[i+1]-ai[i];
2057da668accSHong Zhang     ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
2058da668accSHong Zhang     row++; array += ncol; aj += ncol;
2059b7c46309SBarry Smith   }
2060b7c46309SBarry Smith   aj = Aloc->j;
2061da668accSHong Zhang   for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */
2062b7c46309SBarry Smith 
2063b7c46309SBarry Smith   /* copy over the B part */
2064fc73b1b3SBarry Smith   ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2065fc73b1b3SBarry Smith   ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr);
2066da668accSHong Zhang   array = Bloc->a;
2067d0f46423SBarry Smith   row = A->rmap->rstart;
2068da668accSHong Zhang   for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];}
206961a2fbbaSHong Zhang   cols_tmp = cols;
2070da668accSHong Zhang   for (i=0; i<mb; i++) {
2071da668accSHong Zhang     ncol = bi[i+1]-bi[i];
207261a2fbbaSHong Zhang     ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr);
207361a2fbbaSHong Zhang     row++; array += ncol; cols_tmp += ncol;
2074b7c46309SBarry Smith   }
2075fc73b1b3SBarry Smith   ierr = PetscFree(cols);CHKERRQ(ierr);
2076fc73b1b3SBarry Smith 
20776d4a8577SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
20786d4a8577SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2079815cbec1SBarry Smith   if (reuse == MAT_INITIAL_MATRIX || *matout != A) {
20800de55854SLois Curfman McInnes     *matout = B;
20810de55854SLois Curfman McInnes   } else {
2082eb6b5d47SBarry Smith     ierr = MatHeaderMerge(A,B);CHKERRQ(ierr);
20830de55854SLois Curfman McInnes   }
20843a40ed3dSBarry Smith   PetscFunctionReturn(0);
2085b7c46309SBarry Smith }
2086b7c46309SBarry Smith 
20874a2ae208SSatish Balay #undef __FUNCT__
20884a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ"
2089dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr)
2090a008b906SSatish Balay {
20914b967eb1SSatish Balay   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
20924b967eb1SSatish Balay   Mat            a = aij->A,b = aij->B;
2093dfbe8321SBarry Smith   PetscErrorCode ierr;
2094b1d57f15SBarry Smith   PetscInt       s1,s2,s3;
2095a008b906SSatish Balay 
20963a40ed3dSBarry Smith   PetscFunctionBegin;
20974b967eb1SSatish Balay   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
20984b967eb1SSatish Balay   if (rr) {
2099e1311b90SBarry Smith     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
2100e32f2f54SBarry Smith     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
21014b967eb1SSatish Balay     /* Overlap communication with computation. */
2102ca9f406cSSatish Balay     ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2103a008b906SSatish Balay   }
21044b967eb1SSatish Balay   if (ll) {
2105e1311b90SBarry Smith     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
2106e32f2f54SBarry Smith     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
2107f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr);
21084b967eb1SSatish Balay   }
21094b967eb1SSatish Balay   /* scale  the diagonal block */
2110f830108cSBarry Smith   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
21114b967eb1SSatish Balay 
21124b967eb1SSatish Balay   if (rr) {
21134b967eb1SSatish Balay     /* Do a scatter end and then right scale the off-diagonal block */
2114ca9f406cSSatish Balay     ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2115f830108cSBarry Smith     ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr);
21164b967eb1SSatish Balay   }
21174b967eb1SSatish Balay 
21183a40ed3dSBarry Smith   PetscFunctionReturn(0);
2119a008b906SSatish Balay }
2120a008b906SSatish Balay 
21214a2ae208SSatish Balay #undef __FUNCT__
2122521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ"
2123521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs)
21245a838052SSatish Balay {
2125521d7252SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
2126521d7252SBarry Smith   PetscErrorCode ierr;
2127521d7252SBarry Smith 
21283a40ed3dSBarry Smith   PetscFunctionBegin;
2129521d7252SBarry Smith   ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr);
2130521d7252SBarry Smith   ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr);
2131829b6ff0SJed Brown   ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr);
2132829b6ff0SJed Brown   ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr);
21333a40ed3dSBarry Smith   PetscFunctionReturn(0);
21345a838052SSatish Balay }
21354a2ae208SSatish Balay #undef __FUNCT__
21364a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ"
2137dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A)
2138bb5a7306SBarry Smith {
2139bb5a7306SBarry Smith   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
2140dfbe8321SBarry Smith   PetscErrorCode ierr;
21413a40ed3dSBarry Smith 
21423a40ed3dSBarry Smith   PetscFunctionBegin;
2143bb5a7306SBarry Smith   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
21443a40ed3dSBarry Smith   PetscFunctionReturn(0);
2145bb5a7306SBarry Smith }
2146bb5a7306SBarry Smith 
21474a2ae208SSatish Balay #undef __FUNCT__
21484a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ"
2149ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscBool  *flag)
2150d4bb536fSBarry Smith {
2151d4bb536fSBarry Smith   Mat_MPIAIJ     *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data;
2152d4bb536fSBarry Smith   Mat            a,b,c,d;
2153ace3abfcSBarry Smith   PetscBool      flg;
2154dfbe8321SBarry Smith   PetscErrorCode ierr;
2155d4bb536fSBarry Smith 
21563a40ed3dSBarry Smith   PetscFunctionBegin;
2157d4bb536fSBarry Smith   a = matA->A; b = matA->B;
2158d4bb536fSBarry Smith   c = matB->A; d = matB->B;
2159d4bb536fSBarry Smith 
2160d4bb536fSBarry Smith   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2161abc0a331SBarry Smith   if (flg) {
2162d4bb536fSBarry Smith     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2163d4bb536fSBarry Smith   }
21647adad957SLisandro Dalcin   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
21653a40ed3dSBarry Smith   PetscFunctionReturn(0);
2166d4bb536fSBarry Smith }
2167d4bb536fSBarry Smith 
21684a2ae208SSatish Balay #undef __FUNCT__
21694a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ"
2170dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str)
2171cb5b572fSBarry Smith {
2172dfbe8321SBarry Smith   PetscErrorCode ierr;
2173cb5b572fSBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ *)A->data;
2174cb5b572fSBarry Smith   Mat_MPIAIJ     *b = (Mat_MPIAIJ *)B->data;
2175cb5b572fSBarry Smith 
2176cb5b572fSBarry Smith   PetscFunctionBegin;
217733f4a19fSKris Buschelman   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
217833f4a19fSKris Buschelman   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2179cb5b572fSBarry Smith     /* because of the column compression in the off-processor part of the matrix a->B,
2180cb5b572fSBarry Smith        the number of columns in a->B and b->B may be different, hence we cannot call
2181cb5b572fSBarry Smith        the MatCopy() directly on the two parts. If need be, we can provide a more
2182cb5b572fSBarry Smith        efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices
2183cb5b572fSBarry Smith        then copying the submatrices */
2184cb5b572fSBarry Smith     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2185cb5b572fSBarry Smith   } else {
2186cb5b572fSBarry Smith     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2187cb5b572fSBarry Smith     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2188cb5b572fSBarry Smith   }
2189cb5b572fSBarry Smith   PetscFunctionReturn(0);
2190cb5b572fSBarry Smith }
2191cb5b572fSBarry Smith 
21924a2ae208SSatish Balay #undef __FUNCT__
21934a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ"
2194dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A)
2195273d9f13SBarry Smith {
2196dfbe8321SBarry Smith   PetscErrorCode ierr;
2197273d9f13SBarry Smith 
2198273d9f13SBarry Smith   PetscFunctionBegin;
2199273d9f13SBarry Smith   ierr =  MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2200273d9f13SBarry Smith   PetscFunctionReturn(0);
2201273d9f13SBarry Smith }
2202273d9f13SBarry Smith 
2203ac90fabeSBarry Smith #undef __FUNCT__
2204ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ"
2205f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2206ac90fabeSBarry Smith {
2207dfbe8321SBarry Smith   PetscErrorCode ierr;
2208b1d57f15SBarry Smith   PetscInt       i;
2209ac90fabeSBarry Smith   Mat_MPIAIJ     *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data;
22104ce68768SBarry Smith   PetscBLASInt   bnz,one=1;
2211ac90fabeSBarry Smith   Mat_SeqAIJ     *x,*y;
2212ac90fabeSBarry Smith 
2213ac90fabeSBarry Smith   PetscFunctionBegin;
2214ac90fabeSBarry Smith   if (str == SAME_NONZERO_PATTERN) {
2215f4df32b1SMatthew Knepley     PetscScalar alpha = a;
2216ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->A->data;
2217ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->A->data;
22180805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
2219f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
2220ac90fabeSBarry Smith     x = (Mat_SeqAIJ *)xx->B->data;
2221ac90fabeSBarry Smith     y = (Mat_SeqAIJ *)yy->B->data;
22220805154bSBarry Smith     bnz = PetscBLASIntCast(x->nz);
2223f4df32b1SMatthew Knepley     BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one);
2224a30b2313SHong Zhang   } else if (str == SUBSET_NONZERO_PATTERN) {
2225f4df32b1SMatthew Knepley     ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr);
2226c537a176SHong Zhang 
2227c537a176SHong Zhang     x = (Mat_SeqAIJ *)xx->B->data;
2228a30b2313SHong Zhang     y = (Mat_SeqAIJ *)yy->B->data;
2229a30b2313SHong Zhang     if (y->xtoy && y->XtoY != xx->B) {
2230a30b2313SHong Zhang       ierr = PetscFree(y->xtoy);CHKERRQ(ierr);
22316bf464f9SBarry Smith       ierr = MatDestroy(&y->XtoY);CHKERRQ(ierr);
2232c537a176SHong Zhang     }
2233a30b2313SHong Zhang     if (!y->xtoy) { /* get xtoy */
2234d0f46423SBarry Smith       ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr);
2235a30b2313SHong Zhang       y->XtoY = xx->B;
2236407f6b05SHong Zhang       ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr);
2237c537a176SHong Zhang     }
2238f4df32b1SMatthew Knepley     for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]);
2239ac90fabeSBarry Smith   } else {
22409f5f6813SShri Abhyankar     Mat B;
22419f5f6813SShri Abhyankar     PetscInt *nnz_d,*nnz_o;
22429f5f6813SShri Abhyankar     ierr = PetscMalloc(yy->A->rmap->N*sizeof(PetscInt),&nnz_d);CHKERRQ(ierr);
22439f5f6813SShri Abhyankar     ierr = PetscMalloc(yy->B->rmap->N*sizeof(PetscInt),&nnz_o);CHKERRQ(ierr);
22449f5f6813SShri Abhyankar     ierr = MatCreate(((PetscObject)Y)->comm,&B);CHKERRQ(ierr);
2245bc5a2726SShri Abhyankar     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
22469f5f6813SShri Abhyankar     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
22479f5f6813SShri Abhyankar     ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
22489f5f6813SShri Abhyankar     ierr = MatAXPYGetPreallocation_SeqAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
22499f5f6813SShri Abhyankar     ierr = MatAXPYGetPreallocation_SeqAIJ(yy->B,xx->B,nnz_o);CHKERRQ(ierr);
2250ecd8bba6SJed Brown     ierr = MatMPIAIJSetPreallocation(B,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
22519f5f6813SShri Abhyankar     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
22529f5f6813SShri Abhyankar     ierr = MatHeaderReplace(Y,B);
22539f5f6813SShri Abhyankar     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
22549f5f6813SShri Abhyankar     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2255ac90fabeSBarry Smith   }
2256ac90fabeSBarry Smith   PetscFunctionReturn(0);
2257ac90fabeSBarry Smith }
2258ac90fabeSBarry Smith 
22597087cfbeSBarry Smith extern PetscErrorCode  MatConjugate_SeqAIJ(Mat);
2260354c94deSBarry Smith 
2261354c94deSBarry Smith #undef __FUNCT__
2262354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ"
22637087cfbeSBarry Smith PetscErrorCode  MatConjugate_MPIAIJ(Mat mat)
2264354c94deSBarry Smith {
2265354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX)
2266354c94deSBarry Smith   PetscErrorCode ierr;
2267354c94deSBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
2268354c94deSBarry Smith 
2269354c94deSBarry Smith   PetscFunctionBegin;
2270354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr);
2271354c94deSBarry Smith   ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr);
2272354c94deSBarry Smith #else
2273354c94deSBarry Smith   PetscFunctionBegin;
2274354c94deSBarry Smith #endif
2275354c94deSBarry Smith   PetscFunctionReturn(0);
2276354c94deSBarry Smith }
2277354c94deSBarry Smith 
227899cafbc1SBarry Smith #undef __FUNCT__
227999cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ"
228099cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A)
228199cafbc1SBarry Smith {
228299cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
228399cafbc1SBarry Smith   PetscErrorCode ierr;
228499cafbc1SBarry Smith 
228599cafbc1SBarry Smith   PetscFunctionBegin;
228699cafbc1SBarry Smith   ierr = MatRealPart(a->A);CHKERRQ(ierr);
228799cafbc1SBarry Smith   ierr = MatRealPart(a->B);CHKERRQ(ierr);
228899cafbc1SBarry Smith   PetscFunctionReturn(0);
228999cafbc1SBarry Smith }
229099cafbc1SBarry Smith 
229199cafbc1SBarry Smith #undef __FUNCT__
229299cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ"
229399cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A)
229499cafbc1SBarry Smith {
229599cafbc1SBarry Smith   Mat_MPIAIJ   *a = (Mat_MPIAIJ*)A->data;
229699cafbc1SBarry Smith   PetscErrorCode ierr;
229799cafbc1SBarry Smith 
229899cafbc1SBarry Smith   PetscFunctionBegin;
229999cafbc1SBarry Smith   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
230099cafbc1SBarry Smith   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
230199cafbc1SBarry Smith   PetscFunctionReturn(0);
230299cafbc1SBarry Smith }
230399cafbc1SBarry Smith 
2304103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2305103bf8bdSMatthew Knepley 
2306103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp>
2307a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp>
2308a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp>
2309a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp>
2310103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp>
2311a2c909beSMatthew Knepley #include <boost/multi_array.hpp>
2312d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp>
2313103bf8bdSMatthew Knepley 
2314103bf8bdSMatthew Knepley #undef __FUNCT__
2315103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ"
2316103bf8bdSMatthew Knepley /*
2317103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
2318103bf8bdSMatthew Knepley */
23190481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info)
2320103bf8bdSMatthew Knepley {
2321a2c909beSMatthew Knepley   namespace petsc = boost::distributed::petsc;
2322a2c909beSMatthew Knepley 
2323a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
2324a2c909beSMatthew Knepley   using boost::graph::distributed::ilu_default::process_group_type;
2325a2c909beSMatthew Knepley   using boost::graph::ilu_permuted;
2326a2c909beSMatthew Knepley 
2327ace3abfcSBarry Smith   PetscBool       row_identity, col_identity;
2328776b82aeSLisandro Dalcin   PetscContainer  c;
2329103bf8bdSMatthew Knepley   PetscInt        m, n, M, N;
2330103bf8bdSMatthew Knepley   PetscErrorCode  ierr;
2331103bf8bdSMatthew Knepley 
2332103bf8bdSMatthew Knepley   PetscFunctionBegin;
2333e32f2f54SBarry Smith   if (info->levels != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu");
2334103bf8bdSMatthew Knepley   ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr);
2335103bf8bdSMatthew Knepley   ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr);
2336103bf8bdSMatthew Knepley   if (!row_identity || !col_identity) {
2337e32f2f54SBarry Smith     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU");
2338103bf8bdSMatthew Knepley   }
2339103bf8bdSMatthew Knepley 
2340103bf8bdSMatthew Knepley   process_group_type pg;
2341a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
2342a2c909beSMatthew Knepley   lgraph_type*   lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg));
2343a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
2344a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
2345a2c909beSMatthew Knepley 
2346103bf8bdSMatthew Knepley   petsc::read_matrix(A, graph, get(boost::edge_weight, graph));
2347a2c909beSMatthew Knepley   ilu_permuted(level_graph);
2348103bf8bdSMatthew Knepley 
2349103bf8bdSMatthew Knepley   /* put together the new matrix */
23507adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr);
2351103bf8bdSMatthew Knepley   ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr);
2352103bf8bdSMatthew Knepley   ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr);
2353719d5645SBarry Smith   ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr);
2354719d5645SBarry Smith   ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr);
2355719d5645SBarry Smith   ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2356719d5645SBarry Smith   ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2357103bf8bdSMatthew Knepley 
23587adad957SLisandro Dalcin   ierr = PetscContainerCreate(((PetscObject)A)->comm, &c);
2359776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(c, lgraph_p);
2360719d5645SBarry Smith   ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c);
2361bf0cc555SLisandro Dalcin   ierr = PetscContainerDestroy(&c);
2362103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
2363103bf8bdSMatthew Knepley }
2364103bf8bdSMatthew Knepley 
2365103bf8bdSMatthew Knepley #undef __FUNCT__
2366103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ"
23670481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info)
2368103bf8bdSMatthew Knepley {
2369103bf8bdSMatthew Knepley   PetscFunctionBegin;
2370103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
2371103bf8bdSMatthew Knepley }
2372103bf8bdSMatthew Knepley 
2373103bf8bdSMatthew Knepley #undef __FUNCT__
2374103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ"
2375103bf8bdSMatthew Knepley /*
2376103bf8bdSMatthew Knepley   This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu>
2377103bf8bdSMatthew Knepley */
2378103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x)
2379103bf8bdSMatthew Knepley {
2380a2c909beSMatthew Knepley   namespace graph_dist = boost::graph::distributed;
2381a2c909beSMatthew Knepley 
2382a2c909beSMatthew Knepley   typedef graph_dist::ilu_default::ilu_level_graph_type  lgraph_type;
2383a2c909beSMatthew Knepley   lgraph_type*   lgraph_p;
2384776b82aeSLisandro Dalcin   PetscContainer c;
2385103bf8bdSMatthew Knepley   PetscErrorCode ierr;
2386103bf8bdSMatthew Knepley 
2387103bf8bdSMatthew Knepley   PetscFunctionBegin;
2388103bf8bdSMatthew Knepley   ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr);
2389776b82aeSLisandro Dalcin   ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr);
2390103bf8bdSMatthew Knepley   ierr = VecCopy(b, x);CHKERRQ(ierr);
2391a2c909beSMatthew Knepley 
2392a2c909beSMatthew Knepley   PetscScalar* array_x;
2393a2c909beSMatthew Knepley   ierr = VecGetArray(x, &array_x);CHKERRQ(ierr);
2394a2c909beSMatthew Knepley   PetscInt sx;
2395a2c909beSMatthew Knepley   ierr = VecGetSize(x, &sx);CHKERRQ(ierr);
2396a2c909beSMatthew Knepley 
2397a2c909beSMatthew Knepley   PetscScalar* array_b;
2398a2c909beSMatthew Knepley   ierr = VecGetArray(b, &array_b);CHKERRQ(ierr);
2399a2c909beSMatthew Knepley   PetscInt sb;
2400a2c909beSMatthew Knepley   ierr = VecGetSize(b, &sb);CHKERRQ(ierr);
2401a2c909beSMatthew Knepley 
2402a2c909beSMatthew Knepley   lgraph_type&   level_graph = *lgraph_p;
2403a2c909beSMatthew Knepley   graph_dist::ilu_default::graph_type&            graph(level_graph.graph);
2404a2c909beSMatthew Knepley 
2405a2c909beSMatthew Knepley   typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type;
2406a2c909beSMatthew Knepley   array_ref_type                                 ref_b(array_b, boost::extents[num_vertices(graph)]),
2407a2c909beSMatthew Knepley                                                  ref_x(array_x, boost::extents[num_vertices(graph)]);
2408a2c909beSMatthew Knepley 
2409a2c909beSMatthew Knepley   typedef boost::iterator_property_map<array_ref_type::iterator,
2410a2c909beSMatthew Knepley                                 boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type>  gvector_type;
2411a2c909beSMatthew Knepley   gvector_type                                   vector_b(ref_b.begin(), get(boost::vertex_index, graph)),
2412a2c909beSMatthew Knepley                                                  vector_x(ref_x.begin(), get(boost::vertex_index, graph));
2413a2c909beSMatthew Knepley 
2414a2c909beSMatthew Knepley   ilu_set_solve(*lgraph_p, vector_b, vector_x);
2415a2c909beSMatthew Knepley 
2416103bf8bdSMatthew Knepley   PetscFunctionReturn(0);
2417103bf8bdSMatthew Knepley }
2418103bf8bdSMatthew Knepley #endif
2419103bf8bdSMatthew Knepley 
242069db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */
242169db28dcSHong Zhang   PetscInt       nzlocal,nsends,nrecvs;
24221d79065fSBarry Smith   PetscMPIInt    *send_rank,*recv_rank;
24231d79065fSBarry Smith   PetscInt       *sbuf_nz,*rbuf_nz,*sbuf_j,**rbuf_j;
242469db28dcSHong Zhang   PetscScalar    *sbuf_a,**rbuf_a;
2425bf0cc555SLisandro Dalcin   PetscErrorCode (*Destroy)(Mat);
242669db28dcSHong Zhang } Mat_Redundant;
242769db28dcSHong Zhang 
242869db28dcSHong Zhang #undef __FUNCT__
242969db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant"
243069db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr)
243169db28dcSHong Zhang {
243269db28dcSHong Zhang   PetscErrorCode       ierr;
243369db28dcSHong Zhang   Mat_Redundant        *redund=(Mat_Redundant*)ptr;
243469db28dcSHong Zhang   PetscInt             i;
243569db28dcSHong Zhang 
243669db28dcSHong Zhang   PetscFunctionBegin;
24371d79065fSBarry Smith   ierr = PetscFree2(redund->send_rank,redund->recv_rank);CHKERRQ(ierr);
243869db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr);
243969db28dcSHong Zhang   ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr);
244069db28dcSHong Zhang   for (i=0; i<redund->nrecvs; i++){
244169db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr);
244269db28dcSHong Zhang     ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr);
244369db28dcSHong Zhang   }
24441d79065fSBarry Smith   ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr);
244569db28dcSHong Zhang   ierr = PetscFree(redund);CHKERRQ(ierr);
244669db28dcSHong Zhang   PetscFunctionReturn(0);
244769db28dcSHong Zhang }
244869db28dcSHong Zhang 
244969db28dcSHong Zhang #undef __FUNCT__
245069db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant"
245169db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A)
245269db28dcSHong Zhang {
245369db28dcSHong Zhang   PetscErrorCode  ierr;
245469db28dcSHong Zhang   PetscContainer  container;
245569db28dcSHong Zhang   Mat_Redundant   *redund=PETSC_NULL;
245669db28dcSHong Zhang 
245769db28dcSHong Zhang   PetscFunctionBegin;
245869db28dcSHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
2459bf0cc555SLisandro Dalcin   if (!container) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit");
246069db28dcSHong Zhang   ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
2461bf0cc555SLisandro Dalcin   A->ops->destroy = redund->Destroy;
246269db28dcSHong Zhang   ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr);
2463bf0cc555SLisandro Dalcin   if (A->ops->destroy) {
246469db28dcSHong Zhang     ierr = (*A->ops->destroy)(A);CHKERRQ(ierr);
2465bf0cc555SLisandro Dalcin   }
246669db28dcSHong Zhang   PetscFunctionReturn(0);
246769db28dcSHong Zhang }
246869db28dcSHong Zhang 
246969db28dcSHong Zhang #undef __FUNCT__
247069db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ"
247169db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant)
247269db28dcSHong Zhang {
247369db28dcSHong Zhang   PetscMPIInt    rank,size;
24747adad957SLisandro Dalcin   MPI_Comm       comm=((PetscObject)mat)->comm;
247569db28dcSHong Zhang   PetscErrorCode ierr;
247669db28dcSHong Zhang   PetscInt       nsends=0,nrecvs=0,i,rownz_max=0;
247769db28dcSHong Zhang   PetscMPIInt    *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL;
2478d0f46423SBarry Smith   PetscInt       *rowrange=mat->rmap->range;
247969db28dcSHong Zhang   Mat_MPIAIJ     *aij = (Mat_MPIAIJ*)mat->data;
248069db28dcSHong Zhang   Mat            A=aij->A,B=aij->B,C=*matredundant;
248169db28dcSHong Zhang   Mat_SeqAIJ     *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data;
248269db28dcSHong Zhang   PetscScalar    *sbuf_a;
248369db28dcSHong Zhang   PetscInt       nzlocal=a->nz+b->nz;
2484d0f46423SBarry Smith   PetscInt       j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB;
2485d0f46423SBarry Smith   PetscInt       rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N;
248669db28dcSHong Zhang   PetscInt       *cols,ctmp,lwrite,*rptr,l,*sbuf_j;
2487a77337e4SBarry Smith   MatScalar      *aworkA,*aworkB;
2488a77337e4SBarry Smith   PetscScalar    *vals;
248969db28dcSHong Zhang   PetscMPIInt    tag1,tag2,tag3,imdex;
249069db28dcSHong Zhang   MPI_Request    *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL,
249169db28dcSHong Zhang                  *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL;
249269db28dcSHong Zhang   MPI_Status     recv_status,*send_status;
249369db28dcSHong Zhang   PetscInt       *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count;
249469db28dcSHong Zhang   PetscInt       **rbuf_j=PETSC_NULL;
249569db28dcSHong Zhang   PetscScalar    **rbuf_a=PETSC_NULL;
249669db28dcSHong Zhang   Mat_Redundant  *redund=PETSC_NULL;
249769db28dcSHong Zhang   PetscContainer container;
249869db28dcSHong Zhang 
249969db28dcSHong Zhang   PetscFunctionBegin;
250069db28dcSHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
250169db28dcSHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
250269db28dcSHong Zhang 
250369db28dcSHong Zhang   if (reuse == MAT_REUSE_MATRIX) {
250469db28dcSHong Zhang     ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2505e32f2f54SBarry Smith     if (M != N || M != mat->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size");
250669db28dcSHong Zhang     ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr);
2507e32f2f54SBarry Smith     if (M != N || M != mlocal_sub) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size");
250869db28dcSHong Zhang     ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr);
2509bf0cc555SLisandro Dalcin     if (!container) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit");
251069db28dcSHong Zhang     ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr);
2511e32f2f54SBarry Smith     if (nzlocal != redund->nzlocal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal");
251269db28dcSHong Zhang 
251369db28dcSHong Zhang     nsends    = redund->nsends;
251469db28dcSHong Zhang     nrecvs    = redund->nrecvs;
25151d79065fSBarry Smith     send_rank = redund->send_rank;
25161d79065fSBarry Smith     recv_rank = redund->recv_rank;
25171d79065fSBarry Smith     sbuf_nz   = redund->sbuf_nz;
25181d79065fSBarry Smith     rbuf_nz   = redund->rbuf_nz;
251969db28dcSHong Zhang     sbuf_j    = redund->sbuf_j;
252069db28dcSHong Zhang     sbuf_a    = redund->sbuf_a;
252169db28dcSHong Zhang     rbuf_j    = redund->rbuf_j;
252269db28dcSHong Zhang     rbuf_a    = redund->rbuf_a;
252369db28dcSHong Zhang   }
252469db28dcSHong Zhang 
252569db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
252669db28dcSHong Zhang     PetscMPIInt  subrank,subsize;
252769db28dcSHong Zhang     PetscInt     nleftover,np_subcomm;
252869db28dcSHong Zhang     /* get the destination processors' id send_rank, nsends and nrecvs */
252969db28dcSHong Zhang     ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr);
253069db28dcSHong Zhang     ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr);
25311d79065fSBarry Smith     ierr = PetscMalloc2(size,PetscMPIInt,&send_rank,size,PetscMPIInt,&recv_rank);
253269db28dcSHong Zhang     np_subcomm = size/nsubcomm;
253369db28dcSHong Zhang     nleftover  = size - nsubcomm*np_subcomm;
253469db28dcSHong Zhang     nsends = 0; nrecvs = 0;
253569db28dcSHong Zhang     for (i=0; i<size; i++){ /* i=rank*/
253669db28dcSHong Zhang       if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */
253769db28dcSHong Zhang         send_rank[nsends] = i; nsends++;
253869db28dcSHong Zhang         recv_rank[nrecvs++] = i;
253969db28dcSHong Zhang       }
254069db28dcSHong Zhang     }
254169db28dcSHong Zhang     if (rank >= size - nleftover){/* this proc is a leftover processor */
254269db28dcSHong Zhang       i = size-nleftover-1;
254369db28dcSHong Zhang       j = 0;
254469db28dcSHong Zhang       while (j < nsubcomm - nleftover){
254569db28dcSHong Zhang         send_rank[nsends++] = i;
254669db28dcSHong Zhang         i--; j++;
254769db28dcSHong Zhang       }
254869db28dcSHong Zhang     }
254969db28dcSHong Zhang 
255069db28dcSHong Zhang     if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */
255169db28dcSHong Zhang       for (i=0; i<nleftover; i++){
255269db28dcSHong Zhang         recv_rank[nrecvs++] = size-nleftover+i;
255369db28dcSHong Zhang       }
255469db28dcSHong Zhang     }
255569db28dcSHong Zhang 
255669db28dcSHong Zhang     /* allocate sbuf_j, sbuf_a */
255769db28dcSHong Zhang     i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2;
255869db28dcSHong Zhang     ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr);
255969db28dcSHong Zhang     ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr);
256069db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
256169db28dcSHong Zhang 
256269db28dcSHong Zhang   /* copy mat's local entries into the buffers */
256369db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
256469db28dcSHong Zhang     rownz_max = 0;
256569db28dcSHong Zhang     rptr = sbuf_j;
256669db28dcSHong Zhang     cols = sbuf_j + rend-rstart + 1;
256769db28dcSHong Zhang     vals = sbuf_a;
256869db28dcSHong Zhang     rptr[0] = 0;
256969db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
257069db28dcSHong Zhang       row = i + rstart;
257169db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
257269db28dcSHong Zhang       ncols  = nzA + nzB;
257369db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
257469db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
257569db28dcSHong Zhang       /* load the column indices for this row into cols */
257669db28dcSHong Zhang       lwrite = 0;
257769db28dcSHong Zhang       for (l=0; l<nzB; l++) {
257869db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart){
257969db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
258069db28dcSHong Zhang           cols[lwrite++] = ctmp;
258169db28dcSHong Zhang         }
258269db28dcSHong Zhang       }
258369db28dcSHong Zhang       for (l=0; l<nzA; l++){
258469db28dcSHong Zhang         vals[lwrite]   = aworkA[l];
258569db28dcSHong Zhang         cols[lwrite++] = cstart + cworkA[l];
258669db28dcSHong Zhang       }
258769db28dcSHong Zhang       for (l=0; l<nzB; l++) {
258869db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend){
258969db28dcSHong Zhang           vals[lwrite]   = aworkB[l];
259069db28dcSHong Zhang           cols[lwrite++] = ctmp;
259169db28dcSHong Zhang         }
259269db28dcSHong Zhang       }
259369db28dcSHong Zhang       vals += ncols;
259469db28dcSHong Zhang       cols += ncols;
259569db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
259669db28dcSHong Zhang       if (rownz_max < ncols) rownz_max = ncols;
259769db28dcSHong Zhang     }
2598e32f2f54SBarry Smith     if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz);
259969db28dcSHong Zhang   } else { /* only copy matrix values into sbuf_a */
260069db28dcSHong Zhang     rptr = sbuf_j;
260169db28dcSHong Zhang     vals = sbuf_a;
260269db28dcSHong Zhang     rptr[0] = 0;
260369db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
260469db28dcSHong Zhang       row = i + rstart;
260569db28dcSHong Zhang       nzA    = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i];
260669db28dcSHong Zhang       ncols  = nzA + nzB;
260769db28dcSHong Zhang       cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i];
260869db28dcSHong Zhang       aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i];
260969db28dcSHong Zhang       lwrite = 0;
261069db28dcSHong Zhang       for (l=0; l<nzB; l++) {
261169db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l];
261269db28dcSHong Zhang       }
261369db28dcSHong Zhang       for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l];
261469db28dcSHong Zhang       for (l=0; l<nzB; l++) {
261569db28dcSHong Zhang         if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l];
261669db28dcSHong Zhang       }
261769db28dcSHong Zhang       vals += ncols;
261869db28dcSHong Zhang       rptr[i+1] = rptr[i] + ncols;
261969db28dcSHong Zhang     }
262069db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
262169db28dcSHong Zhang 
262269db28dcSHong Zhang   /* send nzlocal to others, and recv other's nzlocal */
262369db28dcSHong Zhang   /*--------------------------------------------------*/
262469db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
262569db28dcSHong Zhang     ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
262669db28dcSHong Zhang     s_waits2 = s_waits3 + nsends;
262769db28dcSHong Zhang     s_waits1 = s_waits2 + nsends;
262869db28dcSHong Zhang     r_waits1 = s_waits1 + nsends;
262969db28dcSHong Zhang     r_waits2 = r_waits1 + nrecvs;
263069db28dcSHong Zhang     r_waits3 = r_waits2 + nrecvs;
263169db28dcSHong Zhang   } else {
263269db28dcSHong Zhang     ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr);
263369db28dcSHong Zhang     r_waits3 = s_waits3 + nsends;
263469db28dcSHong Zhang   }
263569db28dcSHong Zhang 
263669db28dcSHong Zhang   ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr);
263769db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
263869db28dcSHong Zhang     /* get new tags to keep the communication clean */
263969db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr);
264069db28dcSHong Zhang     ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr);
26411d79065fSBarry Smith     ierr = PetscMalloc4(nsends,PetscInt,&sbuf_nz,nrecvs,PetscInt,&rbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr);
264269db28dcSHong Zhang 
264369db28dcSHong Zhang     /* post receives of other's nzlocal */
264469db28dcSHong Zhang     for (i=0; i<nrecvs; i++){
264569db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr);
264669db28dcSHong Zhang     }
264769db28dcSHong Zhang     /* send nzlocal to others */
264869db28dcSHong Zhang     for (i=0; i<nsends; i++){
264969db28dcSHong Zhang       sbuf_nz[i] = nzlocal;
265069db28dcSHong Zhang       ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr);
265169db28dcSHong Zhang     }
265269db28dcSHong Zhang     /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */
265369db28dcSHong Zhang     count = nrecvs;
265469db28dcSHong Zhang     while (count) {
265569db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr);
265669db28dcSHong Zhang       recv_rank[imdex] = recv_status.MPI_SOURCE;
265769db28dcSHong Zhang       /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */
265869db28dcSHong Zhang       ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr);
265969db28dcSHong Zhang 
266069db28dcSHong Zhang       i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */
266169db28dcSHong Zhang       rbuf_nz[imdex] += i + 2;
266269db28dcSHong Zhang       ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr);
266369db28dcSHong Zhang       ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr);
266469db28dcSHong Zhang       count--;
266569db28dcSHong Zhang     }
266669db28dcSHong Zhang     /* wait on sends of nzlocal */
266769db28dcSHong Zhang     if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);}
266869db28dcSHong Zhang     /* send mat->i,j to others, and recv from other's */
266969db28dcSHong Zhang     /*------------------------------------------------*/
267069db28dcSHong Zhang     for (i=0; i<nsends; i++){
267169db28dcSHong Zhang       j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1;
267269db28dcSHong Zhang       ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr);
267369db28dcSHong Zhang     }
267469db28dcSHong Zhang     /* wait on receives of mat->i,j */
267569db28dcSHong Zhang     /*------------------------------*/
267669db28dcSHong Zhang     count = nrecvs;
267769db28dcSHong Zhang     while (count) {
267869db28dcSHong Zhang       ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr);
2679e32f2f54SBarry Smith       if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
268069db28dcSHong Zhang       count--;
268169db28dcSHong Zhang     }
268269db28dcSHong Zhang     /* wait on sends of mat->i,j */
268369db28dcSHong Zhang     /*---------------------------*/
268469db28dcSHong Zhang     if (nsends) {
268569db28dcSHong Zhang       ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr);
268669db28dcSHong Zhang     }
268769db28dcSHong Zhang   } /* endof if (reuse == MAT_INITIAL_MATRIX) */
268869db28dcSHong Zhang 
268969db28dcSHong Zhang   /* post receives, send and receive mat->a */
269069db28dcSHong Zhang   /*----------------------------------------*/
269169db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++) {
269269db28dcSHong Zhang     ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr);
269369db28dcSHong Zhang   }
269469db28dcSHong Zhang   for (i=0; i<nsends; i++){
269569db28dcSHong Zhang     ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr);
269669db28dcSHong Zhang   }
269769db28dcSHong Zhang   count = nrecvs;
269869db28dcSHong Zhang   while (count) {
269969db28dcSHong Zhang     ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr);
2700e32f2f54SBarry Smith     if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE);
270169db28dcSHong Zhang     count--;
270269db28dcSHong Zhang   }
270369db28dcSHong Zhang   if (nsends) {
270469db28dcSHong Zhang     ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr);
270569db28dcSHong Zhang   }
270669db28dcSHong Zhang 
270769db28dcSHong Zhang   ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr);
270869db28dcSHong Zhang 
270969db28dcSHong Zhang   /* create redundant matrix */
271069db28dcSHong Zhang   /*-------------------------*/
271169db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX){
271269db28dcSHong Zhang     /* compute rownz_max for preallocation */
271369db28dcSHong Zhang     for (imdex=0; imdex<nrecvs; imdex++){
271469db28dcSHong Zhang       j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]];
271569db28dcSHong Zhang       rptr = rbuf_j[imdex];
271669db28dcSHong Zhang       for (i=0; i<j; i++){
271769db28dcSHong Zhang         ncols = rptr[i+1] - rptr[i];
271869db28dcSHong Zhang         if (rownz_max < ncols) rownz_max = ncols;
271969db28dcSHong Zhang       }
272069db28dcSHong Zhang     }
272169db28dcSHong Zhang 
272269db28dcSHong Zhang     ierr = MatCreate(subcomm,&C);CHKERRQ(ierr);
272369db28dcSHong Zhang     ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
272469db28dcSHong Zhang     ierr = MatSetFromOptions(C);CHKERRQ(ierr);
272569db28dcSHong Zhang     ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr);
272669db28dcSHong Zhang     ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr);
272769db28dcSHong Zhang   } else {
272869db28dcSHong Zhang     C = *matredundant;
272969db28dcSHong Zhang   }
273069db28dcSHong Zhang 
273169db28dcSHong Zhang   /* insert local matrix entries */
273269db28dcSHong Zhang   rptr = sbuf_j;
273369db28dcSHong Zhang   cols = sbuf_j + rend-rstart + 1;
273469db28dcSHong Zhang   vals = sbuf_a;
273569db28dcSHong Zhang   for (i=0; i<rend-rstart; i++){
273669db28dcSHong Zhang     row   = i + rstart;
273769db28dcSHong Zhang     ncols = rptr[i+1] - rptr[i];
273869db28dcSHong Zhang     ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
273969db28dcSHong Zhang     vals += ncols;
274069db28dcSHong Zhang     cols += ncols;
274169db28dcSHong Zhang   }
274269db28dcSHong Zhang   /* insert received matrix entries */
274369db28dcSHong Zhang   for (imdex=0; imdex<nrecvs; imdex++){
274469db28dcSHong Zhang     rstart = rowrange[recv_rank[imdex]];
274569db28dcSHong Zhang     rend   = rowrange[recv_rank[imdex]+1];
274669db28dcSHong Zhang     rptr = rbuf_j[imdex];
274769db28dcSHong Zhang     cols = rbuf_j[imdex] + rend-rstart + 1;
274869db28dcSHong Zhang     vals = rbuf_a[imdex];
274969db28dcSHong Zhang     for (i=0; i<rend-rstart; i++){
275069db28dcSHong Zhang       row   = i + rstart;
275169db28dcSHong Zhang       ncols = rptr[i+1] - rptr[i];
275269db28dcSHong Zhang       ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
275369db28dcSHong Zhang       vals += ncols;
275469db28dcSHong Zhang       cols += ncols;
275569db28dcSHong Zhang     }
275669db28dcSHong Zhang   }
275769db28dcSHong Zhang   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
275869db28dcSHong Zhang   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
275969db28dcSHong Zhang   ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr);
2760e32f2f54SBarry Smith   if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N);
276169db28dcSHong Zhang   if (reuse == MAT_INITIAL_MATRIX) {
276269db28dcSHong Zhang     PetscContainer container;
276369db28dcSHong Zhang     *matredundant = C;
276469db28dcSHong Zhang     /* create a supporting struct and attach it to C for reuse */
276538f2d2fdSLisandro Dalcin     ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr);
276669db28dcSHong Zhang     ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
276769db28dcSHong Zhang     ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr);
276869db28dcSHong Zhang     ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr);
2769bf0cc555SLisandro Dalcin     ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr);
2770bf0cc555SLisandro Dalcin     ierr = PetscContainerDestroy(&container);CHKERRQ(ierr);
277169db28dcSHong Zhang 
277269db28dcSHong Zhang     redund->nzlocal = nzlocal;
277369db28dcSHong Zhang     redund->nsends  = nsends;
277469db28dcSHong Zhang     redund->nrecvs  = nrecvs;
277569db28dcSHong Zhang     redund->send_rank = send_rank;
27761d79065fSBarry Smith     redund->recv_rank = recv_rank;
277769db28dcSHong Zhang     redund->sbuf_nz = sbuf_nz;
27781d79065fSBarry Smith     redund->rbuf_nz = rbuf_nz;
277969db28dcSHong Zhang     redund->sbuf_j  = sbuf_j;
278069db28dcSHong Zhang     redund->sbuf_a  = sbuf_a;
278169db28dcSHong Zhang     redund->rbuf_j  = rbuf_j;
278269db28dcSHong Zhang     redund->rbuf_a  = rbuf_a;
278369db28dcSHong Zhang 
2784bf0cc555SLisandro Dalcin     redund->Destroy = C->ops->destroy;
278569db28dcSHong Zhang     C->ops->destroy = MatDestroy_MatRedundant;
278669db28dcSHong Zhang   }
278769db28dcSHong Zhang   PetscFunctionReturn(0);
278869db28dcSHong Zhang }
278969db28dcSHong Zhang 
279003bc72f1SMatthew Knepley #undef __FUNCT__
2791c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ"
2792c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2793c91732d9SHong Zhang {
2794c91732d9SHong Zhang   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2795c91732d9SHong Zhang   PetscErrorCode ierr;
2796c91732d9SHong Zhang   PetscInt       i,*idxb = 0;
2797c91732d9SHong Zhang   PetscScalar    *va,*vb;
2798c91732d9SHong Zhang   Vec            vtmp;
2799c91732d9SHong Zhang 
2800c91732d9SHong Zhang   PetscFunctionBegin;
2801c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
2802c91732d9SHong Zhang   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2803c91732d9SHong Zhang   if (idx) {
2804192daf7cSBarry Smith     for (i=0; i<A->rmap->n; i++) {
2805d0f46423SBarry Smith       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2806c91732d9SHong Zhang     }
2807c91732d9SHong Zhang   }
2808c91732d9SHong Zhang 
2809d0f46423SBarry Smith   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2810c91732d9SHong Zhang   if (idx) {
2811d0f46423SBarry Smith     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2812c91732d9SHong Zhang   }
2813c91732d9SHong Zhang   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2814c91732d9SHong Zhang   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2815c91732d9SHong Zhang 
2816d0f46423SBarry Smith   for (i=0; i<A->rmap->n; i++){
2817c91732d9SHong Zhang     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
2818c91732d9SHong Zhang       va[i] = vb[i];
2819c91732d9SHong Zhang       if (idx) idx[i] = a->garray[idxb[i]];
2820c91732d9SHong Zhang     }
2821c91732d9SHong Zhang   }
2822c91732d9SHong Zhang 
2823c91732d9SHong Zhang   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2824c91732d9SHong Zhang   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2825c91732d9SHong Zhang   ierr = PetscFree(idxb);CHKERRQ(ierr);
28266bf464f9SBarry Smith   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
2827c91732d9SHong Zhang   PetscFunctionReturn(0);
2828c91732d9SHong Zhang }
2829c91732d9SHong Zhang 
2830c91732d9SHong Zhang #undef __FUNCT__
2831c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ"
2832c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2833c87e5d42SMatthew Knepley {
2834c87e5d42SMatthew Knepley   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
2835c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2836c87e5d42SMatthew Knepley   PetscInt       i,*idxb = 0;
2837c87e5d42SMatthew Knepley   PetscScalar    *va,*vb;
2838c87e5d42SMatthew Knepley   Vec            vtmp;
2839c87e5d42SMatthew Knepley 
2840c87e5d42SMatthew Knepley   PetscFunctionBegin;
2841c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr);
2842c87e5d42SMatthew Knepley   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2843c87e5d42SMatthew Knepley   if (idx) {
2844c87e5d42SMatthew Knepley     for (i=0; i<A->cmap->n; i++) {
2845c87e5d42SMatthew Knepley       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
2846c87e5d42SMatthew Knepley     }
2847c87e5d42SMatthew Knepley   }
2848c87e5d42SMatthew Knepley 
2849c87e5d42SMatthew Knepley   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
2850c87e5d42SMatthew Knepley   if (idx) {
2851c87e5d42SMatthew Knepley     ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);
2852c87e5d42SMatthew Knepley   }
2853c87e5d42SMatthew Knepley   ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
2854c87e5d42SMatthew Knepley   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
2855c87e5d42SMatthew Knepley 
2856c87e5d42SMatthew Knepley   for (i=0; i<A->rmap->n; i++){
2857c87e5d42SMatthew Knepley     if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) {
2858c87e5d42SMatthew Knepley       va[i] = vb[i];
2859c87e5d42SMatthew Knepley       if (idx) idx[i] = a->garray[idxb[i]];
2860c87e5d42SMatthew Knepley     }
2861c87e5d42SMatthew Knepley   }
2862c87e5d42SMatthew Knepley 
2863c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2864c87e5d42SMatthew Knepley   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
2865c87e5d42SMatthew Knepley   ierr = PetscFree(idxb);CHKERRQ(ierr);
28666bf464f9SBarry Smith   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
2867c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2868c87e5d42SMatthew Knepley }
2869c87e5d42SMatthew Knepley 
2870c87e5d42SMatthew Knepley #undef __FUNCT__
287103bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ"
287203bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[])
287303bc72f1SMatthew Knepley {
287403bc72f1SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2875d0f46423SBarry Smith   PetscInt       n      = A->rmap->n;
2876d0f46423SBarry Smith   PetscInt       cstart = A->cmap->rstart;
287703bc72f1SMatthew Knepley   PetscInt      *cmap   = mat->garray;
287803bc72f1SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
287903bc72f1SMatthew Knepley   Vec            diagV, offdiagV;
288003bc72f1SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
288103bc72f1SMatthew Knepley   PetscInt       r;
288203bc72f1SMatthew Knepley   PetscErrorCode ierr;
288303bc72f1SMatthew Knepley 
288403bc72f1SMatthew Knepley   PetscFunctionBegin;
288503bc72f1SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2886e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2887e64afeacSLisandro Dalcin   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
288803bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
288903bc72f1SMatthew Knepley   ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
289003bc72f1SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
289103bc72f1SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
289203bc72f1SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
289303bc72f1SMatthew Knepley   for(r = 0; r < n; ++r) {
2894028cd4eaSSatish Balay     if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) {
289503bc72f1SMatthew Knepley       a[r]   = diagA[r];
289603bc72f1SMatthew Knepley       idx[r] = cstart + diagIdx[r];
289703bc72f1SMatthew Knepley     } else {
289803bc72f1SMatthew Knepley       a[r]   = offdiagA[r];
289903bc72f1SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
290003bc72f1SMatthew Knepley     }
290103bc72f1SMatthew Knepley   }
290203bc72f1SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
290303bc72f1SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
290403bc72f1SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
29056bf464f9SBarry Smith   ierr = VecDestroy(&diagV);CHKERRQ(ierr);
29066bf464f9SBarry Smith   ierr = VecDestroy(&offdiagV);CHKERRQ(ierr);
290703bc72f1SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
290803bc72f1SMatthew Knepley   PetscFunctionReturn(0);
290903bc72f1SMatthew Knepley }
291003bc72f1SMatthew Knepley 
29115494a064SHong Zhang #undef __FUNCT__
2912c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ"
2913c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[])
2914c87e5d42SMatthew Knepley {
2915c87e5d42SMatthew Knepley   Mat_MPIAIJ    *mat    = (Mat_MPIAIJ *) A->data;
2916c87e5d42SMatthew Knepley   PetscInt       n      = A->rmap->n;
2917c87e5d42SMatthew Knepley   PetscInt       cstart = A->cmap->rstart;
2918c87e5d42SMatthew Knepley   PetscInt      *cmap   = mat->garray;
2919c87e5d42SMatthew Knepley   PetscInt      *diagIdx, *offdiagIdx;
2920c87e5d42SMatthew Knepley   Vec            diagV, offdiagV;
2921c87e5d42SMatthew Knepley   PetscScalar   *a, *diagA, *offdiagA;
2922c87e5d42SMatthew Knepley   PetscInt       r;
2923c87e5d42SMatthew Knepley   PetscErrorCode ierr;
2924c87e5d42SMatthew Knepley 
2925c87e5d42SMatthew Knepley   PetscFunctionBegin;
2926c87e5d42SMatthew Knepley   ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr);
2927c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr);
2928c87e5d42SMatthew Knepley   ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr);
2929c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->A, diagV,    diagIdx);CHKERRQ(ierr);
2930c87e5d42SMatthew Knepley   ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr);
2931c87e5d42SMatthew Knepley   ierr = VecGetArray(v,        &a);CHKERRQ(ierr);
2932c87e5d42SMatthew Knepley   ierr = VecGetArray(diagV,    &diagA);CHKERRQ(ierr);
2933c87e5d42SMatthew Knepley   ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr);
2934c87e5d42SMatthew Knepley   for(r = 0; r < n; ++r) {
2935c87e5d42SMatthew Knepley     if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) {
2936c87e5d42SMatthew Knepley       a[r]   = diagA[r];
2937c87e5d42SMatthew Knepley       idx[r] = cstart + diagIdx[r];
2938c87e5d42SMatthew Knepley     } else {
2939c87e5d42SMatthew Knepley       a[r]   = offdiagA[r];
2940c87e5d42SMatthew Knepley       idx[r] = cmap[offdiagIdx[r]];
2941c87e5d42SMatthew Knepley     }
2942c87e5d42SMatthew Knepley   }
2943c87e5d42SMatthew Knepley   ierr = VecRestoreArray(v,        &a);CHKERRQ(ierr);
2944c87e5d42SMatthew Knepley   ierr = VecRestoreArray(diagV,    &diagA);CHKERRQ(ierr);
2945c87e5d42SMatthew Knepley   ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr);
29466bf464f9SBarry Smith   ierr = VecDestroy(&diagV);CHKERRQ(ierr);
29476bf464f9SBarry Smith   ierr = VecDestroy(&offdiagV);CHKERRQ(ierr);
2948c87e5d42SMatthew Knepley   ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr);
2949c87e5d42SMatthew Knepley   PetscFunctionReturn(0);
2950c87e5d42SMatthew Knepley }
2951c87e5d42SMatthew Knepley 
2952c87e5d42SMatthew Knepley #undef __FUNCT__
2953d1adec66SJed Brown #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIAIJ"
2954d1adec66SJed Brown PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat mat,Mat *newmat)
29555494a064SHong Zhang {
29565494a064SHong Zhang   PetscErrorCode ierr;
2957f6d58c54SBarry Smith   Mat            *dummy;
29585494a064SHong Zhang 
29595494a064SHong Zhang   PetscFunctionBegin;
2960f6d58c54SBarry Smith   ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr);
2961f6d58c54SBarry Smith   *newmat = *dummy;
2962f6d58c54SBarry Smith   ierr = PetscFree(dummy);CHKERRQ(ierr);
29635494a064SHong Zhang   PetscFunctionReturn(0);
29645494a064SHong Zhang }
29655494a064SHong Zhang 
29667087cfbeSBarry Smith extern PetscErrorCode  MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*);
2967bbead8a2SBarry Smith 
2968bbead8a2SBarry Smith #undef __FUNCT__
2969bbead8a2SBarry Smith #define __FUNCT__ "MatInvertBlockDiagonal_MPIAIJ"
2970bbead8a2SBarry Smith PetscErrorCode  MatInvertBlockDiagonal_MPIAIJ(Mat A,PetscScalar **values)
2971bbead8a2SBarry Smith {
2972bbead8a2SBarry Smith   Mat_MPIAIJ    *a = (Mat_MPIAIJ*) A->data;
2973bbead8a2SBarry Smith   PetscErrorCode ierr;
2974bbead8a2SBarry Smith 
2975bbead8a2SBarry Smith   PetscFunctionBegin;
2976bbead8a2SBarry Smith   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2977bbead8a2SBarry Smith   PetscFunctionReturn(0);
2978bbead8a2SBarry Smith }
2979bbead8a2SBarry Smith 
2980bbead8a2SBarry Smith 
29818a729477SBarry Smith /* -------------------------------------------------------------------*/
2982cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ,
2983cda55fadSBarry Smith        MatGetRow_MPIAIJ,
2984cda55fadSBarry Smith        MatRestoreRow_MPIAIJ,
2985cda55fadSBarry Smith        MatMult_MPIAIJ,
298697304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ,
29877c922b88SBarry Smith        MatMultTranspose_MPIAIJ,
29887c922b88SBarry Smith        MatMultTransposeAdd_MPIAIJ,
2989103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
2990103bf8bdSMatthew Knepley        MatSolve_MPIAIJ,
2991103bf8bdSMatthew Knepley #else
2992cda55fadSBarry Smith        0,
2993103bf8bdSMatthew Knepley #endif
2994cda55fadSBarry Smith        0,
2995cda55fadSBarry Smith        0,
299697304618SKris Buschelman /*10*/ 0,
2997cda55fadSBarry Smith        0,
2998cda55fadSBarry Smith        0,
299941f059aeSBarry Smith        MatSOR_MPIAIJ,
3000b7c46309SBarry Smith        MatTranspose_MPIAIJ,
300197304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ,
3002cda55fadSBarry Smith        MatEqual_MPIAIJ,
3003cda55fadSBarry Smith        MatGetDiagonal_MPIAIJ,
3004cda55fadSBarry Smith        MatDiagonalScale_MPIAIJ,
3005cda55fadSBarry Smith        MatNorm_MPIAIJ,
300697304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ,
3007cda55fadSBarry Smith        MatAssemblyEnd_MPIAIJ,
3008cda55fadSBarry Smith        MatSetOption_MPIAIJ,
3009cda55fadSBarry Smith        MatZeroEntries_MPIAIJ,
3010d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ,
3011cda55fadSBarry Smith        0,
3012103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
3013719d5645SBarry Smith        0,
3014103bf8bdSMatthew Knepley #else
3015cda55fadSBarry Smith        0,
3016103bf8bdSMatthew Knepley #endif
3017cda55fadSBarry Smith        0,
3018cda55fadSBarry Smith        0,
3019d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ,
3020103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL
3021719d5645SBarry Smith        0,
3022103bf8bdSMatthew Knepley #else
3023cda55fadSBarry Smith        0,
3024103bf8bdSMatthew Knepley #endif
3025cda55fadSBarry Smith        0,
3026cda55fadSBarry Smith        0,
3027cda55fadSBarry Smith        0,
3028d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ,
3029cda55fadSBarry Smith        0,
3030cda55fadSBarry Smith        0,
3031cda55fadSBarry Smith        0,
3032cda55fadSBarry Smith        0,
3033d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ,
3034cda55fadSBarry Smith        MatGetSubMatrices_MPIAIJ,
3035cda55fadSBarry Smith        MatIncreaseOverlap_MPIAIJ,
3036cda55fadSBarry Smith        MatGetValues_MPIAIJ,
3037cb5b572fSBarry Smith        MatCopy_MPIAIJ,
3038d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ,
3039cda55fadSBarry Smith        MatScale_MPIAIJ,
3040cda55fadSBarry Smith        0,
3041cda55fadSBarry Smith        0,
3042564f14d6SBarry Smith        MatZeroRowsColumns_MPIAIJ,
3043d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ,
3044cda55fadSBarry Smith        0,
3045cda55fadSBarry Smith        0,
3046cda55fadSBarry Smith        0,
3047cda55fadSBarry Smith        0,
3048d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ,
3049cda55fadSBarry Smith        0,
3050cda55fadSBarry Smith        MatSetUnfactored_MPIAIJ,
305142e855d1Svictor        MatPermute_MPIAIJ,
3052cda55fadSBarry Smith        0,
3053d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ,
3054e03a110bSBarry Smith        MatDestroy_MPIAIJ,
3055e03a110bSBarry Smith        MatView_MPIAIJ,
3056357abbc8SBarry Smith        0,
3057a2243be0SBarry Smith        0,
3058d519adbfSMatthew Knepley /*64*/ 0,
3059a2243be0SBarry Smith        0,
3060a2243be0SBarry Smith        0,
3061a2243be0SBarry Smith        0,
3062a2243be0SBarry Smith        0,
3063d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ,
3064c87e5d42SMatthew Knepley        MatGetRowMinAbs_MPIAIJ,
3065a2243be0SBarry Smith        0,
3066a2243be0SBarry Smith        MatSetColoring_MPIAIJ,
3067dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
3068779c1a83SBarry Smith        MatSetValuesAdic_MPIAIJ,
3069dcf5cc72SBarry Smith #else
3070dcf5cc72SBarry Smith        0,
3071dcf5cc72SBarry Smith #endif
307297304618SKris Buschelman        MatSetValuesAdifor_MPIAIJ,
30733acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ,
307497304618SKris Buschelman        0,
307597304618SKris Buschelman        0,
307697304618SKris Buschelman        0,
307797304618SKris Buschelman        0,
307897304618SKris Buschelman /*80*/ 0,
307997304618SKris Buschelman        0,
308097304618SKris Buschelman        0,
30815bba2384SShri Abhyankar /*83*/ MatLoad_MPIAIJ,
30826284ec50SHong Zhang        0,
30836284ec50SHong Zhang        0,
30846284ec50SHong Zhang        0,
30856284ec50SHong Zhang        0,
3086865e5f61SKris Buschelman        0,
3087d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ,
308826be0446SHong Zhang        MatMatMultSymbolic_MPIAIJ_MPIAIJ,
308926be0446SHong Zhang        MatMatMultNumeric_MPIAIJ_MPIAIJ,
30907a7894deSKris Buschelman        MatPtAP_Basic,
30917a7894deSKris Buschelman        MatPtAPSymbolic_MPIAIJ,
3092d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ,
30937a7894deSKris Buschelman        0,
30947a7894deSKris Buschelman        0,
30957a7894deSKris Buschelman        0,
30967a7894deSKris Buschelman        0,
3097d519adbfSMatthew Knepley /*99*/ 0,
3098865e5f61SKris Buschelman        MatPtAPSymbolic_MPIAIJ_MPIAIJ,
30997a7894deSKris Buschelman        MatPtAPNumeric_MPIAIJ_MPIAIJ,
31002fd7e33dSBarry Smith        MatConjugate_MPIAIJ,
31012fd7e33dSBarry Smith        0,
3102d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ,
310399cafbc1SBarry Smith        MatRealPart_MPIAIJ,
310469db28dcSHong Zhang        MatImaginaryPart_MPIAIJ,
310569db28dcSHong Zhang        0,
310669db28dcSHong Zhang        0,
3107d519adbfSMatthew Knepley /*109*/0,
310803bc72f1SMatthew Knepley        MatGetRedundantMatrix_MPIAIJ,
31095494a064SHong Zhang        MatGetRowMin_MPIAIJ,
31105494a064SHong Zhang        0,
31115494a064SHong Zhang        0,
3112d1adec66SJed Brown /*114*/MatGetSeqNonzeroStructure_MPIAIJ,
3113bd0c2dcbSBarry Smith        0,
3114bd0c2dcbSBarry Smith        0,
3115bd0c2dcbSBarry Smith        0,
3116bd0c2dcbSBarry Smith        0,
31178fb81238SShri Abhyankar /*119*/0,
31188fb81238SShri Abhyankar        0,
31198fb81238SShri Abhyankar        0,
3120d6037b41SHong Zhang        0,
3121b9614d88SDmitry Karpeev        MatGetMultiProcBlock_MPIAIJ,
312227d4218bSShri Abhyankar /*124*/MatFindNonZeroRows_MPIAIJ,
31230716a85fSBarry Smith        MatGetColumnNorms_MPIAIJ,
3124bbead8a2SBarry Smith        MatInvertBlockDiagonal_MPIAIJ,
3125b9614d88SDmitry Karpeev        0,
312637868618SMatthew G Knepley        MatGetSubMatricesParallel_MPIAIJ,
312737868618SMatthew G Knepley /*129*/0
3128bd0c2dcbSBarry Smith };
312936ce4990SBarry Smith 
31302e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/
31312e8a6d31SBarry Smith 
3132fb2e594dSBarry Smith EXTERN_C_BEGIN
31334a2ae208SSatish Balay #undef __FUNCT__
31344a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ"
31357087cfbeSBarry Smith PetscErrorCode  MatStoreValues_MPIAIJ(Mat mat)
31362e8a6d31SBarry Smith {
31372e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
3138dfbe8321SBarry Smith   PetscErrorCode ierr;
31392e8a6d31SBarry Smith 
31402e8a6d31SBarry Smith   PetscFunctionBegin;
31412e8a6d31SBarry Smith   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
31422e8a6d31SBarry Smith   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
31432e8a6d31SBarry Smith   PetscFunctionReturn(0);
31442e8a6d31SBarry Smith }
3145fb2e594dSBarry Smith EXTERN_C_END
31462e8a6d31SBarry Smith 
3147fb2e594dSBarry Smith EXTERN_C_BEGIN
31484a2ae208SSatish Balay #undef __FUNCT__
31494a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ"
31507087cfbeSBarry Smith PetscErrorCode  MatRetrieveValues_MPIAIJ(Mat mat)
31512e8a6d31SBarry Smith {
31522e8a6d31SBarry Smith   Mat_MPIAIJ     *aij = (Mat_MPIAIJ *)mat->data;
3153dfbe8321SBarry Smith   PetscErrorCode ierr;
31542e8a6d31SBarry Smith 
31552e8a6d31SBarry Smith   PetscFunctionBegin;
31562e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
31572e8a6d31SBarry Smith   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
31582e8a6d31SBarry Smith   PetscFunctionReturn(0);
31592e8a6d31SBarry Smith }
3160fb2e594dSBarry Smith EXTERN_C_END
31618a729477SBarry Smith 
316227508adbSBarry Smith EXTERN_C_BEGIN
31634a2ae208SSatish Balay #undef __FUNCT__
3164a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ"
31657087cfbeSBarry Smith PetscErrorCode  MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3166a23d5eceSKris Buschelman {
3167a23d5eceSKris Buschelman   Mat_MPIAIJ     *b;
3168dfbe8321SBarry Smith   PetscErrorCode ierr;
3169b1d57f15SBarry Smith   PetscInt       i;
3170a23d5eceSKris Buschelman 
3171a23d5eceSKris Buschelman   PetscFunctionBegin;
3172a23d5eceSKris Buschelman   if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5;
3173a23d5eceSKris Buschelman   if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2;
3174e32f2f54SBarry Smith   if (d_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz);
3175e32f2f54SBarry Smith   if (o_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz);
3176899cda47SBarry Smith 
317726283091SBarry Smith   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
317826283091SBarry Smith   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
317926283091SBarry Smith   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
318026283091SBarry Smith   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
3181a23d5eceSKris Buschelman   if (d_nnz) {
3182d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
3183e32f2f54SBarry Smith       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]);
3184a23d5eceSKris Buschelman     }
3185a23d5eceSKris Buschelman   }
3186a23d5eceSKris Buschelman   if (o_nnz) {
3187d0f46423SBarry Smith     for (i=0; i<B->rmap->n; i++) {
3188e32f2f54SBarry Smith       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]);
3189a23d5eceSKris Buschelman     }
3190a23d5eceSKris Buschelman   }
3191a23d5eceSKris Buschelman   b = (Mat_MPIAIJ*)B->data;
3192899cda47SBarry Smith 
3193526dfc15SBarry Smith   if (!B->preallocated) {
3194899cda47SBarry Smith     /* Explicitly create 2 MATSEQAIJ matrices. */
3195899cda47SBarry Smith     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3196d0f46423SBarry Smith     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3197899cda47SBarry Smith     ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr);
3198899cda47SBarry Smith     ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr);
3199899cda47SBarry Smith     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3200d0f46423SBarry Smith     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3201899cda47SBarry Smith     ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr);
3202899cda47SBarry Smith     ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr);
3203526dfc15SBarry Smith   }
3204899cda47SBarry Smith 
3205c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr);
3206c60e587dSKris Buschelman   ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr);
3207526dfc15SBarry Smith   B->preallocated = PETSC_TRUE;
3208a23d5eceSKris Buschelman   PetscFunctionReturn(0);
3209a23d5eceSKris Buschelman }
3210a23d5eceSKris Buschelman EXTERN_C_END
3211a23d5eceSKris Buschelman 
32124a2ae208SSatish Balay #undef __FUNCT__
32134a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ"
3214dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3215d6dfbf8fSBarry Smith {
3216d6dfbf8fSBarry Smith   Mat            mat;
3217416022c9SBarry Smith   Mat_MPIAIJ     *a,*oldmat = (Mat_MPIAIJ*)matin->data;
3218dfbe8321SBarry Smith   PetscErrorCode ierr;
3219d6dfbf8fSBarry Smith 
32203a40ed3dSBarry Smith   PetscFunctionBegin;
3221416022c9SBarry Smith   *newmat       = 0;
32227adad957SLisandro Dalcin   ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr);
3223d0f46423SBarry Smith   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
32247adad957SLisandro Dalcin   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
32251d5dac46SHong Zhang   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3226273d9f13SBarry Smith   a    = (Mat_MPIAIJ*)mat->data;
3227e1b6402fSHong Zhang 
3228d5f3da31SBarry Smith   mat->factortype    = matin->factortype;
3229d0f46423SBarry Smith   mat->rmap->bs      = matin->rmap->bs;
3230c456f294SBarry Smith   mat->assembled    = PETSC_TRUE;
3231e7641de0SSatish Balay   mat->insertmode   = NOT_SET_VALUES;
3232273d9f13SBarry Smith   mat->preallocated = PETSC_TRUE;
3233d6dfbf8fSBarry Smith 
323417699dbbSLois Curfman McInnes   a->size           = oldmat->size;
323517699dbbSLois Curfman McInnes   a->rank           = oldmat->rank;
3236e7641de0SSatish Balay   a->donotstash     = oldmat->donotstash;
3237e7641de0SSatish Balay   a->roworiented    = oldmat->roworiented;
3238e7641de0SSatish Balay   a->rowindices     = 0;
3239bcd2baecSBarry Smith   a->rowvalues      = 0;
3240bcd2baecSBarry Smith   a->getrowactive   = PETSC_FALSE;
3241d6dfbf8fSBarry Smith 
32421e1e43feSBarry Smith   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
32431e1e43feSBarry Smith   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3244899cda47SBarry Smith 
32452ee70a88SLois Curfman McInnes   if (oldmat->colmap) {
3246aa482453SBarry Smith #if defined (PETSC_USE_CTABLE)
32470f5bd95cSBarry Smith     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3248b1fc9764SSatish Balay #else
3249d0f46423SBarry Smith     ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
3250d0f46423SBarry Smith     ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
3251d0f46423SBarry Smith     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr);
3252b1fc9764SSatish Balay #endif
3253416022c9SBarry Smith   } else a->colmap = 0;
32543f41c07dSBarry Smith   if (oldmat->garray) {
3255b1d57f15SBarry Smith     PetscInt len;
3256d0f46423SBarry Smith     len  = oldmat->B->cmap->n;
3257b1d57f15SBarry Smith     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
325852e6d16bSBarry Smith     ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3259b1d57f15SBarry Smith     if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); }
3260416022c9SBarry Smith   } else a->garray = 0;
3261d6dfbf8fSBarry Smith 
3262416022c9SBarry Smith   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
326352e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr);
3264a56f8943SBarry Smith   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
326552e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr);
32662e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
326752e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr);
32682e8a6d31SBarry Smith   ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
326952e6d16bSBarry Smith   ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr);
32707adad957SLisandro Dalcin   ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
32718a729477SBarry Smith   *newmat = mat;
32723a40ed3dSBarry Smith   PetscFunctionReturn(0);
32738a729477SBarry Smith }
3274416022c9SBarry Smith 
32751a4ee126SBarry Smith 
32761a4ee126SBarry Smith 
32774a2ae208SSatish Balay #undef __FUNCT__
32785bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIAIJ"
3279112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIAIJ(Mat newMat, PetscViewer viewer)
32808fb81238SShri Abhyankar {
32818fb81238SShri Abhyankar   PetscScalar    *vals,*svals;
32828fb81238SShri Abhyankar   MPI_Comm       comm = ((PetscObject)viewer)->comm;
32838fb81238SShri Abhyankar   PetscErrorCode ierr;
32841a4ee126SBarry Smith   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
32858fb81238SShri Abhyankar   PetscInt       i,nz,j,rstart,rend,mmax,maxnz = 0,grows,gcols;
32868fb81238SShri Abhyankar   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols;
32878fb81238SShri Abhyankar   PetscInt       *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols;
32888fb81238SShri Abhyankar   PetscInt       cend,cstart,n,*rowners,sizesset=1;
32898fb81238SShri Abhyankar   int            fd;
32908fb81238SShri Abhyankar 
32918fb81238SShri Abhyankar   PetscFunctionBegin;
32928fb81238SShri Abhyankar   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
32938fb81238SShri Abhyankar   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
32948fb81238SShri Abhyankar   if (!rank) {
32958fb81238SShri Abhyankar     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
32968fb81238SShri Abhyankar     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
32978fb81238SShri Abhyankar     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
32988fb81238SShri Abhyankar   }
32998fb81238SShri Abhyankar 
33008fb81238SShri Abhyankar   if (newMat->rmap->n < 0 && newMat->rmap->N < 0 && newMat->cmap->n < 0 && newMat->cmap->N < 0) sizesset = 0;
33018fb81238SShri Abhyankar 
33028fb81238SShri Abhyankar   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
33038fb81238SShri Abhyankar   M = header[1]; N = header[2];
33048fb81238SShri Abhyankar   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
33058fb81238SShri Abhyankar   if (sizesset && newMat->rmap->N < 0) newMat->rmap->N = M;
33068fb81238SShri Abhyankar   if (sizesset && newMat->cmap->N < 0) newMat->cmap->N = N;
33078fb81238SShri Abhyankar 
33088fb81238SShri Abhyankar   /* If global sizes are set, check if they are consistent with that given in the file */
33098fb81238SShri Abhyankar   if (sizesset) {
33108fb81238SShri Abhyankar     ierr = MatGetSize(newMat,&grows,&gcols);CHKERRQ(ierr);
33118fb81238SShri Abhyankar   }
3312abd38a8fSBarry Smith   if (sizesset && newMat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
3313abd38a8fSBarry Smith   if (sizesset && newMat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
33148fb81238SShri Abhyankar 
33158fb81238SShri Abhyankar   /* determine ownership of all rows */
33168fb81238SShri Abhyankar   if (newMat->rmap->n < 0 ) m    = M/size + ((M % size) > rank); /* PETSC_DECIDE */
33174683f7a4SShri Abhyankar   else m = newMat->rmap->n; /* Set by user */
33188fb81238SShri Abhyankar 
33198fb81238SShri Abhyankar   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr);
33208fb81238SShri Abhyankar   ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
33218fb81238SShri Abhyankar 
33228fb81238SShri Abhyankar   /* First process needs enough room for process with most rows */
33238fb81238SShri Abhyankar   if (!rank) {
33248fb81238SShri Abhyankar     mmax       = rowners[1];
33258fb81238SShri Abhyankar     for (i=2; i<size; i++) {
33268fb81238SShri Abhyankar       mmax = PetscMax(mmax,rowners[i]);
33278fb81238SShri Abhyankar     }
33288fb81238SShri Abhyankar   } else mmax = m;
33298fb81238SShri Abhyankar 
33308fb81238SShri Abhyankar   rowners[0] = 0;
33318fb81238SShri Abhyankar   for (i=2; i<=size; i++) {
33328fb81238SShri Abhyankar     rowners[i] += rowners[i-1];
33338fb81238SShri Abhyankar   }
33348fb81238SShri Abhyankar   rstart = rowners[rank];
33358fb81238SShri Abhyankar   rend   = rowners[rank+1];
33368fb81238SShri Abhyankar 
33378fb81238SShri Abhyankar   /* distribute row lengths to all processors */
33388fb81238SShri Abhyankar   ierr    = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr);
33398fb81238SShri Abhyankar   if (!rank) {
33408fb81238SShri Abhyankar     ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr);
33418fb81238SShri Abhyankar     ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
33428fb81238SShri Abhyankar     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
33438fb81238SShri Abhyankar     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
33448fb81238SShri Abhyankar     for (j=0; j<m; j++) {
33458fb81238SShri Abhyankar       procsnz[0] += ourlens[j];
33468fb81238SShri Abhyankar     }
33478fb81238SShri Abhyankar     for (i=1; i<size; i++) {
33488fb81238SShri Abhyankar       ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr);
33498fb81238SShri Abhyankar       /* calculate the number of nonzeros on each processor */
33508fb81238SShri Abhyankar       for (j=0; j<rowners[i+1]-rowners[i]; j++) {
33518fb81238SShri Abhyankar         procsnz[i] += rowlengths[j];
33528fb81238SShri Abhyankar       }
33531a4ee126SBarry Smith       ierr = MPILong_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
33548fb81238SShri Abhyankar     }
33558fb81238SShri Abhyankar     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
33568fb81238SShri Abhyankar   } else {
33571a4ee126SBarry Smith     ierr = MPILong_Recv(ourlens,m,MPIU_INT,0,tag,comm);CHKERRQ(ierr);
33588fb81238SShri Abhyankar   }
33598fb81238SShri Abhyankar 
33608fb81238SShri Abhyankar   if (!rank) {
33618fb81238SShri Abhyankar     /* determine max buffer needed and allocate it */
33628fb81238SShri Abhyankar     maxnz = 0;
33638fb81238SShri Abhyankar     for (i=0; i<size; i++) {
33648fb81238SShri Abhyankar       maxnz = PetscMax(maxnz,procsnz[i]);
33658fb81238SShri Abhyankar     }
33668fb81238SShri Abhyankar     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
33678fb81238SShri Abhyankar 
33688fb81238SShri Abhyankar     /* read in my part of the matrix column indices  */
33698fb81238SShri Abhyankar     nz   = procsnz[0];
33708fb81238SShri Abhyankar     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
33718fb81238SShri Abhyankar     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
33728fb81238SShri Abhyankar 
33738fb81238SShri Abhyankar     /* read in every one elses and ship off */
33748fb81238SShri Abhyankar     for (i=1; i<size; i++) {
33758fb81238SShri Abhyankar       nz     = procsnz[i];
33768fb81238SShri Abhyankar       ierr   = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
33771a4ee126SBarry Smith       ierr   = MPILong_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
33788fb81238SShri Abhyankar     }
33798fb81238SShri Abhyankar     ierr = PetscFree(cols);CHKERRQ(ierr);
33808fb81238SShri Abhyankar   } else {
33818fb81238SShri Abhyankar     /* determine buffer space needed for message */
33828fb81238SShri Abhyankar     nz = 0;
33838fb81238SShri Abhyankar     for (i=0; i<m; i++) {
33848fb81238SShri Abhyankar       nz += ourlens[i];
33858fb81238SShri Abhyankar     }
33868fb81238SShri Abhyankar     ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr);
33878fb81238SShri Abhyankar 
33888fb81238SShri Abhyankar     /* receive message of column indices*/
33891a4ee126SBarry Smith     ierr = MPILong_Recv(mycols,nz,MPIU_INT,0,tag,comm);CHKERRQ(ierr);
33908fb81238SShri Abhyankar   }
33918fb81238SShri Abhyankar 
33928fb81238SShri Abhyankar   /* determine column ownership if matrix is not square */
33938fb81238SShri Abhyankar   if (N != M) {
33948fb81238SShri Abhyankar     if (newMat->cmap->n < 0) n      = N/size + ((N % size) > rank);
33958fb81238SShri Abhyankar     else n = newMat->cmap->n;
33968fb81238SShri Abhyankar     ierr   = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
33978fb81238SShri Abhyankar     cstart = cend - n;
33988fb81238SShri Abhyankar   } else {
33998fb81238SShri Abhyankar     cstart = rstart;
34008fb81238SShri Abhyankar     cend   = rend;
34018fb81238SShri Abhyankar     n      = cend - cstart;
34028fb81238SShri Abhyankar   }
34038fb81238SShri Abhyankar 
34048fb81238SShri Abhyankar   /* loop over local rows, determining number of off diagonal entries */
34058fb81238SShri Abhyankar   ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr);
34068fb81238SShri Abhyankar   jj = 0;
34078fb81238SShri Abhyankar   for (i=0; i<m; i++) {
34088fb81238SShri Abhyankar     for (j=0; j<ourlens[i]; j++) {
34098fb81238SShri Abhyankar       if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
34108fb81238SShri Abhyankar       jj++;
34118fb81238SShri Abhyankar     }
34128fb81238SShri Abhyankar   }
34138fb81238SShri Abhyankar 
34148fb81238SShri Abhyankar   for (i=0; i<m; i++) {
34158fb81238SShri Abhyankar     ourlens[i] -= offlens[i];
34168fb81238SShri Abhyankar   }
34178fb81238SShri Abhyankar   if (!sizesset) {
34188fb81238SShri Abhyankar     ierr = MatSetSizes(newMat,m,n,M,N);CHKERRQ(ierr);
34198fb81238SShri Abhyankar   }
34208fb81238SShri Abhyankar   ierr = MatMPIAIJSetPreallocation(newMat,0,ourlens,0,offlens);CHKERRQ(ierr);
34218fb81238SShri Abhyankar 
34228fb81238SShri Abhyankar   for (i=0; i<m; i++) {
34238fb81238SShri Abhyankar     ourlens[i] += offlens[i];
34248fb81238SShri Abhyankar   }
34258fb81238SShri Abhyankar 
34268fb81238SShri Abhyankar   if (!rank) {
34278fb81238SShri Abhyankar     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
34288fb81238SShri Abhyankar 
34298fb81238SShri Abhyankar     /* read in my part of the matrix numerical values  */
34308fb81238SShri Abhyankar     nz   = procsnz[0];
34318fb81238SShri Abhyankar     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
34328fb81238SShri Abhyankar 
34338fb81238SShri Abhyankar     /* insert into matrix */
34348fb81238SShri Abhyankar     jj      = rstart;
34358fb81238SShri Abhyankar     smycols = mycols;
34368fb81238SShri Abhyankar     svals   = vals;
34378fb81238SShri Abhyankar     for (i=0; i<m; i++) {
34388fb81238SShri Abhyankar       ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
34398fb81238SShri Abhyankar       smycols += ourlens[i];
34408fb81238SShri Abhyankar       svals   += ourlens[i];
34418fb81238SShri Abhyankar       jj++;
34428fb81238SShri Abhyankar     }
34438fb81238SShri Abhyankar 
34448fb81238SShri Abhyankar     /* read in other processors and ship out */
34458fb81238SShri Abhyankar     for (i=1; i<size; i++) {
34468fb81238SShri Abhyankar       nz     = procsnz[i];
34478fb81238SShri Abhyankar       ierr   = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
34481a4ee126SBarry Smith       ierr   = MPILong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr);
34498fb81238SShri Abhyankar     }
34508fb81238SShri Abhyankar     ierr = PetscFree(procsnz);CHKERRQ(ierr);
34518fb81238SShri Abhyankar   } else {
34528fb81238SShri Abhyankar     /* receive numeric values */
34538fb81238SShri Abhyankar     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr);
34548fb81238SShri Abhyankar 
34558fb81238SShri Abhyankar     /* receive message of values*/
34561a4ee126SBarry Smith     ierr   = MPILong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr);
34578fb81238SShri Abhyankar 
34588fb81238SShri Abhyankar     /* insert into matrix */
34598fb81238SShri Abhyankar     jj      = rstart;
34608fb81238SShri Abhyankar     smycols = mycols;
34618fb81238SShri Abhyankar     svals   = vals;
34628fb81238SShri Abhyankar     for (i=0; i<m; i++) {
34638fb81238SShri Abhyankar       ierr     = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr);
34648fb81238SShri Abhyankar       smycols += ourlens[i];
34658fb81238SShri Abhyankar       svals   += ourlens[i];
34668fb81238SShri Abhyankar       jj++;
34678fb81238SShri Abhyankar     }
34688fb81238SShri Abhyankar   }
34698fb81238SShri Abhyankar   ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr);
34708fb81238SShri Abhyankar   ierr = PetscFree(vals);CHKERRQ(ierr);
34718fb81238SShri Abhyankar   ierr = PetscFree(mycols);CHKERRQ(ierr);
34728fb81238SShri Abhyankar   ierr = PetscFree(rowners);CHKERRQ(ierr);
34738fb81238SShri Abhyankar 
34748fb81238SShri Abhyankar   ierr = MatAssemblyBegin(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
34758fb81238SShri Abhyankar   ierr = MatAssemblyEnd(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
34768fb81238SShri Abhyankar   PetscFunctionReturn(0);
34778fb81238SShri Abhyankar }
34788fb81238SShri Abhyankar 
34798fb81238SShri Abhyankar #undef __FUNCT__
34804a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ"
34814aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
34824aa3045dSJed Brown {
34834aa3045dSJed Brown   PetscErrorCode ierr;
34844aa3045dSJed Brown   IS             iscol_local;
34854aa3045dSJed Brown   PetscInt       csize;
34864aa3045dSJed Brown 
34874aa3045dSJed Brown   PetscFunctionBegin;
34884aa3045dSJed Brown   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
3489b79d0421SJed Brown   if (call == MAT_REUSE_MATRIX) {
3490b79d0421SJed Brown     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
3491e32f2f54SBarry Smith     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
3492b79d0421SJed Brown   } else {
34934aa3045dSJed Brown     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
3494b79d0421SJed Brown   }
34954aa3045dSJed Brown   ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
3496b79d0421SJed Brown   if (call == MAT_INITIAL_MATRIX) {
3497b79d0421SJed Brown     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
34986bf464f9SBarry Smith     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
3499b79d0421SJed Brown   }
35004aa3045dSJed Brown   PetscFunctionReturn(0);
35014aa3045dSJed Brown }
35024aa3045dSJed Brown 
35034aa3045dSJed Brown #undef __FUNCT__
35044aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private"
3505a0ff6018SBarry Smith /*
350629da9460SBarry Smith     Not great since it makes two copies of the submatrix, first an SeqAIJ
350729da9460SBarry Smith   in local and then by concatenating the local matrices the end result.
350829da9460SBarry Smith   Writing it directly would be much like MatGetSubMatrices_MPIAIJ()
35094aa3045dSJed Brown 
35104aa3045dSJed Brown   Note: This requires a sequential iscol with all indices.
3511a0ff6018SBarry Smith */
35124aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
3513a0ff6018SBarry Smith {
3514dfbe8321SBarry Smith   PetscErrorCode ierr;
351532dcc486SBarry Smith   PetscMPIInt    rank,size;
3516b1d57f15SBarry Smith   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j;
3517b1d57f15SBarry Smith   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal;
3518fee21e36SBarry Smith   Mat            *local,M,Mreuse;
3519a77337e4SBarry Smith   MatScalar      *vwork,*aa;
35207adad957SLisandro Dalcin   MPI_Comm       comm = ((PetscObject)mat)->comm;
352100e6dbe6SBarry Smith   Mat_SeqAIJ     *aij;
35227e2c5f70SBarry Smith 
3523a0ff6018SBarry Smith 
3524a0ff6018SBarry Smith   PetscFunctionBegin;
35251dab6e02SBarry Smith   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
35261dab6e02SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
352700e6dbe6SBarry Smith 
3528fee21e36SBarry Smith   if (call ==  MAT_REUSE_MATRIX) {
3529fee21e36SBarry Smith     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr);
3530e32f2f54SBarry Smith     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
3531fee21e36SBarry Smith     local = &Mreuse;
3532fee21e36SBarry Smith     ierr  = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr);
3533fee21e36SBarry Smith   } else {
3534a0ff6018SBarry Smith     ierr   = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr);
3535fee21e36SBarry Smith     Mreuse = *local;
3536606d414cSSatish Balay     ierr   = PetscFree(local);CHKERRQ(ierr);
3537fee21e36SBarry Smith   }
3538a0ff6018SBarry Smith 
3539a0ff6018SBarry Smith   /*
3540a0ff6018SBarry Smith       m - number of local rows
3541a0ff6018SBarry Smith       n - number of columns (same on all processors)
3542a0ff6018SBarry Smith       rstart - first row in new global matrix generated
3543a0ff6018SBarry Smith   */
3544fee21e36SBarry Smith   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
3545a0ff6018SBarry Smith   if (call == MAT_INITIAL_MATRIX) {
3546fee21e36SBarry Smith     aij = (Mat_SeqAIJ*)(Mreuse)->data;
354700e6dbe6SBarry Smith     ii  = aij->i;
354800e6dbe6SBarry Smith     jj  = aij->j;
354900e6dbe6SBarry Smith 
3550a0ff6018SBarry Smith     /*
355100e6dbe6SBarry Smith         Determine the number of non-zeros in the diagonal and off-diagonal
355200e6dbe6SBarry Smith         portions of the matrix in order to do correct preallocation
3553a0ff6018SBarry Smith     */
355400e6dbe6SBarry Smith 
355500e6dbe6SBarry Smith     /* first get start and end of "diagonal" columns */
35566a6a5d1dSBarry Smith     if (csize == PETSC_DECIDE) {
3557ab50ec6bSBarry Smith       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
3558ab50ec6bSBarry Smith       if (mglobal == n) { /* square matrix */
3559e2c4fddaSBarry Smith 	nlocal = m;
35606a6a5d1dSBarry Smith       } else {
3561ab50ec6bSBarry Smith         nlocal = n/size + ((n % size) > rank);
3562ab50ec6bSBarry Smith       }
3563ab50ec6bSBarry Smith     } else {
35646a6a5d1dSBarry Smith       nlocal = csize;
35656a6a5d1dSBarry Smith     }
3566b1d57f15SBarry Smith     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
356700e6dbe6SBarry Smith     rstart = rend - nlocal;
356865e19b50SBarry Smith     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
356900e6dbe6SBarry Smith 
357000e6dbe6SBarry Smith     /* next, compute all the lengths */
3571b1d57f15SBarry Smith     ierr  = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr);
357200e6dbe6SBarry Smith     olens = dlens + m;
357300e6dbe6SBarry Smith     for (i=0; i<m; i++) {
357400e6dbe6SBarry Smith       jend = ii[i+1] - ii[i];
357500e6dbe6SBarry Smith       olen = 0;
357600e6dbe6SBarry Smith       dlen = 0;
357700e6dbe6SBarry Smith       for (j=0; j<jend; j++) {
357800e6dbe6SBarry Smith         if (*jj < rstart || *jj >= rend) olen++;
357900e6dbe6SBarry Smith         else dlen++;
358000e6dbe6SBarry Smith         jj++;
358100e6dbe6SBarry Smith       }
358200e6dbe6SBarry Smith       olens[i] = olen;
358300e6dbe6SBarry Smith       dlens[i] = dlen;
358400e6dbe6SBarry Smith     }
3585f69a0ea3SMatthew Knepley     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
3586f69a0ea3SMatthew Knepley     ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr);
35877adad957SLisandro Dalcin     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
3588e2d9671bSKris Buschelman     ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr);
3589606d414cSSatish Balay     ierr = PetscFree(dlens);CHKERRQ(ierr);
3590a0ff6018SBarry Smith   } else {
3591b1d57f15SBarry Smith     PetscInt ml,nl;
3592a0ff6018SBarry Smith 
3593a0ff6018SBarry Smith     M = *newmat;
3594a0ff6018SBarry Smith     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
3595e32f2f54SBarry Smith     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
3596a0ff6018SBarry Smith     ierr = MatZeroEntries(M);CHKERRQ(ierr);
3597c48de900SBarry Smith     /*
3598c48de900SBarry Smith          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
3599c48de900SBarry Smith        rather than the slower MatSetValues().
3600c48de900SBarry Smith     */
3601c48de900SBarry Smith     M->was_assembled = PETSC_TRUE;
3602c48de900SBarry Smith     M->assembled     = PETSC_FALSE;
3603a0ff6018SBarry Smith   }
3604a0ff6018SBarry Smith   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
3605fee21e36SBarry Smith   aij = (Mat_SeqAIJ*)(Mreuse)->data;
360600e6dbe6SBarry Smith   ii  = aij->i;
360700e6dbe6SBarry Smith   jj  = aij->j;
360800e6dbe6SBarry Smith   aa  = aij->a;
3609a0ff6018SBarry Smith   for (i=0; i<m; i++) {
3610a0ff6018SBarry Smith     row   = rstart + i;
361100e6dbe6SBarry Smith     nz    = ii[i+1] - ii[i];
361200e6dbe6SBarry Smith     cwork = jj;     jj += nz;
361300e6dbe6SBarry Smith     vwork = aa;     aa += nz;
36148c638d02SBarry Smith     ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
3615a0ff6018SBarry Smith   }
3616a0ff6018SBarry Smith 
3617a0ff6018SBarry Smith   ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3618a0ff6018SBarry Smith   ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3619a0ff6018SBarry Smith   *newmat = M;
3620fee21e36SBarry Smith 
3621fee21e36SBarry Smith   /* save submatrix used in processor for next request */
3622fee21e36SBarry Smith   if (call ==  MAT_INITIAL_MATRIX) {
3623fee21e36SBarry Smith     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
3624bf0cc555SLisandro Dalcin     ierr = MatDestroy(&Mreuse);CHKERRQ(ierr);
3625fee21e36SBarry Smith   }
3626fee21e36SBarry Smith 
3627a0ff6018SBarry Smith   PetscFunctionReturn(0);
3628a0ff6018SBarry Smith }
3629273d9f13SBarry Smith 
3630e2e86b8fSSatish Balay EXTERN_C_BEGIN
36314a2ae208SSatish Balay #undef __FUNCT__
3632ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ"
36337087cfbeSBarry Smith PetscErrorCode  MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[])
3634ccd8e176SBarry Smith {
3635899cda47SBarry Smith   PetscInt       m,cstart, cend,j,nnz,i,d;
3636899cda47SBarry Smith   PetscInt       *d_nnz,*o_nnz,nnz_max = 0,rstart,ii;
3637ccd8e176SBarry Smith   const PetscInt *JJ;
3638ccd8e176SBarry Smith   PetscScalar    *values;
3639ccd8e176SBarry Smith   PetscErrorCode ierr;
3640ccd8e176SBarry Smith 
3641ccd8e176SBarry Smith   PetscFunctionBegin;
3642e32f2f54SBarry Smith   if (Ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]);
3643899cda47SBarry Smith 
364426283091SBarry Smith   ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr);
364526283091SBarry Smith   ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr);
364626283091SBarry Smith   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
364726283091SBarry Smith   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
3648d0f46423SBarry Smith   m      = B->rmap->n;
3649d0f46423SBarry Smith   cstart = B->cmap->rstart;
3650d0f46423SBarry Smith   cend   = B->cmap->rend;
3651d0f46423SBarry Smith   rstart = B->rmap->rstart;
3652899cda47SBarry Smith 
36531d79065fSBarry Smith   ierr  = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr);
3654ccd8e176SBarry Smith 
3655ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING)
3656ecc77c7aSBarry Smith   for (i=0; i<m; i++) {
3657ecc77c7aSBarry Smith     nnz     = Ii[i+1]- Ii[i];
3658ecc77c7aSBarry Smith     JJ      = J + Ii[i];
3659e32f2f54SBarry Smith     if (nnz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz);
3660ecc77c7aSBarry Smith     if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j);
3661d0f46423SBarry Smith     if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N);
3662ecc77c7aSBarry Smith   }
3663ecc77c7aSBarry Smith #endif
3664ecc77c7aSBarry Smith 
3665ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3666b7940d39SSatish Balay     nnz     = Ii[i+1]- Ii[i];
3667b7940d39SSatish Balay     JJ      = J + Ii[i];
3668ccd8e176SBarry Smith     nnz_max = PetscMax(nnz_max,nnz);
3669ccd8e176SBarry Smith     d       = 0;
36700daa03b5SJed Brown     for (j=0; j<nnz; j++) {
36710daa03b5SJed Brown       if (cstart <= JJ[j] && JJ[j] < cend) d++;
3672ccd8e176SBarry Smith     }
3673ccd8e176SBarry Smith     d_nnz[i] = d;
3674ccd8e176SBarry Smith     o_nnz[i] = nnz - d;
3675ccd8e176SBarry Smith   }
3676ccd8e176SBarry Smith   ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
36771d79065fSBarry Smith   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
3678ccd8e176SBarry Smith 
3679ccd8e176SBarry Smith   if (v) values = (PetscScalar*)v;
3680ccd8e176SBarry Smith   else {
3681ccd8e176SBarry Smith     ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr);
3682ccd8e176SBarry Smith     ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr);
3683ccd8e176SBarry Smith   }
3684ccd8e176SBarry Smith 
3685ccd8e176SBarry Smith   for (i=0; i<m; i++) {
3686ccd8e176SBarry Smith     ii   = i + rstart;
3687b7940d39SSatish Balay     nnz  = Ii[i+1]- Ii[i];
3688b7940d39SSatish Balay     ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr);
3689ccd8e176SBarry Smith   }
3690ccd8e176SBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3691ccd8e176SBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3692ccd8e176SBarry Smith 
3693ccd8e176SBarry Smith   if (!v) {
3694ccd8e176SBarry Smith     ierr = PetscFree(values);CHKERRQ(ierr);
3695ccd8e176SBarry Smith   }
3696ccd8e176SBarry Smith   PetscFunctionReturn(0);
3697ccd8e176SBarry Smith }
3698e2e86b8fSSatish Balay EXTERN_C_END
3699ccd8e176SBarry Smith 
3700ccd8e176SBarry Smith #undef __FUNCT__
3701ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR"
37021eea217eSSatish Balay /*@
3703ccd8e176SBarry Smith    MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format
3704ccd8e176SBarry Smith    (the default parallel PETSc format).
3705ccd8e176SBarry Smith 
3706ccd8e176SBarry Smith    Collective on MPI_Comm
3707ccd8e176SBarry Smith 
3708ccd8e176SBarry Smith    Input Parameters:
3709a1661176SMatthew Knepley +  B - the matrix
3710ccd8e176SBarry Smith .  i - the indices into j for the start of each local row (starts with zero)
37110daa03b5SJed Brown .  j - the column indices for each local row (starts with zero)
3712ccd8e176SBarry Smith -  v - optional values in the matrix
3713ccd8e176SBarry Smith 
3714ccd8e176SBarry Smith    Level: developer
3715ccd8e176SBarry Smith 
371612251496SSatish Balay    Notes:
371712251496SSatish Balay        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
371812251496SSatish Balay      thus you CANNOT change the matrix entries by changing the values of a[] after you have
371912251496SSatish Balay      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
372012251496SSatish Balay 
372112251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
372212251496SSatish Balay 
372312251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
372412251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
372512251496SSatish Balay     as shown:
372612251496SSatish Balay 
372712251496SSatish Balay         1 0 0
372812251496SSatish Balay         2 0 3     P0
372912251496SSatish Balay        -------
373012251496SSatish Balay         4 5 6     P1
373112251496SSatish Balay 
373212251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
373312251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
373412251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
373512251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
373612251496SSatish Balay 
373712251496SSatish Balay      Process1 [P1]: rows_owned=[2]
373812251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
373912251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
374012251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
374112251496SSatish Balay 
3742ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3743ccd8e176SBarry Smith 
37442fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ,
37458d7a6e47SBarry Smith           MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays()
3746ccd8e176SBarry Smith @*/
37477087cfbeSBarry Smith PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3748ccd8e176SBarry Smith {
37494ac538c5SBarry Smith   PetscErrorCode ierr;
3750ccd8e176SBarry Smith 
3751ccd8e176SBarry Smith   PetscFunctionBegin;
37524ac538c5SBarry Smith   ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocationCSR_C",(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,i,j,v));CHKERRQ(ierr);
3753ccd8e176SBarry Smith   PetscFunctionReturn(0);
3754ccd8e176SBarry Smith }
3755ccd8e176SBarry Smith 
3756ccd8e176SBarry Smith #undef __FUNCT__
37574a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation"
3758273d9f13SBarry Smith /*@C
3759ccd8e176SBarry Smith    MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format
3760273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3761273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3762273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3763273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3764273d9f13SBarry Smith 
3765273d9f13SBarry Smith    Collective on MPI_Comm
3766273d9f13SBarry Smith 
3767273d9f13SBarry Smith    Input Parameters:
3768273d9f13SBarry Smith +  A - the matrix
3769273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3770273d9f13SBarry Smith            (same value is used for all local rows)
3771273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3772273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3773273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3774273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3775273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3776273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
3777273d9f13SBarry Smith            submatrix (same value is used for all local rows).
3778273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
3779273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
3780273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
3781273d9f13SBarry Smith            structure. The size of this array is equal to the number
3782273d9f13SBarry Smith            of local rows, i.e 'm'.
3783273d9f13SBarry Smith 
378449a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
378549a6f317SBarry Smith 
3786273d9f13SBarry Smith    The AIJ format (also called the Yale sparse matrix format or
3787ccd8e176SBarry Smith    compressed row storage (CSR)), is fully compatible with standard Fortran 77
37880598bfebSBarry Smith    storage.  The stored row and column indices begin with zero.
37890598bfebSBarry Smith    See the <A href="../../docs/manual.pdf#nameddest=ch_mat">Mat chapter of the users manual</A> for details.
3790273d9f13SBarry Smith 
3791273d9f13SBarry Smith    The parallel matrix is partitioned such that the first m0 rows belong to
3792273d9f13SBarry Smith    process 0, the next m1 rows belong to process 1, the next m2 rows belong
3793273d9f13SBarry Smith    to process 2 etc.. where m0,m1,m2... are the input parameter 'm'.
3794273d9f13SBarry Smith 
3795273d9f13SBarry Smith    The DIAGONAL portion of the local submatrix of a processor can be defined
3796a05b864aSJed Brown    as the submatrix which is obtained by extraction the part corresponding to
3797a05b864aSJed Brown    the rows r1-r2 and columns c1-c2 of the global matrix, where r1 is the
3798a05b864aSJed Brown    first row that belongs to the processor, r2 is the last row belonging to
3799a05b864aSJed Brown    the this processor, and c1-c2 is range of indices of the local part of a
3800a05b864aSJed Brown    vector suitable for applying the matrix to.  This is an mxn matrix.  In the
3801a05b864aSJed Brown    common case of a square matrix, the row and column ranges are the same and
3802a05b864aSJed Brown    the DIAGONAL part is also square. The remaining portion of the local
3803a05b864aSJed Brown    submatrix (mxN) constitute the OFF-DIAGONAL portion.
3804273d9f13SBarry Smith 
3805273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
3806273d9f13SBarry Smith 
3807aa95bbe8SBarry Smith    You can call MatGetInfo() to get information on how effective the preallocation was;
3808aa95bbe8SBarry Smith    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3809aa95bbe8SBarry Smith    You can also run with the option -info and look for messages with the string
3810aa95bbe8SBarry Smith    malloc in them to see if additional memory allocation was needed.
3811aa95bbe8SBarry Smith 
3812273d9f13SBarry Smith    Example usage:
3813273d9f13SBarry Smith 
3814273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
3815273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
3816273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
3817273d9f13SBarry Smith    as follows:
3818273d9f13SBarry Smith 
3819273d9f13SBarry Smith .vb
3820273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
3821273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
3822273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
3823273d9f13SBarry Smith     -------------------------------------
3824273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
3825273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
3826273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
3827273d9f13SBarry Smith     -------------------------------------
3828273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
3829273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
3830273d9f13SBarry Smith .ve
3831273d9f13SBarry Smith 
3832273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
3833273d9f13SBarry Smith 
3834273d9f13SBarry Smith .vb
3835273d9f13SBarry Smith       A B C
3836273d9f13SBarry Smith       D E F
3837273d9f13SBarry Smith       G H I
3838273d9f13SBarry Smith .ve
3839273d9f13SBarry Smith 
3840273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
3841273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
3842273d9f13SBarry Smith 
3843273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3844273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
3845273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
3846273d9f13SBarry Smith 
3847273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
3848273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
3849273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
3850273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
3851273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
3852273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
3853273d9f13SBarry Smith 
3854273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
3855273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
3856273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
3857273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
3858273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
3859273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
3860273d9f13SBarry Smith .vb
3861273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
3862273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
3863273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
3864273d9f13SBarry Smith .ve
3865273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
3866273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
3867273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
3868273d9f13SBarry Smith    34 values.
3869273d9f13SBarry Smith 
3870273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
3871273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
3872273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
3873273d9f13SBarry Smith .vb
3874273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
3875273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
3876273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
3877273d9f13SBarry Smith .ve
3878273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
3879273d9f13SBarry Smith    hence pre-allocation is perfect.
3880273d9f13SBarry Smith 
3881273d9f13SBarry Smith    Level: intermediate
3882273d9f13SBarry Smith 
3883273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
3884273d9f13SBarry Smith 
3885ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(),
3886aa95bbe8SBarry Smith           MPIAIJ, MatGetInfo()
3887273d9f13SBarry Smith @*/
38887087cfbeSBarry Smith PetscErrorCode  MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3889273d9f13SBarry Smith {
38904ac538c5SBarry Smith   PetscErrorCode ierr;
3891273d9f13SBarry Smith 
3892273d9f13SBarry Smith   PetscFunctionBegin;
3893*6ba663aaSJed Brown   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3894*6ba663aaSJed Brown   PetscValidType(B,1);
38954ac538c5SBarry Smith   ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3896273d9f13SBarry Smith   PetscFunctionReturn(0);
3897273d9f13SBarry Smith }
3898273d9f13SBarry Smith 
38994a2ae208SSatish Balay #undef __FUNCT__
39002fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays"
390158d36128SBarry Smith /*@
39022fb0ec9aSBarry Smith      MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard
39032fb0ec9aSBarry Smith          CSR format the local rows.
39042fb0ec9aSBarry Smith 
39052fb0ec9aSBarry Smith    Collective on MPI_Comm
39062fb0ec9aSBarry Smith 
39072fb0ec9aSBarry Smith    Input Parameters:
39082fb0ec9aSBarry Smith +  comm - MPI communicator
39092fb0ec9aSBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
39102fb0ec9aSBarry Smith .  n - This value should be the same as the local size used in creating the
39112fb0ec9aSBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
39122fb0ec9aSBarry Smith        calculated if N is given) For square matrices n is almost always m.
39132fb0ec9aSBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
39142fb0ec9aSBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
39152fb0ec9aSBarry Smith .   i - row indices
39162fb0ec9aSBarry Smith .   j - column indices
39172fb0ec9aSBarry Smith -   a - matrix values
39182fb0ec9aSBarry Smith 
39192fb0ec9aSBarry Smith    Output Parameter:
39202fb0ec9aSBarry Smith .   mat - the matrix
392103bfb495SBarry Smith 
39222fb0ec9aSBarry Smith    Level: intermediate
39232fb0ec9aSBarry Smith 
39242fb0ec9aSBarry Smith    Notes:
39252fb0ec9aSBarry Smith        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
39262fb0ec9aSBarry Smith      thus you CANNOT change the matrix entries by changing the values of a[] after you have
39278d7a6e47SBarry Smith      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
39282fb0ec9aSBarry Smith 
392912251496SSatish Balay        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
393012251496SSatish Balay 
393112251496SSatish Balay        The format which is used for the sparse matrix input, is equivalent to a
393212251496SSatish Balay     row-major ordering.. i.e for the following matrix, the input data expected is
393312251496SSatish Balay     as shown:
393412251496SSatish Balay 
393512251496SSatish Balay         1 0 0
393612251496SSatish Balay         2 0 3     P0
393712251496SSatish Balay        -------
393812251496SSatish Balay         4 5 6     P1
393912251496SSatish Balay 
394012251496SSatish Balay      Process0 [P0]: rows_owned=[0,1]
394112251496SSatish Balay         i =  {0,1,3}  [size = nrow+1  = 2+1]
394212251496SSatish Balay         j =  {0,0,2}  [size = nz = 6]
394312251496SSatish Balay         v =  {1,2,3}  [size = nz = 6]
394412251496SSatish Balay 
394512251496SSatish Balay      Process1 [P1]: rows_owned=[2]
394612251496SSatish Balay         i =  {0,3}    [size = nrow+1  = 1+1]
394712251496SSatish Balay         j =  {0,1,2}  [size = nz = 6]
394812251496SSatish Balay         v =  {4,5,6}  [size = nz = 6]
39492fb0ec9aSBarry Smith 
39502fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
39512fb0ec9aSBarry Smith 
39522fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
39538d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays()
39542fb0ec9aSBarry Smith @*/
39557087cfbeSBarry Smith PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
39562fb0ec9aSBarry Smith {
39572fb0ec9aSBarry Smith   PetscErrorCode ierr;
39582fb0ec9aSBarry Smith 
39592fb0ec9aSBarry Smith  PetscFunctionBegin;
39602fb0ec9aSBarry Smith   if (i[0]) {
3961e32f2f54SBarry Smith     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
39622fb0ec9aSBarry Smith   }
3963e32f2f54SBarry Smith   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
39642fb0ec9aSBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3965d4146a68SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
39662fb0ec9aSBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
39672fb0ec9aSBarry Smith   ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr);
39682fb0ec9aSBarry Smith   PetscFunctionReturn(0);
39692fb0ec9aSBarry Smith }
39702fb0ec9aSBarry Smith 
39712fb0ec9aSBarry Smith #undef __FUNCT__
39724a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ"
3973273d9f13SBarry Smith /*@C
3974273d9f13SBarry Smith    MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format
3975273d9f13SBarry Smith    (the default parallel PETSc format).  For good matrix assembly performance
3976273d9f13SBarry Smith    the user should preallocate the matrix storage by setting the parameters
3977273d9f13SBarry Smith    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3978273d9f13SBarry Smith    performance can be increased by more than a factor of 50.
3979273d9f13SBarry Smith 
3980273d9f13SBarry Smith    Collective on MPI_Comm
3981273d9f13SBarry Smith 
3982273d9f13SBarry Smith    Input Parameters:
3983273d9f13SBarry Smith +  comm - MPI communicator
3984273d9f13SBarry Smith .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3985273d9f13SBarry Smith            This value should be the same as the local size used in creating the
3986273d9f13SBarry Smith            y vector for the matrix-vector product y = Ax.
3987273d9f13SBarry Smith .  n - This value should be the same as the local size used in creating the
3988273d9f13SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3989273d9f13SBarry Smith        calculated if N is given) For square matrices n is almost always m.
3990273d9f13SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3991273d9f13SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3992273d9f13SBarry Smith .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
3993273d9f13SBarry Smith            (same value is used for all local rows)
3994273d9f13SBarry Smith .  d_nnz - array containing the number of nonzeros in the various rows of the
3995273d9f13SBarry Smith            DIAGONAL portion of the local submatrix (possibly different for each row)
3996273d9f13SBarry Smith            or PETSC_NULL, if d_nz is used to specify the nonzero structure.
3997273d9f13SBarry Smith            The size of this array is equal to the number of local rows, i.e 'm'.
3998273d9f13SBarry Smith            You must leave room for the diagonal entry even if it is zero.
3999273d9f13SBarry Smith .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
4000273d9f13SBarry Smith            submatrix (same value is used for all local rows).
4001273d9f13SBarry Smith -  o_nnz - array containing the number of nonzeros in the various rows of the
4002273d9f13SBarry Smith            OFF-DIAGONAL portion of the local submatrix (possibly different for
4003273d9f13SBarry Smith            each row) or PETSC_NULL, if o_nz is used to specify the nonzero
4004273d9f13SBarry Smith            structure. The size of this array is equal to the number
4005273d9f13SBarry Smith            of local rows, i.e 'm'.
4006273d9f13SBarry Smith 
4007273d9f13SBarry Smith    Output Parameter:
4008273d9f13SBarry Smith .  A - the matrix
4009273d9f13SBarry Smith 
4010175b88e8SBarry Smith    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
4011ae1d86c5SBarry Smith    MatXXXXSetPreallocation() paradgm instead of this routine directly.
4012175b88e8SBarry Smith    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
4013175b88e8SBarry Smith 
4014273d9f13SBarry Smith    Notes:
401549a6f317SBarry Smith    If the *_nnz parameter is given then the *_nz parameter is ignored
401649a6f317SBarry Smith 
4017273d9f13SBarry Smith    m,n,M,N parameters specify the size of the matrix, and its partitioning across
4018273d9f13SBarry Smith    processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate
4019273d9f13SBarry Smith    storage requirements for this matrix.
4020273d9f13SBarry Smith 
4021273d9f13SBarry Smith    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one
4022273d9f13SBarry Smith    processor than it must be used on all processors that share the object for
4023273d9f13SBarry Smith    that argument.
4024273d9f13SBarry Smith 
4025273d9f13SBarry Smith    The user MUST specify either the local or global matrix dimensions
4026273d9f13SBarry Smith    (possibly both).
4027273d9f13SBarry Smith 
402833a7c187SSatish Balay    The parallel matrix is partitioned across processors such that the
402933a7c187SSatish Balay    first m0 rows belong to process 0, the next m1 rows belong to
403033a7c187SSatish Balay    process 1, the next m2 rows belong to process 2 etc.. where
403133a7c187SSatish Balay    m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores
403233a7c187SSatish Balay    values corresponding to [m x N] submatrix.
4033273d9f13SBarry Smith 
403433a7c187SSatish Balay    The columns are logically partitioned with the n0 columns belonging
403533a7c187SSatish Balay    to 0th partition, the next n1 columns belonging to the next
403633a7c187SSatish Balay    partition etc.. where n0,n1,n2... are the the input parameter 'n'.
403733a7c187SSatish Balay 
403833a7c187SSatish Balay    The DIAGONAL portion of the local submatrix on any given processor
403933a7c187SSatish Balay    is the submatrix corresponding to the rows and columns m,n
404033a7c187SSatish Balay    corresponding to the given processor. i.e diagonal matrix on
404133a7c187SSatish Balay    process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1]
404233a7c187SSatish Balay    etc. The remaining portion of the local submatrix [m x (N-n)]
404333a7c187SSatish Balay    constitute the OFF-DIAGONAL portion. The example below better
404433a7c187SSatish Balay    illustrates this concept.
404533a7c187SSatish Balay 
404633a7c187SSatish Balay    For a square global matrix we define each processor's diagonal portion
404733a7c187SSatish Balay    to be its local rows and the corresponding columns (a square submatrix);
404833a7c187SSatish Balay    each processor's off-diagonal portion encompasses the remainder of the
404933a7c187SSatish Balay    local matrix (a rectangular submatrix).
4050273d9f13SBarry Smith 
4051273d9f13SBarry Smith    If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored.
4052273d9f13SBarry Smith 
405397d05335SKris Buschelman    When calling this routine with a single process communicator, a matrix of
405497d05335SKris Buschelman    type SEQAIJ is returned.  If a matrix of type MPIAIJ is desired for this
405597d05335SKris Buschelman    type of communicator, use the construction mechanism:
405678102f6cSMatthew Knepley      MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...);
405797d05335SKris Buschelman 
4058273d9f13SBarry Smith    By default, this format uses inodes (identical nodes) when possible.
4059273d9f13SBarry Smith    We search for consecutive rows with the same nonzero structure, thereby
4060273d9f13SBarry Smith    reusing matrix information to achieve increased efficiency.
4061273d9f13SBarry Smith 
4062273d9f13SBarry Smith    Options Database Keys:
4063923f20ffSKris Buschelman +  -mat_no_inode  - Do not use inodes
4064923f20ffSKris Buschelman .  -mat_inode_limit <limit> - Sets inode limit (max limit=5)
4065273d9f13SBarry Smith -  -mat_aij_oneindex - Internally use indexing starting at 1
4066273d9f13SBarry Smith         rather than 0.  Note that when calling MatSetValues(),
4067273d9f13SBarry Smith         the user still MUST index entries starting at 0!
4068273d9f13SBarry Smith 
4069273d9f13SBarry Smith 
4070273d9f13SBarry Smith    Example usage:
4071273d9f13SBarry Smith 
4072273d9f13SBarry Smith    Consider the following 8x8 matrix with 34 non-zero values, that is
4073273d9f13SBarry Smith    assembled across 3 processors. Lets assume that proc0 owns 3 rows,
4074273d9f13SBarry Smith    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
4075273d9f13SBarry Smith    as follows:
4076273d9f13SBarry Smith 
4077273d9f13SBarry Smith .vb
4078273d9f13SBarry Smith             1  2  0  |  0  3  0  |  0  4
4079273d9f13SBarry Smith     Proc0   0  5  6  |  7  0  0  |  8  0
4080273d9f13SBarry Smith             9  0 10  | 11  0  0  | 12  0
4081273d9f13SBarry Smith     -------------------------------------
4082273d9f13SBarry Smith            13  0 14  | 15 16 17  |  0  0
4083273d9f13SBarry Smith     Proc1   0 18  0  | 19 20 21  |  0  0
4084273d9f13SBarry Smith             0  0  0  | 22 23  0  | 24  0
4085273d9f13SBarry Smith     -------------------------------------
4086273d9f13SBarry Smith     Proc2  25 26 27  |  0  0 28  | 29  0
4087273d9f13SBarry Smith            30  0  0  | 31 32 33  |  0 34
4088273d9f13SBarry Smith .ve
4089273d9f13SBarry Smith 
4090273d9f13SBarry Smith    This can be represented as a collection of submatrices as:
4091273d9f13SBarry Smith 
4092273d9f13SBarry Smith .vb
4093273d9f13SBarry Smith       A B C
4094273d9f13SBarry Smith       D E F
4095273d9f13SBarry Smith       G H I
4096273d9f13SBarry Smith .ve
4097273d9f13SBarry Smith 
4098273d9f13SBarry Smith    Where the submatrices A,B,C are owned by proc0, D,E,F are
4099273d9f13SBarry Smith    owned by proc1, G,H,I are owned by proc2.
4100273d9f13SBarry Smith 
4101273d9f13SBarry Smith    The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
4102273d9f13SBarry Smith    The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively.
4103273d9f13SBarry Smith    The 'M','N' parameters are 8,8, and have the same values on all procs.
4104273d9f13SBarry Smith 
4105273d9f13SBarry Smith    The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are
4106273d9f13SBarry Smith    submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices
4107273d9f13SBarry Smith    corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively.
4108273d9f13SBarry Smith    Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL
4109273d9f13SBarry Smith    part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ
4110273d9f13SBarry Smith    matrix, ans [DF] as another SeqAIJ matrix.
4111273d9f13SBarry Smith 
4112273d9f13SBarry Smith    When d_nz, o_nz parameters are specified, d_nz storage elements are
4113273d9f13SBarry Smith    allocated for every row of the local diagonal submatrix, and o_nz
4114273d9f13SBarry Smith    storage locations are allocated for every row of the OFF-DIAGONAL submat.
4115273d9f13SBarry Smith    One way to choose d_nz and o_nz is to use the max nonzerors per local
4116273d9f13SBarry Smith    rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices.
4117273d9f13SBarry Smith    In this case, the values of d_nz,o_nz are:
4118273d9f13SBarry Smith .vb
4119273d9f13SBarry Smith      proc0 : dnz = 2, o_nz = 2
4120273d9f13SBarry Smith      proc1 : dnz = 3, o_nz = 2
4121273d9f13SBarry Smith      proc2 : dnz = 1, o_nz = 4
4122273d9f13SBarry Smith .ve
4123273d9f13SBarry Smith    We are allocating m*(d_nz+o_nz) storage locations for every proc. This
4124273d9f13SBarry Smith    translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10
4125273d9f13SBarry Smith    for proc3. i.e we are using 12+15+10=37 storage locations to store
4126273d9f13SBarry Smith    34 values.
4127273d9f13SBarry Smith 
4128273d9f13SBarry Smith    When d_nnz, o_nnz parameters are specified, the storage is specified
4129273d9f13SBarry Smith    for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices.
4130273d9f13SBarry Smith    In the above case the values for d_nnz,o_nnz are:
4131273d9f13SBarry Smith .vb
4132273d9f13SBarry Smith      proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2]
4133273d9f13SBarry Smith      proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1]
4134273d9f13SBarry Smith      proc2: d_nnz = [1,1]   and o_nnz = [4,4]
4135273d9f13SBarry Smith .ve
4136273d9f13SBarry Smith    Here the space allocated is sum of all the above values i.e 34, and
4137273d9f13SBarry Smith    hence pre-allocation is perfect.
4138273d9f13SBarry Smith 
4139273d9f13SBarry Smith    Level: intermediate
4140273d9f13SBarry Smith 
4141273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
4142273d9f13SBarry Smith 
4143ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
41442fb0ec9aSBarry Smith           MPIAIJ, MatCreateMPIAIJWithArrays()
4145273d9f13SBarry Smith @*/
41467087cfbeSBarry Smith PetscErrorCode  MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
4147273d9f13SBarry Smith {
41486849ba73SBarry Smith   PetscErrorCode ierr;
4149b1d57f15SBarry Smith   PetscMPIInt    size;
4150273d9f13SBarry Smith 
4151273d9f13SBarry Smith   PetscFunctionBegin;
4152f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,A);CHKERRQ(ierr);
4153f69a0ea3SMatthew Knepley   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
4154273d9f13SBarry Smith   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
4155273d9f13SBarry Smith   if (size > 1) {
4156273d9f13SBarry Smith     ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr);
4157273d9f13SBarry Smith     ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
4158273d9f13SBarry Smith   } else {
4159273d9f13SBarry Smith     ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr);
4160273d9f13SBarry Smith     ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr);
4161273d9f13SBarry Smith   }
4162273d9f13SBarry Smith   PetscFunctionReturn(0);
4163273d9f13SBarry Smith }
4164195d93cdSBarry Smith 
41654a2ae208SSatish Balay #undef __FUNCT__
41664a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ"
41677087cfbeSBarry Smith PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[])
4168195d93cdSBarry Smith {
4169195d93cdSBarry Smith   Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data;
4170b1d57f15SBarry Smith 
4171195d93cdSBarry Smith   PetscFunctionBegin;
4172195d93cdSBarry Smith   *Ad     = a->A;
4173195d93cdSBarry Smith   *Ao     = a->B;
4174195d93cdSBarry Smith   *colmap = a->garray;
4175195d93cdSBarry Smith   PetscFunctionReturn(0);
4176195d93cdSBarry Smith }
4177a2243be0SBarry Smith 
4178a2243be0SBarry Smith #undef __FUNCT__
4179a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ"
4180dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring)
4181a2243be0SBarry Smith {
4182dfbe8321SBarry Smith   PetscErrorCode ierr;
4183b1d57f15SBarry Smith   PetscInt       i;
4184a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
4185a2243be0SBarry Smith 
4186a2243be0SBarry Smith   PetscFunctionBegin;
41878ee2e534SBarry Smith   if (coloring->ctype == IS_COLORING_GLOBAL) {
418808b6dcc0SBarry Smith     ISColoringValue *allcolors,*colors;
4189a2243be0SBarry Smith     ISColoring      ocoloring;
4190a2243be0SBarry Smith 
4191a2243be0SBarry Smith     /* set coloring for diagonal portion */
4192a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr);
4193a2243be0SBarry Smith 
4194a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
41957adad957SLisandro Dalcin     ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr);
4196d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
4197d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
4198a2243be0SBarry Smith       colors[i] = allcolors[a->garray[i]];
4199a2243be0SBarry Smith     }
4200a2243be0SBarry Smith     ierr = PetscFree(allcolors);CHKERRQ(ierr);
4201d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
4202a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
42036bf464f9SBarry Smith     ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr);
4204a2243be0SBarry Smith   } else if (coloring->ctype == IS_COLORING_GHOSTED) {
420508b6dcc0SBarry Smith     ISColoringValue *colors;
4206b1d57f15SBarry Smith     PetscInt        *larray;
4207a2243be0SBarry Smith     ISColoring      ocoloring;
4208a2243be0SBarry Smith 
4209a2243be0SBarry Smith     /* set coloring for diagonal portion */
4210d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
4211d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
4212d0f46423SBarry Smith       larray[i] = i + A->cmap->rstart;
4213a2243be0SBarry Smith     }
4214992144d0SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr);
4215d0f46423SBarry Smith     ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
4216d0f46423SBarry Smith     for (i=0; i<a->A->cmap->n; i++) {
4217a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
4218a2243be0SBarry Smith     }
4219a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
4220d0f46423SBarry Smith     ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
4221a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr);
42226bf464f9SBarry Smith     ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr);
4223a2243be0SBarry Smith 
4224a2243be0SBarry Smith     /* set coloring for off-diagonal portion */
4225d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr);
4226992144d0SBarry Smith     ierr = ISGlobalToLocalMappingApply(A->cmap->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr);
4227d0f46423SBarry Smith     ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
4228d0f46423SBarry Smith     for (i=0; i<a->B->cmap->n; i++) {
4229a2243be0SBarry Smith       colors[i] = coloring->colors[larray[i]];
4230a2243be0SBarry Smith     }
4231a2243be0SBarry Smith     ierr = PetscFree(larray);CHKERRQ(ierr);
4232d0f46423SBarry Smith     ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr);
4233a2243be0SBarry Smith     ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr);
42346bf464f9SBarry Smith     ierr = ISColoringDestroy(&ocoloring);CHKERRQ(ierr);
42356bf464f9SBarry Smith   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype);
4236a2243be0SBarry Smith 
4237a2243be0SBarry Smith   PetscFunctionReturn(0);
4238a2243be0SBarry Smith }
4239a2243be0SBarry Smith 
4240dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC)
4241a2243be0SBarry Smith #undef __FUNCT__
4242779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ"
4243dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues)
4244a2243be0SBarry Smith {
4245a2243be0SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
4246dfbe8321SBarry Smith   PetscErrorCode ierr;
4247a2243be0SBarry Smith 
4248a2243be0SBarry Smith   PetscFunctionBegin;
4249779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr);
4250779c1a83SBarry Smith   ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr);
4251779c1a83SBarry Smith   PetscFunctionReturn(0);
4252779c1a83SBarry Smith }
4253dcf5cc72SBarry Smith #endif
4254779c1a83SBarry Smith 
4255779c1a83SBarry Smith #undef __FUNCT__
4256779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ"
4257b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues)
4258779c1a83SBarry Smith {
4259779c1a83SBarry Smith   Mat_MPIAIJ     *a = (Mat_MPIAIJ*)A->data;
4260dfbe8321SBarry Smith   PetscErrorCode ierr;
4261779c1a83SBarry Smith 
4262779c1a83SBarry Smith   PetscFunctionBegin;
4263779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr);
4264779c1a83SBarry Smith   ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr);
4265a2243be0SBarry Smith   PetscFunctionReturn(0);
4266a2243be0SBarry Smith }
4267c5d6d63eSBarry Smith 
4268c5d6d63eSBarry Smith #undef __FUNCT__
42699b8102ccSHong Zhang #define __FUNCT__ "MatMergeSymbolic"
42709b8102ccSHong Zhang PetscErrorCode  MatMergeSymbolic(MPI_Comm comm,Mat inmat,PetscInt n,Mat *outmat)
42719b8102ccSHong Zhang {
42729b8102ccSHong Zhang   PetscErrorCode ierr;
42739b8102ccSHong Zhang   PetscInt       m,N,i,rstart,nnz,*dnz,*onz;
42749b8102ccSHong Zhang   PetscInt       *indx;
42759b8102ccSHong Zhang 
42769b8102ccSHong Zhang   PetscFunctionBegin;
42779b8102ccSHong Zhang   /* This routine will ONLY return MPIAIJ type matrix */
42789b8102ccSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
42799b8102ccSHong Zhang   if (n == PETSC_DECIDE){
42809b8102ccSHong Zhang     ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
42819b8102ccSHong Zhang   }
42829b8102ccSHong Zhang   ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
42839b8102ccSHong Zhang   rstart -= m;
42849b8102ccSHong Zhang 
42859b8102ccSHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
42869b8102ccSHong Zhang   for (i=0;i<m;i++) {
42879b8102ccSHong Zhang     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
42889b8102ccSHong Zhang     ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr);
42899b8102ccSHong Zhang     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr);
42909b8102ccSHong Zhang   }
42919b8102ccSHong Zhang 
42929b8102ccSHong Zhang   ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
42939b8102ccSHong Zhang   ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
42949b8102ccSHong Zhang   ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr);
42959b8102ccSHong Zhang   ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr);
42969b8102ccSHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
42979b8102ccSHong Zhang   PetscFunctionReturn(0);
42989b8102ccSHong Zhang }
42999b8102ccSHong Zhang 
43009b8102ccSHong Zhang #undef __FUNCT__
43019b8102ccSHong Zhang #define __FUNCT__ "MatMergeNumeric"
43029b8102ccSHong Zhang PetscErrorCode  MatMergeNumeric(MPI_Comm comm,Mat inmat,PetscInt n,Mat outmat)
43039b8102ccSHong Zhang {
43049b8102ccSHong Zhang   PetscErrorCode ierr;
43059b8102ccSHong Zhang   PetscInt       m,N,i,rstart,nnz,Ii;
43069b8102ccSHong Zhang   PetscInt       *indx;
43079b8102ccSHong Zhang   PetscScalar    *values;
43089b8102ccSHong Zhang 
43099b8102ccSHong Zhang   PetscFunctionBegin;
43109b8102ccSHong Zhang   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
43119b8102ccSHong Zhang   ierr = MatGetOwnershipRange(outmat,&rstart,PETSC_NULL);CHKERRQ(ierr);
43129b8102ccSHong Zhang   for (i=0;i<m;i++) {
43139b8102ccSHong Zhang     ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
43149b8102ccSHong Zhang     Ii    = i + rstart;
43159b8102ccSHong Zhang     ierr = MatSetValues(outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
43169b8102ccSHong Zhang     ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
43179b8102ccSHong Zhang   }
43189b8102ccSHong Zhang   ierr = MatDestroy(&inmat);CHKERRQ(ierr);
43199b8102ccSHong Zhang   ierr = MatAssemblyBegin(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
43209b8102ccSHong Zhang   ierr = MatAssemblyEnd(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
43219b8102ccSHong Zhang   PetscFunctionReturn(0);
43229b8102ccSHong Zhang }
43239b8102ccSHong Zhang 
43249b8102ccSHong Zhang #undef __FUNCT__
432551dd7536SBarry Smith #define __FUNCT__ "MatMerge"
4326bc08b0f1SBarry Smith /*@
432751dd7536SBarry Smith       MatMerge - Creates a single large PETSc matrix by concatinating sequential
432851dd7536SBarry Smith                  matrices from each processor
4329c5d6d63eSBarry Smith 
4330c5d6d63eSBarry Smith     Collective on MPI_Comm
4331c5d6d63eSBarry Smith 
4332c5d6d63eSBarry Smith    Input Parameters:
433351dd7536SBarry Smith +    comm - the communicators the parallel matrix will live on
4334d6bb3c2dSHong Zhang .    inmat - the input sequential matrices
43350e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
4336d6bb3c2dSHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
433751dd7536SBarry Smith 
433851dd7536SBarry Smith    Output Parameter:
433951dd7536SBarry Smith .    outmat - the parallel matrix generated
4340c5d6d63eSBarry Smith 
43417e25d530SSatish Balay     Level: advanced
43427e25d530SSatish Balay 
4343f08fae4eSHong Zhang    Notes: The number of columns of the matrix in EACH processor MUST be the same.
4344c5d6d63eSBarry Smith 
4345c5d6d63eSBarry Smith @*/
43467087cfbeSBarry Smith PetscErrorCode  MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4347c5d6d63eSBarry Smith {
4348dfbe8321SBarry Smith   PetscErrorCode ierr;
4349c5d6d63eSBarry Smith 
4350c5d6d63eSBarry Smith   PetscFunctionBegin;
43519b8102ccSHong Zhang   ierr = PetscLogEventBegin(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr);
4352d6bb3c2dSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
43539b8102ccSHong Zhang     ierr = MatMergeSymbolic(comm,inmat,n,outmat);CHKERRQ(ierr);
43540e36024fSHong Zhang   }
43559b8102ccSHong Zhang   ierr = MatMergeNumeric(comm,inmat,n,*outmat);CHKERRQ(ierr);
43569b8102ccSHong Zhang   ierr = PetscLogEventEnd(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr);
4357c5d6d63eSBarry Smith   PetscFunctionReturn(0);
4358c5d6d63eSBarry Smith }
4359c5d6d63eSBarry Smith 
4360c5d6d63eSBarry Smith #undef __FUNCT__
4361c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit"
4362dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile)
4363c5d6d63eSBarry Smith {
4364dfbe8321SBarry Smith   PetscErrorCode    ierr;
436532dcc486SBarry Smith   PetscMPIInt       rank;
4366b1d57f15SBarry Smith   PetscInt          m,N,i,rstart,nnz;
4367de4209c5SBarry Smith   size_t            len;
4368b1d57f15SBarry Smith   const PetscInt    *indx;
4369c5d6d63eSBarry Smith   PetscViewer       out;
4370c5d6d63eSBarry Smith   char              *name;
4371c5d6d63eSBarry Smith   Mat               B;
4372b3cc6726SBarry Smith   const PetscScalar *values;
4373c5d6d63eSBarry Smith 
4374c5d6d63eSBarry Smith   PetscFunctionBegin;
4375c5d6d63eSBarry Smith   ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr);
4376c5d6d63eSBarry Smith   ierr = MatGetSize(A,0,&N);CHKERRQ(ierr);
4377f204ca49SKris Buschelman   /* Should this be the type of the diagonal block of A? */
4378f69a0ea3SMatthew Knepley   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
4379f69a0ea3SMatthew Knepley   ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr);
4380f204ca49SKris Buschelman   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
4381f204ca49SKris Buschelman   ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr);
4382c5d6d63eSBarry Smith   ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr);
4383c5d6d63eSBarry Smith   for (i=0;i<m;i++) {
4384c5d6d63eSBarry Smith     ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
4385c5d6d63eSBarry Smith     ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4386c5d6d63eSBarry Smith     ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr);
4387c5d6d63eSBarry Smith   }
4388c5d6d63eSBarry Smith   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4389c5d6d63eSBarry Smith   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4390c5d6d63eSBarry Smith 
43917adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr);
4392c5d6d63eSBarry Smith   ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr);
4393c5d6d63eSBarry Smith   ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr);
4394c5d6d63eSBarry Smith   sprintf(name,"%s.%d",outfile,rank);
4395852598b0SBarry Smith   ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr);
4396c5d6d63eSBarry Smith   ierr = PetscFree(name);
4397c5d6d63eSBarry Smith   ierr = MatView(B,out);CHKERRQ(ierr);
43986bf464f9SBarry Smith   ierr = PetscViewerDestroy(&out);CHKERRQ(ierr);
43996bf464f9SBarry Smith   ierr = MatDestroy(&B);CHKERRQ(ierr);
4400c5d6d63eSBarry Smith   PetscFunctionReturn(0);
4401c5d6d63eSBarry Smith }
4402e5f2cdd8SHong Zhang 
440309573ac7SBarry Smith extern PetscErrorCode MatDestroy_MPIAIJ(Mat);
440451a7d1a8SHong Zhang #undef __FUNCT__
440551a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI"
44067087cfbeSBarry Smith PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat A)
440751a7d1a8SHong Zhang {
440851a7d1a8SHong Zhang   PetscErrorCode       ierr;
4409671beff6SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4410776b82aeSLisandro Dalcin   PetscContainer       container;
441151a7d1a8SHong Zhang 
441251a7d1a8SHong Zhang   PetscFunctionBegin;
4413671beff6SHong Zhang   ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
4414671beff6SHong Zhang   if (container) {
4415776b82aeSLisandro Dalcin     ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
441651a7d1a8SHong Zhang     ierr = PetscFree(merge->id_r);CHKERRQ(ierr);
44173e06a4e6SHong Zhang     ierr = PetscFree(merge->len_s);CHKERRQ(ierr);
44183e06a4e6SHong Zhang     ierr = PetscFree(merge->len_r);CHKERRQ(ierr);
441951a7d1a8SHong Zhang     ierr = PetscFree(merge->bi);CHKERRQ(ierr);
442051a7d1a8SHong Zhang     ierr = PetscFree(merge->bj);CHKERRQ(ierr);
4421533163c2SBarry Smith     ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr);
442202c68681SHong Zhang     ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr);
4423533163c2SBarry Smith     ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr);
442402c68681SHong Zhang     ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr);
442505b42c5fSBarry Smith     ierr = PetscFree(merge->coi);CHKERRQ(ierr);
442605b42c5fSBarry Smith     ierr = PetscFree(merge->coj);CHKERRQ(ierr);
442705b42c5fSBarry Smith     ierr = PetscFree(merge->owners_co);CHKERRQ(ierr);
44286bf464f9SBarry Smith     ierr = PetscLayoutDestroy(&merge->rowmap);CHKERRQ(ierr);
4429bf0cc555SLisandro Dalcin     ierr = PetscFree(merge);CHKERRQ(ierr);
4430671beff6SHong Zhang     ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr);
4431671beff6SHong Zhang   }
443251a7d1a8SHong Zhang   ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr);
443351a7d1a8SHong Zhang   PetscFunctionReturn(0);
443451a7d1a8SHong Zhang }
443551a7d1a8SHong Zhang 
4436c6db04a5SJed Brown #include <../src/mat/utils/freespace.h>
4437c6db04a5SJed Brown #include <petscbt.h>
44384ebed01fSBarry Smith 
4439e5f2cdd8SHong Zhang #undef __FUNCT__
444038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric"
4441e5f2cdd8SHong Zhang /*@C
4442f08fae4eSHong Zhang       MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential
4443e5f2cdd8SHong Zhang                  matrices from each processor
4444e5f2cdd8SHong Zhang 
4445e5f2cdd8SHong Zhang     Collective on MPI_Comm
4446e5f2cdd8SHong Zhang 
4447e5f2cdd8SHong Zhang    Input Parameters:
4448e5f2cdd8SHong Zhang +    comm - the communicators the parallel matrix will live on
4449f08fae4eSHong Zhang .    seqmat - the input sequential matrices
44500e36024fSHong Zhang .    m - number of local rows (or PETSC_DECIDE)
44510e36024fSHong Zhang .    n - number of local columns (or PETSC_DECIDE)
4452e5f2cdd8SHong Zhang -    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
4453e5f2cdd8SHong Zhang 
4454e5f2cdd8SHong Zhang    Output Parameter:
4455f08fae4eSHong Zhang .    mpimat - the parallel matrix generated
4456e5f2cdd8SHong Zhang 
4457e5f2cdd8SHong Zhang     Level: advanced
4458e5f2cdd8SHong Zhang 
4459affca5deSHong Zhang    Notes:
4460affca5deSHong Zhang      The dimensions of the sequential matrix in each processor MUST be the same.
4461affca5deSHong Zhang      The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be
4462affca5deSHong Zhang      destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat.
4463e5f2cdd8SHong Zhang @*/
44647087cfbeSBarry Smith PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat)
446555d1abb9SHong Zhang {
446655d1abb9SHong Zhang   PetscErrorCode       ierr;
44677adad957SLisandro Dalcin   MPI_Comm             comm=((PetscObject)mpimat)->comm;
446855d1abb9SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
4469b1d57f15SBarry Smith   PetscMPIInt          size,rank,taga,*len_s;
4470d0f46423SBarry Smith   PetscInt             N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j;
4471b1d57f15SBarry Smith   PetscInt             proc,m;
4472b1d57f15SBarry Smith   PetscInt             **buf_ri,**buf_rj;
4473b1d57f15SBarry Smith   PetscInt             k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj;
4474b1d57f15SBarry Smith   PetscInt             nrows,**buf_ri_k,**nextrow,**nextai;
447555d1abb9SHong Zhang   MPI_Request          *s_waits,*r_waits;
447655d1abb9SHong Zhang   MPI_Status           *status;
4477a77337e4SBarry Smith   MatScalar            *aa=a->a;
4478dd6ea824SBarry Smith   MatScalar            **abuf_r,*ba_i;
447955d1abb9SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4480776b82aeSLisandro Dalcin   PetscContainer       container;
448155d1abb9SHong Zhang 
448255d1abb9SHong Zhang   PetscFunctionBegin;
44834ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
44843c2c1871SHong Zhang 
448555d1abb9SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
448655d1abb9SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
448755d1abb9SHong Zhang 
448855d1abb9SHong Zhang   ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr);
4489776b82aeSLisandro Dalcin   ierr  = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr);
4490bf0cc555SLisandro Dalcin 
449155d1abb9SHong Zhang   bi     = merge->bi;
449255d1abb9SHong Zhang   bj     = merge->bj;
449355d1abb9SHong Zhang   buf_ri = merge->buf_ri;
449455d1abb9SHong Zhang   buf_rj = merge->buf_rj;
449555d1abb9SHong Zhang 
449655d1abb9SHong Zhang   ierr   = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
44977a2fc3feSBarry Smith   owners = merge->rowmap->range;
449855d1abb9SHong Zhang   len_s  = merge->len_s;
449955d1abb9SHong Zhang 
450055d1abb9SHong Zhang   /* send and recv matrix values */
450155d1abb9SHong Zhang   /*-----------------------------*/
4502357abbc8SBarry Smith   ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr);
450355d1abb9SHong Zhang   ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr);
450455d1abb9SHong Zhang 
450555d1abb9SHong Zhang   ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr);
450655d1abb9SHong Zhang   for (proc=0,k=0; proc<size; proc++){
450755d1abb9SHong Zhang     if (!len_s[proc]) continue;
450855d1abb9SHong Zhang     i = owners[proc];
450955d1abb9SHong Zhang     ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr);
451055d1abb9SHong Zhang     k++;
451155d1abb9SHong Zhang   }
451255d1abb9SHong Zhang 
45130c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);}
45140c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);}
451555d1abb9SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
451655d1abb9SHong Zhang 
451755d1abb9SHong Zhang   ierr = PetscFree(s_waits);CHKERRQ(ierr);
451855d1abb9SHong Zhang   ierr = PetscFree(r_waits);CHKERRQ(ierr);
451955d1abb9SHong Zhang 
452055d1abb9SHong Zhang   /* insert mat values of mpimat */
452155d1abb9SHong Zhang   /*----------------------------*/
4522a77337e4SBarry Smith   ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr);
45230572522cSBarry Smith   ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr);
452455d1abb9SHong Zhang 
452555d1abb9SHong Zhang   for (k=0; k<merge->nrecv; k++){
452655d1abb9SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
452755d1abb9SHong Zhang     nrows = *(buf_ri_k[k]);
452855d1abb9SHong Zhang     nextrow[k]  = buf_ri_k[k]+1;  /* next row number of k-th recved i-structure */
452955d1abb9SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
453055d1abb9SHong Zhang   }
453155d1abb9SHong Zhang 
453255d1abb9SHong Zhang   /* set values of ba */
45337a2fc3feSBarry Smith   m = merge->rowmap->n;
453455d1abb9SHong Zhang   for (i=0; i<m; i++) {
453555d1abb9SHong Zhang     arow = owners[rank] + i;
453655d1abb9SHong Zhang     bj_i = bj+bi[i];  /* col indices of the i-th row of mpimat */
453755d1abb9SHong Zhang     bnzi = bi[i+1] - bi[i];
4538a77337e4SBarry Smith     ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr);
453955d1abb9SHong Zhang 
454055d1abb9SHong Zhang     /* add local non-zero vals of this proc's seqmat into ba */
454155d1abb9SHong Zhang     anzi = ai[arow+1] - ai[arow];
454255d1abb9SHong Zhang     aj   = a->j + ai[arow];
454355d1abb9SHong Zhang     aa   = a->a + ai[arow];
454455d1abb9SHong Zhang     nextaj = 0;
454555d1abb9SHong Zhang     for (j=0; nextaj<anzi; j++){
454655d1abb9SHong Zhang       if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
454755d1abb9SHong Zhang         ba_i[j] += aa[nextaj++];
454855d1abb9SHong Zhang       }
454955d1abb9SHong Zhang     }
455055d1abb9SHong Zhang 
455155d1abb9SHong Zhang     /* add received vals into ba */
455255d1abb9SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
455355d1abb9SHong Zhang       /* i-th row */
455455d1abb9SHong Zhang       if (i == *nextrow[k]) {
455555d1abb9SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
455655d1abb9SHong Zhang         aj   = buf_rj[k] + *(nextai[k]);
455755d1abb9SHong Zhang         aa   = abuf_r[k] + *(nextai[k]);
455855d1abb9SHong Zhang         nextaj = 0;
455955d1abb9SHong Zhang         for (j=0; nextaj<anzi; j++){
456055d1abb9SHong Zhang           if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */
456155d1abb9SHong Zhang             ba_i[j] += aa[nextaj++];
456255d1abb9SHong Zhang           }
456355d1abb9SHong Zhang         }
456455d1abb9SHong Zhang         nextrow[k]++; nextai[k]++;
456555d1abb9SHong Zhang       }
456655d1abb9SHong Zhang     }
456755d1abb9SHong Zhang     ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr);
456855d1abb9SHong Zhang   }
456955d1abb9SHong Zhang   ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
457055d1abb9SHong Zhang   ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
457155d1abb9SHong Zhang 
4572533163c2SBarry Smith   ierr = PetscFree(abuf_r[0]);CHKERRQ(ierr);
457355d1abb9SHong Zhang   ierr = PetscFree(abuf_r);CHKERRQ(ierr);
457455d1abb9SHong Zhang   ierr = PetscFree(ba_i);CHKERRQ(ierr);
45751d79065fSBarry Smith   ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr);
45764ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr);
457755d1abb9SHong Zhang   PetscFunctionReturn(0);
457855d1abb9SHong Zhang }
457938f152feSBarry Smith 
45806bc0bbbfSBarry Smith extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
45816bc0bbbfSBarry Smith 
458238f152feSBarry Smith #undef __FUNCT__
458338f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic"
45847087cfbeSBarry Smith PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat)
4585e5f2cdd8SHong Zhang {
4586f08fae4eSHong Zhang   PetscErrorCode       ierr;
458755a3bba9SHong Zhang   Mat                  B_mpi;
4588c2234fe3SHong Zhang   Mat_SeqAIJ           *a=(Mat_SeqAIJ*)seqmat->data;
4589b1d57f15SBarry Smith   PetscMPIInt          size,rank,tagi,tagj,*len_s,*len_si,*len_ri;
4590b1d57f15SBarry Smith   PetscInt             **buf_rj,**buf_ri,**buf_ri_k;
4591d0f46423SBarry Smith   PetscInt             M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j;
4592b1d57f15SBarry Smith   PetscInt             len,proc,*dnz,*onz;
4593b1d57f15SBarry Smith   PetscInt             k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0;
4594b1d57f15SBarry Smith   PetscInt             nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai;
459555d1abb9SHong Zhang   MPI_Request          *si_waits,*sj_waits,*ri_waits,*rj_waits;
459658cb9c82SHong Zhang   MPI_Status           *status;
4597a1a86e44SBarry Smith   PetscFreeSpaceList   free_space=PETSC_NULL,current_space=PETSC_NULL;
4598be0fcf8dSHong Zhang   PetscBT              lnkbt;
459951a7d1a8SHong Zhang   Mat_Merge_SeqsToMPI  *merge;
4600776b82aeSLisandro Dalcin   PetscContainer       container;
460102c68681SHong Zhang 
4602e5f2cdd8SHong Zhang   PetscFunctionBegin;
46034ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
46043c2c1871SHong Zhang 
460538f152feSBarry Smith   /* make sure it is a PETSc comm */
460638f152feSBarry Smith   ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr);
4607e5f2cdd8SHong Zhang   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
4608e5f2cdd8SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
460955d1abb9SHong Zhang 
461051a7d1a8SHong Zhang   ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr);
4611c2234fe3SHong Zhang   ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr);
4612e5f2cdd8SHong Zhang 
46136abd8857SHong Zhang   /* determine row ownership */
4614f08fae4eSHong Zhang   /*---------------------------------------------------------*/
461526283091SBarry Smith   ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr);
461626283091SBarry Smith   ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr);
461726283091SBarry Smith   ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr);
461826283091SBarry Smith   ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr);
461926283091SBarry Smith   ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr);
4620b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr);
4621b1d57f15SBarry Smith   ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr);
462255d1abb9SHong Zhang 
46237a2fc3feSBarry Smith   m      = merge->rowmap->n;
46247a2fc3feSBarry Smith   M      = merge->rowmap->N;
46257a2fc3feSBarry Smith   owners = merge->rowmap->range;
46266abd8857SHong Zhang 
46276abd8857SHong Zhang   /* determine the number of messages to send, their lengths */
46286abd8857SHong Zhang   /*---------------------------------------------------------*/
46293e06a4e6SHong Zhang   len_s  = merge->len_s;
463051a7d1a8SHong Zhang 
46312257cef7SHong Zhang   len = 0;  /* length of buf_si[] */
4632c2234fe3SHong Zhang   merge->nsend = 0;
4633409913e3SHong Zhang   for (proc=0; proc<size; proc++){
46342257cef7SHong Zhang     len_si[proc] = 0;
46353e06a4e6SHong Zhang     if (proc == rank){
46366abd8857SHong Zhang       len_s[proc] = 0;
46373e06a4e6SHong Zhang     } else {
463802c68681SHong Zhang       len_si[proc] = owners[proc+1] - owners[proc] + 1;
46393e06a4e6SHong Zhang       len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */
46403e06a4e6SHong Zhang     }
46413e06a4e6SHong Zhang     if (len_s[proc]) {
4642c2234fe3SHong Zhang       merge->nsend++;
46432257cef7SHong Zhang       nrows = 0;
46442257cef7SHong Zhang       for (i=owners[proc]; i<owners[proc+1]; i++){
46452257cef7SHong Zhang         if (ai[i+1] > ai[i]) nrows++;
46462257cef7SHong Zhang       }
46472257cef7SHong Zhang       len_si[proc] = 2*(nrows+1);
46482257cef7SHong Zhang       len += len_si[proc];
4649409913e3SHong Zhang     }
465058cb9c82SHong Zhang   }
4651409913e3SHong Zhang 
46522257cef7SHong Zhang   /* determine the number and length of messages to receive for ij-structure */
46532257cef7SHong Zhang   /*-------------------------------------------------------------------------*/
465451a7d1a8SHong Zhang   ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr);
465555d1abb9SHong Zhang   ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr);
4656671beff6SHong Zhang 
46573e06a4e6SHong Zhang   /* post the Irecv of j-structure */
46583e06a4e6SHong Zhang   /*-------------------------------*/
46592c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr);
46603e06a4e6SHong Zhang   ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr);
466102c68681SHong Zhang 
46623e06a4e6SHong Zhang   /* post the Isend of j-structure */
4663affca5deSHong Zhang   /*--------------------------------*/
46641d79065fSBarry Smith   ierr = PetscMalloc2(merge->nsend,MPI_Request,&si_waits,merge->nsend,MPI_Request,&sj_waits);CHKERRQ(ierr);
46653e06a4e6SHong Zhang 
46662257cef7SHong Zhang   for (proc=0, k=0; proc<size; proc++){
4667409913e3SHong Zhang     if (!len_s[proc]) continue;
466802c68681SHong Zhang     i = owners[proc];
4669b1d57f15SBarry Smith     ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr);
467051a7d1a8SHong Zhang     k++;
467151a7d1a8SHong Zhang   }
467251a7d1a8SHong Zhang 
46733e06a4e6SHong Zhang   /* receives and sends of j-structure are complete */
46743e06a4e6SHong Zhang   /*------------------------------------------------*/
46750c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);}
46760c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);}
467702c68681SHong Zhang 
467802c68681SHong Zhang   /* send and recv i-structure */
467902c68681SHong Zhang   /*---------------------------*/
46802c72b5baSSatish Balay   ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr);
468102c68681SHong Zhang   ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr);
468202c68681SHong Zhang 
4683b1d57f15SBarry Smith   ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr);
46843e06a4e6SHong Zhang   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
46852257cef7SHong Zhang   for (proc=0,k=0; proc<size; proc++){
468602c68681SHong Zhang     if (!len_s[proc]) continue;
46873e06a4e6SHong Zhang     /* form outgoing message for i-structure:
46883e06a4e6SHong Zhang          buf_si[0]:                 nrows to be sent
46893e06a4e6SHong Zhang                [1:nrows]:           row index (global)
46903e06a4e6SHong Zhang                [nrows+1:2*nrows+1]: i-structure index
46913e06a4e6SHong Zhang     */
46923e06a4e6SHong Zhang     /*-------------------------------------------*/
46932257cef7SHong Zhang     nrows = len_si[proc]/2 - 1;
46943e06a4e6SHong Zhang     buf_si_i    = buf_si + nrows+1;
46953e06a4e6SHong Zhang     buf_si[0]   = nrows;
46963e06a4e6SHong Zhang     buf_si_i[0] = 0;
46973e06a4e6SHong Zhang     nrows = 0;
46983e06a4e6SHong Zhang     for (i=owners[proc]; i<owners[proc+1]; i++){
46993e06a4e6SHong Zhang       anzi = ai[i+1] - ai[i];
47003e06a4e6SHong Zhang       if (anzi) {
47013e06a4e6SHong Zhang         buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */
47023e06a4e6SHong Zhang         buf_si[nrows+1] = i-owners[proc]; /* local row index */
47033e06a4e6SHong Zhang         nrows++;
47043e06a4e6SHong Zhang       }
47053e06a4e6SHong Zhang     }
4706b1d57f15SBarry Smith     ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr);
470702c68681SHong Zhang     k++;
47082257cef7SHong Zhang     buf_si += len_si[proc];
470902c68681SHong Zhang   }
47102257cef7SHong Zhang 
47110c468ba9SBarry Smith   if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);}
47120c468ba9SBarry Smith   if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);}
471302c68681SHong Zhang 
4714ae15b995SBarry Smith   ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr);
47153e06a4e6SHong Zhang   for (i=0; i<merge->nrecv; i++){
4716ae15b995SBarry Smith     ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr);
47173e06a4e6SHong Zhang   }
47183e06a4e6SHong Zhang 
47193e06a4e6SHong Zhang   ierr = PetscFree(len_si);CHKERRQ(ierr);
472002c68681SHong Zhang   ierr = PetscFree(len_ri);CHKERRQ(ierr);
472102c68681SHong Zhang   ierr = PetscFree(rj_waits);CHKERRQ(ierr);
47221d79065fSBarry Smith   ierr = PetscFree2(si_waits,sj_waits);CHKERRQ(ierr);
47232257cef7SHong Zhang   ierr = PetscFree(ri_waits);CHKERRQ(ierr);
47243e06a4e6SHong Zhang   ierr = PetscFree(buf_s);CHKERRQ(ierr);
4725bcc1bcd5SHong Zhang   ierr = PetscFree(status);CHKERRQ(ierr);
472658cb9c82SHong Zhang 
4727bcc1bcd5SHong Zhang   /* compute a local seq matrix in each processor */
4728bcc1bcd5SHong Zhang   /*----------------------------------------------*/
472958cb9c82SHong Zhang   /* allocate bi array and free space for accumulating nonzero column info */
4730b1d57f15SBarry Smith   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr);
473158cb9c82SHong Zhang   bi[0] = 0;
473258cb9c82SHong Zhang 
4733be0fcf8dSHong Zhang   /* create and initialize a linked list */
4734be0fcf8dSHong Zhang   nlnk = N+1;
4735be0fcf8dSHong Zhang   ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
473658cb9c82SHong Zhang 
4737bcc1bcd5SHong Zhang   /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */
473858cb9c82SHong Zhang   len = 0;
4739bcc1bcd5SHong Zhang   len  = ai[owners[rank+1]] - ai[owners[rank]];
4740a1a86e44SBarry Smith   ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr);
474158cb9c82SHong Zhang   current_space = free_space;
474258cb9c82SHong Zhang 
4743bcc1bcd5SHong Zhang   /* determine symbolic info for each local row */
47440572522cSBarry Smith   ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr);
47451d79065fSBarry Smith 
47463e06a4e6SHong Zhang   for (k=0; k<merge->nrecv; k++){
47472257cef7SHong Zhang     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
47483e06a4e6SHong Zhang     nrows = *buf_ri_k[k];
47493e06a4e6SHong Zhang     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
47502257cef7SHong Zhang     nextai[k]   = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure  */
47513e06a4e6SHong Zhang   }
47522257cef7SHong Zhang 
4753bcc1bcd5SHong Zhang   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
4754bcc1bcd5SHong Zhang   len = 0;
475558cb9c82SHong Zhang   for (i=0;i<m;i++) {
475658cb9c82SHong Zhang     bnzi   = 0;
475758cb9c82SHong Zhang     /* add local non-zero cols of this proc's seqmat into lnk */
475858cb9c82SHong Zhang     arow   = owners[rank] + i;
475958cb9c82SHong Zhang     anzi   = ai[arow+1] - ai[arow];
476058cb9c82SHong Zhang     aj     = a->j + ai[arow];
4761be0fcf8dSHong Zhang     ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
476258cb9c82SHong Zhang     bnzi += nlnk;
476358cb9c82SHong Zhang     /* add received col data into lnk */
476451a7d1a8SHong Zhang     for (k=0; k<merge->nrecv; k++){ /* k-th received message */
476555d1abb9SHong Zhang       if (i == *nextrow[k]) { /* i-th row */
47663e06a4e6SHong Zhang         anzi = *(nextai[k]+1) - *nextai[k];
47673e06a4e6SHong Zhang         aj   = buf_rj[k] + *nextai[k];
47683e06a4e6SHong Zhang         ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr);
47693e06a4e6SHong Zhang         bnzi += nlnk;
47703e06a4e6SHong Zhang         nextrow[k]++; nextai[k]++;
47713e06a4e6SHong Zhang       }
477258cb9c82SHong Zhang     }
4773bcc1bcd5SHong Zhang     if (len < bnzi) len = bnzi;  /* =max(bnzi) */
477458cb9c82SHong Zhang 
477558cb9c82SHong Zhang     /* if free space is not available, make more free space */
477658cb9c82SHong Zhang     if (current_space->local_remaining<bnzi) {
47774238b7adSHong Zhang       ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,&current_space);CHKERRQ(ierr);
477858cb9c82SHong Zhang       nspacedouble++;
477958cb9c82SHong Zhang     }
478058cb9c82SHong Zhang     /* copy data into free space, then initialize lnk */
4781be0fcf8dSHong Zhang     ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr);
4782bcc1bcd5SHong Zhang     ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr);
4783bcc1bcd5SHong Zhang 
478458cb9c82SHong Zhang     current_space->array           += bnzi;
478558cb9c82SHong Zhang     current_space->local_used      += bnzi;
478658cb9c82SHong Zhang     current_space->local_remaining -= bnzi;
478758cb9c82SHong Zhang 
478858cb9c82SHong Zhang     bi[i+1] = bi[i] + bnzi;
478958cb9c82SHong Zhang   }
4790bcc1bcd5SHong Zhang 
47911d79065fSBarry Smith   ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr);
4792bcc1bcd5SHong Zhang 
4793b1d57f15SBarry Smith   ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr);
4794a1a86e44SBarry Smith   ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr);
4795be0fcf8dSHong Zhang   ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr);
4796409913e3SHong Zhang 
4797bcc1bcd5SHong Zhang   /* create symbolic parallel matrix B_mpi */
4798bcc1bcd5SHong Zhang   /*---------------------------------------*/
4799f69a0ea3SMatthew Knepley   ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr);
480054b84b50SHong Zhang   if (n==PETSC_DECIDE) {
4801f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr);
480254b84b50SHong Zhang   } else {
4803f69a0ea3SMatthew Knepley     ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
480454b84b50SHong Zhang   }
4805bcc1bcd5SHong Zhang   ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr);
4806bcc1bcd5SHong Zhang   ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr);
4807bcc1bcd5SHong Zhang   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
480858cb9c82SHong Zhang 
48096abd8857SHong Zhang   /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */
48106abd8857SHong Zhang   B_mpi->assembled     = PETSC_FALSE;
4811affca5deSHong Zhang   B_mpi->ops->destroy  = MatDestroy_MPIAIJ_SeqsToMPI;
4812affca5deSHong Zhang   merge->bi            = bi;
4813affca5deSHong Zhang   merge->bj            = bj;
481402c68681SHong Zhang   merge->buf_ri        = buf_ri;
481502c68681SHong Zhang   merge->buf_rj        = buf_rj;
4816de0260b3SHong Zhang   merge->coi           = PETSC_NULL;
4817de0260b3SHong Zhang   merge->coj           = PETSC_NULL;
4818de0260b3SHong Zhang   merge->owners_co     = PETSC_NULL;
4819affca5deSHong Zhang 
4820bf0cc555SLisandro Dalcin   ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);
4821bf0cc555SLisandro Dalcin 
4822affca5deSHong Zhang   /* attach the supporting struct to B_mpi for reuse */
4823776b82aeSLisandro Dalcin   ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr);
4824776b82aeSLisandro Dalcin   ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr);
4825affca5deSHong Zhang   ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr);
4826bf0cc555SLisandro Dalcin   ierr = PetscContainerDestroy(&container);CHKERRQ(ierr);
4827affca5deSHong Zhang   *mpimat = B_mpi;
482838f152feSBarry Smith 
48294ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr);
4830e5f2cdd8SHong Zhang   PetscFunctionReturn(0);
4831e5f2cdd8SHong Zhang }
483225616d81SHong Zhang 
483338f152feSBarry Smith #undef __FUNCT__
483438f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI"
48357087cfbeSBarry Smith PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat)
483655d1abb9SHong Zhang {
483755d1abb9SHong Zhang   PetscErrorCode   ierr;
483855d1abb9SHong Zhang 
483955d1abb9SHong Zhang   PetscFunctionBegin;
48404ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
484155d1abb9SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
484255d1abb9SHong Zhang     ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr);
484355d1abb9SHong Zhang   }
484455d1abb9SHong Zhang   ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr);
48454ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr);
484655d1abb9SHong Zhang   PetscFunctionReturn(0);
484755d1abb9SHong Zhang }
48484ebed01fSBarry Smith 
484925616d81SHong Zhang #undef __FUNCT__
48504a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMat"
4851bc08b0f1SBarry Smith /*@
48524a2b5492SBarry Smith      MatMPIAIJGetLocalMat - Creates a SeqAIJ from a MPIAIJ matrix by taking all its local rows and putting them into a sequential vector with
48538661ff28SBarry Smith           mlocal rows and n columns. Where mlocal is the row count obtained with MatGetLocalSize() and n is the global column count obtained
48548661ff28SBarry Smith           with MatGetSize()
485525616d81SHong Zhang 
485632fba14fSHong Zhang     Not Collective
485725616d81SHong Zhang 
485825616d81SHong Zhang    Input Parameters:
485925616d81SHong Zhang +    A - the matrix
486025616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
486125616d81SHong Zhang 
486225616d81SHong Zhang    Output Parameter:
486325616d81SHong Zhang .    A_loc - the local sequential matrix generated
486425616d81SHong Zhang 
486525616d81SHong Zhang     Level: developer
486625616d81SHong Zhang 
4867ba264940SBarry Smith .seealso: MatGetOwnerShipRange(), MatMPIAIJGetLocalMatCondensed()
48688661ff28SBarry Smith 
486925616d81SHong Zhang @*/
48704a2b5492SBarry Smith PetscErrorCode  MatMPIAIJGetLocalMat(Mat A,MatReuse scall,Mat *A_loc)
487125616d81SHong Zhang {
487225616d81SHong Zhang   PetscErrorCode  ierr;
487301b7ae99SHong Zhang   Mat_MPIAIJ      *mpimat=(Mat_MPIAIJ*)A->data;
487401b7ae99SHong Zhang   Mat_SeqAIJ      *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data;
487501b7ae99SHong Zhang   PetscInt        *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray;
4876a77337e4SBarry Smith   MatScalar       *aa=a->a,*ba=b->a,*cam;
4877a77337e4SBarry Smith   PetscScalar     *ca;
4878d0f46423SBarry Smith   PetscInt        am=A->rmap->n,i,j,k,cstart=A->cmap->rstart;
48795a7d977cSHong Zhang   PetscInt        *ci,*cj,col,ncols_d,ncols_o,jo;
48808661ff28SBarry Smith   PetscBool       match;
488125616d81SHong Zhang 
488225616d81SHong Zhang   PetscFunctionBegin;
48838661ff28SBarry Smith   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr);
48848661ff28SBarry Smith   if (!match) SETERRQ(((PetscObject)A)->comm, PETSC_ERR_SUP,"Requires MPIAIJ matrix as input");
48854ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
488601b7ae99SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
4887dea91ad1SHong Zhang     ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr);
4888dea91ad1SHong Zhang     ci[0] = 0;
488901b7ae99SHong Zhang     for (i=0; i<am; i++){
4890dea91ad1SHong Zhang       ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]);
489101b7ae99SHong Zhang     }
4892dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr);
4893dea91ad1SHong Zhang     ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr);
4894dea91ad1SHong Zhang     k = 0;
489501b7ae99SHong Zhang     for (i=0; i<am; i++) {
48965a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
48975a7d977cSHong Zhang       ncols_d = ai[i+1] - ai[i];
489801b7ae99SHong Zhang       /* off-diagonal portion of A */
48995a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
49005a7d977cSHong Zhang         col = cmap[*bj];
49015a7d977cSHong Zhang         if (col >= cstart) break;
49025a7d977cSHong Zhang         cj[k]   = col; bj++;
49035a7d977cSHong Zhang         ca[k++] = *ba++;
49045a7d977cSHong Zhang       }
49055a7d977cSHong Zhang       /* diagonal portion of A */
49065a7d977cSHong Zhang       for (j=0; j<ncols_d; j++) {
49075a7d977cSHong Zhang         cj[k]   = cstart + *aj++;
49085a7d977cSHong Zhang         ca[k++] = *aa++;
49095a7d977cSHong Zhang       }
49105a7d977cSHong Zhang       /* off-diagonal portion of A */
49115a7d977cSHong Zhang       for (j=jo; j<ncols_o; j++) {
49125a7d977cSHong Zhang         cj[k]   = cmap[*bj++];
49135a7d977cSHong Zhang         ca[k++] = *ba++;
49145a7d977cSHong Zhang       }
491525616d81SHong Zhang     }
4916dea91ad1SHong Zhang     /* put together the new matrix */
4917d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr);
4918dea91ad1SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
4919dea91ad1SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
4920dea91ad1SHong Zhang     mat          = (Mat_SeqAIJ*)(*A_loc)->data;
4921e6b907acSBarry Smith     mat->free_a  = PETSC_TRUE;
4922e6b907acSBarry Smith     mat->free_ij = PETSC_TRUE;
4923dea91ad1SHong Zhang     mat->nonew   = 0;
49245a7d977cSHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
49255a7d977cSHong Zhang     mat=(Mat_SeqAIJ*)(*A_loc)->data;
4926a77337e4SBarry Smith     ci = mat->i; cj = mat->j; cam = mat->a;
49275a7d977cSHong Zhang     for (i=0; i<am; i++) {
49285a7d977cSHong Zhang       /* off-diagonal portion of A */
49295a7d977cSHong Zhang       ncols_o = bi[i+1] - bi[i];
49305a7d977cSHong Zhang       for (jo=0; jo<ncols_o; jo++) {
49315a7d977cSHong Zhang         col = cmap[*bj];
49325a7d977cSHong Zhang         if (col >= cstart) break;
4933a77337e4SBarry Smith         *cam++ = *ba++; bj++;
49345a7d977cSHong Zhang       }
49355a7d977cSHong Zhang       /* diagonal portion of A */
4936ecc9b87dSHong Zhang       ncols_d = ai[i+1] - ai[i];
4937a77337e4SBarry Smith       for (j=0; j<ncols_d; j++) *cam++ = *aa++;
49385a7d977cSHong Zhang       /* off-diagonal portion of A */
4939f33d1a9aSHong Zhang       for (j=jo; j<ncols_o; j++) {
4940a77337e4SBarry Smith         *cam++ = *ba++; bj++;
4941f33d1a9aSHong Zhang       }
49425a7d977cSHong Zhang     }
49438661ff28SBarry Smith   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall);
49444ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr);
494525616d81SHong Zhang   PetscFunctionReturn(0);
494625616d81SHong Zhang }
494725616d81SHong Zhang 
494832fba14fSHong Zhang #undef __FUNCT__
49494a2b5492SBarry Smith #define __FUNCT__ "MatMPIAIJGetLocalMatCondensed"
495032fba14fSHong Zhang /*@C
4951ba264940SBarry Smith      MatMPIAIJGetLocalMatCondensed - Creates a SeqAIJ matrix from an MPIAIJ matrix by taking all its local rows and NON-ZERO columns
495232fba14fSHong Zhang 
495332fba14fSHong Zhang     Not Collective
495432fba14fSHong Zhang 
495532fba14fSHong Zhang    Input Parameters:
495632fba14fSHong Zhang +    A - the matrix
495732fba14fSHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
495832fba14fSHong Zhang -    row, col - index sets of rows and columns to extract (or PETSC_NULL)
495932fba14fSHong Zhang 
496032fba14fSHong Zhang    Output Parameter:
496132fba14fSHong Zhang .    A_loc - the local sequential matrix generated
496232fba14fSHong Zhang 
496332fba14fSHong Zhang     Level: developer
496432fba14fSHong Zhang 
4965ba264940SBarry Smith .seealso: MatGetOwnershipRange(), MatMPIAIJGetLocalMat()
4966ba264940SBarry Smith 
496732fba14fSHong Zhang @*/
49684a2b5492SBarry Smith PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc)
496932fba14fSHong Zhang {
497032fba14fSHong Zhang   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
497132fba14fSHong Zhang   PetscErrorCode    ierr;
497232fba14fSHong Zhang   PetscInt          i,start,end,ncols,nzA,nzB,*cmap,imark,*idx;
497332fba14fSHong Zhang   IS                isrowa,iscola;
497432fba14fSHong Zhang   Mat               *aloc;
49754a2b5492SBarry Smith   PetscBool       match;
497632fba14fSHong Zhang 
497732fba14fSHong Zhang   PetscFunctionBegin;
49784a2b5492SBarry Smith   ierr = PetscTypeCompare((PetscObject)A,MATMPIAIJ,&match);CHKERRQ(ierr);
49794a2b5492SBarry Smith   if (!match) SETERRQ(((PetscObject)A)->comm, PETSC_ERR_SUP,"Requires MPIAIJ matrix as input");
49804ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
498132fba14fSHong Zhang   if (!row){
4982d0f46423SBarry Smith     start = A->rmap->rstart; end = A->rmap->rend;
498332fba14fSHong Zhang     ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr);
498432fba14fSHong Zhang   } else {
498532fba14fSHong Zhang     isrowa = *row;
498632fba14fSHong Zhang   }
498732fba14fSHong Zhang   if (!col){
4988d0f46423SBarry Smith     start = A->cmap->rstart;
498932fba14fSHong Zhang     cmap  = a->garray;
4990d0f46423SBarry Smith     nzA   = a->A->cmap->n;
4991d0f46423SBarry Smith     nzB   = a->B->cmap->n;
499232fba14fSHong Zhang     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
499332fba14fSHong Zhang     ncols = 0;
499432fba14fSHong Zhang     for (i=0; i<nzB; i++) {
499532fba14fSHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
499632fba14fSHong Zhang       else break;
499732fba14fSHong Zhang     }
499832fba14fSHong Zhang     imark = i;
499932fba14fSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;
500032fba14fSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i];
5001d67e408aSBarry Smith     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&iscola);CHKERRQ(ierr);
500232fba14fSHong Zhang   } else {
500332fba14fSHong Zhang     iscola = *col;
500432fba14fSHong Zhang   }
500532fba14fSHong Zhang   if (scall != MAT_INITIAL_MATRIX){
500632fba14fSHong Zhang     ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr);
500732fba14fSHong Zhang     aloc[0] = *A_loc;
500832fba14fSHong Zhang   }
500932fba14fSHong Zhang   ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr);
501032fba14fSHong Zhang   *A_loc = aloc[0];
501132fba14fSHong Zhang   ierr = PetscFree(aloc);CHKERRQ(ierr);
501232fba14fSHong Zhang   if (!row){
50136bf464f9SBarry Smith     ierr = ISDestroy(&isrowa);CHKERRQ(ierr);
501432fba14fSHong Zhang   }
501532fba14fSHong Zhang   if (!col){
50166bf464f9SBarry Smith     ierr = ISDestroy(&iscola);CHKERRQ(ierr);
501732fba14fSHong Zhang   }
50184ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr);
501932fba14fSHong Zhang   PetscFunctionReturn(0);
502032fba14fSHong Zhang }
502132fba14fSHong Zhang 
502225616d81SHong Zhang #undef __FUNCT__
502325616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols"
502425616d81SHong Zhang /*@C
502532fba14fSHong Zhang     MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A
502625616d81SHong Zhang 
502725616d81SHong Zhang     Collective on Mat
502825616d81SHong Zhang 
502925616d81SHong Zhang    Input Parameters:
5030e240928fSHong Zhang +    A,B - the matrices in mpiaij format
503125616d81SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
503225616d81SHong Zhang -    rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL)
503325616d81SHong Zhang 
503425616d81SHong Zhang    Output Parameter:
503525616d81SHong Zhang +    rowb, colb - index sets of rows and columns of B to extract
503625616d81SHong Zhang -    B_seq - the sequential matrix generated
503725616d81SHong Zhang 
503825616d81SHong Zhang     Level: developer
503925616d81SHong Zhang 
504025616d81SHong Zhang @*/
504166bfb163SHong Zhang PetscErrorCode  MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,Mat *B_seq)
504225616d81SHong Zhang {
5043899cda47SBarry Smith   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data;
504425616d81SHong Zhang   PetscErrorCode    ierr;
5045b1d57f15SBarry Smith   PetscInt          *idx,i,start,ncols,nzA,nzB,*cmap,imark;
504625616d81SHong Zhang   IS                isrowb,iscolb;
504766bfb163SHong Zhang   Mat               *bseq=PETSC_NULL;
504825616d81SHong Zhang 
504925616d81SHong Zhang   PetscFunctionBegin;
5050d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
5051e32f2f54SBarry Smith     SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
505225616d81SHong Zhang   }
50534ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
505425616d81SHong Zhang 
505525616d81SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
5056d0f46423SBarry Smith     start = A->cmap->rstart;
505725616d81SHong Zhang     cmap  = a->garray;
5058d0f46423SBarry Smith     nzA   = a->A->cmap->n;
5059d0f46423SBarry Smith     nzB   = a->B->cmap->n;
5060b1d57f15SBarry Smith     ierr  = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr);
506125616d81SHong Zhang     ncols = 0;
50620390132cSHong Zhang     for (i=0; i<nzB; i++) {  /* row < local row index */
506325616d81SHong Zhang       if (cmap[i] < start) idx[ncols++] = cmap[i];
506425616d81SHong Zhang       else break;
506525616d81SHong Zhang     }
506625616d81SHong Zhang     imark = i;
50670390132cSHong Zhang     for (i=0; i<nzA; i++) idx[ncols++] = start + i;  /* local rows */
50680390132cSHong Zhang     for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */
5069d67e408aSBarry Smith     ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&isrowb);CHKERRQ(ierr);
5070d0f46423SBarry Smith     ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr);
507125616d81SHong Zhang   } else {
5072e32f2f54SBarry Smith     if (!rowb || !colb) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX");
507325616d81SHong Zhang     isrowb = *rowb; iscolb = *colb;
507425616d81SHong Zhang     ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr);
507525616d81SHong Zhang     bseq[0] = *B_seq;
507625616d81SHong Zhang   }
507725616d81SHong Zhang   ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr);
507825616d81SHong Zhang   *B_seq = bseq[0];
507925616d81SHong Zhang   ierr = PetscFree(bseq);CHKERRQ(ierr);
508025616d81SHong Zhang   if (!rowb){
50816bf464f9SBarry Smith     ierr = ISDestroy(&isrowb);CHKERRQ(ierr);
508225616d81SHong Zhang   } else {
508325616d81SHong Zhang     *rowb = isrowb;
508425616d81SHong Zhang   }
508525616d81SHong Zhang   if (!colb){
50866bf464f9SBarry Smith     ierr = ISDestroy(&iscolb);CHKERRQ(ierr);
508725616d81SHong Zhang   } else {
508825616d81SHong Zhang     *colb = iscolb;
508925616d81SHong Zhang   }
50904ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr);
509125616d81SHong Zhang   PetscFunctionReturn(0);
509225616d81SHong Zhang }
5093429d309bSHong Zhang 
5094a61c8c0fSHong Zhang #undef __FUNCT__
5095a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols"
5096429d309bSHong Zhang /*@C
5097429d309bSHong Zhang     MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns
509801b7ae99SHong Zhang     of the OFF-DIAGONAL portion of local A
5099429d309bSHong Zhang 
5100429d309bSHong Zhang     Collective on Mat
5101429d309bSHong Zhang 
5102429d309bSHong Zhang    Input Parameters:
5103429d309bSHong Zhang +    A,B - the matrices in mpiaij format
510487025532SHong Zhang .    scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
510587025532SHong Zhang .    startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL)
51061d79065fSBarry Smith .    startsj_r - similar to startsj for receives
510787025532SHong Zhang -    bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL)
5108429d309bSHong Zhang 
5109429d309bSHong Zhang    Output Parameter:
511087025532SHong Zhang +    B_oth - the sequential matrix generated
5111429d309bSHong Zhang 
5112429d309bSHong Zhang     Level: developer
5113429d309bSHong Zhang 
5114429d309bSHong Zhang @*/
51157087cfbeSBarry Smith PetscErrorCode  MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,PetscInt **startsj_r,MatScalar **bufa_ptr,Mat *B_oth)
5116429d309bSHong Zhang {
5117a6b2eed2SHong Zhang   VecScatter_MPI_General *gen_to,*gen_from;
5118429d309bSHong Zhang   PetscErrorCode         ierr;
5119899cda47SBarry Smith   Mat_MPIAIJ             *a=(Mat_MPIAIJ*)A->data;
512087025532SHong Zhang   Mat_SeqAIJ             *b_oth;
5121a6b2eed2SHong Zhang   VecScatter             ctx=a->Mvctx;
51227adad957SLisandro Dalcin   MPI_Comm               comm=((PetscObject)ctx)->comm;
51237adad957SLisandro Dalcin   PetscMPIInt            *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank;
5124d0f46423SBarry Smith   PetscInt               *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj;
5125dd6ea824SBarry Smith   PetscScalar            *rvalues,*svalues;
5126dd6ea824SBarry Smith   MatScalar              *b_otha,*bufa,*bufA;
5127e42f35eeSHong Zhang   PetscInt               i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len;
5128910ba992SMatthew Knepley   MPI_Request            *rwaits = PETSC_NULL,*swaits = PETSC_NULL;
512987025532SHong Zhang   MPI_Status             *sstatus,rstatus;
5130aa5bb8c0SSatish Balay   PetscMPIInt            jj;
5131e42f35eeSHong Zhang   PetscInt               *cols,sbs,rbs;
5132ba8c8a56SBarry Smith   PetscScalar            *vals;
5133429d309bSHong Zhang 
5134429d309bSHong Zhang   PetscFunctionBegin;
5135d0f46423SBarry Smith   if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){
5136e32f2f54SBarry Smith     SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend);
5137429d309bSHong Zhang   }
51384ebed01fSBarry Smith   ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
5139a6b2eed2SHong Zhang   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
5140a6b2eed2SHong Zhang 
5141a6b2eed2SHong Zhang   gen_to   = (VecScatter_MPI_General*)ctx->todata;
5142a6b2eed2SHong Zhang   gen_from = (VecScatter_MPI_General*)ctx->fromdata;
5143e42f35eeSHong Zhang   rvalues  = gen_from->values; /* holds the length of receiving row */
5144e42f35eeSHong Zhang   svalues  = gen_to->values;   /* holds the length of sending row */
5145a6b2eed2SHong Zhang   nrecvs   = gen_from->n;
5146a6b2eed2SHong Zhang   nsends   = gen_to->n;
5147d7ee0231SBarry Smith 
5148d7ee0231SBarry Smith   ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr);
5149a6b2eed2SHong Zhang   srow     = gen_to->indices;   /* local row index to be sent */
5150a6b2eed2SHong Zhang   sstarts  = gen_to->starts;
5151a6b2eed2SHong Zhang   sprocs   = gen_to->procs;
5152a6b2eed2SHong Zhang   sstatus  = gen_to->sstatus;
5153e42f35eeSHong Zhang   sbs      = gen_to->bs;
5154e42f35eeSHong Zhang   rstarts  = gen_from->starts;
5155e42f35eeSHong Zhang   rprocs   = gen_from->procs;
5156e42f35eeSHong Zhang   rbs      = gen_from->bs;
5157429d309bSHong Zhang 
5158dea91ad1SHong Zhang   if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX;
5159429d309bSHong Zhang   if (scall == MAT_INITIAL_MATRIX){
5160a6b2eed2SHong Zhang     /* i-array */
5161a6b2eed2SHong Zhang     /*---------*/
5162a6b2eed2SHong Zhang     /*  post receives */
5163a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
5164e42f35eeSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
5165e42f35eeSHong Zhang       nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */
516687025532SHong Zhang       ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
5167429d309bSHong Zhang     }
5168a6b2eed2SHong Zhang 
5169a6b2eed2SHong Zhang     /* pack the outgoing message */
51701d79065fSBarry Smith     ierr = PetscMalloc2(nsends+1,PetscInt,&sstartsj,nrecvs+1,PetscInt,&rstartsj);CHKERRQ(ierr);
5171a6b2eed2SHong Zhang     sstartsj[0] = 0;  rstartsj[0] = 0;
5172a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be sent */
5173a6b2eed2SHong Zhang     k = 0;
5174a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
5175e42f35eeSHong Zhang       rowlen = (PetscInt*)svalues + sstarts[i]*sbs;
5176e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
517787025532SHong Zhang       for (j=0; j<nrows; j++) {
5178d0f46423SBarry Smith         row = srow[k] + B->rmap->range[rank]; /* global row idx */
5179e42f35eeSHong Zhang         for (l=0; l<sbs; l++){
5180e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */
5181e42f35eeSHong Zhang           rowlen[j*sbs+l] = ncols;
5182e42f35eeSHong Zhang           len += ncols;
5183e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
5184e42f35eeSHong Zhang         }
5185a6b2eed2SHong Zhang         k++;
5186429d309bSHong Zhang       }
5187e42f35eeSHong Zhang       ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
5188dea91ad1SHong Zhang       sstartsj[i+1] = len;  /* starting point of (i+1)-th outgoing msg in bufj and bufa */
5189429d309bSHong Zhang     }
519087025532SHong Zhang     /* recvs and sends of i-array are completed */
519187025532SHong Zhang     i = nrecvs;
519287025532SHong Zhang     while (i--) {
5193aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
519487025532SHong Zhang     }
51950c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
5196e42f35eeSHong Zhang 
5197a6b2eed2SHong Zhang     /* allocate buffers for sending j and a arrays */
5198a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr);
5199a6b2eed2SHong Zhang     ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr);
5200a6b2eed2SHong Zhang 
520187025532SHong Zhang     /* create i-array of B_oth */
520287025532SHong Zhang     ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr);
520387025532SHong Zhang     b_othi[0] = 0;
5204a6b2eed2SHong Zhang     len = 0; /* total length of j or a array to be received */
5205a6b2eed2SHong Zhang     k = 0;
5206a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
5207fd0ff01cSHong Zhang       rowlen = (PetscInt*)rvalues + rstarts[i]*rbs;
5208e42f35eeSHong Zhang       nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */
520987025532SHong Zhang       for (j=0; j<nrows; j++) {
521087025532SHong Zhang         b_othi[k+1] = b_othi[k] + rowlen[j];
5211a6b2eed2SHong Zhang         len += rowlen[j]; k++;
5212a6b2eed2SHong Zhang       }
5213dea91ad1SHong Zhang       rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */
5214a6b2eed2SHong Zhang     }
5215a6b2eed2SHong Zhang 
521687025532SHong Zhang     /* allocate space for j and a arrrays of B_oth */
521787025532SHong Zhang     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr);
5218dd6ea824SBarry Smith     ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr);
5219a6b2eed2SHong Zhang 
522087025532SHong Zhang     /* j-array */
522187025532SHong Zhang     /*---------*/
5222a6b2eed2SHong Zhang     /*  post receives of j-array */
5223a6b2eed2SHong Zhang     for (i=0; i<nrecvs; i++){
522487025532SHong Zhang       nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
522587025532SHong Zhang       ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
5226a6b2eed2SHong Zhang     }
5227e42f35eeSHong Zhang 
5228e42f35eeSHong Zhang     /* pack the outgoing message j-array */
5229a6b2eed2SHong Zhang     k = 0;
5230a6b2eed2SHong Zhang     for (i=0; i<nsends; i++){
5231e42f35eeSHong Zhang       nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
5232a6b2eed2SHong Zhang       bufJ = bufj+sstartsj[i];
523387025532SHong Zhang       for (j=0; j<nrows; j++) {
5234d0f46423SBarry Smith         row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
5235e42f35eeSHong Zhang         for (ll=0; ll<sbs; ll++){
5236e42f35eeSHong Zhang           ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
5237a6b2eed2SHong Zhang           for (l=0; l<ncols; l++){
5238a6b2eed2SHong Zhang             *bufJ++ = cols[l];
523987025532SHong Zhang           }
5240e42f35eeSHong Zhang           ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr);
5241e42f35eeSHong Zhang         }
524287025532SHong Zhang       }
524387025532SHong Zhang       ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
524487025532SHong Zhang     }
524587025532SHong Zhang 
524687025532SHong Zhang     /* recvs and sends of j-array are completed */
524787025532SHong Zhang     i = nrecvs;
524887025532SHong Zhang     while (i--) {
5249aa5bb8c0SSatish Balay       ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
525087025532SHong Zhang     }
52510c468ba9SBarry Smith     if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
525287025532SHong Zhang   } else if (scall == MAT_REUSE_MATRIX){
525387025532SHong Zhang     sstartsj = *startsj;
52541d79065fSBarry Smith     rstartsj = *startsj_r;
525587025532SHong Zhang     bufa     = *bufa_ptr;
525687025532SHong Zhang     b_oth    = (Mat_SeqAIJ*)(*B_oth)->data;
525787025532SHong Zhang     b_otha   = b_oth->a;
525887025532SHong Zhang   } else {
5259e32f2f54SBarry Smith     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container");
526087025532SHong Zhang   }
526187025532SHong Zhang 
526287025532SHong Zhang   /* a-array */
526387025532SHong Zhang   /*---------*/
526487025532SHong Zhang   /*  post receives of a-array */
526587025532SHong Zhang   for (i=0; i<nrecvs; i++){
526687025532SHong Zhang     nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */
526787025532SHong Zhang     ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr);
526887025532SHong Zhang   }
5269e42f35eeSHong Zhang 
5270e42f35eeSHong Zhang   /* pack the outgoing message a-array */
527187025532SHong Zhang   k = 0;
527287025532SHong Zhang   for (i=0; i<nsends; i++){
5273e42f35eeSHong Zhang     nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */
527487025532SHong Zhang     bufA = bufa+sstartsj[i];
527587025532SHong Zhang     for (j=0; j<nrows; j++) {
5276d0f46423SBarry Smith       row  = srow[k++] + B->rmap->range[rank]; /* global row idx */
5277e42f35eeSHong Zhang       for (ll=0; ll<sbs; ll++){
5278e42f35eeSHong Zhang         ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
527987025532SHong Zhang         for (l=0; l<ncols; l++){
5280a6b2eed2SHong Zhang           *bufA++ = vals[l];
5281a6b2eed2SHong Zhang         }
5282e42f35eeSHong Zhang         ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr);
5283e42f35eeSHong Zhang       }
5284a6b2eed2SHong Zhang     }
528587025532SHong Zhang     ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr);
5286a6b2eed2SHong Zhang   }
528787025532SHong Zhang   /* recvs and sends of a-array are completed */
528887025532SHong Zhang   i = nrecvs;
528987025532SHong Zhang   while (i--) {
5290aa5bb8c0SSatish Balay     ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr);
529187025532SHong Zhang   }
52920c468ba9SBarry Smith   if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);}
5293d7ee0231SBarry Smith   ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr);
5294a6b2eed2SHong Zhang 
529587025532SHong Zhang   if (scall == MAT_INITIAL_MATRIX){
5296a6b2eed2SHong Zhang     /* put together the new matrix */
5297d0f46423SBarry Smith     ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr);
5298a6b2eed2SHong Zhang 
5299a6b2eed2SHong Zhang     /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */
5300a6b2eed2SHong Zhang     /* Since these are PETSc arrays, change flags to free them as necessary. */
530187025532SHong Zhang     b_oth          = (Mat_SeqAIJ *)(*B_oth)->data;
5302e6b907acSBarry Smith     b_oth->free_a  = PETSC_TRUE;
5303e6b907acSBarry Smith     b_oth->free_ij = PETSC_TRUE;
530487025532SHong Zhang     b_oth->nonew   = 0;
5305a6b2eed2SHong Zhang 
5306a6b2eed2SHong Zhang     ierr = PetscFree(bufj);CHKERRQ(ierr);
5307dea91ad1SHong Zhang     if (!startsj || !bufa_ptr){
53081d79065fSBarry Smith       ierr = PetscFree2(sstartsj,rstartsj);CHKERRQ(ierr);
5309dea91ad1SHong Zhang       ierr = PetscFree(bufa_ptr);CHKERRQ(ierr);
5310dea91ad1SHong Zhang     } else {
531187025532SHong Zhang       *startsj   = sstartsj;
53121d79065fSBarry Smith       *startsj_r = rstartsj;
531387025532SHong Zhang       *bufa_ptr  = bufa;
531487025532SHong Zhang     }
5315dea91ad1SHong Zhang   }
53164ebed01fSBarry Smith   ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr);
5317429d309bSHong Zhang   PetscFunctionReturn(0);
5318429d309bSHong Zhang }
5319ccd8e176SBarry Smith 
532043eb5e2fSMatthew Knepley #undef __FUNCT__
532143eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs"
532243eb5e2fSMatthew Knepley /*@C
532343eb5e2fSMatthew Knepley   MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication.
532443eb5e2fSMatthew Knepley 
532543eb5e2fSMatthew Knepley   Not Collective
532643eb5e2fSMatthew Knepley 
532743eb5e2fSMatthew Knepley   Input Parameters:
532843eb5e2fSMatthew Knepley . A - The matrix in mpiaij format
532943eb5e2fSMatthew Knepley 
533043eb5e2fSMatthew Knepley   Output Parameter:
533143eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product
533243eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec
533343eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec
533443eb5e2fSMatthew Knepley 
533543eb5e2fSMatthew Knepley   Level: developer
533643eb5e2fSMatthew Knepley 
533743eb5e2fSMatthew Knepley @*/
533843eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE)
53397087cfbeSBarry Smith PetscErrorCode  MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter)
534043eb5e2fSMatthew Knepley #else
53417087cfbeSBarry Smith PetscErrorCode  MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter)
534243eb5e2fSMatthew Knepley #endif
534343eb5e2fSMatthew Knepley {
534443eb5e2fSMatthew Knepley   Mat_MPIAIJ *a;
534543eb5e2fSMatthew Knepley 
534643eb5e2fSMatthew Knepley   PetscFunctionBegin;
53470700a824SBarry Smith   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
5348e414b56bSJed Brown   PetscValidPointer(lvec, 2);
5349e414b56bSJed Brown   PetscValidPointer(colmap, 3);
5350e414b56bSJed Brown   PetscValidPointer(multScatter, 4);
535143eb5e2fSMatthew Knepley   a = (Mat_MPIAIJ *) A->data;
535243eb5e2fSMatthew Knepley   if (lvec) *lvec = a->lvec;
535343eb5e2fSMatthew Knepley   if (colmap) *colmap = a->colmap;
535443eb5e2fSMatthew Knepley   if (multScatter) *multScatter = a->Mvctx;
535543eb5e2fSMatthew Knepley   PetscFunctionReturn(0);
535643eb5e2fSMatthew Knepley }
535743eb5e2fSMatthew Knepley 
535817667f90SBarry Smith EXTERN_C_BEGIN
53597087cfbeSBarry Smith extern PetscErrorCode  MatConvert_MPIAIJ_MPIAIJCRL(Mat,const MatType,MatReuse,Mat*);
53607087cfbeSBarry Smith extern PetscErrorCode  MatConvert_MPIAIJ_MPIAIJPERM(Mat,const MatType,MatReuse,Mat*);
53617087cfbeSBarry Smith extern PetscErrorCode  MatConvert_MPIAIJ_MPISBAIJ(Mat,const MatType,MatReuse,Mat*);
536217667f90SBarry Smith EXTERN_C_END
536317667f90SBarry Smith 
5364fc4dec0aSBarry Smith #undef __FUNCT__
5365fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ"
5366fc4dec0aSBarry Smith /*
5367fc4dec0aSBarry Smith     Computes (B'*A')' since computing B*A directly is untenable
5368fc4dec0aSBarry Smith 
5369fc4dec0aSBarry Smith                n                       p                          p
5370fc4dec0aSBarry Smith         (              )       (              )         (                  )
5371fc4dec0aSBarry Smith       m (      A       )  *  n (       B      )   =   m (         C        )
5372fc4dec0aSBarry Smith         (              )       (              )         (                  )
5373fc4dec0aSBarry Smith 
5374fc4dec0aSBarry Smith */
5375fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C)
5376fc4dec0aSBarry Smith {
5377fc4dec0aSBarry Smith   PetscErrorCode     ierr;
5378fc4dec0aSBarry Smith   Mat                At,Bt,Ct;
5379fc4dec0aSBarry Smith 
5380fc4dec0aSBarry Smith   PetscFunctionBegin;
5381fc4dec0aSBarry Smith   ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr);
5382fc4dec0aSBarry Smith   ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr);
5383fc4dec0aSBarry Smith   ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr);
53846bf464f9SBarry Smith   ierr = MatDestroy(&At);CHKERRQ(ierr);
53856bf464f9SBarry Smith   ierr = MatDestroy(&Bt);CHKERRQ(ierr);
5386fc4dec0aSBarry Smith   ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
53876bf464f9SBarry Smith   ierr = MatDestroy(&Ct);CHKERRQ(ierr);
5388fc4dec0aSBarry Smith   PetscFunctionReturn(0);
5389fc4dec0aSBarry Smith }
5390fc4dec0aSBarry Smith 
5391fc4dec0aSBarry Smith #undef __FUNCT__
5392fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ"
5393fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C)
5394fc4dec0aSBarry Smith {
5395fc4dec0aSBarry Smith   PetscErrorCode ierr;
5396d0f46423SBarry Smith   PetscInt       m=A->rmap->n,n=B->cmap->n;
5397fc4dec0aSBarry Smith   Mat            Cmat;
5398fc4dec0aSBarry Smith 
5399fc4dec0aSBarry Smith   PetscFunctionBegin;
5400e32f2f54SBarry Smith   if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n);
540139804f7cSBarry Smith   ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr);
5402fc4dec0aSBarry Smith   ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
5403fc4dec0aSBarry Smith   ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr);
5404fc4dec0aSBarry Smith   ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr);
540538556019SBarry Smith   ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
540638556019SBarry Smith   ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
5407fc4dec0aSBarry Smith   *C   = Cmat;
54088cdbd757SHong Zhang   (*C)->ops->matmult = MatMatMult_MPIDense_MPIAIJ;
5409fc4dec0aSBarry Smith   PetscFunctionReturn(0);
5410fc4dec0aSBarry Smith }
5411fc4dec0aSBarry Smith 
5412fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/
5413fc4dec0aSBarry Smith #undef __FUNCT__
5414fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ"
5415fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
5416fc4dec0aSBarry Smith {
5417fc4dec0aSBarry Smith   PetscErrorCode ierr;
5418fc4dec0aSBarry Smith 
5419fc4dec0aSBarry Smith   PetscFunctionBegin;
5420fc4dec0aSBarry Smith   if (scall == MAT_INITIAL_MATRIX){
5421fc4dec0aSBarry Smith     ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr);
5422fc4dec0aSBarry Smith   }
5423fc4dec0aSBarry Smith   ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr);
5424fc4dec0aSBarry Smith   PetscFunctionReturn(0);
5425fc4dec0aSBarry Smith }
5426fc4dec0aSBarry Smith 
54275c9eb25fSBarry Smith EXTERN_C_BEGIN
5428611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
5429bccb9932SShri Abhyankar extern PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*);
5430611f576cSBarry Smith #endif
54313bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
54323bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*);
54333bf14a46SMatthew Knepley #endif
5434611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
54355c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*);
5436611f576cSBarry Smith #endif
5437611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
54385c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*);
5439611f576cSBarry Smith #endif
54405c9eb25fSBarry Smith EXTERN_C_END
54415c9eb25fSBarry Smith 
5442ccd8e176SBarry Smith /*MC
5443ccd8e176SBarry Smith    MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices.
5444ccd8e176SBarry Smith 
5445ccd8e176SBarry Smith    Options Database Keys:
5446ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions()
5447ccd8e176SBarry Smith 
5448ccd8e176SBarry Smith   Level: beginner
5449ccd8e176SBarry Smith 
5450175b88e8SBarry Smith .seealso: MatCreateMPIAIJ()
5451ccd8e176SBarry Smith M*/
5452ccd8e176SBarry Smith 
5453ccd8e176SBarry Smith EXTERN_C_BEGIN
5454ccd8e176SBarry Smith #undef __FUNCT__
5455ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ"
54567087cfbeSBarry Smith PetscErrorCode  MatCreate_MPIAIJ(Mat B)
5457ccd8e176SBarry Smith {
5458ccd8e176SBarry Smith   Mat_MPIAIJ     *b;
5459ccd8e176SBarry Smith   PetscErrorCode ierr;
5460ccd8e176SBarry Smith   PetscMPIInt    size;
5461ccd8e176SBarry Smith 
5462ccd8e176SBarry Smith   PetscFunctionBegin;
54637adad957SLisandro Dalcin   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
5464ccd8e176SBarry Smith 
546538f2d2fdSLisandro Dalcin   ierr            = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr);
5466ccd8e176SBarry Smith   B->data         = (void*)b;
5467ccd8e176SBarry Smith   ierr            = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
5468d0f46423SBarry Smith   B->rmap->bs     = 1;
5469ccd8e176SBarry Smith   B->assembled    = PETSC_FALSE;
5470ccd8e176SBarry Smith 
5471ccd8e176SBarry Smith   B->insertmode   = NOT_SET_VALUES;
5472ccd8e176SBarry Smith   b->size         = size;
54737adad957SLisandro Dalcin   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr);
5474ccd8e176SBarry Smith 
5475ccd8e176SBarry Smith   /* build cache for off array entries formed */
54767adad957SLisandro Dalcin   ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr);
5477ccd8e176SBarry Smith   b->donotstash  = PETSC_FALSE;
5478ccd8e176SBarry Smith   b->colmap      = 0;
5479ccd8e176SBarry Smith   b->garray      = 0;
5480ccd8e176SBarry Smith   b->roworiented = PETSC_TRUE;
5481ccd8e176SBarry Smith 
5482ccd8e176SBarry Smith   /* stuff used for matrix vector multiply */
5483ccd8e176SBarry Smith   b->lvec      = PETSC_NULL;
5484ccd8e176SBarry Smith   b->Mvctx     = PETSC_NULL;
5485ccd8e176SBarry Smith 
5486ccd8e176SBarry Smith   /* stuff for MatGetRow() */
5487ccd8e176SBarry Smith   b->rowindices   = 0;
5488ccd8e176SBarry Smith   b->rowvalues    = 0;
5489ccd8e176SBarry Smith   b->getrowactive = PETSC_FALSE;
5490ccd8e176SBarry Smith 
5491611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES)
5492ec1065edSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_spooles_C",
54935c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_spooles",
54945c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_spooles);CHKERRQ(ierr);
5495611f576cSBarry Smith #endif
5496611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS)
5497ec1065edSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C",
5498bccb9932SShri Abhyankar                                      "MatGetFactor_aij_mumps",
5499bccb9932SShri Abhyankar                                      MatGetFactor_aij_mumps);CHKERRQ(ierr);
5500611f576cSBarry Smith #endif
55013bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX)
5502ec1065edSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_pastix_C",
55033bf14a46SMatthew Knepley 					   "MatGetFactor_mpiaij_pastix",
55043bf14a46SMatthew Knepley 					   MatGetFactor_mpiaij_pastix);CHKERRQ(ierr);
55053bf14a46SMatthew Knepley #endif
5506611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST)
5507ec1065edSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_superlu_dist_C",
55085c9eb25fSBarry Smith                                      "MatGetFactor_mpiaij_superlu_dist",
55095c9eb25fSBarry Smith                                      MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr);
5510611f576cSBarry Smith #endif
5511ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
5512ccd8e176SBarry Smith                                      "MatStoreValues_MPIAIJ",
5513ccd8e176SBarry Smith                                      MatStoreValues_MPIAIJ);CHKERRQ(ierr);
5514ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
5515ccd8e176SBarry Smith                                      "MatRetrieveValues_MPIAIJ",
5516ccd8e176SBarry Smith                                      MatRetrieveValues_MPIAIJ);CHKERRQ(ierr);
5517ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
5518ccd8e176SBarry Smith 				     "MatGetDiagonalBlock_MPIAIJ",
5519ccd8e176SBarry Smith                                      MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr);
5520ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C",
5521ccd8e176SBarry Smith 				     "MatIsTranspose_MPIAIJ",
5522ccd8e176SBarry Smith 				     MatIsTranspose_MPIAIJ);CHKERRQ(ierr);
5523ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C",
5524ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocation_MPIAIJ",
5525ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr);
5526ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",
5527ccd8e176SBarry Smith 				     "MatMPIAIJSetPreallocationCSR_MPIAIJ",
5528ccd8e176SBarry Smith 				     MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr);
5529ccd8e176SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C",
5530ccd8e176SBarry Smith 				     "MatDiagonalScaleLocal_MPIAIJ",
5531ccd8e176SBarry Smith 				     MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr);
55325a11e1b2SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijperm_C",
55335a11e1b2SBarry Smith                                      "MatConvert_MPIAIJ_MPIAIJPERM",
55345a11e1b2SBarry Smith                                       MatConvert_MPIAIJ_MPIAIJPERM);CHKERRQ(ierr);
55355a11e1b2SBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijcrl_C",
55365a11e1b2SBarry Smith                                      "MatConvert_MPIAIJ_MPIAIJCRL",
55375a11e1b2SBarry Smith                                       MatConvert_MPIAIJ_MPIAIJCRL);CHKERRQ(ierr);
5538471cc821SHong Zhang   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C",
5539471cc821SHong Zhang                                      "MatConvert_MPIAIJ_MPISBAIJ",
5540471cc821SHong Zhang                                       MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr);
5541fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C",
5542fc4dec0aSBarry Smith                                      "MatMatMult_MPIDense_MPIAIJ",
5543fc4dec0aSBarry Smith                                       MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr);
5544fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C",
5545fc4dec0aSBarry Smith                                      "MatMatMultSymbolic_MPIDense_MPIAIJ",
5546fc4dec0aSBarry Smith                                      MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr);
5547fc4dec0aSBarry Smith   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C",
5548fc4dec0aSBarry Smith                                      "MatMatMultNumeric_MPIDense_MPIAIJ",
5549fc4dec0aSBarry Smith                                       MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr);
555017667f90SBarry Smith   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr);
5551ccd8e176SBarry Smith   PetscFunctionReturn(0);
5552ccd8e176SBarry Smith }
5553ccd8e176SBarry Smith EXTERN_C_END
555481824310SBarry Smith 
555503bfb495SBarry Smith #undef __FUNCT__
555603bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays"
555758d36128SBarry Smith /*@
555803bfb495SBarry Smith      MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal"
555903bfb495SBarry Smith          and "off-diagonal" part of the matrix in CSR format.
556003bfb495SBarry Smith 
556103bfb495SBarry Smith    Collective on MPI_Comm
556203bfb495SBarry Smith 
556303bfb495SBarry Smith    Input Parameters:
556403bfb495SBarry Smith +  comm - MPI communicator
556503bfb495SBarry Smith .  m - number of local rows (Cannot be PETSC_DECIDE)
556603bfb495SBarry Smith .  n - This value should be the same as the local size used in creating the
556703bfb495SBarry Smith        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
556803bfb495SBarry Smith        calculated if N is given) For square matrices n is almost always m.
556903bfb495SBarry Smith .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
557003bfb495SBarry Smith .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
557103bfb495SBarry Smith .   i - row indices for "diagonal" portion of matrix
557203bfb495SBarry Smith .   j - column indices
557303bfb495SBarry Smith .   a - matrix values
557403bfb495SBarry Smith .   oi - row indices for "off-diagonal" portion of matrix
557503bfb495SBarry Smith .   oj - column indices
557603bfb495SBarry Smith -   oa - matrix values
557703bfb495SBarry Smith 
557803bfb495SBarry Smith    Output Parameter:
557903bfb495SBarry Smith .   mat - the matrix
558003bfb495SBarry Smith 
558103bfb495SBarry Smith    Level: advanced
558203bfb495SBarry Smith 
558303bfb495SBarry Smith    Notes:
5584292fb18eSBarry Smith        The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. The user
5585292fb18eSBarry Smith        must free the arrays once the matrix has been destroyed and not before.
558603bfb495SBarry Smith 
558703bfb495SBarry Smith        The i and j indices are 0 based
558803bfb495SBarry Smith 
558903bfb495SBarry Smith        See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix
559003bfb495SBarry Smith 
55917b55108eSBarry Smith        This sets local rows and cannot be used to set off-processor values.
55927b55108eSBarry Smith 
55937b55108eSBarry Smith        You cannot later use MatSetValues() to change values in this matrix.
559403bfb495SBarry Smith 
559503bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel
559603bfb495SBarry Smith 
559703bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
55988d7a6e47SBarry Smith           MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays()
559903bfb495SBarry Smith @*/
56007087cfbeSBarry Smith PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[],
560103bfb495SBarry Smith 								PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat)
560203bfb495SBarry Smith {
560303bfb495SBarry Smith   PetscErrorCode ierr;
560403bfb495SBarry Smith   Mat_MPIAIJ     *maij;
560503bfb495SBarry Smith 
560603bfb495SBarry Smith  PetscFunctionBegin;
5607e32f2f54SBarry Smith   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
5608ea345e14SBarry Smith   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
5609ea345e14SBarry Smith   if (oi[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0");
561003bfb495SBarry Smith   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
561103bfb495SBarry Smith   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
561203bfb495SBarry Smith   ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
561303bfb495SBarry Smith   maij = (Mat_MPIAIJ*) (*mat)->data;
56148d7a6e47SBarry Smith   maij->donotstash     = PETSC_TRUE;
56158d7a6e47SBarry Smith   (*mat)->preallocated = PETSC_TRUE;
561603bfb495SBarry Smith 
561726283091SBarry Smith   ierr = PetscLayoutSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr);
561826283091SBarry Smith   ierr = PetscLayoutSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr);
561926283091SBarry Smith   ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr);
562026283091SBarry Smith   ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr);
562103bfb495SBarry Smith 
562203bfb495SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr);
5623d0f46423SBarry Smith   ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr);
562403bfb495SBarry Smith 
56258d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
56268d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
56278d7a6e47SBarry Smith   ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
56288d7a6e47SBarry Smith   ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
56298d7a6e47SBarry Smith 
563003bfb495SBarry Smith   ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
563103bfb495SBarry Smith   ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
563203bfb495SBarry Smith   PetscFunctionReturn(0);
563303bfb495SBarry Smith }
563403bfb495SBarry Smith 
563581824310SBarry Smith /*
563681824310SBarry Smith     Special version for direct calls from Fortran
563781824310SBarry Smith */
5638c6db04a5SJed Brown #include <private/fortranimpl.h>
56397087cfbeSBarry Smith 
564081824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS)
564181824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ
564281824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
564381824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij
564481824310SBarry Smith #endif
564581824310SBarry Smith 
564681824310SBarry Smith /* Change these macros so can be used in void function */
564781824310SBarry Smith #undef CHKERRQ
5648e32f2f54SBarry Smith #define CHKERRQ(ierr) CHKERRABORT(PETSC_COMM_WORLD,ierr)
564981824310SBarry Smith #undef SETERRQ2
5650e32f2f54SBarry Smith #define SETERRQ2(comm,ierr,b,c,d) CHKERRABORT(comm,ierr)
565181824310SBarry Smith #undef SETERRQ
5652e32f2f54SBarry Smith #define SETERRQ(c,ierr,b) CHKERRABORT(c,ierr)
565381824310SBarry Smith 
565481824310SBarry Smith EXTERN_C_BEGIN
565581824310SBarry Smith #undef __FUNCT__
565681824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_"
56571f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr)
565881824310SBarry Smith {
565981824310SBarry Smith   Mat             mat = *mmat;
566081824310SBarry Smith   PetscInt        m = *mm, n = *mn;
566181824310SBarry Smith   InsertMode      addv = *maddv;
566281824310SBarry Smith   Mat_MPIAIJ      *aij = (Mat_MPIAIJ*)mat->data;
566381824310SBarry Smith   PetscScalar     value;
566481824310SBarry Smith   PetscErrorCode  ierr;
5665899cda47SBarry Smith 
5666d9e2c085SLisandro Dalcin   ierr = MatPreallocated(mat);CHKERRQ(ierr);
566781824310SBarry Smith   if (mat->insertmode == NOT_SET_VALUES) {
566881824310SBarry Smith     mat->insertmode = addv;
566981824310SBarry Smith   }
567081824310SBarry Smith #if defined(PETSC_USE_DEBUG)
567181824310SBarry Smith   else if (mat->insertmode != addv) {
5672e32f2f54SBarry Smith     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
567381824310SBarry Smith   }
567481824310SBarry Smith #endif
567581824310SBarry Smith   {
5676d0f46423SBarry Smith   PetscInt        i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend;
5677d0f46423SBarry Smith   PetscInt        cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col;
5678ace3abfcSBarry Smith   PetscBool       roworiented = aij->roworiented;
567981824310SBarry Smith 
568081824310SBarry Smith   /* Some Variables required in the macro */
568181824310SBarry Smith   Mat             A = aij->A;
568281824310SBarry Smith   Mat_SeqAIJ      *a = (Mat_SeqAIJ*)A->data;
568381824310SBarry Smith   PetscInt        *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j;
5684dd6ea824SBarry Smith   MatScalar       *aa = a->a;
5685ace3abfcSBarry Smith   PetscBool       ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE);
568681824310SBarry Smith   Mat             B = aij->B;
568781824310SBarry Smith   Mat_SeqAIJ      *b = (Mat_SeqAIJ*)B->data;
5688d0f46423SBarry Smith   PetscInt        *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n;
5689dd6ea824SBarry Smith   MatScalar       *ba = b->a;
569081824310SBarry Smith 
569181824310SBarry Smith   PetscInt        *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2;
569281824310SBarry Smith   PetscInt        nonew = a->nonew;
5693dd6ea824SBarry Smith   MatScalar       *ap1,*ap2;
569481824310SBarry Smith 
569581824310SBarry Smith   PetscFunctionBegin;
569681824310SBarry Smith   for (i=0; i<m; i++) {
569781824310SBarry Smith     if (im[i] < 0) continue;
569881824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5699e32f2f54SBarry Smith     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
570081824310SBarry Smith #endif
570181824310SBarry Smith     if (im[i] >= rstart && im[i] < rend) {
570281824310SBarry Smith       row      = im[i] - rstart;
570381824310SBarry Smith       lastcol1 = -1;
570481824310SBarry Smith       rp1      = aj + ai[row];
570581824310SBarry Smith       ap1      = aa + ai[row];
570681824310SBarry Smith       rmax1    = aimax[row];
570781824310SBarry Smith       nrow1    = ailen[row];
570881824310SBarry Smith       low1     = 0;
570981824310SBarry Smith       high1    = nrow1;
571081824310SBarry Smith       lastcol2 = -1;
571181824310SBarry Smith       rp2      = bj + bi[row];
571281824310SBarry Smith       ap2      = ba + bi[row];
571381824310SBarry Smith       rmax2    = bimax[row];
571481824310SBarry Smith       nrow2    = bilen[row];
571581824310SBarry Smith       low2     = 0;
571681824310SBarry Smith       high2    = nrow2;
571781824310SBarry Smith 
571881824310SBarry Smith       for (j=0; j<n; j++) {
571981824310SBarry Smith         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
572081824310SBarry Smith         if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue;
572181824310SBarry Smith         if (in[j] >= cstart && in[j] < cend){
572281824310SBarry Smith           col = in[j] - cstart;
572381824310SBarry Smith           MatSetValues_SeqAIJ_A_Private(row,col,value,addv);
572481824310SBarry Smith         } else if (in[j] < 0) continue;
572581824310SBarry Smith #if defined(PETSC_USE_DEBUG)
5726cb9801acSJed Brown         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
572781824310SBarry Smith #endif
572881824310SBarry Smith         else {
572981824310SBarry Smith           if (mat->was_assembled) {
573081824310SBarry Smith             if (!aij->colmap) {
573181824310SBarry Smith               ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr);
573281824310SBarry Smith             }
573381824310SBarry Smith #if defined (PETSC_USE_CTABLE)
573481824310SBarry Smith             ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr);
573581824310SBarry Smith 	    col--;
573681824310SBarry Smith #else
573781824310SBarry Smith             col = aij->colmap[in[j]] - 1;
573881824310SBarry Smith #endif
573981824310SBarry Smith             if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) {
574081824310SBarry Smith               ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr);
574181824310SBarry Smith               col =  in[j];
574281824310SBarry Smith               /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */
574381824310SBarry Smith               B = aij->B;
574481824310SBarry Smith               b = (Mat_SeqAIJ*)B->data;
574581824310SBarry Smith               bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j;
574681824310SBarry Smith               rp2      = bj + bi[row];
574781824310SBarry Smith               ap2      = ba + bi[row];
574881824310SBarry Smith               rmax2    = bimax[row];
574981824310SBarry Smith               nrow2    = bilen[row];
575081824310SBarry Smith               low2     = 0;
575181824310SBarry Smith               high2    = nrow2;
5752d0f46423SBarry Smith               bm       = aij->B->rmap->n;
575381824310SBarry Smith               ba = b->a;
575481824310SBarry Smith             }
575581824310SBarry Smith           } else col = in[j];
575681824310SBarry Smith           MatSetValues_SeqAIJ_B_Private(row,col,value,addv);
575781824310SBarry Smith         }
575881824310SBarry Smith       }
575981824310SBarry Smith     } else {
576081824310SBarry Smith       if (!aij->donotstash) {
576181824310SBarry Smith         if (roworiented) {
5762ace3abfcSBarry Smith           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr);
576381824310SBarry Smith         } else {
5764ace3abfcSBarry Smith           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr);
576581824310SBarry Smith         }
576681824310SBarry Smith       }
576781824310SBarry Smith     }
576881824310SBarry Smith   }}
576981824310SBarry Smith   PetscFunctionReturnVoid();
577081824310SBarry Smith }
577181824310SBarry Smith EXTERN_C_END
577203bfb495SBarry Smith 
5773