xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 17df9f7c95b0ef00153aa5f4903b08e2f3467d6b)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #if defined(PETSC_HAVE_HYPRE)
8 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat,MatType,MatReuse,Mat*);
9 #endif
10 
11 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
12 {
13   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
14   PetscErrorCode ierr;
15   PetscInt       i,*idxb = 0;
16   PetscScalar    *va,*vb;
17   Vec            vtmp;
18 
19   PetscFunctionBegin;
20   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
21   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
22   if (idx) {
23     for (i=0; i<A->rmap->n; i++) {
24       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
25     }
26   }
27 
28   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
29   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
30   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
31   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
32 
33   for (i=0; i<A->rmap->n; i++) {
34     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
35       va[i] = vb[i];
36       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
37     }
38   }
39 
40   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
41   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
42   ierr = PetscFree(idxb);CHKERRQ(ierr);
43   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
44   PetscFunctionReturn(0);
45 }
46 
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
59 {
60   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
65   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
66   PetscFunctionReturn(0);
67 }
68 
69 /*
70      Local utility routine that creates a mapping from the global column
71    number to the local number in the off-diagonal part of the local
72    storage of the matrix.  This is done in a non scalable way since the
73    length of colmap equals the global matrix length.
74 */
75 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
76 {
77   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
78   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
79   PetscErrorCode ierr;
80   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
81 
82   PetscFunctionBegin;
83 #if defined(PETSC_USE_CTABLE)
84   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
85   for (i=0; i<nbs; i++) {
86     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
87   }
88 #else
89   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
90   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
91   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
92   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
93 #endif
94   PetscFunctionReturn(0);
95 }
96 
97 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
98   { \
99  \
100     brow = row/bs;  \
101     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
102     rmax = aimax[brow]; nrow = ailen[brow]; \
103     bcol = col/bs; \
104     ridx = row % bs; cidx = col % bs; \
105     low  = 0; high = nrow; \
106     while (high-low > 3) { \
107       t = (low+high)/2; \
108       if (rp[t] > bcol) high = t; \
109       else              low  = t; \
110     } \
111     for (_i=low; _i<high; _i++) { \
112       if (rp[_i] > bcol) break; \
113       if (rp[_i] == bcol) { \
114         bap = ap +  bs2*_i + bs*cidx + ridx; \
115         if (addv == ADD_VALUES) *bap += value;  \
116         else                    *bap  = value;  \
117         goto a_noinsert; \
118       } \
119     } \
120     if (a->nonew == 1) goto a_noinsert; \
121     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
122     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
123     N = nrow++ - 1;  \
124     /* shift up all the later entries in this row */ \
125     for (ii=N; ii>=_i; ii--) { \
126       rp[ii+1] = rp[ii]; \
127       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
128     } \
129     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
130     rp[_i]                      = bcol;  \
131     ap[bs2*_i + bs*cidx + ridx] = value;  \
132 a_noinsert:; \
133     ailen[brow] = nrow; \
134   }
135 
136 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
137   { \
138     brow = row/bs;  \
139     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
140     rmax = bimax[brow]; nrow = bilen[brow]; \
141     bcol = col/bs; \
142     ridx = row % bs; cidx = col % bs; \
143     low  = 0; high = nrow; \
144     while (high-low > 3) { \
145       t = (low+high)/2; \
146       if (rp[t] > bcol) high = t; \
147       else              low  = t; \
148     } \
149     for (_i=low; _i<high; _i++) { \
150       if (rp[_i] > bcol) break; \
151       if (rp[_i] == bcol) { \
152         bap = ap +  bs2*_i + bs*cidx + ridx; \
153         if (addv == ADD_VALUES) *bap += value;  \
154         else                    *bap  = value;  \
155         goto b_noinsert; \
156       } \
157     } \
158     if (b->nonew == 1) goto b_noinsert; \
159     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
160     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
161     N = nrow++ - 1;  \
162     /* shift up all the later entries in this row */ \
163     for (ii=N; ii>=_i; ii--) { \
164       rp[ii+1] = rp[ii]; \
165       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
166     } \
167     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
168     rp[_i]                      = bcol;  \
169     ap[bs2*_i + bs*cidx + ridx] = value;  \
170 b_noinsert:; \
171     bilen[brow] = nrow; \
172   }
173 
174 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
175 {
176   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
177   MatScalar      value;
178   PetscBool      roworiented = baij->roworiented;
179   PetscErrorCode ierr;
180   PetscInt       i,j,row,col;
181   PetscInt       rstart_orig=mat->rmap->rstart;
182   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
183   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
184 
185   /* Some Variables required in the macro */
186   Mat         A     = baij->A;
187   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
188   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
189   MatScalar   *aa   =a->a;
190 
191   Mat         B     = baij->B;
192   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
193   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
194   MatScalar   *ba   =b->a;
195 
196   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
197   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
198   MatScalar *ap,*bap;
199 
200   PetscFunctionBegin;
201   for (i=0; i<m; i++) {
202     if (im[i] < 0) continue;
203 #if defined(PETSC_USE_DEBUG)
204     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
205 #endif
206     if (im[i] >= rstart_orig && im[i] < rend_orig) {
207       row = im[i] - rstart_orig;
208       for (j=0; j<n; j++) {
209         if (in[j] >= cstart_orig && in[j] < cend_orig) {
210           col = in[j] - cstart_orig;
211           if (roworiented) value = v[i*n+j];
212           else             value = v[i+j*m];
213           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
214           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
215         } else if (in[j] < 0) continue;
216 #if defined(PETSC_USE_DEBUG)
217         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
218 #endif
219         else {
220           if (mat->was_assembled) {
221             if (!baij->colmap) {
222               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
223             }
224 #if defined(PETSC_USE_CTABLE)
225             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
226             col  = col - 1;
227 #else
228             col = baij->colmap[in[j]/bs] - 1;
229 #endif
230             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
231               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
232               col  =  in[j];
233               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
234               B    = baij->B;
235               b    = (Mat_SeqBAIJ*)(B)->data;
236               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
237               ba   =b->a;
238             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
239             else col += in[j]%bs;
240           } else col = in[j];
241           if (roworiented) value = v[i*n+j];
242           else             value = v[i+j*m];
243           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
244           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
245         }
246       }
247     } else {
248       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
249       if (!baij->donotstash) {
250         mat->assembled = PETSC_FALSE;
251         if (roworiented) {
252           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
253         } else {
254           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
255         }
256       }
257     }
258   }
259   PetscFunctionReturn(0);
260 }
261 
262 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
263 {
264   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
265   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
266   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
267   PetscErrorCode    ierr;
268   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
269   PetscBool         roworiented=a->roworiented;
270   const PetscScalar *value     = v;
271   MatScalar         *ap,*aa = a->a,*bap;
272 
273   PetscFunctionBegin;
274   rp   = aj + ai[row];
275   ap   = aa + bs2*ai[row];
276   rmax = imax[row];
277   nrow = ailen[row];
278   value = v;
279   low = 0;
280   high = nrow;
281   while (high-low > 7) {
282     t = (low+high)/2;
283     if (rp[t] > col) high = t;
284     else             low  = t;
285   }
286   for (i=low; i<high; i++) {
287     if (rp[i] > col) break;
288     if (rp[i] == col) {
289       bap = ap +  bs2*i;
290       if (roworiented) {
291         if (is == ADD_VALUES) {
292           for (ii=0; ii<bs; ii++) {
293             for (jj=ii; jj<bs2; jj+=bs) {
294               bap[jj] += *value++;
295             }
296           }
297         } else {
298           for (ii=0; ii<bs; ii++) {
299             for (jj=ii; jj<bs2; jj+=bs) {
300               bap[jj] = *value++;
301             }
302           }
303         }
304       } else {
305         if (is == ADD_VALUES) {
306           for (ii=0; ii<bs; ii++,value+=bs) {
307             for (jj=0; jj<bs; jj++) {
308               bap[jj] += value[jj];
309             }
310             bap += bs;
311           }
312         } else {
313           for (ii=0; ii<bs; ii++,value+=bs) {
314             for (jj=0; jj<bs; jj++) {
315               bap[jj]  = value[jj];
316             }
317             bap += bs;
318           }
319         }
320       }
321       goto noinsert2;
322     }
323   }
324   if (nonew == 1) goto noinsert2;
325   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
326   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
327   N = nrow++ - 1; high++;
328   /* shift up all the later entries in this row */
329   for (ii=N; ii>=i; ii--) {
330     rp[ii+1] = rp[ii];
331     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
332   }
333   if (N >= i) {
334     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
335   }
336   rp[i] = col;
337   bap   = ap +  bs2*i;
338   if (roworiented) {
339     for (ii=0; ii<bs; ii++) {
340       for (jj=ii; jj<bs2; jj+=bs) {
341         bap[jj] = *value++;
342       }
343     }
344   } else {
345     for (ii=0; ii<bs; ii++) {
346       for (jj=0; jj<bs; jj++) {
347         *bap++ = *value++;
348       }
349     }
350   }
351   noinsert2:;
352   ailen[row] = nrow;
353   PetscFunctionReturn(0);
354 }
355 
356 /*
357     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
358     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
359 */
360 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
361 {
362   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
363   const PetscScalar *value;
364   MatScalar         *barray     = baij->barray;
365   PetscBool         roworiented = baij->roworiented;
366   PetscErrorCode    ierr;
367   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
368   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
369   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
370 
371   PetscFunctionBegin;
372   if (!barray) {
373     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
374     baij->barray = barray;
375   }
376 
377   if (roworiented) stepval = (n-1)*bs;
378   else stepval = (m-1)*bs;
379 
380   for (i=0; i<m; i++) {
381     if (im[i] < 0) continue;
382 #if defined(PETSC_USE_DEBUG)
383     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
384 #endif
385     if (im[i] >= rstart && im[i] < rend) {
386       row = im[i] - rstart;
387       for (j=0; j<n; j++) {
388         /* If NumCol = 1 then a copy is not required */
389         if ((roworiented) && (n == 1)) {
390           barray = (MatScalar*)v + i*bs2;
391         } else if ((!roworiented) && (m == 1)) {
392           barray = (MatScalar*)v + j*bs2;
393         } else { /* Here a copy is required */
394           if (roworiented) {
395             value = v + (i*(stepval+bs) + j)*bs;
396           } else {
397             value = v + (j*(stepval+bs) + i)*bs;
398           }
399           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
400             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
401             barray += bs;
402           }
403           barray -= bs2;
404         }
405 
406         if (in[j] >= cstart && in[j] < cend) {
407           col  = in[j] - cstart;
408           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
409         } else if (in[j] < 0) continue;
410 #if defined(PETSC_USE_DEBUG)
411         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
412 #endif
413         else {
414           if (mat->was_assembled) {
415             if (!baij->colmap) {
416               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
417             }
418 
419 #if defined(PETSC_USE_DEBUG)
420 #if defined(PETSC_USE_CTABLE)
421             { PetscInt data;
422               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
423               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
424             }
425 #else
426             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
427 #endif
428 #endif
429 #if defined(PETSC_USE_CTABLE)
430             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
431             col  = (col - 1)/bs;
432 #else
433             col = (baij->colmap[in[j]] - 1)/bs;
434 #endif
435             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
436               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
437               col  =  in[j];
438             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
439           } else col = in[j];
440           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
441         }
442       }
443     } else {
444       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
445       if (!baij->donotstash) {
446         if (roworiented) {
447           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
448         } else {
449           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
450         }
451       }
452     }
453   }
454   PetscFunctionReturn(0);
455 }
456 
457 #define HASH_KEY 0.6180339887
458 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
459 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
460 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
461 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
462 {
463   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
464   PetscBool      roworiented = baij->roworiented;
465   PetscErrorCode ierr;
466   PetscInt       i,j,row,col;
467   PetscInt       rstart_orig=mat->rmap->rstart;
468   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
469   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
470   PetscReal      tmp;
471   MatScalar      **HD = baij->hd,value;
472 #if defined(PETSC_USE_DEBUG)
473   PetscInt       total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
474 #endif
475 
476   PetscFunctionBegin;
477   for (i=0; i<m; i++) {
478 #if defined(PETSC_USE_DEBUG)
479     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
480     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
481 #endif
482     row = im[i];
483     if (row >= rstart_orig && row < rend_orig) {
484       for (j=0; j<n; j++) {
485         col = in[j];
486         if (roworiented) value = v[i*n+j];
487         else             value = v[i+j*m];
488         /* Look up PetscInto the Hash Table */
489         key = (row/bs)*Nbs+(col/bs)+1;
490         h1  = HASH(size,key,tmp);
491 
492 
493         idx = h1;
494 #if defined(PETSC_USE_DEBUG)
495         insert_ct++;
496         total_ct++;
497         if (HT[idx] != key) {
498           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
499           if (idx == size) {
500             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
501             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
502           }
503         }
504 #else
505         if (HT[idx] != key) {
506           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
507           if (idx == size) {
508             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
509             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
510           }
511         }
512 #endif
513         /* A HASH table entry is found, so insert the values at the correct address */
514         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
515         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
516       }
517     } else if (!baij->donotstash) {
518       if (roworiented) {
519         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
520       } else {
521         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
522       }
523     }
524   }
525 #if defined(PETSC_USE_DEBUG)
526   baij->ht_total_ct  += total_ct;
527   baij->ht_insert_ct += insert_ct;
528 #endif
529   PetscFunctionReturn(0);
530 }
531 
532 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
533 {
534   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
535   PetscBool         roworiented = baij->roworiented;
536   PetscErrorCode    ierr;
537   PetscInt          i,j,ii,jj,row,col;
538   PetscInt          rstart=baij->rstartbs;
539   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
540   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
541   PetscReal         tmp;
542   MatScalar         **HD = baij->hd,*baij_a;
543   const PetscScalar *v_t,*value;
544 #if defined(PETSC_USE_DEBUG)
545   PetscInt          total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
546 #endif
547 
548   PetscFunctionBegin;
549   if (roworiented) stepval = (n-1)*bs;
550   else stepval = (m-1)*bs;
551 
552   for (i=0; i<m; i++) {
553 #if defined(PETSC_USE_DEBUG)
554     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
555     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
556 #endif
557     row = im[i];
558     v_t = v + i*nbs2;
559     if (row >= rstart && row < rend) {
560       for (j=0; j<n; j++) {
561         col = in[j];
562 
563         /* Look up into the Hash Table */
564         key = row*Nbs+col+1;
565         h1  = HASH(size,key,tmp);
566 
567         idx = h1;
568 #if defined(PETSC_USE_DEBUG)
569         total_ct++;
570         insert_ct++;
571         if (HT[idx] != key) {
572           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
573           if (idx == size) {
574             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
575             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
576           }
577         }
578 #else
579         if (HT[idx] != key) {
580           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
581           if (idx == size) {
582             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
583             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
584           }
585         }
586 #endif
587         baij_a = HD[idx];
588         if (roworiented) {
589           /*value = v + i*(stepval+bs)*bs + j*bs;*/
590           /* value = v + (i*(stepval+bs)+j)*bs; */
591           value = v_t;
592           v_t  += bs;
593           if (addv == ADD_VALUES) {
594             for (ii=0; ii<bs; ii++,value+=stepval) {
595               for (jj=ii; jj<bs2; jj+=bs) {
596                 baij_a[jj] += *value++;
597               }
598             }
599           } else {
600             for (ii=0; ii<bs; ii++,value+=stepval) {
601               for (jj=ii; jj<bs2; jj+=bs) {
602                 baij_a[jj] = *value++;
603               }
604             }
605           }
606         } else {
607           value = v + j*(stepval+bs)*bs + i*bs;
608           if (addv == ADD_VALUES) {
609             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
610               for (jj=0; jj<bs; jj++) {
611                 baij_a[jj] += *value++;
612               }
613             }
614           } else {
615             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
616               for (jj=0; jj<bs; jj++) {
617                 baij_a[jj] = *value++;
618               }
619             }
620           }
621         }
622       }
623     } else {
624       if (!baij->donotstash) {
625         if (roworiented) {
626           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
627         } else {
628           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
629         }
630       }
631     }
632   }
633 #if defined(PETSC_USE_DEBUG)
634   baij->ht_total_ct  += total_ct;
635   baij->ht_insert_ct += insert_ct;
636 #endif
637   PetscFunctionReturn(0);
638 }
639 
640 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
641 {
642   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
643   PetscErrorCode ierr;
644   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
645   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
646 
647   PetscFunctionBegin;
648   for (i=0; i<m; i++) {
649     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
650     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
651     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
652       row = idxm[i] - bsrstart;
653       for (j=0; j<n; j++) {
654         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
655         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
656         if (idxn[j] >= bscstart && idxn[j] < bscend) {
657           col  = idxn[j] - bscstart;
658           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
659         } else {
660           if (!baij->colmap) {
661             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
662           }
663 #if defined(PETSC_USE_CTABLE)
664           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
665           data--;
666 #else
667           data = baij->colmap[idxn[j]/bs]-1;
668 #endif
669           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
670           else {
671             col  = data + idxn[j]%bs;
672             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
673           }
674         }
675       }
676     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
677   }
678   PetscFunctionReturn(0);
679 }
680 
681 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
682 {
683   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
684   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
685   PetscErrorCode ierr;
686   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
687   PetscReal      sum = 0.0;
688   MatScalar      *v;
689 
690   PetscFunctionBegin;
691   if (baij->size == 1) {
692     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
693   } else {
694     if (type == NORM_FROBENIUS) {
695       v  = amat->a;
696       nz = amat->nz*bs2;
697       for (i=0; i<nz; i++) {
698         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
699       }
700       v  = bmat->a;
701       nz = bmat->nz*bs2;
702       for (i=0; i<nz; i++) {
703         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
704       }
705       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
706       *nrm = PetscSqrtReal(*nrm);
707     } else if (type == NORM_1) { /* max column sum */
708       PetscReal *tmp,*tmp2;
709       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
710       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
711       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
712       v    = amat->a; jj = amat->j;
713       for (i=0; i<amat->nz; i++) {
714         for (j=0; j<bs; j++) {
715           col = bs*(cstart + *jj) + j; /* column index */
716           for (row=0; row<bs; row++) {
717             tmp[col] += PetscAbsScalar(*v);  v++;
718           }
719         }
720         jj++;
721       }
722       v = bmat->a; jj = bmat->j;
723       for (i=0; i<bmat->nz; i++) {
724         for (j=0; j<bs; j++) {
725           col = bs*garray[*jj] + j;
726           for (row=0; row<bs; row++) {
727             tmp[col] += PetscAbsScalar(*v); v++;
728           }
729         }
730         jj++;
731       }
732       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
733       *nrm = 0.0;
734       for (j=0; j<mat->cmap->N; j++) {
735         if (tmp2[j] > *nrm) *nrm = tmp2[j];
736       }
737       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
738     } else if (type == NORM_INFINITY) { /* max row sum */
739       PetscReal *sums;
740       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
741       sum  = 0.0;
742       for (j=0; j<amat->mbs; j++) {
743         for (row=0; row<bs; row++) sums[row] = 0.0;
744         v  = amat->a + bs2*amat->i[j];
745         nz = amat->i[j+1]-amat->i[j];
746         for (i=0; i<nz; i++) {
747           for (col=0; col<bs; col++) {
748             for (row=0; row<bs; row++) {
749               sums[row] += PetscAbsScalar(*v); v++;
750             }
751           }
752         }
753         v  = bmat->a + bs2*bmat->i[j];
754         nz = bmat->i[j+1]-bmat->i[j];
755         for (i=0; i<nz; i++) {
756           for (col=0; col<bs; col++) {
757             for (row=0; row<bs; row++) {
758               sums[row] += PetscAbsScalar(*v); v++;
759             }
760           }
761         }
762         for (row=0; row<bs; row++) {
763           if (sums[row] > sum) sum = sums[row];
764         }
765       }
766       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
767       ierr = PetscFree(sums);CHKERRQ(ierr);
768     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
769   }
770   PetscFunctionReturn(0);
771 }
772 
773 /*
774   Creates the hash table, and sets the table
775   This table is created only once.
776   If new entried need to be added to the matrix
777   then the hash table has to be destroyed and
778   recreated.
779 */
780 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
781 {
782   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
783   Mat            A     = baij->A,B=baij->B;
784   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
785   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
786   PetscErrorCode ierr;
787   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
788   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
789   PetscInt       *HT,key;
790   MatScalar      **HD;
791   PetscReal      tmp;
792 #if defined(PETSC_USE_INFO)
793   PetscInt ct=0,max=0;
794 #endif
795 
796   PetscFunctionBegin;
797   if (baij->ht) PetscFunctionReturn(0);
798 
799   baij->ht_size = (PetscInt)(factor*nz);
800   ht_size       = baij->ht_size;
801 
802   /* Allocate Memory for Hash Table */
803   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
804   HD   = baij->hd;
805   HT   = baij->ht;
806 
807   /* Loop Over A */
808   for (i=0; i<a->mbs; i++) {
809     for (j=ai[i]; j<ai[i+1]; j++) {
810       row = i+rstart;
811       col = aj[j]+cstart;
812 
813       key = row*Nbs + col + 1;
814       h1  = HASH(ht_size,key,tmp);
815       for (k=0; k<ht_size; k++) {
816         if (!HT[(h1+k)%ht_size]) {
817           HT[(h1+k)%ht_size] = key;
818           HD[(h1+k)%ht_size] = a->a + j*bs2;
819           break;
820 #if defined(PETSC_USE_INFO)
821         } else {
822           ct++;
823 #endif
824         }
825       }
826 #if defined(PETSC_USE_INFO)
827       if (k> max) max = k;
828 #endif
829     }
830   }
831   /* Loop Over B */
832   for (i=0; i<b->mbs; i++) {
833     for (j=bi[i]; j<bi[i+1]; j++) {
834       row = i+rstart;
835       col = garray[bj[j]];
836       key = row*Nbs + col + 1;
837       h1  = HASH(ht_size,key,tmp);
838       for (k=0; k<ht_size; k++) {
839         if (!HT[(h1+k)%ht_size]) {
840           HT[(h1+k)%ht_size] = key;
841           HD[(h1+k)%ht_size] = b->a + j*bs2;
842           break;
843 #if defined(PETSC_USE_INFO)
844         } else {
845           ct++;
846 #endif
847         }
848       }
849 #if defined(PETSC_USE_INFO)
850       if (k> max) max = k;
851 #endif
852     }
853   }
854 
855   /* Print Summary */
856 #if defined(PETSC_USE_INFO)
857   for (i=0,j=0; i<ht_size; i++) {
858     if (HT[i]) j++;
859   }
860   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
861 #endif
862   PetscFunctionReturn(0);
863 }
864 
865 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
866 {
867   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
868   PetscErrorCode ierr;
869   PetscInt       nstash,reallocs;
870 
871   PetscFunctionBegin;
872   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
873 
874   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
875   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
876   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
877   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
878   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
879   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
880   PetscFunctionReturn(0);
881 }
882 
883 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
884 {
885   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
886   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
887   PetscErrorCode ierr;
888   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
889   PetscInt       *row,*col;
890   PetscBool      r1,r2,r3,other_disassembled;
891   MatScalar      *val;
892   PetscMPIInt    n;
893 
894   PetscFunctionBegin;
895   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
896   if (!baij->donotstash && !mat->nooffprocentries) {
897     while (1) {
898       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
899       if (!flg) break;
900 
901       for (i=0; i<n;) {
902         /* Now identify the consecutive vals belonging to the same row */
903         for (j=i,rstart=row[j]; j<n; j++) {
904           if (row[j] != rstart) break;
905         }
906         if (j < n) ncols = j-i;
907         else       ncols = n-i;
908         /* Now assemble all these values with a single function call */
909         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
910         i    = j;
911       }
912     }
913     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
914     /* Now process the block-stash. Since the values are stashed column-oriented,
915        set the roworiented flag to column oriented, and after MatSetValues()
916        restore the original flags */
917     r1 = baij->roworiented;
918     r2 = a->roworiented;
919     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
920 
921     baij->roworiented = PETSC_FALSE;
922     a->roworiented    = PETSC_FALSE;
923 
924     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
925     while (1) {
926       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
927       if (!flg) break;
928 
929       for (i=0; i<n;) {
930         /* Now identify the consecutive vals belonging to the same row */
931         for (j=i,rstart=row[j]; j<n; j++) {
932           if (row[j] != rstart) break;
933         }
934         if (j < n) ncols = j-i;
935         else       ncols = n-i;
936         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
937         i    = j;
938       }
939     }
940     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
941 
942     baij->roworiented = r1;
943     a->roworiented    = r2;
944 
945     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
946   }
947 
948   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
949   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
950 
951   /* determine if any processor has disassembled, if so we must
952      also disassemble ourselfs, in order that we may reassemble. */
953   /*
954      if nonzero structure of submatrix B cannot change then we know that
955      no processor disassembled thus we can skip this stuff
956   */
957   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
958     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
959     if (mat->was_assembled && !other_disassembled) {
960       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
961     }
962   }
963 
964   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
965     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
966   }
967   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
968   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
969 
970 #if defined(PETSC_USE_INFO)
971   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
972     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",(double)((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
973 
974     baij->ht_total_ct  = 0;
975     baij->ht_insert_ct = 0;
976   }
977 #endif
978   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
979     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
980 
981     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
982     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
983   }
984 
985   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
986 
987   baij->rowvalues = 0;
988 
989   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
990   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
991     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
992     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
993   }
994   PetscFunctionReturn(0);
995 }
996 
997 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
998 #include <petscdraw.h>
999 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1000 {
1001   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1002   PetscErrorCode    ierr;
1003   PetscMPIInt       rank = baij->rank;
1004   PetscInt          bs   = mat->rmap->bs;
1005   PetscBool         iascii,isdraw;
1006   PetscViewer       sviewer;
1007   PetscViewerFormat format;
1008 
1009   PetscFunctionBegin;
1010   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1011   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1012   if (iascii) {
1013     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1014     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1015       MatInfo info;
1016       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1017       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1018       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1019       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1020                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1021       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1022       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1023       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1024       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1025       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1026       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1027       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1028       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1029       PetscFunctionReturn(0);
1030     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1031       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1032       PetscFunctionReturn(0);
1033     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1034       PetscFunctionReturn(0);
1035     }
1036   }
1037 
1038   if (isdraw) {
1039     PetscDraw draw;
1040     PetscBool isnull;
1041     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1042     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1043     if (isnull) PetscFunctionReturn(0);
1044   }
1045 
1046   {
1047     /* assemble the entire matrix onto first processor. */
1048     Mat         A;
1049     Mat_SeqBAIJ *Aloc;
1050     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1051     MatScalar   *a;
1052     const char  *matname;
1053 
1054     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1055     /* Perhaps this should be the type of mat? */
1056     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1057     if (!rank) {
1058       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1059     } else {
1060       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1061     }
1062     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1063     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1064     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1065     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1066 
1067     /* copy over the A part */
1068     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1069     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1070     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1071 
1072     for (i=0; i<mbs; i++) {
1073       rvals[0] = bs*(baij->rstartbs + i);
1074       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1075       for (j=ai[i]; j<ai[i+1]; j++) {
1076         col = (baij->cstartbs+aj[j])*bs;
1077         for (k=0; k<bs; k++) {
1078           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1079           col++; a += bs;
1080         }
1081       }
1082     }
1083     /* copy over the B part */
1084     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1085     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1086     for (i=0; i<mbs; i++) {
1087       rvals[0] = bs*(baij->rstartbs + i);
1088       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1089       for (j=ai[i]; j<ai[i+1]; j++) {
1090         col = baij->garray[aj[j]]*bs;
1091         for (k=0; k<bs; k++) {
1092           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1093           col++; a += bs;
1094         }
1095       }
1096     }
1097     ierr = PetscFree(rvals);CHKERRQ(ierr);
1098     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1099     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1100     /*
1101        Everyone has to call to draw the matrix since the graphics waits are
1102        synchronized across all processors that share the PetscDraw object
1103     */
1104     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1105     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1106     if (!rank) {
1107       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1108       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1109     }
1110     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1111     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1112     ierr = MatDestroy(&A);CHKERRQ(ierr);
1113   }
1114   PetscFunctionReturn(0);
1115 }
1116 
1117 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1118 {
1119   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1120   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1121   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1122   PetscErrorCode ierr;
1123   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1124   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1125   int            fd;
1126   PetscScalar    *column_values;
1127   FILE           *file;
1128   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1129   PetscInt       message_count,flowcontrolcount;
1130 
1131   PetscFunctionBegin;
1132   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1133   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1134   nz   = bs2*(A->nz + B->nz);
1135   rlen = mat->rmap->n;
1136   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1137   if (!rank) {
1138     header[0] = MAT_FILE_CLASSID;
1139     header[1] = mat->rmap->N;
1140     header[2] = mat->cmap->N;
1141 
1142     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1143     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1144     /* get largest number of rows any processor has */
1145     range = mat->rmap->range;
1146     for (i=1; i<size; i++) {
1147       rlen = PetscMax(rlen,range[i+1] - range[i]);
1148     }
1149   } else {
1150     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1151   }
1152 
1153   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1154   /* compute lengths of each row  */
1155   for (i=0; i<a->mbs; i++) {
1156     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1157   }
1158   /* store the row lengths to the file */
1159   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1160   if (!rank) {
1161     MPI_Status status;
1162     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1163     rlen = (range[1] - range[0])/bs;
1164     for (i=0; i<rlen; i++) {
1165       for (j=0; j<bs; j++) {
1166         row_lens[i*bs+j] = bs*crow_lens[i];
1167       }
1168     }
1169     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1170     for (i=1; i<size; i++) {
1171       rlen = (range[i+1] - range[i])/bs;
1172       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1173       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1174       for (k=0; k<rlen; k++) {
1175         for (j=0; j<bs; j++) {
1176           row_lens[k*bs+j] = bs*crow_lens[k];
1177         }
1178       }
1179       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1180     }
1181     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1182     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1183   } else {
1184     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1185     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1186     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1187   }
1188   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1189 
1190   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1191      information needed to make it for each row from a block row. This does require more communication but still not more than
1192      the communication needed for the nonzero values  */
1193   nzmax = nz; /*  space a largest processor needs */
1194   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1195   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1196   cnt   = 0;
1197   for (i=0; i<a->mbs; i++) {
1198     pcnt = cnt;
1199     for (j=B->i[i]; j<B->i[i+1]; j++) {
1200       if ((col = garray[B->j[j]]) > cstart) break;
1201       for (l=0; l<bs; l++) {
1202         column_indices[cnt++] = bs*col+l;
1203       }
1204     }
1205     for (k=A->i[i]; k<A->i[i+1]; k++) {
1206       for (l=0; l<bs; l++) {
1207         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1208       }
1209     }
1210     for (; j<B->i[i+1]; j++) {
1211       for (l=0; l<bs; l++) {
1212         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1213       }
1214     }
1215     len = cnt - pcnt;
1216     for (k=1; k<bs; k++) {
1217       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1218       cnt += len;
1219     }
1220   }
1221   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1222 
1223   /* store the columns to the file */
1224   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1225   if (!rank) {
1226     MPI_Status status;
1227     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1228     for (i=1; i<size; i++) {
1229       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1230       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1231       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1232       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1233     }
1234     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1235   } else {
1236     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1237     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1238     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1239     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1240   }
1241   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1242 
1243   /* load up the numerical values */
1244   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1245   cnt  = 0;
1246   for (i=0; i<a->mbs; i++) {
1247     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1248     for (j=B->i[i]; j<B->i[i+1]; j++) {
1249       if (garray[B->j[j]] > cstart) break;
1250       for (l=0; l<bs; l++) {
1251         for (ll=0; ll<bs; ll++) {
1252           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1253         }
1254       }
1255       cnt += bs;
1256     }
1257     for (k=A->i[i]; k<A->i[i+1]; k++) {
1258       for (l=0; l<bs; l++) {
1259         for (ll=0; ll<bs; ll++) {
1260           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1261         }
1262       }
1263       cnt += bs;
1264     }
1265     for (; j<B->i[i+1]; j++) {
1266       for (l=0; l<bs; l++) {
1267         for (ll=0; ll<bs; ll++) {
1268           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1269         }
1270       }
1271       cnt += bs;
1272     }
1273     cnt += (bs-1)*rlen;
1274   }
1275   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1276 
1277   /* store the column values to the file */
1278   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1279   if (!rank) {
1280     MPI_Status status;
1281     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1282     for (i=1; i<size; i++) {
1283       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1284       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1285       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1286       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1287     }
1288     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1289   } else {
1290     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1291     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1292     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1293     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1294   }
1295   ierr = PetscFree(column_values);CHKERRQ(ierr);
1296 
1297   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1298   if (file) {
1299     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1300   }
1301   PetscFunctionReturn(0);
1302 }
1303 
1304 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1305 {
1306   PetscErrorCode ierr;
1307   PetscBool      iascii,isdraw,issocket,isbinary;
1308 
1309   PetscFunctionBegin;
1310   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1311   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1312   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1313   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1314   if (iascii || isdraw || issocket) {
1315     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1316   } else if (isbinary) {
1317     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1318   }
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1323 {
1324   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1325   PetscErrorCode ierr;
1326 
1327   PetscFunctionBegin;
1328 #if defined(PETSC_USE_LOG)
1329   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1330 #endif
1331   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1332   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1333   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1334   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1335 #if defined(PETSC_USE_CTABLE)
1336   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1337 #else
1338   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1339 #endif
1340   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1341   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1342   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1343   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1344   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1345   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1346   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1347   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1348 
1349   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1350   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1351   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1352   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1353   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1354   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1355   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1356   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1357   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1358 #if defined(PETSC_HAVE_HYPRE)
1359   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_hypre_C",NULL);CHKERRQ(ierr);
1360 #endif
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1365 {
1366   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1367   PetscErrorCode ierr;
1368   PetscInt       nt;
1369 
1370   PetscFunctionBegin;
1371   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1372   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1373   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1374   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1375   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1376   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1377   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1378   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1379   PetscFunctionReturn(0);
1380 }
1381 
1382 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1383 {
1384   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1385   PetscErrorCode ierr;
1386 
1387   PetscFunctionBegin;
1388   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1389   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1390   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1391   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1396 {
1397   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1398   PetscErrorCode ierr;
1399   PetscBool      merged;
1400 
1401   PetscFunctionBegin;
1402   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1403   /* do nondiagonal part */
1404   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1405   if (!merged) {
1406     /* send it on its way */
1407     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1408     /* do local part */
1409     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1410     /* receive remote parts: note this assumes the values are not actually */
1411     /* inserted in yy until the next line */
1412     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1413   } else {
1414     /* do local part */
1415     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1416     /* send it on its way */
1417     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1418     /* values actually were received in the Begin() but we need to call this nop */
1419     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1420   }
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1425 {
1426   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1427   PetscErrorCode ierr;
1428 
1429   PetscFunctionBegin;
1430   /* do nondiagonal part */
1431   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1432   /* send it on its way */
1433   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1434   /* do local part */
1435   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1436   /* receive remote parts: note this assumes the values are not actually */
1437   /* inserted in yy until the next line, which is true for my implementation*/
1438   /* but is not perhaps always true. */
1439   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1440   PetscFunctionReturn(0);
1441 }
1442 
1443 /*
1444   This only works correctly for square matrices where the subblock A->A is the
1445    diagonal block
1446 */
1447 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1448 {
1449   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1450   PetscErrorCode ierr;
1451 
1452   PetscFunctionBegin;
1453   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1454   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1455   PetscFunctionReturn(0);
1456 }
1457 
1458 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1459 {
1460   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1461   PetscErrorCode ierr;
1462 
1463   PetscFunctionBegin;
1464   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1465   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1470 {
1471   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1472   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1473   PetscErrorCode ierr;
1474   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1475   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1476   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1477 
1478   PetscFunctionBegin;
1479   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1480   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1481   mat->getrowactive = PETSC_TRUE;
1482 
1483   if (!mat->rowvalues && (idx || v)) {
1484     /*
1485         allocate enough space to hold information from the longest row.
1486     */
1487     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1488     PetscInt    max = 1,mbs = mat->mbs,tmp;
1489     for (i=0; i<mbs; i++) {
1490       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1491       if (max < tmp) max = tmp;
1492     }
1493     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1494   }
1495   lrow = row - brstart;
1496 
1497   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1498   if (!v)   {pvA = 0; pvB = 0;}
1499   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1500   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1501   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1502   nztot = nzA + nzB;
1503 
1504   cmap = mat->garray;
1505   if (v  || idx) {
1506     if (nztot) {
1507       /* Sort by increasing column numbers, assuming A and B already sorted */
1508       PetscInt imark = -1;
1509       if (v) {
1510         *v = v_p = mat->rowvalues;
1511         for (i=0; i<nzB; i++) {
1512           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1513           else break;
1514         }
1515         imark = i;
1516         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1517         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1518       }
1519       if (idx) {
1520         *idx = idx_p = mat->rowindices;
1521         if (imark > -1) {
1522           for (i=0; i<imark; i++) {
1523             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1524           }
1525         } else {
1526           for (i=0; i<nzB; i++) {
1527             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1528             else break;
1529           }
1530           imark = i;
1531         }
1532         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1533         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1534       }
1535     } else {
1536       if (idx) *idx = 0;
1537       if (v)   *v   = 0;
1538     }
1539   }
1540   *nz  = nztot;
1541   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1542   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1547 {
1548   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1549 
1550   PetscFunctionBegin;
1551   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1552   baij->getrowactive = PETSC_FALSE;
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1557 {
1558   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1559   PetscErrorCode ierr;
1560 
1561   PetscFunctionBegin;
1562   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1563   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1564   PetscFunctionReturn(0);
1565 }
1566 
1567 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1568 {
1569   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1570   Mat            A  = a->A,B = a->B;
1571   PetscErrorCode ierr;
1572   PetscReal      isend[5],irecv[5];
1573 
1574   PetscFunctionBegin;
1575   info->block_size = (PetscReal)matin->rmap->bs;
1576 
1577   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1578 
1579   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1580   isend[3] = info->memory;  isend[4] = info->mallocs;
1581 
1582   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1583 
1584   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1585   isend[3] += info->memory;  isend[4] += info->mallocs;
1586 
1587   if (flag == MAT_LOCAL) {
1588     info->nz_used      = isend[0];
1589     info->nz_allocated = isend[1];
1590     info->nz_unneeded  = isend[2];
1591     info->memory       = isend[3];
1592     info->mallocs      = isend[4];
1593   } else if (flag == MAT_GLOBAL_MAX) {
1594     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1595 
1596     info->nz_used      = irecv[0];
1597     info->nz_allocated = irecv[1];
1598     info->nz_unneeded  = irecv[2];
1599     info->memory       = irecv[3];
1600     info->mallocs      = irecv[4];
1601   } else if (flag == MAT_GLOBAL_SUM) {
1602     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1603 
1604     info->nz_used      = irecv[0];
1605     info->nz_allocated = irecv[1];
1606     info->nz_unneeded  = irecv[2];
1607     info->memory       = irecv[3];
1608     info->mallocs      = irecv[4];
1609   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1610   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1611   info->fill_ratio_needed = 0;
1612   info->factor_mallocs    = 0;
1613   PetscFunctionReturn(0);
1614 }
1615 
1616 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1617 {
1618   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1619   PetscErrorCode ierr;
1620 
1621   PetscFunctionBegin;
1622   switch (op) {
1623   case MAT_NEW_NONZERO_LOCATIONS:
1624   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1625   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1626   case MAT_KEEP_NONZERO_PATTERN:
1627   case MAT_NEW_NONZERO_LOCATION_ERR:
1628     MatCheckPreallocated(A,1);
1629     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1630     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1631     break;
1632   case MAT_ROW_ORIENTED:
1633     MatCheckPreallocated(A,1);
1634     a->roworiented = flg;
1635 
1636     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1637     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1638     break;
1639   case MAT_NEW_DIAGONALS:
1640     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1641     break;
1642   case MAT_IGNORE_OFF_PROC_ENTRIES:
1643     a->donotstash = flg;
1644     break;
1645   case MAT_USE_HASH_TABLE:
1646     a->ht_flag = flg;
1647     a->ht_fact = 1.39;
1648     break;
1649   case MAT_SYMMETRIC:
1650   case MAT_STRUCTURALLY_SYMMETRIC:
1651   case MAT_HERMITIAN:
1652   case MAT_SUBMAT_SINGLEIS:
1653   case MAT_SYMMETRY_ETERNAL:
1654     MatCheckPreallocated(A,1);
1655     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1656     break;
1657   default:
1658     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1659   }
1660   PetscFunctionReturn(0);
1661 }
1662 
1663 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1664 {
1665   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1666   Mat_SeqBAIJ    *Aloc;
1667   Mat            B;
1668   PetscErrorCode ierr;
1669   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1670   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1671   MatScalar      *a;
1672 
1673   PetscFunctionBegin;
1674   if (reuse == MAT_INPLACE_MATRIX && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1675   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
1676     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1677     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1678     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1679     /* Do not know preallocation information, but must set block size */
1680     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1681   } else {
1682     B = *matout;
1683   }
1684 
1685   /* copy over the A part */
1686   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1687   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1688   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1689 
1690   for (i=0; i<mbs; i++) {
1691     rvals[0] = bs*(baij->rstartbs + i);
1692     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1693     for (j=ai[i]; j<ai[i+1]; j++) {
1694       col = (baij->cstartbs+aj[j])*bs;
1695       for (k=0; k<bs; k++) {
1696         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1697 
1698         col++; a += bs;
1699       }
1700     }
1701   }
1702   /* copy over the B part */
1703   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1704   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1705   for (i=0; i<mbs; i++) {
1706     rvals[0] = bs*(baij->rstartbs + i);
1707     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1708     for (j=ai[i]; j<ai[i+1]; j++) {
1709       col = baij->garray[aj[j]]*bs;
1710       for (k=0; k<bs; k++) {
1711         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1712         col++;
1713         a += bs;
1714       }
1715     }
1716   }
1717   ierr = PetscFree(rvals);CHKERRQ(ierr);
1718   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1719   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1720 
1721   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) *matout = B;
1722   else {
1723     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1724   }
1725   PetscFunctionReturn(0);
1726 }
1727 
1728 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1729 {
1730   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1731   Mat            a     = baij->A,b = baij->B;
1732   PetscErrorCode ierr;
1733   PetscInt       s1,s2,s3;
1734 
1735   PetscFunctionBegin;
1736   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1737   if (rr) {
1738     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1739     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1740     /* Overlap communication with computation. */
1741     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1742   }
1743   if (ll) {
1744     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1745     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1746     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1747   }
1748   /* scale  the diagonal block */
1749   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1750 
1751   if (rr) {
1752     /* Do a scatter end and then right scale the off-diagonal block */
1753     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1754     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1755   }
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1760 {
1761   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1762   PetscInt      *lrows;
1763   PetscInt       r, len;
1764   PetscErrorCode ierr;
1765 
1766   PetscFunctionBegin;
1767   /* get locally owned rows */
1768   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1769   /* fix right hand side if needed */
1770   if (x && b) {
1771     const PetscScalar *xx;
1772     PetscScalar       *bb;
1773 
1774     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1775     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1776     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1777     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1778     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1779   }
1780 
1781   /* actually zap the local rows */
1782   /*
1783         Zero the required rows. If the "diagonal block" of the matrix
1784      is square and the user wishes to set the diagonal we use separate
1785      code so that MatSetValues() is not called for each diagonal allocating
1786      new memory, thus calling lots of mallocs and slowing things down.
1787 
1788   */
1789   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1790   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1791   if (A->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
1792     PetscBool cong;
1793     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1794     if (cong) A->congruentlayouts = 1;
1795     else      A->congruentlayouts = 0;
1796   }
1797   if ((diag != 0.0) && A->congruentlayouts) {
1798     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1799   } else if (diag != 0.0) {
1800     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1801     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1802        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1803     for (r = 0; r < len; ++r) {
1804       const PetscInt row = lrows[r] + A->rmap->rstart;
1805       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1806     }
1807     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1808     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1809   } else {
1810     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1811   }
1812   ierr = PetscFree(lrows);CHKERRQ(ierr);
1813 
1814   /* only change matrix nonzero state if pattern was allowed to be changed */
1815   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1816     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1817     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1818   }
1819   PetscFunctionReturn(0);
1820 }
1821 
1822 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1823 {
1824   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1825   PetscErrorCode    ierr;
1826   PetscMPIInt       n = A->rmap->n;
1827   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1828   PetscInt          *lrows,*owners = A->rmap->range;
1829   PetscSFNode       *rrows;
1830   PetscSF           sf;
1831   const PetscScalar *xx;
1832   PetscScalar       *bb,*mask;
1833   Vec               xmask,lmask;
1834   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1835   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1836   PetscScalar       *aa;
1837 
1838   PetscFunctionBegin;
1839   /* Create SF where leaves are input rows and roots are owned rows */
1840   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1841   for (r = 0; r < n; ++r) lrows[r] = -1;
1842   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1843   for (r = 0; r < N; ++r) {
1844     const PetscInt idx   = rows[r];
1845     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1846     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1847       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1848     }
1849     rrows[r].rank  = p;
1850     rrows[r].index = rows[r] - owners[p];
1851   }
1852   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1853   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1854   /* Collect flags for rows to be zeroed */
1855   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1856   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1857   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1858   /* Compress and put in row numbers */
1859   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1860   /* zero diagonal part of matrix */
1861   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1862   /* handle off diagonal part of matrix */
1863   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1864   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1865   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1866   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1867   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1868   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1869   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1870   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1871   if (x) {
1872     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1873     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1874     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1875     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1876   }
1877   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1878   /* remove zeroed rows of off diagonal matrix */
1879   for (i = 0; i < len; ++i) {
1880     row   = lrows[i];
1881     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1882     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1883     for (k = 0; k < count; ++k) {
1884       aa[0] = 0.0;
1885       aa   += bs;
1886     }
1887   }
1888   /* loop over all elements of off process part of matrix zeroing removed columns*/
1889   for (i = 0; i < l->B->rmap->N; ++i) {
1890     row = i/bs;
1891     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1892       for (k = 0; k < bs; ++k) {
1893         col = bs*baij->j[j] + k;
1894         if (PetscAbsScalar(mask[col])) {
1895           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1896           if (x) bb[i] -= aa[0]*xx[col];
1897           aa[0] = 0.0;
1898         }
1899       }
1900     }
1901   }
1902   if (x) {
1903     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1904     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1905   }
1906   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1907   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1908   ierr = PetscFree(lrows);CHKERRQ(ierr);
1909 
1910   /* only change matrix nonzero state if pattern was allowed to be changed */
1911   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1912     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1913     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1914   }
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1919 {
1920   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1921   PetscErrorCode ierr;
1922 
1923   PetscFunctionBegin;
1924   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1929 
1930 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1931 {
1932   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1933   Mat            a,b,c,d;
1934   PetscBool      flg;
1935   PetscErrorCode ierr;
1936 
1937   PetscFunctionBegin;
1938   a = matA->A; b = matA->B;
1939   c = matB->A; d = matB->B;
1940 
1941   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1942   if (flg) {
1943     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1944   }
1945   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1946   PetscFunctionReturn(0);
1947 }
1948 
1949 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
1950 {
1951   PetscErrorCode ierr;
1952   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1953   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
1954 
1955   PetscFunctionBegin;
1956   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1957   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1958     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1959   } else {
1960     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1961     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1962   }
1963   PetscFunctionReturn(0);
1964 }
1965 
1966 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
1967 {
1968   PetscErrorCode ierr;
1969 
1970   PetscFunctionBegin;
1971   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1972   PetscFunctionReturn(0);
1973 }
1974 
1975 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
1976 {
1977   PetscErrorCode ierr;
1978   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
1979   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
1980   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
1981 
1982   PetscFunctionBegin;
1983   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1988 {
1989   PetscErrorCode ierr;
1990   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
1991   PetscBLASInt   bnz,one=1;
1992   Mat_SeqBAIJ    *x,*y;
1993 
1994   PetscFunctionBegin;
1995   if (str == SAME_NONZERO_PATTERN) {
1996     PetscScalar alpha = a;
1997     x    = (Mat_SeqBAIJ*)xx->A->data;
1998     y    = (Mat_SeqBAIJ*)yy->A->data;
1999     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2000     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2001     x    = (Mat_SeqBAIJ*)xx->B->data;
2002     y    = (Mat_SeqBAIJ*)yy->B->data;
2003     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2004     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2005     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2006   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2007     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2008   } else {
2009     Mat      B;
2010     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2011     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2012     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2013     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2014     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2015     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2016     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2017     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2018     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2019     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2020     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2021     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2022     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2023     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2024     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2025     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2026   }
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2031 {
2032   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2033   PetscErrorCode ierr;
2034 
2035   PetscFunctionBegin;
2036   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2037   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2038   PetscFunctionReturn(0);
2039 }
2040 
2041 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2042 {
2043   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2044   PetscErrorCode ierr;
2045 
2046   PetscFunctionBegin;
2047   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2048   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2049   PetscFunctionReturn(0);
2050 }
2051 
2052 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2053 {
2054   PetscErrorCode ierr;
2055   IS             iscol_local;
2056   PetscInt       csize;
2057 
2058   PetscFunctionBegin;
2059   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2060   if (call == MAT_REUSE_MATRIX) {
2061     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2062     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2063   } else {
2064     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2065   }
2066   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2067   if (call == MAT_INITIAL_MATRIX) {
2068     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2069     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2070   }
2071   PetscFunctionReturn(0);
2072 }
2073 
2074 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2075 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local_new(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2076 /*
2077   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2078   in local and then by concatenating the local matrices the end result.
2079   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2080   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2081 */
2082 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2083 {
2084   PetscErrorCode ierr;
2085   PetscMPIInt    rank,size;
2086   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2087   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2088   Mat            M,Mreuse;
2089   MatScalar      *vwork,*aa;
2090   MPI_Comm       comm;
2091   IS             isrow_new, iscol_new;
2092   PetscBool      idflag,allrows, allcols;
2093   Mat_SeqBAIJ    *aij;
2094 
2095   PetscFunctionBegin;
2096   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2097   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2098   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2099   /* The compression and expansion should be avoided. Doesn't point
2100      out errors, might change the indices, hence buggey */
2101   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2102   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2103 
2104   /* Check for special case: each processor gets entire matrix columns */
2105   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2106   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2107   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2108   else allcols = PETSC_FALSE;
2109 
2110   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2111   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2112   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2113   else allrows = PETSC_FALSE;
2114 
2115   if (call ==  MAT_REUSE_MATRIX) {
2116     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2117     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2118     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2119   } else {
2120     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2121   }
2122   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2123   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2124   /*
2125       m - number of local rows
2126       n - number of columns (same on all processors)
2127       rstart - first row in new global matrix generated
2128   */
2129   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2130   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2131   m    = m/bs;
2132   n    = n/bs;
2133 
2134   if (call == MAT_INITIAL_MATRIX) {
2135     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2136     ii  = aij->i;
2137     jj  = aij->j;
2138 
2139     /*
2140         Determine the number of non-zeros in the diagonal and off-diagonal
2141         portions of the matrix in order to do correct preallocation
2142     */
2143 
2144     /* first get start and end of "diagonal" columns */
2145     if (csize == PETSC_DECIDE) {
2146       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2147       if (mglobal == n*bs) { /* square matrix */
2148         nlocal = m;
2149       } else {
2150         nlocal = n/size + ((n % size) > rank);
2151       }
2152     } else {
2153       nlocal = csize/bs;
2154     }
2155     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2156     rstart = rend - nlocal;
2157     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2158 
2159     /* next, compute all the lengths */
2160     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2161     for (i=0; i<m; i++) {
2162       jend = ii[i+1] - ii[i];
2163       olen = 0;
2164       dlen = 0;
2165       for (j=0; j<jend; j++) {
2166         if (*jj < rstart || *jj >= rend) olen++;
2167         else dlen++;
2168         jj++;
2169       }
2170       olens[i] = olen;
2171       dlens[i] = dlen;
2172     }
2173     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2174     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2175     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2176     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2177     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2178     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2179   } else {
2180     PetscInt ml,nl;
2181 
2182     M    = *newmat;
2183     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2184     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2185     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2186     /*
2187          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2188        rather than the slower MatSetValues().
2189     */
2190     M->was_assembled = PETSC_TRUE;
2191     M->assembled     = PETSC_FALSE;
2192   }
2193   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2194   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2195   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2196   ii   = aij->i;
2197   jj   = aij->j;
2198   aa   = aij->a;
2199   for (i=0; i<m; i++) {
2200     row   = rstart/bs + i;
2201     nz    = ii[i+1] - ii[i];
2202     cwork = jj;     jj += nz;
2203     vwork = aa;     aa += nz*bs*bs;
2204     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2205   }
2206 
2207   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2208   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2209   *newmat = M;
2210 
2211   /* save submatrix used in processor for next request */
2212   if (call ==  MAT_INITIAL_MATRIX) {
2213     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2214     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2215   }
2216   PetscFunctionReturn(0);
2217 }
2218 
2219 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2220 {
2221   MPI_Comm       comm,pcomm;
2222   PetscInt       clocal_size,nrows;
2223   const PetscInt *rows;
2224   PetscMPIInt    size;
2225   IS             crowp,lcolp;
2226   PetscErrorCode ierr;
2227 
2228   PetscFunctionBegin;
2229   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2230   /* make a collective version of 'rowp' */
2231   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2232   if (pcomm==comm) {
2233     crowp = rowp;
2234   } else {
2235     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2236     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2237     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2238     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2239   }
2240   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2241   /* make a local version of 'colp' */
2242   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2243   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2244   if (size==1) {
2245     lcolp = colp;
2246   } else {
2247     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2248   }
2249   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2250   /* now we just get the submatrix */
2251   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2252   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2253   /* clean up */
2254   if (pcomm!=comm) {
2255     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2256   }
2257   if (size>1) {
2258     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2259   }
2260   PetscFunctionReturn(0);
2261 }
2262 
2263 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2264 {
2265   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2266   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2267 
2268   PetscFunctionBegin;
2269   if (nghosts) *nghosts = B->nbs;
2270   if (ghosts) *ghosts = baij->garray;
2271   PetscFunctionReturn(0);
2272 }
2273 
2274 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2275 {
2276   Mat            B;
2277   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2278   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2279   Mat_SeqAIJ     *b;
2280   PetscErrorCode ierr;
2281   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2282   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2283   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2284 
2285   PetscFunctionBegin;
2286   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2287   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2288 
2289   /* ----------------------------------------------------------------
2290      Tell every processor the number of nonzeros per row
2291   */
2292   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2293   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2294     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2295   }
2296   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2297   displs    = recvcounts + size;
2298   for (i=0; i<size; i++) {
2299     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2300     displs[i]     = A->rmap->range[i]/bs;
2301   }
2302 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2303   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2304 #else
2305   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2306   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2307 #endif
2308   /* ---------------------------------------------------------------
2309      Create the sequential matrix of the same type as the local block diagonal
2310   */
2311   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2312   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2313   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2314   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2315   b    = (Mat_SeqAIJ*)B->data;
2316 
2317   /*--------------------------------------------------------------------
2318     Copy my part of matrix column indices over
2319   */
2320   sendcount  = ad->nz + bd->nz;
2321   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2322   a_jsendbuf = ad->j;
2323   b_jsendbuf = bd->j;
2324   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2325   cnt        = 0;
2326   for (i=0; i<n; i++) {
2327 
2328     /* put in lower diagonal portion */
2329     m = bd->i[i+1] - bd->i[i];
2330     while (m > 0) {
2331       /* is it above diagonal (in bd (compressed) numbering) */
2332       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2333       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2334       m--;
2335     }
2336 
2337     /* put in diagonal portion */
2338     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2339       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2340     }
2341 
2342     /* put in upper diagonal portion */
2343     while (m-- > 0) {
2344       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2345     }
2346   }
2347   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2348 
2349   /*--------------------------------------------------------------------
2350     Gather all column indices to all processors
2351   */
2352   for (i=0; i<size; i++) {
2353     recvcounts[i] = 0;
2354     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2355       recvcounts[i] += lens[j];
2356     }
2357   }
2358   displs[0] = 0;
2359   for (i=1; i<size; i++) {
2360     displs[i] = displs[i-1] + recvcounts[i-1];
2361   }
2362 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2363   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2364 #else
2365   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2366 #endif
2367   /*--------------------------------------------------------------------
2368     Assemble the matrix into useable form (note numerical values not yet set)
2369   */
2370   /* set the b->ilen (length of each row) values */
2371   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2372   /* set the b->i indices */
2373   b->i[0] = 0;
2374   for (i=1; i<=A->rmap->N/bs; i++) {
2375     b->i[i] = b->i[i-1] + lens[i-1];
2376   }
2377   ierr = PetscFree(lens);CHKERRQ(ierr);
2378   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2379   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2380   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2381 
2382   if (A->symmetric) {
2383     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2384   } else if (A->hermitian) {
2385     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2386   } else if (A->structurally_symmetric) {
2387     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2388   }
2389   *newmat = B;
2390   PetscFunctionReturn(0);
2391 }
2392 
2393 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2394 {
2395   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2396   PetscErrorCode ierr;
2397   Vec            bb1 = 0;
2398 
2399   PetscFunctionBegin;
2400   if (flag == SOR_APPLY_UPPER) {
2401     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2402     PetscFunctionReturn(0);
2403   }
2404 
2405   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2406     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2407   }
2408 
2409   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2410     if (flag & SOR_ZERO_INITIAL_GUESS) {
2411       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2412       its--;
2413     }
2414 
2415     while (its--) {
2416       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2417       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2418 
2419       /* update rhs: bb1 = bb - B*x */
2420       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2421       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2422 
2423       /* local sweep */
2424       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2425     }
2426   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2427     if (flag & SOR_ZERO_INITIAL_GUESS) {
2428       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2429       its--;
2430     }
2431     while (its--) {
2432       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2433       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2434 
2435       /* update rhs: bb1 = bb - B*x */
2436       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2437       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2438 
2439       /* local sweep */
2440       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2441     }
2442   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2443     if (flag & SOR_ZERO_INITIAL_GUESS) {
2444       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2445       its--;
2446     }
2447     while (its--) {
2448       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2449       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2450 
2451       /* update rhs: bb1 = bb - B*x */
2452       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2453       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2454 
2455       /* local sweep */
2456       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2457     }
2458   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2459 
2460   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2461   PetscFunctionReturn(0);
2462 }
2463 
2464 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2465 {
2466   PetscErrorCode ierr;
2467   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2468   PetscInt       N,i,*garray = aij->garray;
2469   PetscInt       ib,jb,bs = A->rmap->bs;
2470   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2471   MatScalar      *a_val = a_aij->a;
2472   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2473   MatScalar      *b_val = b_aij->a;
2474   PetscReal      *work;
2475 
2476   PetscFunctionBegin;
2477   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2478   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2479   if (type == NORM_2) {
2480     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2481       for (jb=0; jb<bs; jb++) {
2482         for (ib=0; ib<bs; ib++) {
2483           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2484           a_val++;
2485         }
2486       }
2487     }
2488     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2489       for (jb=0; jb<bs; jb++) {
2490         for (ib=0; ib<bs; ib++) {
2491           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2492           b_val++;
2493         }
2494       }
2495     }
2496   } else if (type == NORM_1) {
2497     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2498       for (jb=0; jb<bs; jb++) {
2499         for (ib=0; ib<bs; ib++) {
2500           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2501           a_val++;
2502         }
2503       }
2504     }
2505     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2506       for (jb=0; jb<bs; jb++) {
2507        for (ib=0; ib<bs; ib++) {
2508           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2509           b_val++;
2510         }
2511       }
2512     }
2513   } else if (type == NORM_INFINITY) {
2514     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2515       for (jb=0; jb<bs; jb++) {
2516         for (ib=0; ib<bs; ib++) {
2517           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2518           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2519           a_val++;
2520         }
2521       }
2522     }
2523     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2524       for (jb=0; jb<bs; jb++) {
2525         for (ib=0; ib<bs; ib++) {
2526           int col = garray[b_aij->j[i]] * bs + jb;
2527           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2528           b_val++;
2529         }
2530       }
2531     }
2532   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2533   if (type == NORM_INFINITY) {
2534     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2535   } else {
2536     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2537   }
2538   ierr = PetscFree(work);CHKERRQ(ierr);
2539   if (type == NORM_2) {
2540     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2541   }
2542   PetscFunctionReturn(0);
2543 }
2544 
2545 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2546 {
2547   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2548   PetscErrorCode ierr;
2549 
2550   PetscFunctionBegin;
2551   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2552   A->factorerrortype             = a->A->factorerrortype;
2553   A->factorerror_zeropivot_value = a->A->factorerror_zeropivot_value;
2554   A->factorerror_zeropivot_row   = a->A->factorerror_zeropivot_row;
2555   PetscFunctionReturn(0);
2556 }
2557 
2558 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2559 {
2560   PetscErrorCode ierr;
2561   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2562   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2563 
2564   PetscFunctionBegin;
2565   if (!Y->preallocated) {
2566     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2567   } else if (!aij->nz) {
2568     PetscInt nonew = aij->nonew;
2569     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2570     aij->nonew = nonew;
2571   }
2572   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2573   PetscFunctionReturn(0);
2574 }
2575 
2576 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2577 {
2578   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2579   PetscErrorCode ierr;
2580 
2581   PetscFunctionBegin;
2582   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2583   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2584   if (d) {
2585     PetscInt rstart;
2586     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2587     *d += rstart/A->rmap->bs;
2588 
2589   }
2590   PetscFunctionReturn(0);
2591 }
2592 
2593 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2594 {
2595   PetscFunctionBegin;
2596   *a = ((Mat_MPIBAIJ*)A->data)->A;
2597   PetscFunctionReturn(0);
2598 }
2599 
2600 /* -------------------------------------------------------------------*/
2601 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2602                                        MatGetRow_MPIBAIJ,
2603                                        MatRestoreRow_MPIBAIJ,
2604                                        MatMult_MPIBAIJ,
2605                                 /* 4*/ MatMultAdd_MPIBAIJ,
2606                                        MatMultTranspose_MPIBAIJ,
2607                                        MatMultTransposeAdd_MPIBAIJ,
2608                                        0,
2609                                        0,
2610                                        0,
2611                                 /*10*/ 0,
2612                                        0,
2613                                        0,
2614                                        MatSOR_MPIBAIJ,
2615                                        MatTranspose_MPIBAIJ,
2616                                 /*15*/ MatGetInfo_MPIBAIJ,
2617                                        MatEqual_MPIBAIJ,
2618                                        MatGetDiagonal_MPIBAIJ,
2619                                        MatDiagonalScale_MPIBAIJ,
2620                                        MatNorm_MPIBAIJ,
2621                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2622                                        MatAssemblyEnd_MPIBAIJ,
2623                                        MatSetOption_MPIBAIJ,
2624                                        MatZeroEntries_MPIBAIJ,
2625                                 /*24*/ MatZeroRows_MPIBAIJ,
2626                                        0,
2627                                        0,
2628                                        0,
2629                                        0,
2630                                 /*29*/ MatSetUp_MPIBAIJ,
2631                                        0,
2632                                        0,
2633                                        MatGetDiagonalBlock_MPIBAIJ,
2634                                        0,
2635                                 /*34*/ MatDuplicate_MPIBAIJ,
2636                                        0,
2637                                        0,
2638                                        0,
2639                                        0,
2640                                 /*39*/ MatAXPY_MPIBAIJ,
2641                                        MatGetSubMatrices_MPIBAIJ,
2642                                        MatIncreaseOverlap_MPIBAIJ,
2643                                        MatGetValues_MPIBAIJ,
2644                                        MatCopy_MPIBAIJ,
2645                                 /*44*/ 0,
2646                                        MatScale_MPIBAIJ,
2647                                        MatShift_MPIBAIJ,
2648                                        0,
2649                                        MatZeroRowsColumns_MPIBAIJ,
2650                                 /*49*/ 0,
2651                                        0,
2652                                        0,
2653                                        0,
2654                                        0,
2655                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2656                                        0,
2657                                        MatSetUnfactored_MPIBAIJ,
2658                                        MatPermute_MPIBAIJ,
2659                                        MatSetValuesBlocked_MPIBAIJ,
2660                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2661                                        MatDestroy_MPIBAIJ,
2662                                        MatView_MPIBAIJ,
2663                                        0,
2664                                        0,
2665                                 /*64*/ 0,
2666                                        0,
2667                                        0,
2668                                        0,
2669                                        0,
2670                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2671                                        0,
2672                                        0,
2673                                        0,
2674                                        0,
2675                                 /*74*/ 0,
2676                                        MatFDColoringApply_BAIJ,
2677                                        0,
2678                                        0,
2679                                        0,
2680                                 /*79*/ 0,
2681                                        0,
2682                                        0,
2683                                        0,
2684                                        MatLoad_MPIBAIJ,
2685                                 /*84*/ 0,
2686                                        0,
2687                                        0,
2688                                        0,
2689                                        0,
2690                                 /*89*/ 0,
2691                                        0,
2692                                        0,
2693                                        0,
2694                                        0,
2695                                 /*94*/ 0,
2696                                        0,
2697                                        0,
2698                                        0,
2699                                        0,
2700                                 /*99*/ 0,
2701                                        0,
2702                                        0,
2703                                        0,
2704                                        0,
2705                                 /*104*/0,
2706                                        MatRealPart_MPIBAIJ,
2707                                        MatImaginaryPart_MPIBAIJ,
2708                                        0,
2709                                        0,
2710                                 /*109*/0,
2711                                        0,
2712                                        0,
2713                                        0,
2714                                        MatMissingDiagonal_MPIBAIJ,
2715                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2716                                        0,
2717                                        MatGetGhosts_MPIBAIJ,
2718                                        0,
2719                                        0,
2720                                 /*119*/0,
2721                                        0,
2722                                        0,
2723                                        0,
2724                                        MatGetMultiProcBlock_MPIBAIJ,
2725                                 /*124*/0,
2726                                        MatGetColumnNorms_MPIBAIJ,
2727                                        MatInvertBlockDiagonal_MPIBAIJ,
2728                                        0,
2729                                        0,
2730                                /*129*/ 0,
2731                                        0,
2732                                        0,
2733                                        0,
2734                                        0,
2735                                /*134*/ 0,
2736                                        0,
2737                                        0,
2738                                        0,
2739                                        0,
2740                                /*139*/ MatSetBlockSizes_Default,
2741                                        0,
2742                                        0,
2743                                        MatFDColoringSetUp_MPIXAIJ,
2744                                        0,
2745                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2746 };
2747 
2748 
2749 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2750 
2751 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2752 {
2753   PetscInt       m,rstart,cstart,cend;
2754   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2755   const PetscInt *JJ    =0;
2756   PetscScalar    *values=0;
2757   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2758   PetscErrorCode ierr;
2759 
2760   PetscFunctionBegin;
2761   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2762   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2763   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2764   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2765   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2766   m      = B->rmap->n/bs;
2767   rstart = B->rmap->rstart/bs;
2768   cstart = B->cmap->rstart/bs;
2769   cend   = B->cmap->rend/bs;
2770 
2771   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2772   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2773   for (i=0; i<m; i++) {
2774     nz = ii[i+1] - ii[i];
2775     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2776     nz_max = PetscMax(nz_max,nz);
2777     JJ     = jj + ii[i];
2778     for (j=0; j<nz; j++) {
2779       if (*JJ >= cstart) break;
2780       JJ++;
2781     }
2782     d = 0;
2783     for (; j<nz; j++) {
2784       if (*JJ++ >= cend) break;
2785       d++;
2786     }
2787     d_nnz[i] = d;
2788     o_nnz[i] = nz - d;
2789   }
2790   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2791   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2792 
2793   values = (PetscScalar*)V;
2794   if (!values) {
2795     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2796     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2797   }
2798   for (i=0; i<m; i++) {
2799     PetscInt          row    = i + rstart;
2800     PetscInt          ncols  = ii[i+1] - ii[i];
2801     const PetscInt    *icols = jj + ii[i];
2802     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2803       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2804       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2805     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2806       PetscInt j;
2807       for (j=0; j<ncols; j++) {
2808         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2809         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2810       }
2811     }
2812   }
2813 
2814   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2815   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2816   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2817   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2818   PetscFunctionReturn(0);
2819 }
2820 
2821 /*@C
2822    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2823    (the default parallel PETSc format).
2824 
2825    Collective on MPI_Comm
2826 
2827    Input Parameters:
2828 +  B - the matrix
2829 .  bs - the block size
2830 .  i - the indices into j for the start of each local row (starts with zero)
2831 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2832 -  v - optional values in the matrix
2833 
2834    Level: developer
2835 
2836    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2837    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2838    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2839    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2840    block column and the second index is over columns within a block.
2841 
2842 .keywords: matrix, aij, compressed row, sparse, parallel
2843 
2844 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2845 @*/
2846 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2847 {
2848   PetscErrorCode ierr;
2849 
2850   PetscFunctionBegin;
2851   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2852   PetscValidType(B,1);
2853   PetscValidLogicalCollectiveInt(B,bs,2);
2854   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2855   PetscFunctionReturn(0);
2856 }
2857 
2858 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2859 {
2860   Mat_MPIBAIJ    *b;
2861   PetscErrorCode ierr;
2862   PetscInt       i;
2863 
2864   PetscFunctionBegin;
2865   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2866   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2867   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2868   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2869 
2870   if (d_nnz) {
2871     for (i=0; i<B->rmap->n/bs; i++) {
2872       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2873     }
2874   }
2875   if (o_nnz) {
2876     for (i=0; i<B->rmap->n/bs; i++) {
2877       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2878     }
2879   }
2880 
2881   b      = (Mat_MPIBAIJ*)B->data;
2882   b->bs2 = bs*bs;
2883   b->mbs = B->rmap->n/bs;
2884   b->nbs = B->cmap->n/bs;
2885   b->Mbs = B->rmap->N/bs;
2886   b->Nbs = B->cmap->N/bs;
2887 
2888   for (i=0; i<=b->size; i++) {
2889     b->rangebs[i] = B->rmap->range[i]/bs;
2890   }
2891   b->rstartbs = B->rmap->rstart/bs;
2892   b->rendbs   = B->rmap->rend/bs;
2893   b->cstartbs = B->cmap->rstart/bs;
2894   b->cendbs   = B->cmap->rend/bs;
2895 
2896 #if defined(PETSC_USE_CTABLE)
2897   ierr = PetscTableDestroy(&b->colmap);CHKERRQ(ierr);
2898 #else
2899   ierr = PetscFree(b->colmap);CHKERRQ(ierr);
2900 #endif
2901   ierr = PetscFree(b->garray);CHKERRQ(ierr);
2902   ierr = VecDestroy(&b->lvec);CHKERRQ(ierr);
2903   ierr = VecScatterDestroy(&b->Mvctx);CHKERRQ(ierr);
2904 
2905   /* Because the B will have been resized we simply destroy it and create a new one each time */
2906   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
2907   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2908   ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2909   ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
2910   ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
2911 
2912   if (!B->preallocated) {
2913     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2914     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2915     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
2916     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
2917     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
2918   }
2919 
2920   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2921   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
2922   B->preallocated  = PETSC_TRUE;
2923   B->was_assembled = PETSC_FALSE;
2924   B->assembled     = PETSC_FALSE;
2925   PetscFunctionReturn(0);
2926 }
2927 
2928 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
2929 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
2930 
2931 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
2932 {
2933   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2934   PetscErrorCode ierr;
2935   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
2936   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
2937   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
2938 
2939   PetscFunctionBegin;
2940   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
2941   ii[0] = 0;
2942   for (i=0; i<M; i++) {
2943     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
2944     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
2945     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
2946     /* remove one from count of matrix has diagonal */
2947     for (j=id[i]; j<id[i+1]; j++) {
2948       if (jd[j] == i) {ii[i+1]--;break;}
2949     }
2950   }
2951   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
2952   cnt  = 0;
2953   for (i=0; i<M; i++) {
2954     for (j=io[i]; j<io[i+1]; j++) {
2955       if (garray[jo[j]] > rstart) break;
2956       jj[cnt++] = garray[jo[j]];
2957     }
2958     for (k=id[i]; k<id[i+1]; k++) {
2959       if (jd[k] != i) {
2960         jj[cnt++] = rstart + jd[k];
2961       }
2962     }
2963     for (; j<io[i+1]; j++) {
2964       jj[cnt++] = garray[jo[j]];
2965     }
2966   }
2967   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
2968   PetscFunctionReturn(0);
2969 }
2970 
2971 #include <../src/mat/impls/aij/mpi/mpiaij.h>
2972 
2973 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
2974 
2975 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
2976 {
2977   PetscErrorCode ierr;
2978   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2979   Mat            B;
2980   Mat_MPIAIJ     *b;
2981 
2982   PetscFunctionBegin;
2983   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
2984 
2985   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2986   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
2987   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2988   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
2989   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2990   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2991   b    = (Mat_MPIAIJ*) B->data;
2992 
2993   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
2994   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
2995   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
2996   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
2997   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
2998   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2999   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3000   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3001   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3002   if (reuse == MAT_INPLACE_MATRIX) {
3003     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3004   } else {
3005    *newmat = B;
3006   }
3007   PetscFunctionReturn(0);
3008 }
3009 
3010 /*MC
3011    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3012 
3013    Options Database Keys:
3014 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3015 . -mat_block_size <bs> - set the blocksize used to store the matrix
3016 - -mat_use_hash_table <fact>
3017 
3018   Level: beginner
3019 
3020 .seealso: MatCreateMPIBAIJ
3021 M*/
3022 
3023 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3024 
3025 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3026 {
3027   Mat_MPIBAIJ    *b;
3028   PetscErrorCode ierr;
3029   PetscBool      flg = PETSC_FALSE;
3030 
3031   PetscFunctionBegin;
3032   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3033   B->data = (void*)b;
3034 
3035   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3036   B->assembled = PETSC_FALSE;
3037 
3038   B->insertmode = NOT_SET_VALUES;
3039   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3040   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3041 
3042   /* build local table of row and column ownerships */
3043   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3044 
3045   /* build cache for off array entries formed */
3046   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3047 
3048   b->donotstash  = PETSC_FALSE;
3049   b->colmap      = NULL;
3050   b->garray      = NULL;
3051   b->roworiented = PETSC_TRUE;
3052 
3053   /* stuff used in block assembly */
3054   b->barray = 0;
3055 
3056   /* stuff used for matrix vector multiply */
3057   b->lvec  = 0;
3058   b->Mvctx = 0;
3059 
3060   /* stuff for MatGetRow() */
3061   b->rowindices   = 0;
3062   b->rowvalues    = 0;
3063   b->getrowactive = PETSC_FALSE;
3064 
3065   /* hash table stuff */
3066   b->ht           = 0;
3067   b->hd           = 0;
3068   b->ht_size      = 0;
3069   b->ht_flag      = PETSC_FALSE;
3070   b->ht_fact      = 0;
3071   b->ht_total_ct  = 0;
3072   b->ht_insert_ct = 0;
3073 
3074   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3075   b->ijonly = PETSC_FALSE;
3076 
3077 
3078   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3079   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3080   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3081 #if defined(PETSC_HAVE_HYPRE)
3082   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_hypre_C",MatConvert_AIJ_HYPRE);CHKERRQ(ierr);
3083 #endif
3084   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3085   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3086   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3087   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3088   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3089   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3090   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3091 
3092   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3093   ierr = PetscOptionsName("-mat_use_hash_table","Use hash table to save time in constructing matrix","MatSetOption",&flg);CHKERRQ(ierr);
3094   if (flg) {
3095     PetscReal fact = 1.39;
3096     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3097     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3098     if (fact <= 1.0) fact = 1.39;
3099     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3100     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3101   }
3102   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3103   PetscFunctionReturn(0);
3104 }
3105 
3106 /*MC
3107    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3108 
3109    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3110    and MATMPIBAIJ otherwise.
3111 
3112    Options Database Keys:
3113 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3114 
3115   Level: beginner
3116 
3117 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3118 M*/
3119 
3120 /*@C
3121    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3122    (block compressed row).  For good matrix assembly performance
3123    the user should preallocate the matrix storage by setting the parameters
3124    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3125    performance can be increased by more than a factor of 50.
3126 
3127    Collective on Mat
3128 
3129    Input Parameters:
3130 +  B - the matrix
3131 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3132           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3133 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3134            submatrix  (same for all local rows)
3135 .  d_nnz - array containing the number of block nonzeros in the various block rows
3136            of the in diagonal portion of the local (possibly different for each block
3137            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3138            set it even if it is zero.
3139 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3140            submatrix (same for all local rows).
3141 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3142            off-diagonal portion of the local submatrix (possibly different for
3143            each block row) or NULL.
3144 
3145    If the *_nnz parameter is given then the *_nz parameter is ignored
3146 
3147    Options Database Keys:
3148 +   -mat_block_size - size of the blocks to use
3149 -   -mat_use_hash_table <fact>
3150 
3151    Notes:
3152    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3153    than it must be used on all processors that share the object for that argument.
3154 
3155    Storage Information:
3156    For a square global matrix we define each processor's diagonal portion
3157    to be its local rows and the corresponding columns (a square submatrix);
3158    each processor's off-diagonal portion encompasses the remainder of the
3159    local matrix (a rectangular submatrix).
3160 
3161    The user can specify preallocated storage for the diagonal part of
3162    the local submatrix with either d_nz or d_nnz (not both).  Set
3163    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3164    memory allocation.  Likewise, specify preallocated storage for the
3165    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3166 
3167    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3168    the figure below we depict these three local rows and all columns (0-11).
3169 
3170 .vb
3171            0 1 2 3 4 5 6 7 8 9 10 11
3172           --------------------------
3173    row 3  |o o o d d d o o o o  o  o
3174    row 4  |o o o d d d o o o o  o  o
3175    row 5  |o o o d d d o o o o  o  o
3176           --------------------------
3177 .ve
3178 
3179    Thus, any entries in the d locations are stored in the d (diagonal)
3180    submatrix, and any entries in the o locations are stored in the
3181    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3182    stored simply in the MATSEQBAIJ format for compressed row storage.
3183 
3184    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3185    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3186    In general, for PDE problems in which most nonzeros are near the diagonal,
3187    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3188    or you will get TERRIBLE performance; see the users' manual chapter on
3189    matrices.
3190 
3191    You can call MatGetInfo() to get information on how effective the preallocation was;
3192    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3193    You can also run with the option -info and look for messages with the string
3194    malloc in them to see if additional memory allocation was needed.
3195 
3196    Level: intermediate
3197 
3198 .keywords: matrix, block, aij, compressed row, sparse, parallel
3199 
3200 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3201 @*/
3202 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3203 {
3204   PetscErrorCode ierr;
3205 
3206   PetscFunctionBegin;
3207   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3208   PetscValidType(B,1);
3209   PetscValidLogicalCollectiveInt(B,bs,2);
3210   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3211   PetscFunctionReturn(0);
3212 }
3213 
3214 /*@C
3215    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3216    (block compressed row).  For good matrix assembly performance
3217    the user should preallocate the matrix storage by setting the parameters
3218    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3219    performance can be increased by more than a factor of 50.
3220 
3221    Collective on MPI_Comm
3222 
3223    Input Parameters:
3224 +  comm - MPI communicator
3225 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3226           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3227 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3228            This value should be the same as the local size used in creating the
3229            y vector for the matrix-vector product y = Ax.
3230 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3231            This value should be the same as the local size used in creating the
3232            x vector for the matrix-vector product y = Ax.
3233 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3234 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3235 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3236            submatrix  (same for all local rows)
3237 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3238            of the in diagonal portion of the local (possibly different for each block
3239            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3240            and set it even if it is zero.
3241 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3242            submatrix (same for all local rows).
3243 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3244            off-diagonal portion of the local submatrix (possibly different for
3245            each block row) or NULL.
3246 
3247    Output Parameter:
3248 .  A - the matrix
3249 
3250    Options Database Keys:
3251 +   -mat_block_size - size of the blocks to use
3252 -   -mat_use_hash_table <fact>
3253 
3254    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3255    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3256    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3257 
3258    Notes:
3259    If the *_nnz parameter is given then the *_nz parameter is ignored
3260 
3261    A nonzero block is any block that as 1 or more nonzeros in it
3262 
3263    The user MUST specify either the local or global matrix dimensions
3264    (possibly both).
3265 
3266    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3267    than it must be used on all processors that share the object for that argument.
3268 
3269    Storage Information:
3270    For a square global matrix we define each processor's diagonal portion
3271    to be its local rows and the corresponding columns (a square submatrix);
3272    each processor's off-diagonal portion encompasses the remainder of the
3273    local matrix (a rectangular submatrix).
3274 
3275    The user can specify preallocated storage for the diagonal part of
3276    the local submatrix with either d_nz or d_nnz (not both).  Set
3277    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3278    memory allocation.  Likewise, specify preallocated storage for the
3279    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3280 
3281    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3282    the figure below we depict these three local rows and all columns (0-11).
3283 
3284 .vb
3285            0 1 2 3 4 5 6 7 8 9 10 11
3286           --------------------------
3287    row 3  |o o o d d d o o o o  o  o
3288    row 4  |o o o d d d o o o o  o  o
3289    row 5  |o o o d d d o o o o  o  o
3290           --------------------------
3291 .ve
3292 
3293    Thus, any entries in the d locations are stored in the d (diagonal)
3294    submatrix, and any entries in the o locations are stored in the
3295    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3296    stored simply in the MATSEQBAIJ format for compressed row storage.
3297 
3298    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3299    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3300    In general, for PDE problems in which most nonzeros are near the diagonal,
3301    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3302    or you will get TERRIBLE performance; see the users' manual chapter on
3303    matrices.
3304 
3305    Level: intermediate
3306 
3307 .keywords: matrix, block, aij, compressed row, sparse, parallel
3308 
3309 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3310 @*/
3311 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3312 {
3313   PetscErrorCode ierr;
3314   PetscMPIInt    size;
3315 
3316   PetscFunctionBegin;
3317   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3318   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3319   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3320   if (size > 1) {
3321     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3322     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3323   } else {
3324     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3325     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3326   }
3327   PetscFunctionReturn(0);
3328 }
3329 
3330 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3331 {
3332   Mat            mat;
3333   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3334   PetscErrorCode ierr;
3335   PetscInt       len=0;
3336 
3337   PetscFunctionBegin;
3338   *newmat = 0;
3339   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3340   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3341   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3342   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3343 
3344   mat->factortype   = matin->factortype;
3345   mat->preallocated = PETSC_TRUE;
3346   mat->assembled    = PETSC_TRUE;
3347   mat->insertmode   = NOT_SET_VALUES;
3348 
3349   a             = (Mat_MPIBAIJ*)mat->data;
3350   mat->rmap->bs = matin->rmap->bs;
3351   a->bs2        = oldmat->bs2;
3352   a->mbs        = oldmat->mbs;
3353   a->nbs        = oldmat->nbs;
3354   a->Mbs        = oldmat->Mbs;
3355   a->Nbs        = oldmat->Nbs;
3356 
3357   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3358   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3359 
3360   a->size         = oldmat->size;
3361   a->rank         = oldmat->rank;
3362   a->donotstash   = oldmat->donotstash;
3363   a->roworiented  = oldmat->roworiented;
3364   a->rowindices   = 0;
3365   a->rowvalues    = 0;
3366   a->getrowactive = PETSC_FALSE;
3367   a->barray       = 0;
3368   a->rstartbs     = oldmat->rstartbs;
3369   a->rendbs       = oldmat->rendbs;
3370   a->cstartbs     = oldmat->cstartbs;
3371   a->cendbs       = oldmat->cendbs;
3372 
3373   /* hash table stuff */
3374   a->ht           = 0;
3375   a->hd           = 0;
3376   a->ht_size      = 0;
3377   a->ht_flag      = oldmat->ht_flag;
3378   a->ht_fact      = oldmat->ht_fact;
3379   a->ht_total_ct  = 0;
3380   a->ht_insert_ct = 0;
3381 
3382   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3383   if (oldmat->colmap) {
3384 #if defined(PETSC_USE_CTABLE)
3385     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3386 #else
3387     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3388     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3389     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3390 #endif
3391   } else a->colmap = 0;
3392 
3393   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3394     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3395     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3396     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3397   } else a->garray = 0;
3398 
3399   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3400   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3401   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3402   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3403   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3404 
3405   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3406   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3407   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3408   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3409   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3410   *newmat = mat;
3411   PetscFunctionReturn(0);
3412 }
3413 
3414 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3415 {
3416   PetscErrorCode ierr;
3417   int            fd;
3418   PetscInt       i,nz,j,rstart,rend;
3419   PetscScalar    *vals,*buf;
3420   MPI_Comm       comm;
3421   MPI_Status     status;
3422   PetscMPIInt    rank,size,maxnz;
3423   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3424   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3425   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3426   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3427   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3428   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3429 
3430   PetscFunctionBegin;
3431   /* force binary viewer to load .info file if it has not yet done so */
3432   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3433   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3434   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3435   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3436   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3437   if (bs < 0) bs = 1;
3438 
3439   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3440   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3441   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3442   if (!rank) {
3443     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3444     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3445     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3446   }
3447   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3448   M    = header[1]; N = header[2];
3449 
3450   /* If global sizes are set, check if they are consistent with that given in the file */
3451   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3452   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3453 
3454   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3455 
3456   /*
3457      This code adds extra rows to make sure the number of rows is
3458      divisible by the blocksize
3459   */
3460   Mbs        = M/bs;
3461   extra_rows = bs - M + bs*Mbs;
3462   if (extra_rows == bs) extra_rows = 0;
3463   else                  Mbs++;
3464   if (extra_rows && !rank) {
3465     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3466   }
3467 
3468   /* determine ownership of all rows */
3469   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3470     mbs = Mbs/size + ((Mbs % size) > rank);
3471     m   = mbs*bs;
3472   } else { /* User set */
3473     m   = newmat->rmap->n;
3474     mbs = m/bs;
3475   }
3476   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3477   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3478 
3479   /* process 0 needs enough room for process with most rows */
3480   if (!rank) {
3481     mmax = rowners[1];
3482     for (i=2; i<=size; i++) {
3483       mmax = PetscMax(mmax,rowners[i]);
3484     }
3485     mmax*=bs;
3486   } else mmax = -1;             /* unused, but compiler warns anyway */
3487 
3488   rowners[0] = 0;
3489   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3490   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3491   rstart = rowners[rank];
3492   rend   = rowners[rank+1];
3493 
3494   /* distribute row lengths to all processors */
3495   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3496   if (!rank) {
3497     mend = m;
3498     if (size == 1) mend = mend - extra_rows;
3499     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3500     for (j=mend; j<m; j++) locrowlens[j] = 1;
3501     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3502     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3503     for (j=0; j<m; j++) {
3504       procsnz[0] += locrowlens[j];
3505     }
3506     for (i=1; i<size; i++) {
3507       mend = browners[i+1] - browners[i];
3508       if (i == size-1) mend = mend - extra_rows;
3509       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3510       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3511       /* calculate the number of nonzeros on each processor */
3512       for (j=0; j<browners[i+1]-browners[i]; j++) {
3513         procsnz[i] += rowlengths[j];
3514       }
3515       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3516     }
3517     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3518   } else {
3519     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3520   }
3521 
3522   if (!rank) {
3523     /* determine max buffer needed and allocate it */
3524     maxnz = procsnz[0];
3525     for (i=1; i<size; i++) {
3526       maxnz = PetscMax(maxnz,procsnz[i]);
3527     }
3528     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3529 
3530     /* read in my part of the matrix column indices  */
3531     nz     = procsnz[0];
3532     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3533     mycols = ibuf;
3534     if (size == 1) nz -= extra_rows;
3535     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3536     if (size == 1) {
3537       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3538     }
3539 
3540     /* read in every ones (except the last) and ship off */
3541     for (i=1; i<size-1; i++) {
3542       nz   = procsnz[i];
3543       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3544       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3545     }
3546     /* read in the stuff for the last proc */
3547     if (size != 1) {
3548       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3549       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3550       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3551       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3552     }
3553     ierr = PetscFree(cols);CHKERRQ(ierr);
3554   } else {
3555     /* determine buffer space needed for message */
3556     nz = 0;
3557     for (i=0; i<m; i++) {
3558       nz += locrowlens[i];
3559     }
3560     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3561     mycols = ibuf;
3562     /* receive message of column indices*/
3563     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3564     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3565     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3566   }
3567 
3568   /* loop over local rows, determining number of off diagonal entries */
3569   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3570   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3571   rowcount = 0; nzcount = 0;
3572   for (i=0; i<mbs; i++) {
3573     dcount  = 0;
3574     odcount = 0;
3575     for (j=0; j<bs; j++) {
3576       kmax = locrowlens[rowcount];
3577       for (k=0; k<kmax; k++) {
3578         tmp = mycols[nzcount++]/bs;
3579         if (!mask[tmp]) {
3580           mask[tmp] = 1;
3581           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3582           else masked1[dcount++] = tmp;
3583         }
3584       }
3585       rowcount++;
3586     }
3587 
3588     dlens[i]  = dcount;
3589     odlens[i] = odcount;
3590 
3591     /* zero out the mask elements we set */
3592     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3593     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3594   }
3595 
3596   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3597   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3598 
3599   if (!rank) {
3600     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3601     /* read in my part of the matrix numerical values  */
3602     nz     = procsnz[0];
3603     vals   = buf;
3604     mycols = ibuf;
3605     if (size == 1) nz -= extra_rows;
3606     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3607     if (size == 1) {
3608       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3609     }
3610 
3611     /* insert into matrix */
3612     jj = rstart*bs;
3613     for (i=0; i<m; i++) {
3614       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3615       mycols += locrowlens[i];
3616       vals   += locrowlens[i];
3617       jj++;
3618     }
3619     /* read in other processors (except the last one) and ship out */
3620     for (i=1; i<size-1; i++) {
3621       nz   = procsnz[i];
3622       vals = buf;
3623       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3624       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3625     }
3626     /* the last proc */
3627     if (size != 1) {
3628       nz   = procsnz[i] - extra_rows;
3629       vals = buf;
3630       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3631       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3632       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3633     }
3634     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3635   } else {
3636     /* receive numeric values */
3637     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3638 
3639     /* receive message of values*/
3640     vals   = buf;
3641     mycols = ibuf;
3642     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3643 
3644     /* insert into matrix */
3645     jj = rstart*bs;
3646     for (i=0; i<m; i++) {
3647       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3648       mycols += locrowlens[i];
3649       vals   += locrowlens[i];
3650       jj++;
3651     }
3652   }
3653   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3654   ierr = PetscFree(buf);CHKERRQ(ierr);
3655   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3656   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3657   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3658   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3659   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3660   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3661   PetscFunctionReturn(0);
3662 }
3663 
3664 /*@
3665    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3666 
3667    Input Parameters:
3668 .  mat  - the matrix
3669 .  fact - factor
3670 
3671    Not Collective, each process can use a different factor
3672 
3673    Level: advanced
3674 
3675   Notes:
3676    This can also be set by the command line option: -mat_use_hash_table <fact>
3677 
3678 .keywords: matrix, hashtable, factor, HT
3679 
3680 .seealso: MatSetOption()
3681 @*/
3682 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3683 {
3684   PetscErrorCode ierr;
3685 
3686   PetscFunctionBegin;
3687   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3688   PetscFunctionReturn(0);
3689 }
3690 
3691 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3692 {
3693   Mat_MPIBAIJ *baij;
3694 
3695   PetscFunctionBegin;
3696   baij          = (Mat_MPIBAIJ*)mat->data;
3697   baij->ht_fact = fact;
3698   PetscFunctionReturn(0);
3699 }
3700 
3701 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3702 {
3703   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3704 
3705   PetscFunctionBegin;
3706   if (Ad)     *Ad     = a->A;
3707   if (Ao)     *Ao     = a->B;
3708   if (colmap) *colmap = a->garray;
3709   PetscFunctionReturn(0);
3710 }
3711 
3712 /*
3713     Special version for direct calls from Fortran (to eliminate two function call overheads
3714 */
3715 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3716 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3717 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3718 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3719 #endif
3720 
3721 /*@C
3722   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3723 
3724   Collective on Mat
3725 
3726   Input Parameters:
3727 + mat - the matrix
3728 . min - number of input rows
3729 . im - input rows
3730 . nin - number of input columns
3731 . in - input columns
3732 . v - numerical values input
3733 - addvin - INSERT_VALUES or ADD_VALUES
3734 
3735   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3736 
3737   Level: advanced
3738 
3739 .seealso:   MatSetValuesBlocked()
3740 @*/
3741 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3742 {
3743   /* convert input arguments to C version */
3744   Mat        mat  = *matin;
3745   PetscInt   m    = *min, n = *nin;
3746   InsertMode addv = *addvin;
3747 
3748   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3749   const MatScalar *value;
3750   MatScalar       *barray     = baij->barray;
3751   PetscBool       roworiented = baij->roworiented;
3752   PetscErrorCode  ierr;
3753   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3754   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3755   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3756 
3757   PetscFunctionBegin;
3758   /* tasks normally handled by MatSetValuesBlocked() */
3759   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3760 #if defined(PETSC_USE_DEBUG)
3761   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3762   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3763 #endif
3764   if (mat->assembled) {
3765     mat->was_assembled = PETSC_TRUE;
3766     mat->assembled     = PETSC_FALSE;
3767   }
3768   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3769 
3770 
3771   if (!barray) {
3772     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3773     baij->barray = barray;
3774   }
3775 
3776   if (roworiented) stepval = (n-1)*bs;
3777   else stepval = (m-1)*bs;
3778 
3779   for (i=0; i<m; i++) {
3780     if (im[i] < 0) continue;
3781 #if defined(PETSC_USE_DEBUG)
3782     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3783 #endif
3784     if (im[i] >= rstart && im[i] < rend) {
3785       row = im[i] - rstart;
3786       for (j=0; j<n; j++) {
3787         /* If NumCol = 1 then a copy is not required */
3788         if ((roworiented) && (n == 1)) {
3789           barray = (MatScalar*)v + i*bs2;
3790         } else if ((!roworiented) && (m == 1)) {
3791           barray = (MatScalar*)v + j*bs2;
3792         } else { /* Here a copy is required */
3793           if (roworiented) {
3794             value = v + i*(stepval+bs)*bs + j*bs;
3795           } else {
3796             value = v + j*(stepval+bs)*bs + i*bs;
3797           }
3798           for (ii=0; ii<bs; ii++,value+=stepval) {
3799             for (jj=0; jj<bs; jj++) {
3800               *barray++ = *value++;
3801             }
3802           }
3803           barray -=bs2;
3804         }
3805 
3806         if (in[j] >= cstart && in[j] < cend) {
3807           col  = in[j] - cstart;
3808           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3809         } else if (in[j] < 0) continue;
3810 #if defined(PETSC_USE_DEBUG)
3811         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3812 #endif
3813         else {
3814           if (mat->was_assembled) {
3815             if (!baij->colmap) {
3816               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3817             }
3818 
3819 #if defined(PETSC_USE_DEBUG)
3820 #if defined(PETSC_USE_CTABLE)
3821             { PetscInt data;
3822               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3823               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3824             }
3825 #else
3826             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3827 #endif
3828 #endif
3829 #if defined(PETSC_USE_CTABLE)
3830             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3831             col  = (col - 1)/bs;
3832 #else
3833             col = (baij->colmap[in[j]] - 1)/bs;
3834 #endif
3835             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3836               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3837               col  =  in[j];
3838             }
3839           } else col = in[j];
3840           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3841         }
3842       }
3843     } else {
3844       if (!baij->donotstash) {
3845         if (roworiented) {
3846           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3847         } else {
3848           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3849         }
3850       }
3851     }
3852   }
3853 
3854   /* task normally handled by MatSetValuesBlocked() */
3855   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3856   PetscFunctionReturn(0);
3857 }
3858 
3859 /*@
3860      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3861          CSR format the local rows.
3862 
3863    Collective on MPI_Comm
3864 
3865    Input Parameters:
3866 +  comm - MPI communicator
3867 .  bs - the block size, only a block size of 1 is supported
3868 .  m - number of local rows (Cannot be PETSC_DECIDE)
3869 .  n - This value should be the same as the local size used in creating the
3870        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3871        calculated if N is given) For square matrices n is almost always m.
3872 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3873 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3874 .   i - row indices
3875 .   j - column indices
3876 -   a - matrix values
3877 
3878    Output Parameter:
3879 .   mat - the matrix
3880 
3881    Level: intermediate
3882 
3883    Notes:
3884        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
3885      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3886      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
3887 
3888      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
3889      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
3890      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
3891      with column-major ordering within blocks.
3892 
3893        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
3894 
3895 .keywords: matrix, aij, compressed row, sparse, parallel
3896 
3897 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
3898           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
3899 @*/
3900 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
3901 {
3902   PetscErrorCode ierr;
3903 
3904   PetscFunctionBegin;
3905   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
3906   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
3907   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3908   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
3909   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
3910   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
3911   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
3912   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
3913   PetscFunctionReturn(0);
3914 }
3915 
3916 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3917 {
3918   PetscErrorCode ierr;
3919   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
3920   PetscInt       *indx;
3921   PetscScalar    *values;
3922 
3923   PetscFunctionBegin;
3924   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3925   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
3926     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
3927     PetscInt       *dnz,*onz,sum,mbs,Nbs;
3928     PetscInt       *bindx,rmax=a->rmax,j;
3929 
3930     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
3931     mbs = m/bs; Nbs = N/cbs;
3932     if (n == PETSC_DECIDE) {
3933       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
3934     }
3935     /* Check sum(n) = Nbs */
3936     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3937     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
3938 
3939     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3940     rstart -= mbs;
3941 
3942     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
3943     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
3944     for (i=0; i<mbs; i++) {
3945       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
3946       nnz = nnz/bs;
3947       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
3948       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
3949       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
3950     }
3951     ierr = PetscFree(bindx);CHKERRQ(ierr);
3952 
3953     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3954     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3955     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
3956     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
3957     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
3958     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3959   }
3960 
3961   /* numeric phase */
3962   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
3963   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
3964 
3965   for (i=0; i<m; i++) {
3966     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3967     Ii   = i + rstart;
3968     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3969     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3970   }
3971   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3972   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3973   PetscFunctionReturn(0);
3974 }
3975