xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision f86b9fba6afbb510b5a4e5909e38406c0861ad7a)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 #include <petscblaslapack.h>
4 
5 extern PetscErrorCode MatSetUpMultiply_MPIBAIJ(Mat);
6 extern PetscErrorCode MatDisAssemble_MPIBAIJ(Mat);
7 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],PetscScalar []);
8 extern PetscErrorCode MatSetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
9 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
10 extern PetscErrorCode MatGetRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
11 extern PetscErrorCode MatRestoreRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
12 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscScalar,Vec,Vec);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
16 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
17 {
18   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
19   PetscErrorCode ierr;
20   PetscInt       i,*idxb = 0;
21   PetscScalar    *va,*vb;
22   Vec            vtmp;
23 
24   PetscFunctionBegin;
25   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
26   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
27   if (idx) {
28     for (i=0; i<A->rmap->n; i++) {
29       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
30     }
31   }
32 
33   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
34   if (idx) {ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);}
35   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
36   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
37 
38   for (i=0; i<A->rmap->n; i++) {
39     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
40       va[i] = vb[i];
41       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
42     }
43   }
44 
45   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
46   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
47   ierr = PetscFree(idxb);CHKERRQ(ierr);
48   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNCT__
53 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
54 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
55 {
56   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
61   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
67 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
68 {
69   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
74   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 /*
79      Local utility routine that creates a mapping from the global column
80    number to the local number in the off-diagonal part of the local
81    storage of the matrix.  This is done in a non scalable way since the
82    length of colmap equals the global matrix length.
83 */
84 #undef __FUNCT__
85 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
86 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
87 {
88   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
89   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
90   PetscErrorCode ierr;
91   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
92 
93   PetscFunctionBegin;
94 #if defined(PETSC_USE_CTABLE)
95   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) {
97     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
98   }
99 #else
100   ierr = PetscMalloc((baij->Nbs+1)*sizeof(PetscInt),&baij->colmap);CHKERRQ(ierr);
101   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
102   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
103   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
104 #endif
105   PetscFunctionReturn(0);
106 }
107 
108 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv) \
109   { \
110  \
111     brow = row/bs;  \
112     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
113     rmax = aimax[brow]; nrow = ailen[brow]; \
114     bcol = col/bs; \
115     ridx = row % bs; cidx = col % bs; \
116     low  = 0; high = nrow; \
117     while (high-low > 3) { \
118       t = (low+high)/2; \
119       if (rp[t] > bcol) high = t; \
120       else              low  = t; \
121     } \
122     for (_i=low; _i<high; _i++) { \
123       if (rp[_i] > bcol) break; \
124       if (rp[_i] == bcol) { \
125         bap = ap +  bs2*_i + bs*cidx + ridx; \
126         if (addv == ADD_VALUES) *bap += value;  \
127         else                    *bap  = value;  \
128         goto a_noinsert; \
129       } \
130     } \
131     if (a->nonew == 1) goto a_noinsert; \
132     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
133     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
134     N = nrow++ - 1;  \
135     /* shift up all the later entries in this row */ \
136     for (ii=N; ii>=_i; ii--) { \
137       rp[ii+1] = rp[ii]; \
138       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
139     } \
140     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
141     rp[_i]                      = bcol;  \
142     ap[bs2*_i + bs*cidx + ridx] = value;  \
143 a_noinsert:; \
144     ailen[brow] = nrow; \
145   }
146 
147 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv) \
148   { \
149     brow = row/bs;  \
150     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
151     rmax = bimax[brow]; nrow = bilen[brow]; \
152     bcol = col/bs; \
153     ridx = row % bs; cidx = col % bs; \
154     low  = 0; high = nrow; \
155     while (high-low > 3) { \
156       t = (low+high)/2; \
157       if (rp[t] > bcol) high = t; \
158       else              low  = t; \
159     } \
160     for (_i=low; _i<high; _i++) { \
161       if (rp[_i] > bcol) break; \
162       if (rp[_i] == bcol) { \
163         bap = ap +  bs2*_i + bs*cidx + ridx; \
164         if (addv == ADD_VALUES) *bap += value;  \
165         else                    *bap  = value;  \
166         goto b_noinsert; \
167       } \
168     } \
169     if (b->nonew == 1) goto b_noinsert; \
170     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
171     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
172     N = nrow++ - 1;  \
173     /* shift up all the later entries in this row */ \
174     for (ii=N; ii>=_i; ii--) { \
175       rp[ii+1] = rp[ii]; \
176       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
177     } \
178     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
179     rp[_i]                      = bcol;  \
180     ap[bs2*_i + bs*cidx + ridx] = value;  \
181 b_noinsert:; \
182     bilen[brow] = nrow; \
183   }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatSetValues_MPIBAIJ"
187 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
188 {
189   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
190   MatScalar      value;
191   PetscBool      roworiented = baij->roworiented;
192   PetscErrorCode ierr;
193   PetscInt       i,j,row,col;
194   PetscInt       rstart_orig=mat->rmap->rstart;
195   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
196   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
197 
198   /* Some Variables required in the macro */
199   Mat         A     = baij->A;
200   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
201   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
202   MatScalar   *aa   =a->a;
203 
204   Mat         B     = baij->B;
205   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
206   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
207   MatScalar   *ba   =b->a;
208 
209   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
210   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
211   MatScalar *ap,*bap;
212 
213   PetscFunctionBegin;
214   if (v) PetscValidScalarPointer(v,6);
215   for (i=0; i<m; i++) {
216     if (im[i] < 0) continue;
217 #if defined(PETSC_USE_DEBUG)
218     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
219 #endif
220     if (im[i] >= rstart_orig && im[i] < rend_orig) {
221       row = im[i] - rstart_orig;
222       for (j=0; j<n; j++) {
223         if (in[j] >= cstart_orig && in[j] < cend_orig) {
224           col = in[j] - cstart_orig;
225           if (roworiented) value = v[i*n+j];
226           else             value = v[i+j*m];
227           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv);
228           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
229         } else if (in[j] < 0) continue;
230 #if defined(PETSC_USE_DEBUG)
231         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
232 #endif
233         else {
234           if (mat->was_assembled) {
235             if (!baij->colmap) {
236               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
237             }
238 #if defined(PETSC_USE_CTABLE)
239             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
240             col  = col - 1;
241 #else
242             col = baij->colmap[in[j]/bs] - 1;
243 #endif
244             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
245               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
246               col  =  in[j];
247               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
248               B    = baij->B;
249               b    = (Mat_SeqBAIJ*)(B)->data;
250               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
251               ba   =b->a;
252             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
253             else col += in[j]%bs;
254           } else col = in[j];
255           if (roworiented) value = v[i*n+j];
256           else             value = v[i+j*m];
257           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv);
258           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
259         }
260       }
261     } else {
262       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
263       if (!baij->donotstash) {
264         mat->assembled = PETSC_FALSE;
265         if (roworiented) {
266           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
267         } else {
268           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
269         }
270       }
271     }
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
278 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
279 {
280   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
281   const PetscScalar *value;
282   MatScalar         *barray     = baij->barray;
283   PetscBool         roworiented = baij->roworiented;
284   PetscErrorCode    ierr;
285   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
286   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
287   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
288 
289   PetscFunctionBegin;
290   if (!barray) {
291     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
292     baij->barray = barray;
293   }
294 
295   if (roworiented) stepval = (n-1)*bs;
296   else stepval = (m-1)*bs;
297 
298   for (i=0; i<m; i++) {
299     if (im[i] < 0) continue;
300 #if defined(PETSC_USE_DEBUG)
301     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
302 #endif
303     if (im[i] >= rstart && im[i] < rend) {
304       row = im[i] - rstart;
305       for (j=0; j<n; j++) {
306         /* If NumCol = 1 then a copy is not required */
307         if ((roworiented) && (n == 1)) {
308           barray = (MatScalar*)v + i*bs2;
309         } else if ((!roworiented) && (m == 1)) {
310           barray = (MatScalar*)v + j*bs2;
311         } else { /* Here a copy is required */
312           if (roworiented) {
313             value = v + (i*(stepval+bs) + j)*bs;
314           } else {
315             value = v + (j*(stepval+bs) + i)*bs;
316           }
317           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
318             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
319             barray += bs;
320           }
321           barray -= bs2;
322         }
323 
324         if (in[j] >= cstart && in[j] < cend) {
325           col  = in[j] - cstart;
326           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
327         } else if (in[j] < 0) continue;
328 #if defined(PETSC_USE_DEBUG)
329         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
330 #endif
331         else {
332           if (mat->was_assembled) {
333             if (!baij->colmap) {
334               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
335             }
336 
337 #if defined(PETSC_USE_DEBUG)
338 #if defined(PETSC_USE_CTABLE)
339             { PetscInt data;
340               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
341               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
342             }
343 #else
344             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
345 #endif
346 #endif
347 #if defined(PETSC_USE_CTABLE)
348             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
349             col  = (col - 1)/bs;
350 #else
351             col = (baij->colmap[in[j]] - 1)/bs;
352 #endif
353             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
354               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
355               col  =  in[j];
356             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", bs*im[i], bs*in[j]);
357           } else col = in[j];
358           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
359         }
360       }
361     } else {
362       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
363       if (!baij->donotstash) {
364         if (roworiented) {
365           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
366         } else {
367           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
368         }
369       }
370     }
371   }
372   PetscFunctionReturn(0);
373 }
374 
375 #define HASH_KEY 0.6180339887
376 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
377 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
378 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
379 #undef __FUNCT__
380 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
381 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
382 {
383   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
384   PetscBool      roworiented = baij->roworiented;
385   PetscErrorCode ierr;
386   PetscInt       i,j,row,col;
387   PetscInt       rstart_orig=mat->rmap->rstart;
388   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
389   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
390   PetscReal      tmp;
391   MatScalar      **HD = baij->hd,value;
392 #if defined(PETSC_USE_DEBUG)
393   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
394 #endif
395 
396   PetscFunctionBegin;
397   if (v) PetscValidScalarPointer(v,6);
398   for (i=0; i<m; i++) {
399 #if defined(PETSC_USE_DEBUG)
400     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
401     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
402 #endif
403     row = im[i];
404     if (row >= rstart_orig && row < rend_orig) {
405       for (j=0; j<n; j++) {
406         col = in[j];
407         if (roworiented) value = v[i*n+j];
408         else             value = v[i+j*m];
409         /* Look up PetscInto the Hash Table */
410         key = (row/bs)*Nbs+(col/bs)+1;
411         h1  = HASH(size,key,tmp);
412 
413 
414         idx = h1;
415 #if defined(PETSC_USE_DEBUG)
416         insert_ct++;
417         total_ct++;
418         if (HT[idx] != key) {
419           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
420           if (idx == size) {
421             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
422             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
423           }
424         }
425 #else
426         if (HT[idx] != key) {
427           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
428           if (idx == size) {
429             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
430             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
431           }
432         }
433 #endif
434         /* A HASH table entry is found, so insert the values at the correct address */
435         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
436         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
437       }
438     } else if (!baij->donotstash) {
439       if (roworiented) {
440         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
441       } else {
442         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
443       }
444     }
445   }
446 #if defined(PETSC_USE_DEBUG)
447   baij->ht_total_ct  = total_ct;
448   baij->ht_insert_ct = insert_ct;
449 #endif
450   PetscFunctionReturn(0);
451 }
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
455 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
456 {
457   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
458   PetscBool         roworiented = baij->roworiented;
459   PetscErrorCode    ierr;
460   PetscInt          i,j,ii,jj,row,col;
461   PetscInt          rstart=baij->rstartbs;
462   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
463   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
464   PetscReal         tmp;
465   MatScalar         **HD = baij->hd,*baij_a;
466   const PetscScalar *v_t,*value;
467 #if defined(PETSC_USE_DEBUG)
468   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
469 #endif
470 
471   PetscFunctionBegin;
472   if (roworiented) stepval = (n-1)*bs;
473   else stepval = (m-1)*bs;
474 
475   for (i=0; i<m; i++) {
476 #if defined(PETSC_USE_DEBUG)
477     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
478     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
479 #endif
480     row = im[i];
481     v_t = v + i*nbs2;
482     if (row >= rstart && row < rend) {
483       for (j=0; j<n; j++) {
484         col = in[j];
485 
486         /* Look up into the Hash Table */
487         key = row*Nbs+col+1;
488         h1  = HASH(size,key,tmp);
489 
490         idx = h1;
491 #if defined(PETSC_USE_DEBUG)
492         total_ct++;
493         insert_ct++;
494         if (HT[idx] != key) {
495           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
496           if (idx == size) {
497             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
498             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
499           }
500         }
501 #else
502         if (HT[idx] != key) {
503           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
504           if (idx == size) {
505             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
506             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
507           }
508         }
509 #endif
510         baij_a = HD[idx];
511         if (roworiented) {
512           /*value = v + i*(stepval+bs)*bs + j*bs;*/
513           /* value = v + (i*(stepval+bs)+j)*bs; */
514           value = v_t;
515           v_t  += bs;
516           if (addv == ADD_VALUES) {
517             for (ii=0; ii<bs; ii++,value+=stepval) {
518               for (jj=ii; jj<bs2; jj+=bs) {
519                 baij_a[jj] += *value++;
520               }
521             }
522           } else {
523             for (ii=0; ii<bs; ii++,value+=stepval) {
524               for (jj=ii; jj<bs2; jj+=bs) {
525                 baij_a[jj] = *value++;
526               }
527             }
528           }
529         } else {
530           value = v + j*(stepval+bs)*bs + i*bs;
531           if (addv == ADD_VALUES) {
532             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
533               for (jj=0; jj<bs; jj++) {
534                 baij_a[jj] += *value++;
535               }
536             }
537           } else {
538             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
539               for (jj=0; jj<bs; jj++) {
540                 baij_a[jj] = *value++;
541               }
542             }
543           }
544         }
545       }
546     } else {
547       if (!baij->donotstash) {
548         if (roworiented) {
549           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
550         } else {
551           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
552         }
553       }
554     }
555   }
556 #if defined(PETSC_USE_DEBUG)
557   baij->ht_total_ct  = total_ct;
558   baij->ht_insert_ct = insert_ct;
559 #endif
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "MatGetValues_MPIBAIJ"
565 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
566 {
567   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
568   PetscErrorCode ierr;
569   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
570   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
571 
572   PetscFunctionBegin;
573   for (i=0; i<m; i++) {
574     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
575     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
576     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
577       row = idxm[i] - bsrstart;
578       for (j=0; j<n; j++) {
579         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
580         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
581         if (idxn[j] >= bscstart && idxn[j] < bscend) {
582           col  = idxn[j] - bscstart;
583           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
584         } else {
585           if (!baij->colmap) {
586             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
587           }
588 #if defined(PETSC_USE_CTABLE)
589           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
590           data--;
591 #else
592           data = baij->colmap[idxn[j]/bs]-1;
593 #endif
594           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
595           else {
596             col  = data + idxn[j]%bs;
597             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
598           }
599         }
600       }
601     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "MatNorm_MPIBAIJ"
608 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
609 {
610   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
611   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
612   PetscErrorCode ierr;
613   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
614   PetscReal      sum = 0.0;
615   MatScalar      *v;
616 
617   PetscFunctionBegin;
618   if (baij->size == 1) {
619     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
620   } else {
621     if (type == NORM_FROBENIUS) {
622       v  = amat->a;
623       nz = amat->nz*bs2;
624       for (i=0; i<nz; i++) {
625         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
626       }
627       v  = bmat->a;
628       nz = bmat->nz*bs2;
629       for (i=0; i<nz; i++) {
630         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
631       }
632       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
633       *nrm = PetscSqrtReal(*nrm);
634     } else if (type == NORM_1) { /* max column sum */
635       PetscReal *tmp,*tmp2;
636       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
637       ierr = PetscMalloc2(mat->cmap->N,PetscReal,&tmp,mat->cmap->N,PetscReal,&tmp2);CHKERRQ(ierr);
638       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
639       v    = amat->a; jj = amat->j;
640       for (i=0; i<amat->nz; i++) {
641         for (j=0; j<bs; j++) {
642           col = bs*(cstart + *jj) + j; /* column index */
643           for (row=0; row<bs; row++) {
644             tmp[col] += PetscAbsScalar(*v);  v++;
645           }
646         }
647         jj++;
648       }
649       v = bmat->a; jj = bmat->j;
650       for (i=0; i<bmat->nz; i++) {
651         for (j=0; j<bs; j++) {
652           col = bs*garray[*jj] + j;
653           for (row=0; row<bs; row++) {
654             tmp[col] += PetscAbsScalar(*v); v++;
655           }
656         }
657         jj++;
658       }
659       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
660       *nrm = 0.0;
661       for (j=0; j<mat->cmap->N; j++) {
662         if (tmp2[j] > *nrm) *nrm = tmp2[j];
663       }
664       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
665     } else if (type == NORM_INFINITY) { /* max row sum */
666       PetscReal *sums;
667       ierr = PetscMalloc(bs*sizeof(PetscReal),&sums);CHKERRQ(ierr);
668       sum  = 0.0;
669       for (j=0; j<amat->mbs; j++) {
670         for (row=0; row<bs; row++) sums[row] = 0.0;
671         v  = amat->a + bs2*amat->i[j];
672         nz = amat->i[j+1]-amat->i[j];
673         for (i=0; i<nz; i++) {
674           for (col=0; col<bs; col++) {
675             for (row=0; row<bs; row++) {
676               sums[row] += PetscAbsScalar(*v); v++;
677             }
678           }
679         }
680         v  = bmat->a + bs2*bmat->i[j];
681         nz = bmat->i[j+1]-bmat->i[j];
682         for (i=0; i<nz; i++) {
683           for (col=0; col<bs; col++) {
684             for (row=0; row<bs; row++) {
685               sums[row] += PetscAbsScalar(*v); v++;
686             }
687           }
688         }
689         for (row=0; row<bs; row++) {
690           if (sums[row] > sum) sum = sums[row];
691         }
692       }
693       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
694       ierr = PetscFree(sums);CHKERRQ(ierr);
695     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
696   }
697   PetscFunctionReturn(0);
698 }
699 
700 /*
701   Creates the hash table, and sets the table
702   This table is created only once.
703   If new entried need to be added to the matrix
704   then the hash table has to be destroyed and
705   recreated.
706 */
707 #undef __FUNCT__
708 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
709 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
710 {
711   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
712   Mat            A     = baij->A,B=baij->B;
713   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
714   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
715   PetscErrorCode ierr;
716   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
717   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
718   PetscInt       *HT,key;
719   MatScalar      **HD;
720   PetscReal      tmp;
721 #if defined(PETSC_USE_INFO)
722   PetscInt ct=0,max=0;
723 #endif
724 
725   PetscFunctionBegin;
726   if (baij->ht) PetscFunctionReturn(0);
727 
728   baij->ht_size = (PetscInt)(factor*nz);
729   ht_size       = baij->ht_size;
730 
731   /* Allocate Memory for Hash Table */
732   ierr = PetscMalloc2(ht_size,MatScalar*,&baij->hd,ht_size,PetscInt,&baij->ht);CHKERRQ(ierr);
733   ierr = PetscMemzero(baij->hd,ht_size*sizeof(MatScalar*));CHKERRQ(ierr);
734   ierr = PetscMemzero(baij->ht,ht_size*sizeof(PetscInt));CHKERRQ(ierr);
735   HD   = baij->hd;
736   HT   = baij->ht;
737 
738   /* Loop Over A */
739   for (i=0; i<a->mbs; i++) {
740     for (j=ai[i]; j<ai[i+1]; j++) {
741       row = i+rstart;
742       col = aj[j]+cstart;
743 
744       key = row*Nbs + col + 1;
745       h1  = HASH(ht_size,key,tmp);
746       for (k=0; k<ht_size; k++) {
747         if (!HT[(h1+k)%ht_size]) {
748           HT[(h1+k)%ht_size] = key;
749           HD[(h1+k)%ht_size] = a->a + j*bs2;
750           break;
751 #if defined(PETSC_USE_INFO)
752         } else {
753           ct++;
754 #endif
755         }
756       }
757 #if defined(PETSC_USE_INFO)
758       if (k> max) max = k;
759 #endif
760     }
761   }
762   /* Loop Over B */
763   for (i=0; i<b->mbs; i++) {
764     for (j=bi[i]; j<bi[i+1]; j++) {
765       row = i+rstart;
766       col = garray[bj[j]];
767       key = row*Nbs + col + 1;
768       h1  = HASH(ht_size,key,tmp);
769       for (k=0; k<ht_size; k++) {
770         if (!HT[(h1+k)%ht_size]) {
771           HT[(h1+k)%ht_size] = key;
772           HD[(h1+k)%ht_size] = b->a + j*bs2;
773           break;
774 #if defined(PETSC_USE_INFO)
775         } else {
776           ct++;
777 #endif
778         }
779       }
780 #if defined(PETSC_USE_INFO)
781       if (k> max) max = k;
782 #endif
783     }
784   }
785 
786   /* Print Summary */
787 #if defined(PETSC_USE_INFO)
788   for (i=0,j=0; i<ht_size; i++) {
789     if (HT[i]) j++;
790   }
791   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
792 #endif
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
798 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
799 {
800   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
801   PetscErrorCode ierr;
802   PetscInt       nstash,reallocs;
803   InsertMode     addv;
804 
805   PetscFunctionBegin;
806   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
807 
808   /* make sure all processors are either in INSERTMODE or ADDMODE */
809   ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
810   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
811   mat->insertmode = addv; /* in case this processor had no cache */
812 
813   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
814   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
815   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
816   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
817   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
818   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
824 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
825 {
826   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
827   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
828   PetscErrorCode ierr;
829   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
830   PetscInt       *row,*col;
831   PetscBool      r1,r2,r3,other_disassembled;
832   MatScalar      *val;
833   InsertMode     addv = mat->insertmode;
834   PetscMPIInt    n;
835 
836   PetscFunctionBegin;
837   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
838   if (!baij->donotstash && !mat->nooffprocentries) {
839     while (1) {
840       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
841       if (!flg) break;
842 
843       for (i=0; i<n;) {
844         /* Now identify the consecutive vals belonging to the same row */
845         for (j=i,rstart=row[j]; j<n; j++) {
846           if (row[j] != rstart) break;
847         }
848         if (j < n) ncols = j-i;
849         else       ncols = n-i;
850         /* Now assemble all these values with a single function call */
851         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
852         i    = j;
853       }
854     }
855     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
856     /* Now process the block-stash. Since the values are stashed column-oriented,
857        set the roworiented flag to column oriented, and after MatSetValues()
858        restore the original flags */
859     r1 = baij->roworiented;
860     r2 = a->roworiented;
861     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
862 
863     baij->roworiented = PETSC_FALSE;
864     a->roworiented    = PETSC_FALSE;
865 
866     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
867     while (1) {
868       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
869       if (!flg) break;
870 
871       for (i=0; i<n;) {
872         /* Now identify the consecutive vals belonging to the same row */
873         for (j=i,rstart=row[j]; j<n; j++) {
874           if (row[j] != rstart) break;
875         }
876         if (j < n) ncols = j-i;
877         else       ncols = n-i;
878         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
879         i    = j;
880       }
881     }
882     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
883 
884     baij->roworiented = r1;
885     a->roworiented    = r2;
886 
887     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
888   }
889 
890   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
891   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
892 
893   /* determine if any processor has disassembled, if so we must
894      also disassemble ourselfs, in order that we may reassemble. */
895   /*
896      if nonzero structure of submatrix B cannot change then we know that
897      no processor disassembled thus we can skip this stuff
898   */
899   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
900     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
901     if (mat->was_assembled && !other_disassembled) {
902       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
903     }
904   }
905 
906   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
907     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
908   }
909   ierr = MatSetOption(baij->B,MAT_CHECK_COMPRESSED_ROW,PETSC_FALSE);CHKERRQ(ierr);
910   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
911   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
912 
913 #if defined(PETSC_USE_INFO)
914   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
915     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
916 
917     baij->ht_total_ct  = 0;
918     baij->ht_insert_ct = 0;
919   }
920 #endif
921   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
922     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
923 
924     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
925     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
926   }
927 
928   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
929 
930   baij->rowvalues = 0;
931   PetscFunctionReturn(0);
932 }
933 
934 #include <petscdraw.h>
935 #undef __FUNCT__
936 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
937 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
938 {
939   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
940   PetscErrorCode    ierr;
941   PetscMPIInt       size = baij->size,rank = baij->rank;
942   PetscInt          bs   = mat->rmap->bs;
943   PetscBool         iascii,isdraw;
944   PetscViewer       sviewer;
945   PetscViewerFormat format;
946 
947   PetscFunctionBegin;
948   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
949   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
950   if (iascii) {
951     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
952     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
953       MatInfo info;
954       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
955       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
956       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
957       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
958                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
959       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
960       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
961       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
962       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
963       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
964       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
965       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
966       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
967       PetscFunctionReturn(0);
968     } else if (format == PETSC_VIEWER_ASCII_INFO) {
969       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
970       PetscFunctionReturn(0);
971     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
972       PetscFunctionReturn(0);
973     }
974   }
975 
976   if (isdraw) {
977     PetscDraw draw;
978     PetscBool isnull;
979     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
980     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
981   }
982 
983   if (size == 1) {
984     ierr = PetscObjectSetName((PetscObject)baij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
985     ierr = MatView(baij->A,viewer);CHKERRQ(ierr);
986   } else {
987     /* assemble the entire matrix onto first processor. */
988     Mat         A;
989     Mat_SeqBAIJ *Aloc;
990     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
991     MatScalar   *a;
992 
993     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
994     /* Perhaps this should be the type of mat? */
995     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
996     if (!rank) {
997       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
998     } else {
999       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1000     }
1001     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1002     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1003     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1004     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1005 
1006     /* copy over the A part */
1007     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1008     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1009     ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1010 
1011     for (i=0; i<mbs; i++) {
1012       rvals[0] = bs*(baij->rstartbs + i);
1013       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1014       for (j=ai[i]; j<ai[i+1]; j++) {
1015         col = (baij->cstartbs+aj[j])*bs;
1016         for (k=0; k<bs; k++) {
1017           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1018           col++; a += bs;
1019         }
1020       }
1021     }
1022     /* copy over the B part */
1023     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1024     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1025     for (i=0; i<mbs; i++) {
1026       rvals[0] = bs*(baij->rstartbs + i);
1027       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1028       for (j=ai[i]; j<ai[i+1]; j++) {
1029         col = baij->garray[aj[j]]*bs;
1030         for (k=0; k<bs; k++) {
1031           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1032           col++; a += bs;
1033         }
1034       }
1035     }
1036     ierr = PetscFree(rvals);CHKERRQ(ierr);
1037     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1038     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1039     /*
1040        Everyone has to call to draw the matrix since the graphics waits are
1041        synchronized across all processors that share the PetscDraw object
1042     */
1043     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1044     if (!rank) {
1045       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
1046       /* Set the type name to MATMPIBAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqBAIJ_ASCII()*/
1047       PetscStrcpy(((PetscObject)((Mat_MPIBAIJ*)(A->data))->A)->type_name,MATMPIBAIJ);
1048       ierr = MatView(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1049     }
1050     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
1051     ierr = MatDestroy(&A);CHKERRQ(ierr);
1052   }
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1058 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1059 {
1060   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1061   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1062   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1063   PetscErrorCode ierr;
1064   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1065   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1066   int            fd;
1067   PetscScalar    *column_values;
1068   FILE           *file;
1069   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1070   PetscInt       message_count,flowcontrolcount;
1071 
1072   PetscFunctionBegin;
1073   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1074   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1075   nz   = bs2*(A->nz + B->nz);
1076   rlen = mat->rmap->n;
1077   if (!rank) {
1078     header[0] = MAT_FILE_CLASSID;
1079     header[1] = mat->rmap->N;
1080     header[2] = mat->cmap->N;
1081 
1082     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1083     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1084     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1085     /* get largest number of rows any processor has */
1086     range = mat->rmap->range;
1087     for (i=1; i<size; i++) {
1088       rlen = PetscMax(rlen,range[i+1] - range[i]);
1089     }
1090   } else {
1091     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1092   }
1093 
1094   ierr = PetscMalloc((rlen/bs)*sizeof(PetscInt),&crow_lens);CHKERRQ(ierr);
1095   /* compute lengths of each row  */
1096   for (i=0; i<a->mbs; i++) {
1097     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1098   }
1099   /* store the row lengths to the file */
1100   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1101   if (!rank) {
1102     MPI_Status status;
1103     ierr = PetscMalloc(rlen*sizeof(PetscInt),&row_lens);CHKERRQ(ierr);
1104     rlen = (range[1] - range[0])/bs;
1105     for (i=0; i<rlen; i++) {
1106       for (j=0; j<bs; j++) {
1107         row_lens[i*bs+j] = bs*crow_lens[i];
1108       }
1109     }
1110     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1111     for (i=1; i<size; i++) {
1112       rlen = (range[i+1] - range[i])/bs;
1113       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1114       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1115       for (k=0; k<rlen; k++) {
1116         for (j=0; j<bs; j++) {
1117           row_lens[k*bs+j] = bs*crow_lens[k];
1118         }
1119       }
1120       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1121     }
1122     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1123     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1124   } else {
1125     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1126     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1127     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1128   }
1129   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1130 
1131   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1132      information needed to make it for each row from a block row. This does require more communication but still not more than
1133      the communication needed for the nonzero values  */
1134   nzmax = nz; /*  space a largest processor needs */
1135   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1136   ierr  = PetscMalloc(nzmax*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
1137   cnt   = 0;
1138   for (i=0; i<a->mbs; i++) {
1139     pcnt = cnt;
1140     for (j=B->i[i]; j<B->i[i+1]; j++) {
1141       if ((col = garray[B->j[j]]) > cstart) break;
1142       for (l=0; l<bs; l++) {
1143         column_indices[cnt++] = bs*col+l;
1144       }
1145     }
1146     for (k=A->i[i]; k<A->i[i+1]; k++) {
1147       for (l=0; l<bs; l++) {
1148         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1149       }
1150     }
1151     for (; j<B->i[i+1]; j++) {
1152       for (l=0; l<bs; l++) {
1153         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1154       }
1155     }
1156     len = cnt - pcnt;
1157     for (k=1; k<bs; k++) {
1158       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1159       cnt += len;
1160     }
1161   }
1162   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1163 
1164   /* store the columns to the file */
1165   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1166   if (!rank) {
1167     MPI_Status status;
1168     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1169     for (i=1; i<size; i++) {
1170       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1171       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1172       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1173       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1174     }
1175     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1176   } else {
1177     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1178     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1180     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1181   }
1182   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1183 
1184   /* load up the numerical values */
1185   ierr = PetscMalloc(nzmax*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
1186   cnt  = 0;
1187   for (i=0; i<a->mbs; i++) {
1188     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1189     for (j=B->i[i]; j<B->i[i+1]; j++) {
1190       if (garray[B->j[j]] > cstart) break;
1191       for (l=0; l<bs; l++) {
1192         for (ll=0; ll<bs; ll++) {
1193           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1194         }
1195       }
1196       cnt += bs;
1197     }
1198     for (k=A->i[i]; k<A->i[i+1]; k++) {
1199       for (l=0; l<bs; l++) {
1200         for (ll=0; ll<bs; ll++) {
1201           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1202         }
1203       }
1204       cnt += bs;
1205     }
1206     for (; j<B->i[i+1]; j++) {
1207       for (l=0; l<bs; l++) {
1208         for (ll=0; ll<bs; ll++) {
1209           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1210         }
1211       }
1212       cnt += bs;
1213     }
1214     cnt += (bs-1)*rlen;
1215   }
1216   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1217 
1218   /* store the column values to the file */
1219   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1220   if (!rank) {
1221     MPI_Status status;
1222     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1223     for (i=1; i<size; i++) {
1224       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1225       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1226       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1227       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1228     }
1229     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1230   } else {
1231     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1232     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1233     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1234     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1235   }
1236   ierr = PetscFree(column_values);CHKERRQ(ierr);
1237 
1238   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1239   if (file) {
1240     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1241   }
1242   PetscFunctionReturn(0);
1243 }
1244 
1245 #undef __FUNCT__
1246 #define __FUNCT__ "MatView_MPIBAIJ"
1247 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1248 {
1249   PetscErrorCode ierr;
1250   PetscBool      iascii,isdraw,issocket,isbinary;
1251 
1252   PetscFunctionBegin;
1253   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1254   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1255   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1256   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1257   if (iascii || isdraw || issocket) {
1258     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1259   } else if (isbinary) {
1260     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1261   }
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1267 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1268 {
1269   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1270   PetscErrorCode ierr;
1271 
1272   PetscFunctionBegin;
1273 #if defined(PETSC_USE_LOG)
1274   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1275 #endif
1276   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1277   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1278   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1279   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1280 #if defined(PETSC_USE_CTABLE)
1281   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1282 #else
1283   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1284 #endif
1285   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1286   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1287   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1288   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1289   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1290   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1291   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1292   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1293 
1294   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1296   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1297   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1298   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1301   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1302   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1303   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1304   PetscFunctionReturn(0);
1305 }
1306 
1307 #undef __FUNCT__
1308 #define __FUNCT__ "MatMult_MPIBAIJ"
1309 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1310 {
1311   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1312   PetscErrorCode ierr;
1313   PetscInt       nt;
1314 
1315   PetscFunctionBegin;
1316   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1317   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1318   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1319   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1320   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1321   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1322   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1323   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 #undef __FUNCT__
1328 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1329 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1330 {
1331   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1332   PetscErrorCode ierr;
1333 
1334   PetscFunctionBegin;
1335   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1336   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1337   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1338   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 #undef __FUNCT__
1343 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1344 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1345 {
1346   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1347   PetscErrorCode ierr;
1348   PetscBool      merged;
1349 
1350   PetscFunctionBegin;
1351   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1352   /* do nondiagonal part */
1353   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1354   if (!merged) {
1355     /* send it on its way */
1356     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1357     /* do local part */
1358     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1359     /* receive remote parts: note this assumes the values are not actually */
1360     /* inserted in yy until the next line */
1361     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1362   } else {
1363     /* do local part */
1364     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1365     /* send it on its way */
1366     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1367     /* values actually were received in the Begin() but we need to call this nop */
1368     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1369   }
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 #undef __FUNCT__
1374 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1375 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1376 {
1377   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1378   PetscErrorCode ierr;
1379 
1380   PetscFunctionBegin;
1381   /* do nondiagonal part */
1382   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1383   /* send it on its way */
1384   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1385   /* do local part */
1386   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1387   /* receive remote parts: note this assumes the values are not actually */
1388   /* inserted in yy until the next line, which is true for my implementation*/
1389   /* but is not perhaps always true. */
1390   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 /*
1395   This only works correctly for square matrices where the subblock A->A is the
1396    diagonal block
1397 */
1398 #undef __FUNCT__
1399 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1400 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1401 {
1402   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1403   PetscErrorCode ierr;
1404 
1405   PetscFunctionBegin;
1406   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1407   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 #undef __FUNCT__
1412 #define __FUNCT__ "MatScale_MPIBAIJ"
1413 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1414 {
1415   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1416   PetscErrorCode ierr;
1417 
1418   PetscFunctionBegin;
1419   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1420   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 #undef __FUNCT__
1425 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1426 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1427 {
1428   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1429   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1430   PetscErrorCode ierr;
1431   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1432   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1433   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1434 
1435   PetscFunctionBegin;
1436   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1437   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1438   mat->getrowactive = PETSC_TRUE;
1439 
1440   if (!mat->rowvalues && (idx || v)) {
1441     /*
1442         allocate enough space to hold information from the longest row.
1443     */
1444     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1445     PetscInt    max = 1,mbs = mat->mbs,tmp;
1446     for (i=0; i<mbs; i++) {
1447       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1448       if (max < tmp) max = tmp;
1449     }
1450     ierr = PetscMalloc2(max*bs2,PetscScalar,&mat->rowvalues,max*bs2,PetscInt,&mat->rowindices);CHKERRQ(ierr);
1451   }
1452   lrow = row - brstart;
1453 
1454   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1455   if (!v)   {pvA = 0; pvB = 0;}
1456   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1457   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1458   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1459   nztot = nzA + nzB;
1460 
1461   cmap = mat->garray;
1462   if (v  || idx) {
1463     if (nztot) {
1464       /* Sort by increasing column numbers, assuming A and B already sorted */
1465       PetscInt imark = -1;
1466       if (v) {
1467         *v = v_p = mat->rowvalues;
1468         for (i=0; i<nzB; i++) {
1469           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1470           else break;
1471         }
1472         imark = i;
1473         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1474         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1475       }
1476       if (idx) {
1477         *idx = idx_p = mat->rowindices;
1478         if (imark > -1) {
1479           for (i=0; i<imark; i++) {
1480             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1481           }
1482         } else {
1483           for (i=0; i<nzB; i++) {
1484             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1485             else break;
1486           }
1487           imark = i;
1488         }
1489         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1490         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1491       }
1492     } else {
1493       if (idx) *idx = 0;
1494       if (v)   *v   = 0;
1495     }
1496   }
1497   *nz  = nztot;
1498   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1499   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1500   PetscFunctionReturn(0);
1501 }
1502 
1503 #undef __FUNCT__
1504 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1505 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1506 {
1507   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1508 
1509   PetscFunctionBegin;
1510   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1511   baij->getrowactive = PETSC_FALSE;
1512   PetscFunctionReturn(0);
1513 }
1514 
1515 #undef __FUNCT__
1516 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1517 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1518 {
1519   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1520   PetscErrorCode ierr;
1521 
1522   PetscFunctionBegin;
1523   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1524   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1525   PetscFunctionReturn(0);
1526 }
1527 
1528 #undef __FUNCT__
1529 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1530 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1531 {
1532   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1533   Mat            A  = a->A,B = a->B;
1534   PetscErrorCode ierr;
1535   PetscReal      isend[5],irecv[5];
1536 
1537   PetscFunctionBegin;
1538   info->block_size = (PetscReal)matin->rmap->bs;
1539 
1540   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1541 
1542   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1543   isend[3] = info->memory;  isend[4] = info->mallocs;
1544 
1545   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1546 
1547   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1548   isend[3] += info->memory;  isend[4] += info->mallocs;
1549 
1550   if (flag == MAT_LOCAL) {
1551     info->nz_used      = isend[0];
1552     info->nz_allocated = isend[1];
1553     info->nz_unneeded  = isend[2];
1554     info->memory       = isend[3];
1555     info->mallocs      = isend[4];
1556   } else if (flag == MAT_GLOBAL_MAX) {
1557     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1558 
1559     info->nz_used      = irecv[0];
1560     info->nz_allocated = irecv[1];
1561     info->nz_unneeded  = irecv[2];
1562     info->memory       = irecv[3];
1563     info->mallocs      = irecv[4];
1564   } else if (flag == MAT_GLOBAL_SUM) {
1565     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1566 
1567     info->nz_used      = irecv[0];
1568     info->nz_allocated = irecv[1];
1569     info->nz_unneeded  = irecv[2];
1570     info->memory       = irecv[3];
1571     info->mallocs      = irecv[4];
1572   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1573   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1574   info->fill_ratio_needed = 0;
1575   info->factor_mallocs    = 0;
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 #undef __FUNCT__
1580 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1581 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1582 {
1583   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1584   PetscErrorCode ierr;
1585 
1586   PetscFunctionBegin;
1587   switch (op) {
1588   case MAT_NEW_NONZERO_LOCATIONS:
1589   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1590   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1591   case MAT_KEEP_NONZERO_PATTERN:
1592   case MAT_NEW_NONZERO_LOCATION_ERR:
1593     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1594     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1595     break;
1596   case MAT_ROW_ORIENTED:
1597     a->roworiented = flg;
1598 
1599     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1600     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1601     break;
1602   case MAT_NEW_DIAGONALS:
1603     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1604     break;
1605   case MAT_IGNORE_OFF_PROC_ENTRIES:
1606     a->donotstash = flg;
1607     break;
1608   case MAT_USE_HASH_TABLE:
1609     a->ht_flag = flg;
1610     break;
1611   case MAT_SYMMETRIC:
1612   case MAT_STRUCTURALLY_SYMMETRIC:
1613   case MAT_HERMITIAN:
1614   case MAT_SYMMETRY_ETERNAL:
1615     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1616     break;
1617   default:
1618     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1619   }
1620   PetscFunctionReturn(0);
1621 }
1622 
1623 #undef __FUNCT__
1624 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1625 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1626 {
1627   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1628   Mat_SeqBAIJ    *Aloc;
1629   Mat            B;
1630   PetscErrorCode ierr;
1631   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1632   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1633   MatScalar      *a;
1634 
1635   PetscFunctionBegin;
1636   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1637   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1638     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1639     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1640     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1641     /* Do not know preallocation information, but must set block size */
1642     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1643   } else {
1644     B = *matout;
1645   }
1646 
1647   /* copy over the A part */
1648   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1649   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1650   ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1651 
1652   for (i=0; i<mbs; i++) {
1653     rvals[0] = bs*(baij->rstartbs + i);
1654     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1655     for (j=ai[i]; j<ai[i+1]; j++) {
1656       col = (baij->cstartbs+aj[j])*bs;
1657       for (k=0; k<bs; k++) {
1658         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1659 
1660         col++; a += bs;
1661       }
1662     }
1663   }
1664   /* copy over the B part */
1665   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1666   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1667   for (i=0; i<mbs; i++) {
1668     rvals[0] = bs*(baij->rstartbs + i);
1669     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1670     for (j=ai[i]; j<ai[i+1]; j++) {
1671       col = baij->garray[aj[j]]*bs;
1672       for (k=0; k<bs; k++) {
1673         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1674         col++;
1675         a += bs;
1676       }
1677     }
1678   }
1679   ierr = PetscFree(rvals);CHKERRQ(ierr);
1680   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1681   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1682 
1683   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1684   else {
1685     ierr = MatHeaderMerge(A,B);CHKERRQ(ierr);
1686   }
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 #undef __FUNCT__
1691 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1692 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1693 {
1694   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1695   Mat            a     = baij->A,b = baij->B;
1696   PetscErrorCode ierr;
1697   PetscInt       s1,s2,s3;
1698 
1699   PetscFunctionBegin;
1700   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1701   if (rr) {
1702     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1703     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1704     /* Overlap communication with computation. */
1705     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1706   }
1707   if (ll) {
1708     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1709     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1710     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1711   }
1712   /* scale  the diagonal block */
1713   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1714 
1715   if (rr) {
1716     /* Do a scatter end and then right scale the off-diagonal block */
1717     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1718     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1719   }
1720   PetscFunctionReturn(0);
1721 }
1722 
1723 #undef __FUNCT__
1724 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1725 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1726 {
1727   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1728   PetscErrorCode    ierr;
1729   PetscMPIInt       imdex,size = l->size,n,rank = l->rank;
1730   PetscInt          i,*owners = A->rmap->range;
1731   PetscInt          *nprocs,j,idx,nsends,row;
1732   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
1733   PetscInt          *rvalues,tag = ((PetscObject)A)->tag,count,base,slen,*source,lastidx = -1;
1734   PetscInt          *lens,*lrows,*values,rstart_bs=A->rmap->rstart;
1735   MPI_Comm          comm;
1736   MPI_Request       *send_waits,*recv_waits;
1737   MPI_Status        recv_status,*send_status;
1738   const PetscScalar *xx;
1739   PetscScalar       *bb;
1740 #if defined(PETSC_DEBUG)
1741   PetscBool         found = PETSC_FALSE;
1742 #endif
1743 
1744   PetscFunctionBegin;
1745   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1746   /*  first count number of contributors to each processor */
1747   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
1748   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1749   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr);  /* see note*/
1750   j    = 0;
1751   for (i=0; i<N; i++) {
1752     if (lastidx > (idx = rows[i])) j = 0;
1753     lastidx = idx;
1754     for (; j<size; j++) {
1755       if (idx >= owners[j] && idx < owners[j+1]) {
1756         nprocs[2*j]++;
1757         nprocs[2*j+1] = 1;
1758         owner[i]      = j;
1759 #if defined(PETSC_DEBUG)
1760         found = PETSC_TRUE;
1761 #endif
1762         break;
1763       }
1764     }
1765 #if defined(PETSC_DEBUG)
1766     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
1767     found = PETSC_FALSE;
1768 #endif
1769   }
1770   nsends = 0;  for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1771 
1772   if (A->nooffproczerorows) {
1773     if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row");
1774     nrecvs = nsends;
1775     nmax   = N;
1776   } else {
1777     /* inform other processors of number of messages and max length*/
1778     ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1779   }
1780 
1781   /* post receives:   */
1782   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
1783   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
1784   for (i=0; i<nrecvs; i++) {
1785     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
1786   }
1787 
1788   /* do sends:
1789      1) starts[i] gives the starting index in svalues for stuff going to
1790      the ith processor
1791   */
1792   ierr      = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
1793   ierr      = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
1794   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
1795   starts[0] = 0;
1796   for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1797   for (i=0; i<N; i++) {
1798     svalues[starts[owner[i]]++] = rows[i];
1799   }
1800 
1801   starts[0] = 0;
1802   for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1803   count = 0;
1804   for (i=0; i<size; i++) {
1805     if (nprocs[2*i+1]) {
1806       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
1807     }
1808   }
1809   ierr = PetscFree(starts);CHKERRQ(ierr);
1810 
1811   base = owners[rank];
1812 
1813   /*  wait on receives */
1814   ierr  = PetscMalloc2(nrecvs+1,PetscInt,&lens,nrecvs+1,PetscInt,&source);CHKERRQ(ierr);
1815   count = nrecvs;
1816   slen  = 0;
1817   while (count) {
1818     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1819     /* unpack receives into our local space */
1820     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
1821 
1822     source[imdex] = recv_status.MPI_SOURCE;
1823     lens[imdex]   = n;
1824     slen         += n;
1825     count--;
1826   }
1827   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1828 
1829   /* move the data into the send scatter */
1830   ierr  = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
1831   count = 0;
1832   for (i=0; i<nrecvs; i++) {
1833     values = rvalues + i*nmax;
1834     for (j=0; j<lens[i]; j++) {
1835       lrows[count++] = values[j] - base;
1836     }
1837   }
1838   ierr = PetscFree(rvalues);CHKERRQ(ierr);
1839   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
1840   ierr = PetscFree(owner);CHKERRQ(ierr);
1841   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1842 
1843   /* fix right hand side if needed */
1844   if (x && b) {
1845     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1846     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1847     for (i=0; i<slen; i++) {
1848       bb[lrows[i]] = diag*xx[lrows[i]];
1849     }
1850     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1851     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1852   }
1853 
1854   /* actually zap the local rows */
1855   /*
1856         Zero the required rows. If the "diagonal block" of the matrix
1857      is square and the user wishes to set the diagonal we use separate
1858      code so that MatSetValues() is not called for each diagonal allocating
1859      new memory, thus calling lots of mallocs and slowing things down.
1860 
1861   */
1862   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1863   ierr = MatZeroRows_SeqBAIJ(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1864   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1865     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr);
1866   } else if (diag != 0.0) {
1867     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1868     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1869        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1870     for (i=0; i<slen; i++) {
1871       row  = lrows[i] + rstart_bs;
1872       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1873     }
1874     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1875     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1876   } else {
1877     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1878   }
1879 
1880   ierr = PetscFree(lrows);CHKERRQ(ierr);
1881 
1882   /* wait on sends */
1883   if (nsends) {
1884     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
1885     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1886     ierr = PetscFree(send_status);CHKERRQ(ierr);
1887   }
1888   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1889   ierr = PetscFree(svalues);CHKERRQ(ierr);
1890   PetscFunctionReturn(0);
1891 }
1892 
1893 #undef __FUNCT__
1894 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1895 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1896 {
1897   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1898   PetscErrorCode ierr;
1899 
1900   PetscFunctionBegin;
1901   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1902   PetscFunctionReturn(0);
1903 }
1904 
1905 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1906 
1907 #undef __FUNCT__
1908 #define __FUNCT__ "MatEqual_MPIBAIJ"
1909 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1910 {
1911   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1912   Mat            a,b,c,d;
1913   PetscBool      flg;
1914   PetscErrorCode ierr;
1915 
1916   PetscFunctionBegin;
1917   a = matA->A; b = matA->B;
1918   c = matB->A; d = matB->B;
1919 
1920   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1921   if (flg) {
1922     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1923   }
1924   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 #undef __FUNCT__
1929 #define __FUNCT__ "MatCopy_MPIBAIJ"
1930 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
1931 {
1932   PetscErrorCode ierr;
1933   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1934   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
1935 
1936   PetscFunctionBegin;
1937   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1938   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1939     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1940   } else {
1941     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1942     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1943   }
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 #undef __FUNCT__
1948 #define __FUNCT__ "MatSetUp_MPIBAIJ"
1949 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
1950 {
1951   PetscErrorCode ierr;
1952 
1953   PetscFunctionBegin;
1954   ierr =  MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 #undef __FUNCT__
1959 #define __FUNCT__ "MatAXPY_MPIBAIJ"
1960 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1961 {
1962   PetscErrorCode ierr;
1963   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
1964   PetscBLASInt   bnz,one=1;
1965   Mat_SeqBAIJ    *x,*y;
1966 
1967   PetscFunctionBegin;
1968   if (str == SAME_NONZERO_PATTERN) {
1969     PetscScalar alpha = a;
1970     x    = (Mat_SeqBAIJ*)xx->A->data;
1971     y    = (Mat_SeqBAIJ*)yy->A->data;
1972     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1973     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1974     x    = (Mat_SeqBAIJ*)xx->B->data;
1975     y    = (Mat_SeqBAIJ*)yy->B->data;
1976     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1977     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1978   } else {
1979     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1980   }
1981   PetscFunctionReturn(0);
1982 }
1983 
1984 #undef __FUNCT__
1985 #define __FUNCT__ "MatRealPart_MPIBAIJ"
1986 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
1987 {
1988   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1989   PetscErrorCode ierr;
1990 
1991   PetscFunctionBegin;
1992   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1993   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1994   PetscFunctionReturn(0);
1995 }
1996 
1997 #undef __FUNCT__
1998 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
1999 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2000 {
2001   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2002   PetscErrorCode ierr;
2003 
2004   PetscFunctionBegin;
2005   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2006   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #undef __FUNCT__
2011 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2012 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2013 {
2014   PetscErrorCode ierr;
2015   IS             iscol_local;
2016   PetscInt       csize;
2017 
2018   PetscFunctionBegin;
2019   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2020   if (call == MAT_REUSE_MATRIX) {
2021     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2022     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2023   } else {
2024     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2025   }
2026   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2027   if (call == MAT_INITIAL_MATRIX) {
2028     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2029     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2030   }
2031   PetscFunctionReturn(0);
2032 }
2033 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2034 #undef __FUNCT__
2035 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2036 /*
2037   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2038   in local and then by concatenating the local matrices the end result.
2039   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ()
2040 */
2041 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2042 {
2043   PetscErrorCode ierr;
2044   PetscMPIInt    rank,size;
2045   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2046   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2047   Mat            M,Mreuse;
2048   MatScalar      *vwork,*aa;
2049   MPI_Comm       comm;
2050   IS             isrow_new, iscol_new;
2051   PetscBool      idflag,allrows, allcols;
2052   Mat_SeqBAIJ    *aij;
2053 
2054   PetscFunctionBegin;
2055   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2056   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2057   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2058   /* The compression and expansion should be avoided. Doesn't point
2059      out errors, might change the indices, hence buggey */
2060   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2061   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2062 
2063   /* Check for special case: each processor gets entire matrix columns */
2064   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2065   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2066   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2067   else allcols = PETSC_FALSE;
2068 
2069   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2070   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2071   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2072   else allrows = PETSC_FALSE;
2073 
2074   if (call ==  MAT_REUSE_MATRIX) {
2075     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2076     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2077     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2078   } else {
2079     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2080   }
2081   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2082   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2083   /*
2084       m - number of local rows
2085       n - number of columns (same on all processors)
2086       rstart - first row in new global matrix generated
2087   */
2088   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2089   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2090   m    = m/bs;
2091   n    = n/bs;
2092 
2093   if (call == MAT_INITIAL_MATRIX) {
2094     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2095     ii  = aij->i;
2096     jj  = aij->j;
2097 
2098     /*
2099         Determine the number of non-zeros in the diagonal and off-diagonal
2100         portions of the matrix in order to do correct preallocation
2101     */
2102 
2103     /* first get start and end of "diagonal" columns */
2104     if (csize == PETSC_DECIDE) {
2105       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2106       if (mglobal == n*bs) { /* square matrix */
2107         nlocal = m;
2108       } else {
2109         nlocal = n/size + ((n % size) > rank);
2110       }
2111     } else {
2112       nlocal = csize/bs;
2113     }
2114     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2115     rstart = rend - nlocal;
2116     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2117 
2118     /* next, compute all the lengths */
2119     ierr  = PetscMalloc2(m+1,PetscInt,&dlens,m+1,PetscInt,&olens);CHKERRQ(ierr);
2120     for (i=0; i<m; i++) {
2121       jend = ii[i+1] - ii[i];
2122       olen = 0;
2123       dlen = 0;
2124       for (j=0; j<jend; j++) {
2125         if (*jj < rstart || *jj >= rend) olen++;
2126         else dlen++;
2127         jj++;
2128       }
2129       olens[i] = olen;
2130       dlens[i] = dlen;
2131     }
2132     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2133     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2134     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2135     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2136     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2137   } else {
2138     PetscInt ml,nl;
2139 
2140     M    = *newmat;
2141     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2142     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2143     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2144     /*
2145          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2146        rather than the slower MatSetValues().
2147     */
2148     M->was_assembled = PETSC_TRUE;
2149     M->assembled     = PETSC_FALSE;
2150   }
2151   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2152   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2153   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2154   ii   = aij->i;
2155   jj   = aij->j;
2156   aa   = aij->a;
2157   for (i=0; i<m; i++) {
2158     row   = rstart/bs + i;
2159     nz    = ii[i+1] - ii[i];
2160     cwork = jj;     jj += nz;
2161     vwork = aa;     aa += nz*bs*bs;
2162     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2163   }
2164 
2165   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2166   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2167   *newmat = M;
2168 
2169   /* save submatrix used in processor for next request */
2170   if (call ==  MAT_INITIAL_MATRIX) {
2171     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2172     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2173   }
2174   PetscFunctionReturn(0);
2175 }
2176 
2177 #undef __FUNCT__
2178 #define __FUNCT__ "MatPermute_MPIBAIJ"
2179 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2180 {
2181   MPI_Comm       comm,pcomm;
2182   PetscInt       first,rlocal_size,clocal_size,nrows;
2183   const PetscInt *rows;
2184   PetscMPIInt    size;
2185   IS             crowp,growp,irowp,lrowp,lcolp;
2186   PetscErrorCode ierr;
2187 
2188   PetscFunctionBegin;
2189   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2190   /* make a collective version of 'rowp' */
2191   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2192   if (pcomm==comm) {
2193     crowp = rowp;
2194   } else {
2195     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2196     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2197     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2198     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2199   }
2200   /* collect the global row permutation and invert it */
2201   ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr);
2202   ierr = ISSetPermutation(growp);CHKERRQ(ierr);
2203   if (pcomm!=comm) {
2204     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2205   }
2206   ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr);
2207   ierr = ISDestroy(&growp);CHKERRQ(ierr);
2208   /* get the local target indices */
2209   ierr = MatGetOwnershipRange(A,&first,NULL);CHKERRQ(ierr);
2210   ierr = MatGetLocalSize(A,&rlocal_size,&clocal_size);CHKERRQ(ierr);
2211   ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr);
2212   ierr = ISCreateGeneral(MPI_COMM_SELF,rlocal_size,rows+first,PETSC_COPY_VALUES,&lrowp);CHKERRQ(ierr);
2213   ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr);
2214   ierr = ISDestroy(&irowp);CHKERRQ(ierr);
2215   /* the column permutation is so much easier;
2216      make a local version of 'colp' and invert it */
2217   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2218   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2219   if (size==1) {
2220     lcolp = colp;
2221   } else {
2222     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2223   }
2224   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2225   /* now we just get the submatrix */
2226   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,lrowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2227   if (size>1) {
2228     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2229   }
2230   /* clean up */
2231   ierr = ISDestroy(&lrowp);CHKERRQ(ierr);
2232   PetscFunctionReturn(0);
2233 }
2234 
2235 #undef __FUNCT__
2236 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2237 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2238 {
2239   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2240   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2241 
2242   PetscFunctionBegin;
2243   if (nghosts) *nghosts = B->nbs;
2244   if (ghosts) *ghosts = baij->garray;
2245   PetscFunctionReturn(0);
2246 }
2247 
2248 #undef __FUNCT__
2249 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2250 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2251 {
2252   Mat            B;
2253   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2254   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2255   Mat_SeqAIJ     *b;
2256   PetscErrorCode ierr;
2257   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2258   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2259   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2260 
2261   PetscFunctionBegin;
2262   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2263   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2264 
2265   /* ----------------------------------------------------------------
2266      Tell every processor the number of nonzeros per row
2267   */
2268   ierr = PetscMalloc((A->rmap->N/bs)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
2269   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2270     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2271   }
2272   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2273   ierr      = PetscMalloc(2*size*sizeof(PetscMPIInt),&recvcounts);CHKERRQ(ierr);
2274   displs    = recvcounts + size;
2275   for (i=0; i<size; i++) {
2276     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2277     displs[i]     = A->rmap->range[i]/bs;
2278   }
2279 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2280   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2281 #else
2282   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2283 #endif
2284   /* ---------------------------------------------------------------
2285      Create the sequential matrix of the same type as the local block diagonal
2286   */
2287   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2288   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2289   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2290   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2291   b    = (Mat_SeqAIJ*)B->data;
2292 
2293   /*--------------------------------------------------------------------
2294     Copy my part of matrix column indices over
2295   */
2296   sendcount  = ad->nz + bd->nz;
2297   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2298   a_jsendbuf = ad->j;
2299   b_jsendbuf = bd->j;
2300   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2301   cnt        = 0;
2302   for (i=0; i<n; i++) {
2303 
2304     /* put in lower diagonal portion */
2305     m = bd->i[i+1] - bd->i[i];
2306     while (m > 0) {
2307       /* is it above diagonal (in bd (compressed) numbering) */
2308       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2309       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2310       m--;
2311     }
2312 
2313     /* put in diagonal portion */
2314     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2315       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2316     }
2317 
2318     /* put in upper diagonal portion */
2319     while (m-- > 0) {
2320       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2321     }
2322   }
2323   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2324 
2325   /*--------------------------------------------------------------------
2326     Gather all column indices to all processors
2327   */
2328   for (i=0; i<size; i++) {
2329     recvcounts[i] = 0;
2330     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2331       recvcounts[i] += lens[j];
2332     }
2333   }
2334   displs[0] = 0;
2335   for (i=1; i<size; i++) {
2336     displs[i] = displs[i-1] + recvcounts[i-1];
2337   }
2338 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2339   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2340 #else
2341   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2342 #endif
2343   /*--------------------------------------------------------------------
2344     Assemble the matrix into useable form (note numerical values not yet set)
2345   */
2346   /* set the b->ilen (length of each row) values */
2347   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2348   /* set the b->i indices */
2349   b->i[0] = 0;
2350   for (i=1; i<=A->rmap->N/bs; i++) {
2351     b->i[i] = b->i[i-1] + lens[i-1];
2352   }
2353   ierr = PetscFree(lens);CHKERRQ(ierr);
2354   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2355   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2356   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2357 
2358   if (A->symmetric) {
2359     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2360   } else if (A->hermitian) {
2361     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2362   } else if (A->structurally_symmetric) {
2363     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2364   }
2365   *newmat = B;
2366   PetscFunctionReturn(0);
2367 }
2368 
2369 #undef __FUNCT__
2370 #define __FUNCT__ "MatSOR_MPIBAIJ"
2371 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2372 {
2373   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2374   PetscErrorCode ierr;
2375   Vec            bb1 = 0;
2376 
2377   PetscFunctionBegin;
2378   if (flag == SOR_APPLY_UPPER) {
2379     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2380     PetscFunctionReturn(0);
2381   }
2382 
2383   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2384     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2385   }
2386 
2387   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2388     if (flag & SOR_ZERO_INITIAL_GUESS) {
2389       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2390       its--;
2391     }
2392 
2393     while (its--) {
2394       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2395       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2396 
2397       /* update rhs: bb1 = bb - B*x */
2398       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2399       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2400 
2401       /* local sweep */
2402       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2403     }
2404   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2405     if (flag & SOR_ZERO_INITIAL_GUESS) {
2406       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2407       its--;
2408     }
2409     while (its--) {
2410       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2411       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2412 
2413       /* update rhs: bb1 = bb - B*x */
2414       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2415       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2416 
2417       /* local sweep */
2418       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2419     }
2420   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2421     if (flag & SOR_ZERO_INITIAL_GUESS) {
2422       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2423       its--;
2424     }
2425     while (its--) {
2426       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2427       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2428 
2429       /* update rhs: bb1 = bb - B*x */
2430       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2431       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2432 
2433       /* local sweep */
2434       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2435     }
2436   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2437 
2438   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2439   PetscFunctionReturn(0);
2440 }
2441 
2442 #undef __FUNCT__
2443 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2444 PetscErrorCode  MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2445 {
2446   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2447   PetscErrorCode ierr;
2448 
2449   PetscFunctionBegin;
2450   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2451   PetscFunctionReturn(0);
2452 }
2453 
2454 
2455 /* -------------------------------------------------------------------*/
2456 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2457                                        MatGetRow_MPIBAIJ,
2458                                        MatRestoreRow_MPIBAIJ,
2459                                        MatMult_MPIBAIJ,
2460                                 /* 4*/ MatMultAdd_MPIBAIJ,
2461                                        MatMultTranspose_MPIBAIJ,
2462                                        MatMultTransposeAdd_MPIBAIJ,
2463                                        0,
2464                                        0,
2465                                        0,
2466                                 /*10*/ 0,
2467                                        0,
2468                                        0,
2469                                        MatSOR_MPIBAIJ,
2470                                        MatTranspose_MPIBAIJ,
2471                                 /*15*/ MatGetInfo_MPIBAIJ,
2472                                        MatEqual_MPIBAIJ,
2473                                        MatGetDiagonal_MPIBAIJ,
2474                                        MatDiagonalScale_MPIBAIJ,
2475                                        MatNorm_MPIBAIJ,
2476                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2477                                        MatAssemblyEnd_MPIBAIJ,
2478                                        MatSetOption_MPIBAIJ,
2479                                        MatZeroEntries_MPIBAIJ,
2480                                 /*24*/ MatZeroRows_MPIBAIJ,
2481                                        0,
2482                                        0,
2483                                        0,
2484                                        0,
2485                                 /*29*/ MatSetUp_MPIBAIJ,
2486                                        0,
2487                                        0,
2488                                        0,
2489                                        0,
2490                                 /*34*/ MatDuplicate_MPIBAIJ,
2491                                        0,
2492                                        0,
2493                                        0,
2494                                        0,
2495                                 /*39*/ MatAXPY_MPIBAIJ,
2496                                        MatGetSubMatrices_MPIBAIJ,
2497                                        MatIncreaseOverlap_MPIBAIJ,
2498                                        MatGetValues_MPIBAIJ,
2499                                        MatCopy_MPIBAIJ,
2500                                 /*44*/ 0,
2501                                        MatScale_MPIBAIJ,
2502                                        0,
2503                                        0,
2504                                        0,
2505                                 /*49*/ 0,
2506                                        0,
2507                                        0,
2508                                        0,
2509                                        0,
2510                                 /*54*/ 0,
2511                                        0,
2512                                        MatSetUnfactored_MPIBAIJ,
2513                                        MatPermute_MPIBAIJ,
2514                                        MatSetValuesBlocked_MPIBAIJ,
2515                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2516                                        MatDestroy_MPIBAIJ,
2517                                        MatView_MPIBAIJ,
2518                                        0,
2519                                        0,
2520                                 /*64*/ 0,
2521                                        0,
2522                                        0,
2523                                        0,
2524                                        0,
2525                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2526                                        0,
2527                                        0,
2528                                        0,
2529                                        0,
2530                                 /*74*/ 0,
2531                                        MatFDColoringApply_BAIJ,
2532                                        0,
2533                                        0,
2534                                        0,
2535                                 /*79*/ 0,
2536                                        0,
2537                                        0,
2538                                        0,
2539                                        MatLoad_MPIBAIJ,
2540                                 /*84*/ 0,
2541                                        0,
2542                                        0,
2543                                        0,
2544                                        0,
2545                                 /*89*/ 0,
2546                                        0,
2547                                        0,
2548                                        0,
2549                                        0,
2550                                 /*94*/ 0,
2551                                        0,
2552                                        0,
2553                                        0,
2554                                        0,
2555                                 /*99*/ 0,
2556                                        0,
2557                                        0,
2558                                        0,
2559                                        0,
2560                                 /*104*/0,
2561                                        MatRealPart_MPIBAIJ,
2562                                        MatImaginaryPart_MPIBAIJ,
2563                                        0,
2564                                        0,
2565                                 /*109*/0,
2566                                        0,
2567                                        0,
2568                                        0,
2569                                        0,
2570                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2571                                        0,
2572                                        MatGetGhosts_MPIBAIJ,
2573                                        0,
2574                                        0,
2575                                 /*119*/0,
2576                                        0,
2577                                        0,
2578                                        0,
2579                                        MatGetMultiProcBlock_MPIBAIJ,
2580                                 /*124*/0,
2581                                        0,
2582                                        MatInvertBlockDiagonal_MPIBAIJ,
2583                                        0,
2584                                        0,
2585                                /*129*/ 0,
2586                                        0,
2587                                        0,
2588                                        0,
2589                                        0,
2590                                /*134*/ 0,
2591                                        0,
2592                                        0,
2593                                        0,
2594                                        0,
2595                                /*139*/ 0,
2596                                        0,
2597                                        MatFDColoringSetUp_MPIXAIJ
2598 };
2599 
2600 #undef __FUNCT__
2601 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2602 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2603 {
2604   PetscFunctionBegin;
2605   *a = ((Mat_MPIBAIJ*)A->data)->A;
2606   PetscFunctionReturn(0);
2607 }
2608 
2609 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2610 
2611 #undef __FUNCT__
2612 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2613 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2614 {
2615   PetscInt       m,rstart,cstart,cend;
2616   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2617   const PetscInt *JJ    =0;
2618   PetscScalar    *values=0;
2619   PetscErrorCode ierr;
2620 
2621   PetscFunctionBegin;
2622   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2623   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2624   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2625   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2626   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2627   m      = B->rmap->n/bs;
2628   rstart = B->rmap->rstart/bs;
2629   cstart = B->cmap->rstart/bs;
2630   cend   = B->cmap->rend/bs;
2631 
2632   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2633   ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr);
2634   for (i=0; i<m; i++) {
2635     nz = ii[i+1] - ii[i];
2636     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2637     nz_max = PetscMax(nz_max,nz);
2638     JJ     = jj + ii[i];
2639     for (j=0; j<nz; j++) {
2640       if (*JJ >= cstart) break;
2641       JJ++;
2642     }
2643     d = 0;
2644     for (; j<nz; j++) {
2645       if (*JJ++ >= cend) break;
2646       d++;
2647     }
2648     d_nnz[i] = d;
2649     o_nnz[i] = nz - d;
2650   }
2651   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2652   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2653 
2654   values = (PetscScalar*)V;
2655   if (!values) {
2656     ierr = PetscMalloc(bs*bs*nz_max*sizeof(PetscScalar),&values);CHKERRQ(ierr);
2657     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2658   }
2659   for (i=0; i<m; i++) {
2660     PetscInt          row    = i + rstart;
2661     PetscInt          ncols  = ii[i+1] - ii[i];
2662     const PetscInt    *icols = jj + ii[i];
2663     const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2664     ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2665   }
2666 
2667   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2668   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2669   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2670   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2671   PetscFunctionReturn(0);
2672 }
2673 
2674 #undef __FUNCT__
2675 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2676 /*@C
2677    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2678    (the default parallel PETSc format).
2679 
2680    Collective on MPI_Comm
2681 
2682    Input Parameters:
2683 +  A - the matrix
2684 .  bs - the block size
2685 .  i - the indices into j for the start of each local row (starts with zero)
2686 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2687 -  v - optional values in the matrix
2688 
2689    Level: developer
2690 
2691 .keywords: matrix, aij, compressed row, sparse, parallel
2692 
2693 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ
2694 @*/
2695 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2696 {
2697   PetscErrorCode ierr;
2698 
2699   PetscFunctionBegin;
2700   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2701   PetscValidType(B,1);
2702   PetscValidLogicalCollectiveInt(B,bs,2);
2703   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2704   PetscFunctionReturn(0);
2705 }
2706 
2707 #undef __FUNCT__
2708 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2709 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2710 {
2711   Mat_MPIBAIJ    *b;
2712   PetscErrorCode ierr;
2713   PetscInt       i;
2714 
2715   PetscFunctionBegin;
2716   ierr = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2717   ierr = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2718   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2719   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2720   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2721 
2722   if (d_nnz) {
2723     for (i=0; i<B->rmap->n/bs; i++) {
2724       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2725     }
2726   }
2727   if (o_nnz) {
2728     for (i=0; i<B->rmap->n/bs; i++) {
2729       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2730     }
2731   }
2732 
2733   b      = (Mat_MPIBAIJ*)B->data;
2734   b->bs2 = bs*bs;
2735   b->mbs = B->rmap->n/bs;
2736   b->nbs = B->cmap->n/bs;
2737   b->Mbs = B->rmap->N/bs;
2738   b->Nbs = B->cmap->N/bs;
2739 
2740   for (i=0; i<=b->size; i++) {
2741     b->rangebs[i] = B->rmap->range[i]/bs;
2742   }
2743   b->rstartbs = B->rmap->rstart/bs;
2744   b->rendbs   = B->rmap->rend/bs;
2745   b->cstartbs = B->cmap->rstart/bs;
2746   b->cendbs   = B->cmap->rend/bs;
2747 
2748   if (!B->preallocated) {
2749     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2750     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2751     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
2752     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
2753     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2754     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2755     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
2756     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
2757     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
2758   }
2759 
2760   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2761   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
2762   B->preallocated = PETSC_TRUE;
2763   PetscFunctionReturn(0);
2764 }
2765 
2766 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
2767 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
2768 
2769 #undef __FUNCT__
2770 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
2771 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
2772 {
2773   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2774   PetscErrorCode ierr;
2775   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
2776   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
2777   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
2778 
2779   PetscFunctionBegin;
2780   ierr  = PetscMalloc((M+1)*sizeof(PetscInt),&ii);CHKERRQ(ierr);
2781   ii[0] = 0;
2782   for (i=0; i<M; i++) {
2783     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
2784     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
2785     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
2786     /* remove one from count of matrix has diagonal */
2787     for (j=id[i]; j<id[i+1]; j++) {
2788       if (jd[j] == i) {ii[i+1]--;break;}
2789     }
2790   }
2791   ierr = PetscMalloc(ii[M]*sizeof(PetscInt),&jj);CHKERRQ(ierr);
2792   cnt  = 0;
2793   for (i=0; i<M; i++) {
2794     for (j=io[i]; j<io[i+1]; j++) {
2795       if (garray[jo[j]] > rstart) break;
2796       jj[cnt++] = garray[jo[j]];
2797     }
2798     for (k=id[i]; k<id[i+1]; k++) {
2799       if (jd[k] != i) {
2800         jj[cnt++] = rstart + jd[k];
2801       }
2802     }
2803     for (; j<io[i+1]; j++) {
2804       jj[cnt++] = garray[jo[j]];
2805     }
2806   }
2807   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
2808   PetscFunctionReturn(0);
2809 }
2810 
2811 #include <../src/mat/impls/aij/mpi/mpiaij.h>
2812 
2813 PETSC_EXTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
2814 
2815 #undef __FUNCT__
2816 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
2817 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
2818 {
2819   PetscErrorCode ierr;
2820   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2821   Mat            B;
2822   Mat_MPIAIJ     *b;
2823 
2824   PetscFunctionBegin;
2825   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
2826 
2827   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2828   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2829   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
2830   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2831   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2832   b    = (Mat_MPIAIJ*) B->data;
2833 
2834   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
2835   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
2836   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
2837   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
2838   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
2839   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2840   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2841   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2842   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2843   if (reuse == MAT_REUSE_MATRIX) {
2844     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
2845   } else {
2846    *newmat = B;
2847   }
2848   PetscFunctionReturn(0);
2849 }
2850 
2851 #if defined(PETSC_HAVE_MUMPS)
2852 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2853 #endif
2854 
2855 /*MC
2856    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
2857 
2858    Options Database Keys:
2859 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
2860 . -mat_block_size <bs> - set the blocksize used to store the matrix
2861 - -mat_use_hash_table <fact>
2862 
2863   Level: beginner
2864 
2865 .seealso: MatCreateMPIBAIJ
2866 M*/
2867 
2868 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
2869 
2870 #undef __FUNCT__
2871 #define __FUNCT__ "MatCreate_MPIBAIJ"
2872 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
2873 {
2874   Mat_MPIBAIJ    *b;
2875   PetscErrorCode ierr;
2876   PetscBool      flg;
2877 
2878   PetscFunctionBegin;
2879   ierr    = PetscNewLog(B,Mat_MPIBAIJ,&b);CHKERRQ(ierr);
2880   B->data = (void*)b;
2881 
2882   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
2883   B->assembled = PETSC_FALSE;
2884 
2885   B->insertmode = NOT_SET_VALUES;
2886   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
2887   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
2888 
2889   /* build local table of row and column ownerships */
2890   ierr = PetscMalloc((b->size+1)*sizeof(PetscInt),&b->rangebs);CHKERRQ(ierr);
2891 
2892   /* build cache for off array entries formed */
2893   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2894 
2895   b->donotstash  = PETSC_FALSE;
2896   b->colmap      = NULL;
2897   b->garray      = NULL;
2898   b->roworiented = PETSC_TRUE;
2899 
2900   /* stuff used in block assembly */
2901   b->barray = 0;
2902 
2903   /* stuff used for matrix vector multiply */
2904   b->lvec  = 0;
2905   b->Mvctx = 0;
2906 
2907   /* stuff for MatGetRow() */
2908   b->rowindices   = 0;
2909   b->rowvalues    = 0;
2910   b->getrowactive = PETSC_FALSE;
2911 
2912   /* hash table stuff */
2913   b->ht           = 0;
2914   b->hd           = 0;
2915   b->ht_size      = 0;
2916   b->ht_flag      = PETSC_FALSE;
2917   b->ht_fact      = 0;
2918   b->ht_total_ct  = 0;
2919   b->ht_insert_ct = 0;
2920 
2921   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
2922   b->ijonly = PETSC_FALSE;
2923 
2924   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
2925   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,NULL);CHKERRQ(ierr);
2926   if (flg) {
2927     PetscReal fact = 1.39;
2928     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
2929     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
2930     if (fact <= 1.0) fact = 1.39;
2931     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
2932     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
2933   }
2934   ierr = PetscOptionsEnd();CHKERRQ(ierr);
2935 
2936 #if defined(PETSC_HAVE_MUMPS)
2937   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_baij_mumps);CHKERRQ(ierr);
2938 #endif
2939   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
2940   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
2941   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
2942   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
2943   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
2944   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
2945   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
2946   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
2947   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
2948   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
2949   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
2950   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
2951   PetscFunctionReturn(0);
2952 }
2953 
2954 /*MC
2955    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
2956 
2957    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
2958    and MATMPIBAIJ otherwise.
2959 
2960    Options Database Keys:
2961 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
2962 
2963   Level: beginner
2964 
2965 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
2966 M*/
2967 
2968 #undef __FUNCT__
2969 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
2970 /*@C
2971    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
2972    (block compressed row).  For good matrix assembly performance
2973    the user should preallocate the matrix storage by setting the parameters
2974    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2975    performance can be increased by more than a factor of 50.
2976 
2977    Collective on Mat
2978 
2979    Input Parameters:
2980 +  A - the matrix
2981 .  bs   - size of block
2982 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
2983            submatrix  (same for all local rows)
2984 .  d_nnz - array containing the number of block nonzeros in the various block rows
2985            of the in diagonal portion of the local (possibly different for each block
2986            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
2987            set it even if it is zero.
2988 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2989            submatrix (same for all local rows).
2990 -  o_nnz - array containing the number of nonzeros in the various block rows of the
2991            off-diagonal portion of the local submatrix (possibly different for
2992            each block row) or NULL.
2993 
2994    If the *_nnz parameter is given then the *_nz parameter is ignored
2995 
2996    Options Database Keys:
2997 +   -mat_block_size - size of the blocks to use
2998 -   -mat_use_hash_table <fact>
2999 
3000    Notes:
3001    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3002    than it must be used on all processors that share the object for that argument.
3003 
3004    Storage Information:
3005    For a square global matrix we define each processor's diagonal portion
3006    to be its local rows and the corresponding columns (a square submatrix);
3007    each processor's off-diagonal portion encompasses the remainder of the
3008    local matrix (a rectangular submatrix).
3009 
3010    The user can specify preallocated storage for the diagonal part of
3011    the local submatrix with either d_nz or d_nnz (not both).  Set
3012    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3013    memory allocation.  Likewise, specify preallocated storage for the
3014    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3015 
3016    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3017    the figure below we depict these three local rows and all columns (0-11).
3018 
3019 .vb
3020            0 1 2 3 4 5 6 7 8 9 10 11
3021           --------------------------
3022    row 3  |o o o d d d o o o o  o  o
3023    row 4  |o o o d d d o o o o  o  o
3024    row 5  |o o o d d d o o o o  o  o
3025           --------------------------
3026 .ve
3027 
3028    Thus, any entries in the d locations are stored in the d (diagonal)
3029    submatrix, and any entries in the o locations are stored in the
3030    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3031    stored simply in the MATSEQBAIJ format for compressed row storage.
3032 
3033    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3034    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3035    In general, for PDE problems in which most nonzeros are near the diagonal,
3036    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3037    or you will get TERRIBLE performance; see the users' manual chapter on
3038    matrices.
3039 
3040    You can call MatGetInfo() to get information on how effective the preallocation was;
3041    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3042    You can also run with the option -info and look for messages with the string
3043    malloc in them to see if additional memory allocation was needed.
3044 
3045    Level: intermediate
3046 
3047 .keywords: matrix, block, aij, compressed row, sparse, parallel
3048 
3049 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3050 @*/
3051 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3052 {
3053   PetscErrorCode ierr;
3054 
3055   PetscFunctionBegin;
3056   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3057   PetscValidType(B,1);
3058   PetscValidLogicalCollectiveInt(B,bs,2);
3059   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3060   PetscFunctionReturn(0);
3061 }
3062 
3063 #undef __FUNCT__
3064 #define __FUNCT__ "MatCreateBAIJ"
3065 /*@C
3066    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3067    (block compressed row).  For good matrix assembly performance
3068    the user should preallocate the matrix storage by setting the parameters
3069    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3070    performance can be increased by more than a factor of 50.
3071 
3072    Collective on MPI_Comm
3073 
3074    Input Parameters:
3075 +  comm - MPI communicator
3076 .  bs   - size of blockk
3077 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3078            This value should be the same as the local size used in creating the
3079            y vector for the matrix-vector product y = Ax.
3080 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3081            This value should be the same as the local size used in creating the
3082            x vector for the matrix-vector product y = Ax.
3083 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3084 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3085 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3086            submatrix  (same for all local rows)
3087 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3088            of the in diagonal portion of the local (possibly different for each block
3089            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3090            and set it even if it is zero.
3091 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3092            submatrix (same for all local rows).
3093 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3094            off-diagonal portion of the local submatrix (possibly different for
3095            each block row) or NULL.
3096 
3097    Output Parameter:
3098 .  A - the matrix
3099 
3100    Options Database Keys:
3101 +   -mat_block_size - size of the blocks to use
3102 -   -mat_use_hash_table <fact>
3103 
3104    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3105    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3106    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3107 
3108    Notes:
3109    If the *_nnz parameter is given then the *_nz parameter is ignored
3110 
3111    A nonzero block is any block that as 1 or more nonzeros in it
3112 
3113    The user MUST specify either the local or global matrix dimensions
3114    (possibly both).
3115 
3116    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3117    than it must be used on all processors that share the object for that argument.
3118 
3119    Storage Information:
3120    For a square global matrix we define each processor's diagonal portion
3121    to be its local rows and the corresponding columns (a square submatrix);
3122    each processor's off-diagonal portion encompasses the remainder of the
3123    local matrix (a rectangular submatrix).
3124 
3125    The user can specify preallocated storage for the diagonal part of
3126    the local submatrix with either d_nz or d_nnz (not both).  Set
3127    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3128    memory allocation.  Likewise, specify preallocated storage for the
3129    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3130 
3131    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3132    the figure below we depict these three local rows and all columns (0-11).
3133 
3134 .vb
3135            0 1 2 3 4 5 6 7 8 9 10 11
3136           --------------------------
3137    row 3  |o o o d d d o o o o  o  o
3138    row 4  |o o o d d d o o o o  o  o
3139    row 5  |o o o d d d o o o o  o  o
3140           --------------------------
3141 .ve
3142 
3143    Thus, any entries in the d locations are stored in the d (diagonal)
3144    submatrix, and any entries in the o locations are stored in the
3145    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3146    stored simply in the MATSEQBAIJ format for compressed row storage.
3147 
3148    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3149    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3150    In general, for PDE problems in which most nonzeros are near the diagonal,
3151    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3152    or you will get TERRIBLE performance; see the users' manual chapter on
3153    matrices.
3154 
3155    Level: intermediate
3156 
3157 .keywords: matrix, block, aij, compressed row, sparse, parallel
3158 
3159 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3160 @*/
3161 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3162 {
3163   PetscErrorCode ierr;
3164   PetscMPIInt    size;
3165 
3166   PetscFunctionBegin;
3167   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3168   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3169   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3170   if (size > 1) {
3171     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3172     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3173   } else {
3174     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3175     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3176   }
3177   PetscFunctionReturn(0);
3178 }
3179 
3180 #undef __FUNCT__
3181 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3182 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3183 {
3184   Mat            mat;
3185   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3186   PetscErrorCode ierr;
3187   PetscInt       len=0;
3188 
3189   PetscFunctionBegin;
3190   *newmat = 0;
3191   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3192   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3193   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3194   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3195 
3196   mat->factortype   = matin->factortype;
3197   mat->preallocated = PETSC_TRUE;
3198   mat->assembled    = PETSC_TRUE;
3199   mat->insertmode   = NOT_SET_VALUES;
3200 
3201   a             = (Mat_MPIBAIJ*)mat->data;
3202   mat->rmap->bs = matin->rmap->bs;
3203   a->bs2        = oldmat->bs2;
3204   a->mbs        = oldmat->mbs;
3205   a->nbs        = oldmat->nbs;
3206   a->Mbs        = oldmat->Mbs;
3207   a->Nbs        = oldmat->Nbs;
3208 
3209   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3210   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3211 
3212   a->size         = oldmat->size;
3213   a->rank         = oldmat->rank;
3214   a->donotstash   = oldmat->donotstash;
3215   a->roworiented  = oldmat->roworiented;
3216   a->rowindices   = 0;
3217   a->rowvalues    = 0;
3218   a->getrowactive = PETSC_FALSE;
3219   a->barray       = 0;
3220   a->rstartbs     = oldmat->rstartbs;
3221   a->rendbs       = oldmat->rendbs;
3222   a->cstartbs     = oldmat->cstartbs;
3223   a->cendbs       = oldmat->cendbs;
3224 
3225   /* hash table stuff */
3226   a->ht           = 0;
3227   a->hd           = 0;
3228   a->ht_size      = 0;
3229   a->ht_flag      = oldmat->ht_flag;
3230   a->ht_fact      = oldmat->ht_fact;
3231   a->ht_total_ct  = 0;
3232   a->ht_insert_ct = 0;
3233 
3234   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3235   if (oldmat->colmap) {
3236 #if defined(PETSC_USE_CTABLE)
3237     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3238 #else
3239     ierr = PetscMalloc((a->Nbs)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
3240     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3241     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3242 #endif
3243   } else a->colmap = 0;
3244 
3245   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3246     ierr = PetscMalloc(len*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
3247     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3248     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3249   } else a->garray = 0;
3250 
3251   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3252   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3253   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3254   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3255   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3256 
3257   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3258   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3259   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3260   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3261   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3262   *newmat = mat;
3263   PetscFunctionReturn(0);
3264 }
3265 
3266 #undef __FUNCT__
3267 #define __FUNCT__ "MatLoad_MPIBAIJ"
3268 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3269 {
3270   PetscErrorCode ierr;
3271   int            fd;
3272   PetscInt       i,nz,j,rstart,rend;
3273   PetscScalar    *vals,*buf;
3274   MPI_Comm       comm;
3275   MPI_Status     status;
3276   PetscMPIInt    rank,size,maxnz;
3277   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3278   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3279   PetscInt       jj,*mycols,*ibuf,bs=1,Mbs,mbs,extra_rows,mmax;
3280   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3281   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3282   PetscInt       dcount,kmax,k,nzcount,tmp,mend,sizesset=1,grows,gcols;
3283 
3284   PetscFunctionBegin;
3285   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3286   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3287   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3288   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3289 
3290   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3291   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3292   if (!rank) {
3293     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3294     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3295     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3296   }
3297 
3298   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
3299 
3300   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3301   M    = header[1]; N = header[2];
3302 
3303   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
3304   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
3305   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
3306 
3307   /* If global sizes are set, check if they are consistent with that given in the file */
3308   if (sizesset) {
3309     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
3310   }
3311   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
3312   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
3313 
3314   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3315 
3316   /*
3317      This code adds extra rows to make sure the number of rows is
3318      divisible by the blocksize
3319   */
3320   Mbs        = M/bs;
3321   extra_rows = bs - M + bs*Mbs;
3322   if (extra_rows == bs) extra_rows = 0;
3323   else                  Mbs++;
3324   if (extra_rows && !rank) {
3325     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3326   }
3327 
3328   /* determine ownership of all rows */
3329   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3330     mbs = Mbs/size + ((Mbs % size) > rank);
3331     m   = mbs*bs;
3332   } else { /* User set */
3333     m   = newmat->rmap->n;
3334     mbs = m/bs;
3335   }
3336   ierr = PetscMalloc2(size+1,PetscInt,&rowners,size+1,PetscInt,&browners);CHKERRQ(ierr);
3337   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3338 
3339   /* process 0 needs enough room for process with most rows */
3340   if (!rank) {
3341     mmax = rowners[1];
3342     for (i=2; i<=size; i++) {
3343       mmax = PetscMax(mmax,rowners[i]);
3344     }
3345     mmax*=bs;
3346   } else mmax = -1;             /* unused, but compiler warns anyway */
3347 
3348   rowners[0] = 0;
3349   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3350   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3351   rstart = rowners[rank];
3352   rend   = rowners[rank+1];
3353 
3354   /* distribute row lengths to all processors */
3355   ierr = PetscMalloc(m*sizeof(PetscInt),&locrowlens);CHKERRQ(ierr);
3356   if (!rank) {
3357     mend = m;
3358     if (size == 1) mend = mend - extra_rows;
3359     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3360     for (j=mend; j<m; j++) locrowlens[j] = 1;
3361     ierr = PetscMalloc(mmax*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
3362     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
3363     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
3364     for (j=0; j<m; j++) {
3365       procsnz[0] += locrowlens[j];
3366     }
3367     for (i=1; i<size; i++) {
3368       mend = browners[i+1] - browners[i];
3369       if (i == size-1) mend = mend - extra_rows;
3370       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3371       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3372       /* calculate the number of nonzeros on each processor */
3373       for (j=0; j<browners[i+1]-browners[i]; j++) {
3374         procsnz[i] += rowlengths[j];
3375       }
3376       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3377     }
3378     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3379   } else {
3380     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3381   }
3382 
3383   if (!rank) {
3384     /* determine max buffer needed and allocate it */
3385     maxnz = procsnz[0];
3386     for (i=1; i<size; i++) {
3387       maxnz = PetscMax(maxnz,procsnz[i]);
3388     }
3389     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
3390 
3391     /* read in my part of the matrix column indices  */
3392     nz     = procsnz[0];
3393     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3394     mycols = ibuf;
3395     if (size == 1) nz -= extra_rows;
3396     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3397     if (size == 1) {
3398       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3399     }
3400 
3401     /* read in every ones (except the last) and ship off */
3402     for (i=1; i<size-1; i++) {
3403       nz   = procsnz[i];
3404       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3405       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3406     }
3407     /* read in the stuff for the last proc */
3408     if (size != 1) {
3409       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3410       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3411       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3412       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3413     }
3414     ierr = PetscFree(cols);CHKERRQ(ierr);
3415   } else {
3416     /* determine buffer space needed for message */
3417     nz = 0;
3418     for (i=0; i<m; i++) {
3419       nz += locrowlens[i];
3420     }
3421     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3422     mycols = ibuf;
3423     /* receive message of column indices*/
3424     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3425     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3426     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3427   }
3428 
3429   /* loop over local rows, determining number of off diagonal entries */
3430   ierr     = PetscMalloc2(rend-rstart,PetscInt,&dlens,rend-rstart,PetscInt,&odlens);CHKERRQ(ierr);
3431   ierr     = PetscMalloc3(Mbs,PetscInt,&mask,Mbs,PetscInt,&masked1,Mbs,PetscInt,&masked2);CHKERRQ(ierr);
3432   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3433   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3434   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3435   rowcount = 0; nzcount = 0;
3436   for (i=0; i<mbs; i++) {
3437     dcount  = 0;
3438     odcount = 0;
3439     for (j=0; j<bs; j++) {
3440       kmax = locrowlens[rowcount];
3441       for (k=0; k<kmax; k++) {
3442         tmp = mycols[nzcount++]/bs;
3443         if (!mask[tmp]) {
3444           mask[tmp] = 1;
3445           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3446           else masked1[dcount++] = tmp;
3447         }
3448       }
3449       rowcount++;
3450     }
3451 
3452     dlens[i]  = dcount;
3453     odlens[i] = odcount;
3454 
3455     /* zero out the mask elements we set */
3456     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3457     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3458   }
3459 
3460 
3461   if (!sizesset) {
3462     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3463   }
3464   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3465 
3466   if (!rank) {
3467     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3468     /* read in my part of the matrix numerical values  */
3469     nz     = procsnz[0];
3470     vals   = buf;
3471     mycols = ibuf;
3472     if (size == 1) nz -= extra_rows;
3473     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3474     if (size == 1) {
3475       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3476     }
3477 
3478     /* insert into matrix */
3479     jj = rstart*bs;
3480     for (i=0; i<m; i++) {
3481       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3482       mycols += locrowlens[i];
3483       vals   += locrowlens[i];
3484       jj++;
3485     }
3486     /* read in other processors (except the last one) and ship out */
3487     for (i=1; i<size-1; i++) {
3488       nz   = procsnz[i];
3489       vals = buf;
3490       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3491       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3492     }
3493     /* the last proc */
3494     if (size != 1) {
3495       nz   = procsnz[i] - extra_rows;
3496       vals = buf;
3497       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3498       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3499       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3500     }
3501     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3502   } else {
3503     /* receive numeric values */
3504     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3505 
3506     /* receive message of values*/
3507     vals   = buf;
3508     mycols = ibuf;
3509     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3510 
3511     /* insert into matrix */
3512     jj = rstart*bs;
3513     for (i=0; i<m; i++) {
3514       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3515       mycols += locrowlens[i];
3516       vals   += locrowlens[i];
3517       jj++;
3518     }
3519   }
3520   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3521   ierr = PetscFree(buf);CHKERRQ(ierr);
3522   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3523   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3524   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3525   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3526   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3527   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3528   PetscFunctionReturn(0);
3529 }
3530 
3531 #undef __FUNCT__
3532 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3533 /*@
3534    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3535 
3536    Input Parameters:
3537 .  mat  - the matrix
3538 .  fact - factor
3539 
3540    Not Collective, each process can use a different factor
3541 
3542    Level: advanced
3543 
3544   Notes:
3545    This can also be set by the command line option: -mat_use_hash_table <fact>
3546 
3547 .keywords: matrix, hashtable, factor, HT
3548 
3549 .seealso: MatSetOption()
3550 @*/
3551 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3552 {
3553   PetscErrorCode ierr;
3554 
3555   PetscFunctionBegin;
3556   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3557   PetscFunctionReturn(0);
3558 }
3559 
3560 #undef __FUNCT__
3561 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3562 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3563 {
3564   Mat_MPIBAIJ *baij;
3565 
3566   PetscFunctionBegin;
3567   baij          = (Mat_MPIBAIJ*)mat->data;
3568   baij->ht_fact = fact;
3569   PetscFunctionReturn(0);
3570 }
3571 
3572 #undef __FUNCT__
3573 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3574 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3575 {
3576   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3577 
3578   PetscFunctionBegin;
3579   *Ad     = a->A;
3580   *Ao     = a->B;
3581   *colmap = a->garray;
3582   PetscFunctionReturn(0);
3583 }
3584 
3585 /*
3586     Special version for direct calls from Fortran (to eliminate two function call overheads
3587 */
3588 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3589 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3590 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3591 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3592 #endif
3593 
3594 #undef __FUNCT__
3595 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3596 /*@C
3597   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3598 
3599   Collective on Mat
3600 
3601   Input Parameters:
3602 + mat - the matrix
3603 . min - number of input rows
3604 . im - input rows
3605 . nin - number of input columns
3606 . in - input columns
3607 . v - numerical values input
3608 - addvin - INSERT_VALUES or ADD_VALUES
3609 
3610   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3611 
3612   Level: advanced
3613 
3614 .seealso:   MatSetValuesBlocked()
3615 @*/
3616 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3617 {
3618   /* convert input arguments to C version */
3619   Mat        mat  = *matin;
3620   PetscInt   m    = *min, n = *nin;
3621   InsertMode addv = *addvin;
3622 
3623   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3624   const MatScalar *value;
3625   MatScalar       *barray     = baij->barray;
3626   PetscBool       roworiented = baij->roworiented;
3627   PetscErrorCode  ierr;
3628   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3629   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3630   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3631 
3632   PetscFunctionBegin;
3633   /* tasks normally handled by MatSetValuesBlocked() */
3634   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3635 #if defined(PETSC_USE_DEBUG)
3636   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3637   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3638 #endif
3639   if (mat->assembled) {
3640     mat->was_assembled = PETSC_TRUE;
3641     mat->assembled     = PETSC_FALSE;
3642   }
3643   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3644 
3645 
3646   if (!barray) {
3647     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
3648     baij->barray = barray;
3649   }
3650 
3651   if (roworiented) stepval = (n-1)*bs;
3652   else stepval = (m-1)*bs;
3653 
3654   for (i=0; i<m; i++) {
3655     if (im[i] < 0) continue;
3656 #if defined(PETSC_USE_DEBUG)
3657     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3658 #endif
3659     if (im[i] >= rstart && im[i] < rend) {
3660       row = im[i] - rstart;
3661       for (j=0; j<n; j++) {
3662         /* If NumCol = 1 then a copy is not required */
3663         if ((roworiented) && (n == 1)) {
3664           barray = (MatScalar*)v + i*bs2;
3665         } else if ((!roworiented) && (m == 1)) {
3666           barray = (MatScalar*)v + j*bs2;
3667         } else { /* Here a copy is required */
3668           if (roworiented) {
3669             value = v + i*(stepval+bs)*bs + j*bs;
3670           } else {
3671             value = v + j*(stepval+bs)*bs + i*bs;
3672           }
3673           for (ii=0; ii<bs; ii++,value+=stepval) {
3674             for (jj=0; jj<bs; jj++) {
3675               *barray++ = *value++;
3676             }
3677           }
3678           barray -=bs2;
3679         }
3680 
3681         if (in[j] >= cstart && in[j] < cend) {
3682           col  = in[j] - cstart;
3683           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
3684         } else if (in[j] < 0) continue;
3685 #if defined(PETSC_USE_DEBUG)
3686         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3687 #endif
3688         else {
3689           if (mat->was_assembled) {
3690             if (!baij->colmap) {
3691               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3692             }
3693 
3694 #if defined(PETSC_USE_DEBUG)
3695 #if defined(PETSC_USE_CTABLE)
3696             { PetscInt data;
3697               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3698               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3699             }
3700 #else
3701             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3702 #endif
3703 #endif
3704 #if defined(PETSC_USE_CTABLE)
3705             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3706             col  = (col - 1)/bs;
3707 #else
3708             col = (baij->colmap[in[j]] - 1)/bs;
3709 #endif
3710             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3711               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3712               col  =  in[j];
3713             }
3714           } else col = in[j];
3715           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
3716         }
3717       }
3718     } else {
3719       if (!baij->donotstash) {
3720         if (roworiented) {
3721           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3722         } else {
3723           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3724         }
3725       }
3726     }
3727   }
3728 
3729   /* task normally handled by MatSetValuesBlocked() */
3730   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3731   PetscFunctionReturn(0);
3732 }
3733 
3734 #undef __FUNCT__
3735 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3736 /*@
3737      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3738          CSR format the local rows.
3739 
3740    Collective on MPI_Comm
3741 
3742    Input Parameters:
3743 +  comm - MPI communicator
3744 .  bs - the block size, only a block size of 1 is supported
3745 .  m - number of local rows (Cannot be PETSC_DECIDE)
3746 .  n - This value should be the same as the local size used in creating the
3747        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3748        calculated if N is given) For square matrices n is almost always m.
3749 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3750 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3751 .   i - row indices
3752 .   j - column indices
3753 -   a - matrix values
3754 
3755    Output Parameter:
3756 .   mat - the matrix
3757 
3758    Level: intermediate
3759 
3760    Notes:
3761        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
3762      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3763      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
3764 
3765        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
3766 
3767 .keywords: matrix, aij, compressed row, sparse, parallel
3768 
3769 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
3770           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
3771 @*/
3772 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
3773 {
3774   PetscErrorCode ierr;
3775 
3776   PetscFunctionBegin;
3777   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
3778   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
3779   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3780   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
3781   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
3782   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
3783   PetscFunctionReturn(0);
3784 }
3785