xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision a76eec0e025e246efca9d3ebd46f014f5abffee5)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 #include <petscblaslapack.h>
4 
5 extern PetscErrorCode MatSetUpMultiply_MPIBAIJ(Mat);
6 extern PetscErrorCode MatDisAssemble_MPIBAIJ(Mat);
7 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],PetscScalar []);
8 extern PetscErrorCode MatSetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
9 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
10 extern PetscErrorCode MatGetRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
11 extern PetscErrorCode MatRestoreRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
12 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscScalar,Vec,Vec);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
16 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
17 {
18   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
19   PetscErrorCode ierr;
20   PetscInt       i,*idxb = 0;
21   PetscScalar    *va,*vb;
22   Vec            vtmp;
23 
24   PetscFunctionBegin;
25   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
26   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
27   if (idx) {
28     for (i=0; i<A->rmap->n; i++) {
29       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
30     }
31   }
32 
33   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
34   if (idx) {ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);}
35   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
36   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
37 
38   for (i=0; i<A->rmap->n; i++) {
39     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
40       va[i] = vb[i];
41       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
42     }
43   }
44 
45   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
46   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
47   ierr = PetscFree(idxb);CHKERRQ(ierr);
48   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNCT__
53 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
54 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
55 {
56   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
61   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
67 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
68 {
69   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
74   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 /*
79      Local utility routine that creates a mapping from the global column
80    number to the local number in the off-diagonal part of the local
81    storage of the matrix.  This is done in a non scalable way since the
82    length of colmap equals the global matrix length.
83 */
84 #undef __FUNCT__
85 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
86 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
87 {
88   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
89   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
90   PetscErrorCode ierr;
91   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
92 
93   PetscFunctionBegin;
94 #if defined(PETSC_USE_CTABLE)
95   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) {
97     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
98   }
99 #else
100   ierr = PetscMalloc((baij->Nbs+1)*sizeof(PetscInt),&baij->colmap);CHKERRQ(ierr);
101   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
102   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
103   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
104 #endif
105   PetscFunctionReturn(0);
106 }
107 
108 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv) \
109   { \
110  \
111     brow = row/bs;  \
112     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
113     rmax = aimax[brow]; nrow = ailen[brow]; \
114     bcol = col/bs; \
115     ridx = row % bs; cidx = col % bs; \
116     low  = 0; high = nrow; \
117     while (high-low > 3) { \
118       t = (low+high)/2; \
119       if (rp[t] > bcol) high = t; \
120       else              low  = t; \
121     } \
122     for (_i=low; _i<high; _i++) { \
123       if (rp[_i] > bcol) break; \
124       if (rp[_i] == bcol) { \
125         bap = ap +  bs2*_i + bs*cidx + ridx; \
126         if (addv == ADD_VALUES) *bap += value;  \
127         else                    *bap  = value;  \
128         goto a_noinsert; \
129       } \
130     } \
131     if (a->nonew == 1) goto a_noinsert; \
132     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
133     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
134     N = nrow++ - 1;  \
135     /* shift up all the later entries in this row */ \
136     for (ii=N; ii>=_i; ii--) { \
137       rp[ii+1] = rp[ii]; \
138       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
139     } \
140     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
141     rp[_i]                      = bcol;  \
142     ap[bs2*_i + bs*cidx + ridx] = value;  \
143 a_noinsert:; \
144     ailen[brow] = nrow; \
145   }
146 
147 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv) \
148   { \
149     brow = row/bs;  \
150     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
151     rmax = bimax[brow]; nrow = bilen[brow]; \
152     bcol = col/bs; \
153     ridx = row % bs; cidx = col % bs; \
154     low  = 0; high = nrow; \
155     while (high-low > 3) { \
156       t = (low+high)/2; \
157       if (rp[t] > bcol) high = t; \
158       else              low  = t; \
159     } \
160     for (_i=low; _i<high; _i++) { \
161       if (rp[_i] > bcol) break; \
162       if (rp[_i] == bcol) { \
163         bap = ap +  bs2*_i + bs*cidx + ridx; \
164         if (addv == ADD_VALUES) *bap += value;  \
165         else                    *bap  = value;  \
166         goto b_noinsert; \
167       } \
168     } \
169     if (b->nonew == 1) goto b_noinsert; \
170     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
171     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
172     N = nrow++ - 1;  \
173     /* shift up all the later entries in this row */ \
174     for (ii=N; ii>=_i; ii--) { \
175       rp[ii+1] = rp[ii]; \
176       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
177     } \
178     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
179     rp[_i]                      = bcol;  \
180     ap[bs2*_i + bs*cidx + ridx] = value;  \
181 b_noinsert:; \
182     bilen[brow] = nrow; \
183   }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatSetValues_MPIBAIJ"
187 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
188 {
189   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
190   MatScalar      value;
191   PetscBool      roworiented = baij->roworiented;
192   PetscErrorCode ierr;
193   PetscInt       i,j,row,col;
194   PetscInt       rstart_orig=mat->rmap->rstart;
195   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
196   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
197 
198   /* Some Variables required in the macro */
199   Mat         A     = baij->A;
200   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
201   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
202   MatScalar   *aa   =a->a;
203 
204   Mat         B     = baij->B;
205   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
206   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
207   MatScalar   *ba   =b->a;
208 
209   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
210   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
211   MatScalar *ap,*bap;
212 
213   PetscFunctionBegin;
214   if (v) PetscValidScalarPointer(v,6);
215   for (i=0; i<m; i++) {
216     if (im[i] < 0) continue;
217 #if defined(PETSC_USE_DEBUG)
218     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
219 #endif
220     if (im[i] >= rstart_orig && im[i] < rend_orig) {
221       row = im[i] - rstart_orig;
222       for (j=0; j<n; j++) {
223         if (in[j] >= cstart_orig && in[j] < cend_orig) {
224           col = in[j] - cstart_orig;
225           if (roworiented) value = v[i*n+j];
226           else             value = v[i+j*m];
227           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv);
228           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
229         } else if (in[j] < 0) continue;
230 #if defined(PETSC_USE_DEBUG)
231         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
232 #endif
233         else {
234           if (mat->was_assembled) {
235             if (!baij->colmap) {
236               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
237             }
238 #if defined(PETSC_USE_CTABLE)
239             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
240             col  = col - 1;
241 #else
242             col = baij->colmap[in[j]/bs] - 1;
243 #endif
244             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
245               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
246               col  =  in[j];
247               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
248               B    = baij->B;
249               b    = (Mat_SeqBAIJ*)(B)->data;
250               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
251               ba   =b->a;
252             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
253             else col += in[j]%bs;
254           } else col = in[j];
255           if (roworiented) value = v[i*n+j];
256           else             value = v[i+j*m];
257           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv);
258           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
259         }
260       }
261     } else {
262       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
263       if (!baij->donotstash) {
264         mat->assembled = PETSC_FALSE;
265         if (roworiented) {
266           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
267         } else {
268           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
269         }
270       }
271     }
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
278 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
279 {
280   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
281   const PetscScalar *value;
282   MatScalar         *barray     = baij->barray;
283   PetscBool         roworiented = baij->roworiented;
284   PetscErrorCode    ierr;
285   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
286   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
287   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
288 
289   PetscFunctionBegin;
290   if (!barray) {
291     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
292     baij->barray = barray;
293   }
294 
295   if (roworiented) stepval = (n-1)*bs;
296   else stepval = (m-1)*bs;
297 
298   for (i=0; i<m; i++) {
299     if (im[i] < 0) continue;
300 #if defined(PETSC_USE_DEBUG)
301     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
302 #endif
303     if (im[i] >= rstart && im[i] < rend) {
304       row = im[i] - rstart;
305       for (j=0; j<n; j++) {
306         /* If NumCol = 1 then a copy is not required */
307         if ((roworiented) && (n == 1)) {
308           barray = (MatScalar*)v + i*bs2;
309         } else if ((!roworiented) && (m == 1)) {
310           barray = (MatScalar*)v + j*bs2;
311         } else { /* Here a copy is required */
312           if (roworiented) {
313             value = v + (i*(stepval+bs) + j)*bs;
314           } else {
315             value = v + (j*(stepval+bs) + i)*bs;
316           }
317           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
318             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
319             barray += bs;
320           }
321           barray -= bs2;
322         }
323 
324         if (in[j] >= cstart && in[j] < cend) {
325           col  = in[j] - cstart;
326           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
327         } else if (in[j] < 0) continue;
328 #if defined(PETSC_USE_DEBUG)
329         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
330 #endif
331         else {
332           if (mat->was_assembled) {
333             if (!baij->colmap) {
334               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
335             }
336 
337 #if defined(PETSC_USE_DEBUG)
338 #if defined(PETSC_USE_CTABLE)
339             { PetscInt data;
340               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
341               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
342             }
343 #else
344             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
345 #endif
346 #endif
347 #if defined(PETSC_USE_CTABLE)
348             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
349             col  = (col - 1)/bs;
350 #else
351             col = (baij->colmap[in[j]] - 1)/bs;
352 #endif
353             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
354               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
355               col  =  in[j];
356             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", bs*im[i], bs*in[j]);
357           } else col = in[j];
358           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
359         }
360       }
361     } else {
362       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
363       if (!baij->donotstash) {
364         if (roworiented) {
365           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
366         } else {
367           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
368         }
369       }
370     }
371   }
372   PetscFunctionReturn(0);
373 }
374 
375 #define HASH_KEY 0.6180339887
376 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
377 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
378 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
379 #undef __FUNCT__
380 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
381 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
382 {
383   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
384   PetscBool      roworiented = baij->roworiented;
385   PetscErrorCode ierr;
386   PetscInt       i,j,row,col;
387   PetscInt       rstart_orig=mat->rmap->rstart;
388   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
389   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
390   PetscReal      tmp;
391   MatScalar      **HD = baij->hd,value;
392 #if defined(PETSC_USE_DEBUG)
393   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
394 #endif
395 
396   PetscFunctionBegin;
397   if (v) PetscValidScalarPointer(v,6);
398   for (i=0; i<m; i++) {
399 #if defined(PETSC_USE_DEBUG)
400     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
401     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
402 #endif
403     row = im[i];
404     if (row >= rstart_orig && row < rend_orig) {
405       for (j=0; j<n; j++) {
406         col = in[j];
407         if (roworiented) value = v[i*n+j];
408         else             value = v[i+j*m];
409         /* Look up PetscInto the Hash Table */
410         key = (row/bs)*Nbs+(col/bs)+1;
411         h1  = HASH(size,key,tmp);
412 
413 
414         idx = h1;
415 #if defined(PETSC_USE_DEBUG)
416         insert_ct++;
417         total_ct++;
418         if (HT[idx] != key) {
419           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
420           if (idx == size) {
421             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
422             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
423           }
424         }
425 #else
426         if (HT[idx] != key) {
427           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
428           if (idx == size) {
429             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
430             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
431           }
432         }
433 #endif
434         /* A HASH table entry is found, so insert the values at the correct address */
435         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
436         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
437       }
438     } else if (!baij->donotstash) {
439       if (roworiented) {
440         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
441       } else {
442         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
443       }
444     }
445   }
446 #if defined(PETSC_USE_DEBUG)
447   baij->ht_total_ct  = total_ct;
448   baij->ht_insert_ct = insert_ct;
449 #endif
450   PetscFunctionReturn(0);
451 }
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
455 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
456 {
457   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
458   PetscBool         roworiented = baij->roworiented;
459   PetscErrorCode    ierr;
460   PetscInt          i,j,ii,jj,row,col;
461   PetscInt          rstart=baij->rstartbs;
462   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
463   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
464   PetscReal         tmp;
465   MatScalar         **HD = baij->hd,*baij_a;
466   const PetscScalar *v_t,*value;
467 #if defined(PETSC_USE_DEBUG)
468   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
469 #endif
470 
471   PetscFunctionBegin;
472   if (roworiented) stepval = (n-1)*bs;
473   else stepval = (m-1)*bs;
474 
475   for (i=0; i<m; i++) {
476 #if defined(PETSC_USE_DEBUG)
477     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
478     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
479 #endif
480     row = im[i];
481     v_t = v + i*nbs2;
482     if (row >= rstart && row < rend) {
483       for (j=0; j<n; j++) {
484         col = in[j];
485 
486         /* Look up into the Hash Table */
487         key = row*Nbs+col+1;
488         h1  = HASH(size,key,tmp);
489 
490         idx = h1;
491 #if defined(PETSC_USE_DEBUG)
492         total_ct++;
493         insert_ct++;
494         if (HT[idx] != key) {
495           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
496           if (idx == size) {
497             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
498             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
499           }
500         }
501 #else
502         if (HT[idx] != key) {
503           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
504           if (idx == size) {
505             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
506             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
507           }
508         }
509 #endif
510         baij_a = HD[idx];
511         if (roworiented) {
512           /*value = v + i*(stepval+bs)*bs + j*bs;*/
513           /* value = v + (i*(stepval+bs)+j)*bs; */
514           value = v_t;
515           v_t  += bs;
516           if (addv == ADD_VALUES) {
517             for (ii=0; ii<bs; ii++,value+=stepval) {
518               for (jj=ii; jj<bs2; jj+=bs) {
519                 baij_a[jj] += *value++;
520               }
521             }
522           } else {
523             for (ii=0; ii<bs; ii++,value+=stepval) {
524               for (jj=ii; jj<bs2; jj+=bs) {
525                 baij_a[jj] = *value++;
526               }
527             }
528           }
529         } else {
530           value = v + j*(stepval+bs)*bs + i*bs;
531           if (addv == ADD_VALUES) {
532             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
533               for (jj=0; jj<bs; jj++) {
534                 baij_a[jj] += *value++;
535               }
536             }
537           } else {
538             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
539               for (jj=0; jj<bs; jj++) {
540                 baij_a[jj] = *value++;
541               }
542             }
543           }
544         }
545       }
546     } else {
547       if (!baij->donotstash) {
548         if (roworiented) {
549           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
550         } else {
551           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
552         }
553       }
554     }
555   }
556 #if defined(PETSC_USE_DEBUG)
557   baij->ht_total_ct  = total_ct;
558   baij->ht_insert_ct = insert_ct;
559 #endif
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "MatGetValues_MPIBAIJ"
565 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
566 {
567   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
568   PetscErrorCode ierr;
569   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
570   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
571 
572   PetscFunctionBegin;
573   for (i=0; i<m; i++) {
574     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
575     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
576     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
577       row = idxm[i] - bsrstart;
578       for (j=0; j<n; j++) {
579         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
580         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
581         if (idxn[j] >= bscstart && idxn[j] < bscend) {
582           col  = idxn[j] - bscstart;
583           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
584         } else {
585           if (!baij->colmap) {
586             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
587           }
588 #if defined(PETSC_USE_CTABLE)
589           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
590           data--;
591 #else
592           data = baij->colmap[idxn[j]/bs]-1;
593 #endif
594           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
595           else {
596             col  = data + idxn[j]%bs;
597             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
598           }
599         }
600       }
601     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "MatNorm_MPIBAIJ"
608 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
609 {
610   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
611   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
612   PetscErrorCode ierr;
613   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
614   PetscReal      sum = 0.0;
615   MatScalar      *v;
616 
617   PetscFunctionBegin;
618   if (baij->size == 1) {
619     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
620   } else {
621     if (type == NORM_FROBENIUS) {
622       v  = amat->a;
623       nz = amat->nz*bs2;
624       for (i=0; i<nz; i++) {
625         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
626       }
627       v  = bmat->a;
628       nz = bmat->nz*bs2;
629       for (i=0; i<nz; i++) {
630         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
631       }
632       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
633       *nrm = PetscSqrtReal(*nrm);
634     } else if (type == NORM_1) { /* max column sum */
635       PetscReal *tmp,*tmp2;
636       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
637       ierr = PetscMalloc2(mat->cmap->N,PetscReal,&tmp,mat->cmap->N,PetscReal,&tmp2);CHKERRQ(ierr);
638       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
639       v    = amat->a; jj = amat->j;
640       for (i=0; i<amat->nz; i++) {
641         for (j=0; j<bs; j++) {
642           col = bs*(cstart + *jj) + j; /* column index */
643           for (row=0; row<bs; row++) {
644             tmp[col] += PetscAbsScalar(*v);  v++;
645           }
646         }
647         jj++;
648       }
649       v = bmat->a; jj = bmat->j;
650       for (i=0; i<bmat->nz; i++) {
651         for (j=0; j<bs; j++) {
652           col = bs*garray[*jj] + j;
653           for (row=0; row<bs; row++) {
654             tmp[col] += PetscAbsScalar(*v); v++;
655           }
656         }
657         jj++;
658       }
659       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
660       *nrm = 0.0;
661       for (j=0; j<mat->cmap->N; j++) {
662         if (tmp2[j] > *nrm) *nrm = tmp2[j];
663       }
664       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
665     } else if (type == NORM_INFINITY) { /* max row sum */
666       PetscReal *sums;
667       ierr = PetscMalloc(bs*sizeof(PetscReal),&sums);CHKERRQ(ierr);
668       sum  = 0.0;
669       for (j=0; j<amat->mbs; j++) {
670         for (row=0; row<bs; row++) sums[row] = 0.0;
671         v  = amat->a + bs2*amat->i[j];
672         nz = amat->i[j+1]-amat->i[j];
673         for (i=0; i<nz; i++) {
674           for (col=0; col<bs; col++) {
675             for (row=0; row<bs; row++) {
676               sums[row] += PetscAbsScalar(*v); v++;
677             }
678           }
679         }
680         v  = bmat->a + bs2*bmat->i[j];
681         nz = bmat->i[j+1]-bmat->i[j];
682         for (i=0; i<nz; i++) {
683           for (col=0; col<bs; col++) {
684             for (row=0; row<bs; row++) {
685               sums[row] += PetscAbsScalar(*v); v++;
686             }
687           }
688         }
689         for (row=0; row<bs; row++) {
690           if (sums[row] > sum) sum = sums[row];
691         }
692       }
693       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
694       ierr = PetscFree(sums);CHKERRQ(ierr);
695     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
696   }
697   PetscFunctionReturn(0);
698 }
699 
700 /*
701   Creates the hash table, and sets the table
702   This table is created only once.
703   If new entried need to be added to the matrix
704   then the hash table has to be destroyed and
705   recreated.
706 */
707 #undef __FUNCT__
708 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
709 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
710 {
711   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
712   Mat            A     = baij->A,B=baij->B;
713   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
714   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
715   PetscErrorCode ierr;
716   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
717   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
718   PetscInt       *HT,key;
719   MatScalar      **HD;
720   PetscReal      tmp;
721 #if defined(PETSC_USE_INFO)
722   PetscInt ct=0,max=0;
723 #endif
724 
725   PetscFunctionBegin;
726   if (baij->ht) PetscFunctionReturn(0);
727 
728   baij->ht_size = (PetscInt)(factor*nz);
729   ht_size       = baij->ht_size;
730 
731   /* Allocate Memory for Hash Table */
732   ierr = PetscMalloc2(ht_size,MatScalar*,&baij->hd,ht_size,PetscInt,&baij->ht);CHKERRQ(ierr);
733   ierr = PetscMemzero(baij->hd,ht_size*sizeof(MatScalar*));CHKERRQ(ierr);
734   ierr = PetscMemzero(baij->ht,ht_size*sizeof(PetscInt));CHKERRQ(ierr);
735   HD   = baij->hd;
736   HT   = baij->ht;
737 
738   /* Loop Over A */
739   for (i=0; i<a->mbs; i++) {
740     for (j=ai[i]; j<ai[i+1]; j++) {
741       row = i+rstart;
742       col = aj[j]+cstart;
743 
744       key = row*Nbs + col + 1;
745       h1  = HASH(ht_size,key,tmp);
746       for (k=0; k<ht_size; k++) {
747         if (!HT[(h1+k)%ht_size]) {
748           HT[(h1+k)%ht_size] = key;
749           HD[(h1+k)%ht_size] = a->a + j*bs2;
750           break;
751 #if defined(PETSC_USE_INFO)
752         } else {
753           ct++;
754 #endif
755         }
756       }
757 #if defined(PETSC_USE_INFO)
758       if (k> max) max = k;
759 #endif
760     }
761   }
762   /* Loop Over B */
763   for (i=0; i<b->mbs; i++) {
764     for (j=bi[i]; j<bi[i+1]; j++) {
765       row = i+rstart;
766       col = garray[bj[j]];
767       key = row*Nbs + col + 1;
768       h1  = HASH(ht_size,key,tmp);
769       for (k=0; k<ht_size; k++) {
770         if (!HT[(h1+k)%ht_size]) {
771           HT[(h1+k)%ht_size] = key;
772           HD[(h1+k)%ht_size] = b->a + j*bs2;
773           break;
774 #if defined(PETSC_USE_INFO)
775         } else {
776           ct++;
777 #endif
778         }
779       }
780 #if defined(PETSC_USE_INFO)
781       if (k> max) max = k;
782 #endif
783     }
784   }
785 
786   /* Print Summary */
787 #if defined(PETSC_USE_INFO)
788   for (i=0,j=0; i<ht_size; i++) {
789     if (HT[i]) j++;
790   }
791   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
792 #endif
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
798 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
799 {
800   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
801   PetscErrorCode ierr;
802   PetscInt       nstash,reallocs;
803   InsertMode     addv;
804 
805   PetscFunctionBegin;
806   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
807 
808   /* make sure all processors are either in INSERTMODE or ADDMODE */
809   ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
810   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
811   mat->insertmode = addv; /* in case this processor had no cache */
812 
813   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
814   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
815   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
816   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
817   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
818   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
824 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
825 {
826   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
827   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
828   PetscErrorCode ierr;
829   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
830   PetscInt       *row,*col;
831   PetscBool      r1,r2,r3,other_disassembled;
832   MatScalar      *val;
833   InsertMode     addv = mat->insertmode;
834   PetscMPIInt    n;
835 
836   PetscFunctionBegin;
837   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
838   if (!baij->donotstash && !mat->nooffprocentries) {
839     while (1) {
840       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
841       if (!flg) break;
842 
843       for (i=0; i<n;) {
844         /* Now identify the consecutive vals belonging to the same row */
845         for (j=i,rstart=row[j]; j<n; j++) {
846           if (row[j] != rstart) break;
847         }
848         if (j < n) ncols = j-i;
849         else       ncols = n-i;
850         /* Now assemble all these values with a single function call */
851         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
852         i    = j;
853       }
854     }
855     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
856     /* Now process the block-stash. Since the values are stashed column-oriented,
857        set the roworiented flag to column oriented, and after MatSetValues()
858        restore the original flags */
859     r1 = baij->roworiented;
860     r2 = a->roworiented;
861     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
862 
863     baij->roworiented = PETSC_FALSE;
864     a->roworiented    = PETSC_FALSE;
865 
866     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
867     while (1) {
868       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
869       if (!flg) break;
870 
871       for (i=0; i<n;) {
872         /* Now identify the consecutive vals belonging to the same row */
873         for (j=i,rstart=row[j]; j<n; j++) {
874           if (row[j] != rstart) break;
875         }
876         if (j < n) ncols = j-i;
877         else       ncols = n-i;
878         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
879         i    = j;
880       }
881     }
882     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
883 
884     baij->roworiented = r1;
885     a->roworiented    = r2;
886 
887     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
888   }
889 
890   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
891   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
892 
893   /* determine if any processor has disassembled, if so we must
894      also disassemble ourselfs, in order that we may reassemble. */
895   /*
896      if nonzero structure of submatrix B cannot change then we know that
897      no processor disassembled thus we can skip this stuff
898   */
899   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
900     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
901     if (mat->was_assembled && !other_disassembled) {
902       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
903     }
904   }
905 
906   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
907     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
908   }
909   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
910   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
911 
912 #if defined(PETSC_USE_INFO)
913   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
914     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
915 
916     baij->ht_total_ct  = 0;
917     baij->ht_insert_ct = 0;
918   }
919 #endif
920   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
921     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
922 
923     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
924     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
925   }
926 
927   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
928 
929   baij->rowvalues = 0;
930   PetscFunctionReturn(0);
931 }
932 
933 #include <petscdraw.h>
934 #undef __FUNCT__
935 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
936 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
937 {
938   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
939   PetscErrorCode    ierr;
940   PetscMPIInt       size = baij->size,rank = baij->rank;
941   PetscInt          bs   = mat->rmap->bs;
942   PetscBool         iascii,isdraw;
943   PetscViewer       sviewer;
944   PetscViewerFormat format;
945 
946   PetscFunctionBegin;
947   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
948   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
949   if (iascii) {
950     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
951     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
952       MatInfo info;
953       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
954       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
955       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
956       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
957                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
958       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
959       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
960       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
961       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
962       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
963       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
964       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
965       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
966       PetscFunctionReturn(0);
967     } else if (format == PETSC_VIEWER_ASCII_INFO) {
968       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
969       PetscFunctionReturn(0);
970     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
971       PetscFunctionReturn(0);
972     }
973   }
974 
975   if (isdraw) {
976     PetscDraw draw;
977     PetscBool isnull;
978     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
979     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
980   }
981 
982   if (size == 1) {
983     ierr = PetscObjectSetName((PetscObject)baij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
984     ierr = MatView(baij->A,viewer);CHKERRQ(ierr);
985   } else {
986     /* assemble the entire matrix onto first processor. */
987     Mat         A;
988     Mat_SeqBAIJ *Aloc;
989     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
990     MatScalar   *a;
991 
992     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
993     /* Perhaps this should be the type of mat? */
994     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
995     if (!rank) {
996       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
997     } else {
998       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
999     }
1000     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1001     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1002     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1003     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1004 
1005     /* copy over the A part */
1006     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1007     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1008     ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1009 
1010     for (i=0; i<mbs; i++) {
1011       rvals[0] = bs*(baij->rstartbs + i);
1012       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1013       for (j=ai[i]; j<ai[i+1]; j++) {
1014         col = (baij->cstartbs+aj[j])*bs;
1015         for (k=0; k<bs; k++) {
1016           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1017           col++; a += bs;
1018         }
1019       }
1020     }
1021     /* copy over the B part */
1022     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1023     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1024     for (i=0; i<mbs; i++) {
1025       rvals[0] = bs*(baij->rstartbs + i);
1026       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1027       for (j=ai[i]; j<ai[i+1]; j++) {
1028         col = baij->garray[aj[j]]*bs;
1029         for (k=0; k<bs; k++) {
1030           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1031           col++; a += bs;
1032         }
1033       }
1034     }
1035     ierr = PetscFree(rvals);CHKERRQ(ierr);
1036     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1037     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1038     /*
1039        Everyone has to call to draw the matrix since the graphics waits are
1040        synchronized across all processors that share the PetscDraw object
1041     */
1042     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1043     if (!rank) {
1044       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
1045       /* Set the type name to MATMPIBAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqBAIJ_ASCII()*/
1046       PetscStrcpy(((PetscObject)((Mat_MPIBAIJ*)(A->data))->A)->type_name,MATMPIBAIJ);
1047       ierr = MatView(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1048     }
1049     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
1050     ierr = MatDestroy(&A);CHKERRQ(ierr);
1051   }
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1057 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1058 {
1059   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1060   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1061   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1062   PetscErrorCode ierr;
1063   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1064   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1065   int            fd;
1066   PetscScalar    *column_values;
1067   FILE           *file;
1068   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1069   PetscInt       message_count,flowcontrolcount;
1070 
1071   PetscFunctionBegin;
1072   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1073   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1074   nz   = bs2*(A->nz + B->nz);
1075   rlen = mat->rmap->n;
1076   if (!rank) {
1077     header[0] = MAT_FILE_CLASSID;
1078     header[1] = mat->rmap->N;
1079     header[2] = mat->cmap->N;
1080 
1081     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1082     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1083     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1084     /* get largest number of rows any processor has */
1085     range = mat->rmap->range;
1086     for (i=1; i<size; i++) {
1087       rlen = PetscMax(rlen,range[i+1] - range[i]);
1088     }
1089   } else {
1090     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1091   }
1092 
1093   ierr = PetscMalloc((rlen/bs)*sizeof(PetscInt),&crow_lens);CHKERRQ(ierr);
1094   /* compute lengths of each row  */
1095   for (i=0; i<a->mbs; i++) {
1096     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1097   }
1098   /* store the row lengths to the file */
1099   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1100   if (!rank) {
1101     MPI_Status status;
1102     ierr = PetscMalloc(rlen*sizeof(PetscInt),&row_lens);CHKERRQ(ierr);
1103     rlen = (range[1] - range[0])/bs;
1104     for (i=0; i<rlen; i++) {
1105       for (j=0; j<bs; j++) {
1106         row_lens[i*bs+j] = bs*crow_lens[i];
1107       }
1108     }
1109     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1110     for (i=1; i<size; i++) {
1111       rlen = (range[i+1] - range[i])/bs;
1112       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1113       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1114       for (k=0; k<rlen; k++) {
1115         for (j=0; j<bs; j++) {
1116           row_lens[k*bs+j] = bs*crow_lens[k];
1117         }
1118       }
1119       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1120     }
1121     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1122     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1123   } else {
1124     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1125     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1126     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1127   }
1128   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1129 
1130   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1131      information needed to make it for each row from a block row. This does require more communication but still not more than
1132      the communication needed for the nonzero values  */
1133   nzmax = nz; /*  space a largest processor needs */
1134   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1135   ierr  = PetscMalloc(nzmax*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
1136   cnt   = 0;
1137   for (i=0; i<a->mbs; i++) {
1138     pcnt = cnt;
1139     for (j=B->i[i]; j<B->i[i+1]; j++) {
1140       if ((col = garray[B->j[j]]) > cstart) break;
1141       for (l=0; l<bs; l++) {
1142         column_indices[cnt++] = bs*col+l;
1143       }
1144     }
1145     for (k=A->i[i]; k<A->i[i+1]; k++) {
1146       for (l=0; l<bs; l++) {
1147         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1148       }
1149     }
1150     for (; j<B->i[i+1]; j++) {
1151       for (l=0; l<bs; l++) {
1152         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1153       }
1154     }
1155     len = cnt - pcnt;
1156     for (k=1; k<bs; k++) {
1157       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1158       cnt += len;
1159     }
1160   }
1161   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1162 
1163   /* store the columns to the file */
1164   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1165   if (!rank) {
1166     MPI_Status status;
1167     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1168     for (i=1; i<size; i++) {
1169       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1170       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1171       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1172       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1173     }
1174     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1175   } else {
1176     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1177     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1178     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1180   }
1181   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1182 
1183   /* load up the numerical values */
1184   ierr = PetscMalloc(nzmax*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
1185   cnt  = 0;
1186   for (i=0; i<a->mbs; i++) {
1187     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1188     for (j=B->i[i]; j<B->i[i+1]; j++) {
1189       if (garray[B->j[j]] > cstart) break;
1190       for (l=0; l<bs; l++) {
1191         for (ll=0; ll<bs; ll++) {
1192           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1193         }
1194       }
1195       cnt += bs;
1196     }
1197     for (k=A->i[i]; k<A->i[i+1]; k++) {
1198       for (l=0; l<bs; l++) {
1199         for (ll=0; ll<bs; ll++) {
1200           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1201         }
1202       }
1203       cnt += bs;
1204     }
1205     for (; j<B->i[i+1]; j++) {
1206       for (l=0; l<bs; l++) {
1207         for (ll=0; ll<bs; ll++) {
1208           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1209         }
1210       }
1211       cnt += bs;
1212     }
1213     cnt += (bs-1)*rlen;
1214   }
1215   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1216 
1217   /* store the column values to the file */
1218   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1219   if (!rank) {
1220     MPI_Status status;
1221     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1222     for (i=1; i<size; i++) {
1223       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1224       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1225       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1226       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1227     }
1228     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1229   } else {
1230     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1231     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1232     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1233     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1234   }
1235   ierr = PetscFree(column_values);CHKERRQ(ierr);
1236 
1237   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1238   if (file) {
1239     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1240   }
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "MatView_MPIBAIJ"
1246 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1247 {
1248   PetscErrorCode ierr;
1249   PetscBool      iascii,isdraw,issocket,isbinary;
1250 
1251   PetscFunctionBegin;
1252   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1253   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1254   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1255   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1256   if (iascii || isdraw || issocket) {
1257     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1258   } else if (isbinary) {
1259     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1260   }
1261   PetscFunctionReturn(0);
1262 }
1263 
1264 #undef __FUNCT__
1265 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1266 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1267 {
1268   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1269   PetscErrorCode ierr;
1270 
1271   PetscFunctionBegin;
1272 #if defined(PETSC_USE_LOG)
1273   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1274 #endif
1275   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1276   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1277   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1278   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1279 #if defined(PETSC_USE_CTABLE)
1280   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1281 #else
1282   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1283 #endif
1284   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1285   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1286   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1287   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1288   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1289   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1290   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1291   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1292 
1293   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1294   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1296   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1297   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1298   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1301   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1302   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "MatMult_MPIBAIJ"
1308 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1309 {
1310   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1311   PetscErrorCode ierr;
1312   PetscInt       nt;
1313 
1314   PetscFunctionBegin;
1315   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1316   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1317   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1318   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1319   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1320   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1321   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1322   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1328 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1329 {
1330   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1335   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1336   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1337   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1343 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1344 {
1345   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1346   PetscErrorCode ierr;
1347   PetscBool      merged;
1348 
1349   PetscFunctionBegin;
1350   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1351   /* do nondiagonal part */
1352   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1353   if (!merged) {
1354     /* send it on its way */
1355     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1356     /* do local part */
1357     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1358     /* receive remote parts: note this assumes the values are not actually */
1359     /* inserted in yy until the next line */
1360     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1361   } else {
1362     /* do local part */
1363     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1364     /* send it on its way */
1365     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1366     /* values actually were received in the Begin() but we need to call this nop */
1367     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1368   }
1369   PetscFunctionReturn(0);
1370 }
1371 
1372 #undef __FUNCT__
1373 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1374 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1375 {
1376   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1377   PetscErrorCode ierr;
1378 
1379   PetscFunctionBegin;
1380   /* do nondiagonal part */
1381   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1382   /* send it on its way */
1383   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1384   /* do local part */
1385   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1386   /* receive remote parts: note this assumes the values are not actually */
1387   /* inserted in yy until the next line, which is true for my implementation*/
1388   /* but is not perhaps always true. */
1389   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 /*
1394   This only works correctly for square matrices where the subblock A->A is the
1395    diagonal block
1396 */
1397 #undef __FUNCT__
1398 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1399 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1400 {
1401   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1402   PetscErrorCode ierr;
1403 
1404   PetscFunctionBegin;
1405   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1406   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "MatScale_MPIBAIJ"
1412 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1413 {
1414   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1419   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 #undef __FUNCT__
1424 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1425 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1426 {
1427   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1428   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1429   PetscErrorCode ierr;
1430   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1431   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1432   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1433 
1434   PetscFunctionBegin;
1435   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1436   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1437   mat->getrowactive = PETSC_TRUE;
1438 
1439   if (!mat->rowvalues && (idx || v)) {
1440     /*
1441         allocate enough space to hold information from the longest row.
1442     */
1443     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1444     PetscInt    max = 1,mbs = mat->mbs,tmp;
1445     for (i=0; i<mbs; i++) {
1446       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1447       if (max < tmp) max = tmp;
1448     }
1449     ierr = PetscMalloc2(max*bs2,PetscScalar,&mat->rowvalues,max*bs2,PetscInt,&mat->rowindices);CHKERRQ(ierr);
1450   }
1451   lrow = row - brstart;
1452 
1453   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1454   if (!v)   {pvA = 0; pvB = 0;}
1455   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1456   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1457   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1458   nztot = nzA + nzB;
1459 
1460   cmap = mat->garray;
1461   if (v  || idx) {
1462     if (nztot) {
1463       /* Sort by increasing column numbers, assuming A and B already sorted */
1464       PetscInt imark = -1;
1465       if (v) {
1466         *v = v_p = mat->rowvalues;
1467         for (i=0; i<nzB; i++) {
1468           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1469           else break;
1470         }
1471         imark = i;
1472         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1473         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1474       }
1475       if (idx) {
1476         *idx = idx_p = mat->rowindices;
1477         if (imark > -1) {
1478           for (i=0; i<imark; i++) {
1479             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1480           }
1481         } else {
1482           for (i=0; i<nzB; i++) {
1483             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1484             else break;
1485           }
1486           imark = i;
1487         }
1488         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1489         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1490       }
1491     } else {
1492       if (idx) *idx = 0;
1493       if (v)   *v   = 0;
1494     }
1495   }
1496   *nz  = nztot;
1497   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1498   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1504 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1505 {
1506   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1507 
1508   PetscFunctionBegin;
1509   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1510   baij->getrowactive = PETSC_FALSE;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 #undef __FUNCT__
1515 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1516 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1517 {
1518   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1519   PetscErrorCode ierr;
1520 
1521   PetscFunctionBegin;
1522   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1523   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 #undef __FUNCT__
1528 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1529 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1530 {
1531   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1532   Mat            A  = a->A,B = a->B;
1533   PetscErrorCode ierr;
1534   PetscReal      isend[5],irecv[5];
1535 
1536   PetscFunctionBegin;
1537   info->block_size = (PetscReal)matin->rmap->bs;
1538 
1539   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1540 
1541   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1542   isend[3] = info->memory;  isend[4] = info->mallocs;
1543 
1544   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1545 
1546   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1547   isend[3] += info->memory;  isend[4] += info->mallocs;
1548 
1549   if (flag == MAT_LOCAL) {
1550     info->nz_used      = isend[0];
1551     info->nz_allocated = isend[1];
1552     info->nz_unneeded  = isend[2];
1553     info->memory       = isend[3];
1554     info->mallocs      = isend[4];
1555   } else if (flag == MAT_GLOBAL_MAX) {
1556     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1557 
1558     info->nz_used      = irecv[0];
1559     info->nz_allocated = irecv[1];
1560     info->nz_unneeded  = irecv[2];
1561     info->memory       = irecv[3];
1562     info->mallocs      = irecv[4];
1563   } else if (flag == MAT_GLOBAL_SUM) {
1564     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1565 
1566     info->nz_used      = irecv[0];
1567     info->nz_allocated = irecv[1];
1568     info->nz_unneeded  = irecv[2];
1569     info->memory       = irecv[3];
1570     info->mallocs      = irecv[4];
1571   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1572   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1573   info->fill_ratio_needed = 0;
1574   info->factor_mallocs    = 0;
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 #undef __FUNCT__
1579 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1580 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1581 {
1582   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1583   PetscErrorCode ierr;
1584 
1585   PetscFunctionBegin;
1586   switch (op) {
1587   case MAT_NEW_NONZERO_LOCATIONS:
1588   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1589   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1590   case MAT_KEEP_NONZERO_PATTERN:
1591   case MAT_NEW_NONZERO_LOCATION_ERR:
1592     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1593     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1594     break;
1595   case MAT_ROW_ORIENTED:
1596     a->roworiented = flg;
1597 
1598     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1599     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1600     break;
1601   case MAT_NEW_DIAGONALS:
1602     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1603     break;
1604   case MAT_IGNORE_OFF_PROC_ENTRIES:
1605     a->donotstash = flg;
1606     break;
1607   case MAT_USE_HASH_TABLE:
1608     a->ht_flag = flg;
1609     break;
1610   case MAT_SYMMETRIC:
1611   case MAT_STRUCTURALLY_SYMMETRIC:
1612   case MAT_HERMITIAN:
1613   case MAT_SYMMETRY_ETERNAL:
1614     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1615     break;
1616   default:
1617     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1618   }
1619   PetscFunctionReturn(0);
1620 }
1621 
1622 #undef __FUNCT__
1623 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1624 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1625 {
1626   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1627   Mat_SeqBAIJ    *Aloc;
1628   Mat            B;
1629   PetscErrorCode ierr;
1630   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1631   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1632   MatScalar      *a;
1633 
1634   PetscFunctionBegin;
1635   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1636   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1637     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1638     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1639     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1640     /* Do not know preallocation information, but must set block size */
1641     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1642   } else {
1643     B = *matout;
1644   }
1645 
1646   /* copy over the A part */
1647   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1648   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1649   ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1650 
1651   for (i=0; i<mbs; i++) {
1652     rvals[0] = bs*(baij->rstartbs + i);
1653     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1654     for (j=ai[i]; j<ai[i+1]; j++) {
1655       col = (baij->cstartbs+aj[j])*bs;
1656       for (k=0; k<bs; k++) {
1657         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1658 
1659         col++; a += bs;
1660       }
1661     }
1662   }
1663   /* copy over the B part */
1664   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1665   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1666   for (i=0; i<mbs; i++) {
1667     rvals[0] = bs*(baij->rstartbs + i);
1668     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1669     for (j=ai[i]; j<ai[i+1]; j++) {
1670       col = baij->garray[aj[j]]*bs;
1671       for (k=0; k<bs; k++) {
1672         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1673         col++;
1674         a += bs;
1675       }
1676     }
1677   }
1678   ierr = PetscFree(rvals);CHKERRQ(ierr);
1679   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1680   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1681 
1682   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1683   else {
1684     ierr = MatHeaderMerge(A,B);CHKERRQ(ierr);
1685   }
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 #undef __FUNCT__
1690 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1691 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1692 {
1693   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1694   Mat            a     = baij->A,b = baij->B;
1695   PetscErrorCode ierr;
1696   PetscInt       s1,s2,s3;
1697 
1698   PetscFunctionBegin;
1699   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1700   if (rr) {
1701     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1702     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1703     /* Overlap communication with computation. */
1704     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1705   }
1706   if (ll) {
1707     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1708     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1709     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1710   }
1711   /* scale  the diagonal block */
1712   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1713 
1714   if (rr) {
1715     /* Do a scatter end and then right scale the off-diagonal block */
1716     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1717     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1718   }
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 #undef __FUNCT__
1723 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1724 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1725 {
1726   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1727   PetscErrorCode    ierr;
1728   PetscMPIInt       imdex,size = l->size,n,rank = l->rank;
1729   PetscInt          i,*owners = A->rmap->range;
1730   PetscInt          *nprocs,j,idx,nsends,row;
1731   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
1732   PetscInt          *rvalues,tag = ((PetscObject)A)->tag,count,base,slen,*source,lastidx = -1;
1733   PetscInt          *lens,*lrows,*values,rstart_bs=A->rmap->rstart;
1734   MPI_Comm          comm;
1735   MPI_Request       *send_waits,*recv_waits;
1736   MPI_Status        recv_status,*send_status;
1737   const PetscScalar *xx;
1738   PetscScalar       *bb;
1739 #if defined(PETSC_DEBUG)
1740   PetscBool         found = PETSC_FALSE;
1741 #endif
1742 
1743   PetscFunctionBegin;
1744   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1745   /*  first count number of contributors to each processor */
1746   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
1747   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1748   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr);  /* see note*/
1749   j    = 0;
1750   for (i=0; i<N; i++) {
1751     if (lastidx > (idx = rows[i])) j = 0;
1752     lastidx = idx;
1753     for (; j<size; j++) {
1754       if (idx >= owners[j] && idx < owners[j+1]) {
1755         nprocs[2*j]++;
1756         nprocs[2*j+1] = 1;
1757         owner[i]      = j;
1758 #if defined(PETSC_DEBUG)
1759         found = PETSC_TRUE;
1760 #endif
1761         break;
1762       }
1763     }
1764 #if defined(PETSC_DEBUG)
1765     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
1766     found = PETSC_FALSE;
1767 #endif
1768   }
1769   nsends = 0;  for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1770 
1771   if (A->nooffproczerorows) {
1772     if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row");
1773     nrecvs = nsends;
1774     nmax   = N;
1775   } else {
1776     /* inform other processors of number of messages and max length*/
1777     ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1778   }
1779 
1780   /* post receives:   */
1781   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
1782   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
1783   for (i=0; i<nrecvs; i++) {
1784     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
1785   }
1786 
1787   /* do sends:
1788      1) starts[i] gives the starting index in svalues for stuff going to
1789      the ith processor
1790   */
1791   ierr      = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
1792   ierr      = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
1793   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
1794   starts[0] = 0;
1795   for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1796   for (i=0; i<N; i++) {
1797     svalues[starts[owner[i]]++] = rows[i];
1798   }
1799 
1800   starts[0] = 0;
1801   for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1802   count = 0;
1803   for (i=0; i<size; i++) {
1804     if (nprocs[2*i+1]) {
1805       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
1806     }
1807   }
1808   ierr = PetscFree(starts);CHKERRQ(ierr);
1809 
1810   base = owners[rank];
1811 
1812   /*  wait on receives */
1813   ierr  = PetscMalloc2(nrecvs+1,PetscInt,&lens,nrecvs+1,PetscInt,&source);CHKERRQ(ierr);
1814   count = nrecvs;
1815   slen  = 0;
1816   while (count) {
1817     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1818     /* unpack receives into our local space */
1819     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
1820 
1821     source[imdex] = recv_status.MPI_SOURCE;
1822     lens[imdex]   = n;
1823     slen         += n;
1824     count--;
1825   }
1826   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1827 
1828   /* move the data into the send scatter */
1829   ierr  = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
1830   count = 0;
1831   for (i=0; i<nrecvs; i++) {
1832     values = rvalues + i*nmax;
1833     for (j=0; j<lens[i]; j++) {
1834       lrows[count++] = values[j] - base;
1835     }
1836   }
1837   ierr = PetscFree(rvalues);CHKERRQ(ierr);
1838   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
1839   ierr = PetscFree(owner);CHKERRQ(ierr);
1840   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1841 
1842   /* fix right hand side if needed */
1843   if (x && b) {
1844     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1845     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1846     for (i=0; i<slen; i++) {
1847       bb[lrows[i]] = diag*xx[lrows[i]];
1848     }
1849     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1850     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1851   }
1852 
1853   /* actually zap the local rows */
1854   /*
1855         Zero the required rows. If the "diagonal block" of the matrix
1856      is square and the user wishes to set the diagonal we use separate
1857      code so that MatSetValues() is not called for each diagonal allocating
1858      new memory, thus calling lots of mallocs and slowing things down.
1859 
1860   */
1861   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1862   ierr = MatZeroRows_SeqBAIJ(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1863   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1864     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr);
1865   } else if (diag != 0.0) {
1866     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1867     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1868        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1869     for (i=0; i<slen; i++) {
1870       row  = lrows[i] + rstart_bs;
1871       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1872     }
1873     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1874     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1875   } else {
1876     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1877   }
1878 
1879   ierr = PetscFree(lrows);CHKERRQ(ierr);
1880 
1881   /* wait on sends */
1882   if (nsends) {
1883     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
1884     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1885     ierr = PetscFree(send_status);CHKERRQ(ierr);
1886   }
1887   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1888   ierr = PetscFree(svalues);CHKERRQ(ierr);
1889   PetscFunctionReturn(0);
1890 }
1891 
1892 #undef __FUNCT__
1893 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1894 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1895 {
1896   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1897   PetscErrorCode ierr;
1898 
1899   PetscFunctionBegin;
1900   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1905 
1906 #undef __FUNCT__
1907 #define __FUNCT__ "MatEqual_MPIBAIJ"
1908 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1909 {
1910   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1911   Mat            a,b,c,d;
1912   PetscBool      flg;
1913   PetscErrorCode ierr;
1914 
1915   PetscFunctionBegin;
1916   a = matA->A; b = matA->B;
1917   c = matB->A; d = matB->B;
1918 
1919   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1920   if (flg) {
1921     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1922   }
1923   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 #undef __FUNCT__
1928 #define __FUNCT__ "MatCopy_MPIBAIJ"
1929 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
1930 {
1931   PetscErrorCode ierr;
1932   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1933   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
1934 
1935   PetscFunctionBegin;
1936   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1937   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1938     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1939   } else {
1940     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1941     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1942   }
1943   PetscFunctionReturn(0);
1944 }
1945 
1946 #undef __FUNCT__
1947 #define __FUNCT__ "MatSetUp_MPIBAIJ"
1948 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
1949 {
1950   PetscErrorCode ierr;
1951 
1952   PetscFunctionBegin;
1953   ierr =  MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1954   PetscFunctionReturn(0);
1955 }
1956 
1957 #undef __FUNCT__
1958 #define __FUNCT__ "MatAXPY_MPIBAIJ"
1959 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1960 {
1961   PetscErrorCode ierr;
1962   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
1963   PetscBLASInt   bnz,one=1;
1964   Mat_SeqBAIJ    *x,*y;
1965 
1966   PetscFunctionBegin;
1967   if (str == SAME_NONZERO_PATTERN) {
1968     PetscScalar alpha = a;
1969     x    = (Mat_SeqBAIJ*)xx->A->data;
1970     y    = (Mat_SeqBAIJ*)yy->A->data;
1971     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1972     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1973     x    = (Mat_SeqBAIJ*)xx->B->data;
1974     y    = (Mat_SeqBAIJ*)yy->B->data;
1975     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1976     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1977   } else {
1978     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1979   }
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 #undef __FUNCT__
1984 #define __FUNCT__ "MatRealPart_MPIBAIJ"
1985 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
1986 {
1987   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1988   PetscErrorCode ierr;
1989 
1990   PetscFunctionBegin;
1991   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1992   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1993   PetscFunctionReturn(0);
1994 }
1995 
1996 #undef __FUNCT__
1997 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
1998 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
1999 {
2000   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2001   PetscErrorCode ierr;
2002 
2003   PetscFunctionBegin;
2004   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2005   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2006   PetscFunctionReturn(0);
2007 }
2008 
2009 #undef __FUNCT__
2010 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2011 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2012 {
2013   PetscErrorCode ierr;
2014   IS             iscol_local;
2015   PetscInt       csize;
2016 
2017   PetscFunctionBegin;
2018   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2019   if (call == MAT_REUSE_MATRIX) {
2020     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2021     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2022   } else {
2023     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2024   }
2025   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2026   if (call == MAT_INITIAL_MATRIX) {
2027     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2028     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2029   }
2030   PetscFunctionReturn(0);
2031 }
2032 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2033 #undef __FUNCT__
2034 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2035 /*
2036   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2037   in local and then by concatenating the local matrices the end result.
2038   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ()
2039 */
2040 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2041 {
2042   PetscErrorCode ierr;
2043   PetscMPIInt    rank,size;
2044   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2045   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2046   Mat            M,Mreuse;
2047   MatScalar      *vwork,*aa;
2048   MPI_Comm       comm;
2049   IS             isrow_new, iscol_new;
2050   PetscBool      idflag,allrows, allcols;
2051   Mat_SeqBAIJ    *aij;
2052 
2053   PetscFunctionBegin;
2054   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2055   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2056   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2057   /* The compression and expansion should be avoided. Doesn't point
2058      out errors, might change the indices, hence buggey */
2059   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2060   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2061 
2062   /* Check for special case: each processor gets entire matrix columns */
2063   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2064   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2065   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2066   else allcols = PETSC_FALSE;
2067 
2068   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2069   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2070   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2071   else allrows = PETSC_FALSE;
2072 
2073   if (call ==  MAT_REUSE_MATRIX) {
2074     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2075     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2076     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2077   } else {
2078     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2079   }
2080   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2081   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2082   /*
2083       m - number of local rows
2084       n - number of columns (same on all processors)
2085       rstart - first row in new global matrix generated
2086   */
2087   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2088   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2089   m    = m/bs;
2090   n    = n/bs;
2091 
2092   if (call == MAT_INITIAL_MATRIX) {
2093     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2094     ii  = aij->i;
2095     jj  = aij->j;
2096 
2097     /*
2098         Determine the number of non-zeros in the diagonal and off-diagonal
2099         portions of the matrix in order to do correct preallocation
2100     */
2101 
2102     /* first get start and end of "diagonal" columns */
2103     if (csize == PETSC_DECIDE) {
2104       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2105       if (mglobal == n*bs) { /* square matrix */
2106         nlocal = m;
2107       } else {
2108         nlocal = n/size + ((n % size) > rank);
2109       }
2110     } else {
2111       nlocal = csize/bs;
2112     }
2113     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2114     rstart = rend - nlocal;
2115     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2116 
2117     /* next, compute all the lengths */
2118     ierr  = PetscMalloc2(m+1,PetscInt,&dlens,m+1,PetscInt,&olens);CHKERRQ(ierr);
2119     for (i=0; i<m; i++) {
2120       jend = ii[i+1] - ii[i];
2121       olen = 0;
2122       dlen = 0;
2123       for (j=0; j<jend; j++) {
2124         if (*jj < rstart || *jj >= rend) olen++;
2125         else dlen++;
2126         jj++;
2127       }
2128       olens[i] = olen;
2129       dlens[i] = dlen;
2130     }
2131     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2132     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2133     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2134     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2135     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2136   } else {
2137     PetscInt ml,nl;
2138 
2139     M    = *newmat;
2140     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2141     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2142     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2143     /*
2144          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2145        rather than the slower MatSetValues().
2146     */
2147     M->was_assembled = PETSC_TRUE;
2148     M->assembled     = PETSC_FALSE;
2149   }
2150   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2151   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2152   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2153   ii   = aij->i;
2154   jj   = aij->j;
2155   aa   = aij->a;
2156   for (i=0; i<m; i++) {
2157     row   = rstart/bs + i;
2158     nz    = ii[i+1] - ii[i];
2159     cwork = jj;     jj += nz;
2160     vwork = aa;     aa += nz*bs*bs;
2161     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2162   }
2163 
2164   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2165   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2166   *newmat = M;
2167 
2168   /* save submatrix used in processor for next request */
2169   if (call ==  MAT_INITIAL_MATRIX) {
2170     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2171     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2172   }
2173   PetscFunctionReturn(0);
2174 }
2175 
2176 #undef __FUNCT__
2177 #define __FUNCT__ "MatPermute_MPIBAIJ"
2178 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2179 {
2180   MPI_Comm       comm,pcomm;
2181   PetscInt       clocal_size,nrows;
2182   const PetscInt *rows;
2183   PetscMPIInt    size;
2184   IS             crowp,lcolp;
2185   PetscErrorCode ierr;
2186 
2187   PetscFunctionBegin;
2188   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2189   /* make a collective version of 'rowp' */
2190   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2191   if (pcomm==comm) {
2192     crowp = rowp;
2193   } else {
2194     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2195     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2196     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2197     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2198   }
2199   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2200   /* make a local version of 'colp' */
2201   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2202   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2203   if (size==1) {
2204     lcolp = colp;
2205   } else {
2206     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2207   }
2208   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2209   /* now we just get the submatrix */
2210   ierr = MatGetLocalSize(A,PETSC_NULL,&clocal_size);CHKERRQ(ierr);
2211   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2212   /* clean up */
2213   if (pcomm!=comm) {
2214     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2215   }
2216   if (size>1) {
2217     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2218   }
2219   PetscFunctionReturn(0);
2220 }
2221 
2222 #undef __FUNCT__
2223 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2224 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2225 {
2226   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2227   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2228 
2229   PetscFunctionBegin;
2230   if (nghosts) *nghosts = B->nbs;
2231   if (ghosts) *ghosts = baij->garray;
2232   PetscFunctionReturn(0);
2233 }
2234 
2235 #undef __FUNCT__
2236 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2237 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2238 {
2239   Mat            B;
2240   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2241   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2242   Mat_SeqAIJ     *b;
2243   PetscErrorCode ierr;
2244   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2245   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2246   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2247 
2248   PetscFunctionBegin;
2249   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2250   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2251 
2252   /* ----------------------------------------------------------------
2253      Tell every processor the number of nonzeros per row
2254   */
2255   ierr = PetscMalloc((A->rmap->N/bs)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
2256   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2257     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2258   }
2259   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2260   ierr      = PetscMalloc(2*size*sizeof(PetscMPIInt),&recvcounts);CHKERRQ(ierr);
2261   displs    = recvcounts + size;
2262   for (i=0; i<size; i++) {
2263     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2264     displs[i]     = A->rmap->range[i]/bs;
2265   }
2266 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2267   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2268 #else
2269   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2270 #endif
2271   /* ---------------------------------------------------------------
2272      Create the sequential matrix of the same type as the local block diagonal
2273   */
2274   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2275   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2276   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2277   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2278   b    = (Mat_SeqAIJ*)B->data;
2279 
2280   /*--------------------------------------------------------------------
2281     Copy my part of matrix column indices over
2282   */
2283   sendcount  = ad->nz + bd->nz;
2284   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2285   a_jsendbuf = ad->j;
2286   b_jsendbuf = bd->j;
2287   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2288   cnt        = 0;
2289   for (i=0; i<n; i++) {
2290 
2291     /* put in lower diagonal portion */
2292     m = bd->i[i+1] - bd->i[i];
2293     while (m > 0) {
2294       /* is it above diagonal (in bd (compressed) numbering) */
2295       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2296       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2297       m--;
2298     }
2299 
2300     /* put in diagonal portion */
2301     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2302       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2303     }
2304 
2305     /* put in upper diagonal portion */
2306     while (m-- > 0) {
2307       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2308     }
2309   }
2310   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2311 
2312   /*--------------------------------------------------------------------
2313     Gather all column indices to all processors
2314   */
2315   for (i=0; i<size; i++) {
2316     recvcounts[i] = 0;
2317     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2318       recvcounts[i] += lens[j];
2319     }
2320   }
2321   displs[0] = 0;
2322   for (i=1; i<size; i++) {
2323     displs[i] = displs[i-1] + recvcounts[i-1];
2324   }
2325 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2326   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2327 #else
2328   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2329 #endif
2330   /*--------------------------------------------------------------------
2331     Assemble the matrix into useable form (note numerical values not yet set)
2332   */
2333   /* set the b->ilen (length of each row) values */
2334   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2335   /* set the b->i indices */
2336   b->i[0] = 0;
2337   for (i=1; i<=A->rmap->N/bs; i++) {
2338     b->i[i] = b->i[i-1] + lens[i-1];
2339   }
2340   ierr = PetscFree(lens);CHKERRQ(ierr);
2341   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2342   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2343   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2344 
2345   if (A->symmetric) {
2346     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2347   } else if (A->hermitian) {
2348     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2349   } else if (A->structurally_symmetric) {
2350     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2351   }
2352   *newmat = B;
2353   PetscFunctionReturn(0);
2354 }
2355 
2356 #undef __FUNCT__
2357 #define __FUNCT__ "MatSOR_MPIBAIJ"
2358 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2359 {
2360   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2361   PetscErrorCode ierr;
2362   Vec            bb1 = 0;
2363 
2364   PetscFunctionBegin;
2365   if (flag == SOR_APPLY_UPPER) {
2366     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2367     PetscFunctionReturn(0);
2368   }
2369 
2370   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2371     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2372   }
2373 
2374   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2375     if (flag & SOR_ZERO_INITIAL_GUESS) {
2376       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2377       its--;
2378     }
2379 
2380     while (its--) {
2381       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2382       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2383 
2384       /* update rhs: bb1 = bb - B*x */
2385       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2386       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2387 
2388       /* local sweep */
2389       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2390     }
2391   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2392     if (flag & SOR_ZERO_INITIAL_GUESS) {
2393       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2394       its--;
2395     }
2396     while (its--) {
2397       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2398       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2399 
2400       /* update rhs: bb1 = bb - B*x */
2401       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2402       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2403 
2404       /* local sweep */
2405       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2406     }
2407   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2408     if (flag & SOR_ZERO_INITIAL_GUESS) {
2409       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2410       its--;
2411     }
2412     while (its--) {
2413       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2414       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2415 
2416       /* update rhs: bb1 = bb - B*x */
2417       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2418       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2419 
2420       /* local sweep */
2421       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2422     }
2423   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2424 
2425   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2426   PetscFunctionReturn(0);
2427 }
2428 
2429 #undef __FUNCT__
2430 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2431 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2432 {
2433   PetscErrorCode ierr;
2434   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2435   PetscInt       N,i,*garray = aij->garray;
2436   PetscInt       ib,jb,bs = A->rmap->bs;
2437   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2438   MatScalar      *a_val = a_aij->a;
2439   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2440   MatScalar      *b_val = b_aij->a;
2441   PetscReal      *work;
2442 
2443   PetscFunctionBegin;
2444   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2445   ierr = PetscMalloc(N*sizeof(PetscReal),&work);CHKERRQ(ierr);
2446   ierr = PetscMemzero(work,N*sizeof(PetscReal));CHKERRQ(ierr);
2447   if (type == NORM_2) {
2448     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2449       for (jb=0; jb<bs; jb++) {
2450         for (ib=0; ib<bs; ib++) {
2451           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2452           a_val++;
2453         }
2454       }
2455     }
2456     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2457       for (jb=0; jb<bs; jb++) {
2458         for (ib=0; ib<bs; ib++) {
2459           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2460           b_val++;
2461         }
2462       }
2463     }
2464   } else if (type == NORM_1) {
2465     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2466       for (jb=0; jb<bs; jb++) {
2467         for (ib=0; ib<bs; ib++) {
2468           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2469           a_val++;
2470         }
2471       }
2472     }
2473     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2474       for (jb=0; jb<bs; jb++) {
2475        for (ib=0; ib<bs; ib++) {
2476           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2477           b_val++;
2478         }
2479       }
2480     }
2481   } else if (type == NORM_INFINITY) {
2482     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2483       for (jb=0; jb<bs; jb++) {
2484         for (ib=0; ib<bs; ib++) {
2485           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2486           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2487           a_val++;
2488         }
2489       }
2490     }
2491     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2492       for (jb=0; jb<bs; jb++) {
2493         for (ib=0; ib<bs; ib++) {
2494           int col = garray[b_aij->j[i]] * bs + jb;
2495           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2496           b_val++;
2497         }
2498       }
2499     }
2500   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2501   if (type == NORM_INFINITY) {
2502     ierr = MPI_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2503   } else {
2504     ierr = MPI_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2505   }
2506   ierr = PetscFree(work);CHKERRQ(ierr);
2507   if (type == NORM_2) {
2508     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2509   }
2510   PetscFunctionReturn(0);
2511 }
2512 
2513 #undef __FUNCT__
2514 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2515 PetscErrorCode  MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2516 {
2517   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2518   PetscErrorCode ierr;
2519 
2520   PetscFunctionBegin;
2521   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2522   PetscFunctionReturn(0);
2523 }
2524 
2525 
2526 /* -------------------------------------------------------------------*/
2527 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2528                                        MatGetRow_MPIBAIJ,
2529                                        MatRestoreRow_MPIBAIJ,
2530                                        MatMult_MPIBAIJ,
2531                                 /* 4*/ MatMultAdd_MPIBAIJ,
2532                                        MatMultTranspose_MPIBAIJ,
2533                                        MatMultTransposeAdd_MPIBAIJ,
2534                                        0,
2535                                        0,
2536                                        0,
2537                                 /*10*/ 0,
2538                                        0,
2539                                        0,
2540                                        MatSOR_MPIBAIJ,
2541                                        MatTranspose_MPIBAIJ,
2542                                 /*15*/ MatGetInfo_MPIBAIJ,
2543                                        MatEqual_MPIBAIJ,
2544                                        MatGetDiagonal_MPIBAIJ,
2545                                        MatDiagonalScale_MPIBAIJ,
2546                                        MatNorm_MPIBAIJ,
2547                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2548                                        MatAssemblyEnd_MPIBAIJ,
2549                                        MatSetOption_MPIBAIJ,
2550                                        MatZeroEntries_MPIBAIJ,
2551                                 /*24*/ MatZeroRows_MPIBAIJ,
2552                                        0,
2553                                        0,
2554                                        0,
2555                                        0,
2556                                 /*29*/ MatSetUp_MPIBAIJ,
2557                                        0,
2558                                        0,
2559                                        0,
2560                                        0,
2561                                 /*34*/ MatDuplicate_MPIBAIJ,
2562                                        0,
2563                                        0,
2564                                        0,
2565                                        0,
2566                                 /*39*/ MatAXPY_MPIBAIJ,
2567                                        MatGetSubMatrices_MPIBAIJ,
2568                                        MatIncreaseOverlap_MPIBAIJ,
2569                                        MatGetValues_MPIBAIJ,
2570                                        MatCopy_MPIBAIJ,
2571                                 /*44*/ 0,
2572                                        MatScale_MPIBAIJ,
2573                                        0,
2574                                        0,
2575                                        0,
2576                                 /*49*/ 0,
2577                                        0,
2578                                        0,
2579                                        0,
2580                                        0,
2581                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2582                                        0,
2583                                        MatSetUnfactored_MPIBAIJ,
2584                                        MatPermute_MPIBAIJ,
2585                                        MatSetValuesBlocked_MPIBAIJ,
2586                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2587                                        MatDestroy_MPIBAIJ,
2588                                        MatView_MPIBAIJ,
2589                                        0,
2590                                        0,
2591                                 /*64*/ 0,
2592                                        0,
2593                                        0,
2594                                        0,
2595                                        0,
2596                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2597                                        0,
2598                                        0,
2599                                        0,
2600                                        0,
2601                                 /*74*/ 0,
2602                                        MatFDColoringApply_BAIJ,
2603                                        0,
2604                                        0,
2605                                        0,
2606                                 /*79*/ 0,
2607                                        0,
2608                                        0,
2609                                        0,
2610                                        MatLoad_MPIBAIJ,
2611                                 /*84*/ 0,
2612                                        0,
2613                                        0,
2614                                        0,
2615                                        0,
2616                                 /*89*/ 0,
2617                                        0,
2618                                        0,
2619                                        0,
2620                                        0,
2621                                 /*94*/ 0,
2622                                        0,
2623                                        0,
2624                                        0,
2625                                        0,
2626                                 /*99*/ 0,
2627                                        0,
2628                                        0,
2629                                        0,
2630                                        0,
2631                                 /*104*/0,
2632                                        MatRealPart_MPIBAIJ,
2633                                        MatImaginaryPart_MPIBAIJ,
2634                                        0,
2635                                        0,
2636                                 /*109*/0,
2637                                        0,
2638                                        0,
2639                                        0,
2640                                        0,
2641                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2642                                        0,
2643                                        MatGetGhosts_MPIBAIJ,
2644                                        0,
2645                                        0,
2646                                 /*119*/0,
2647                                        0,
2648                                        0,
2649                                        0,
2650                                        MatGetMultiProcBlock_MPIBAIJ,
2651                                 /*124*/0,
2652                                        MatGetColumnNorms_MPIBAIJ,
2653                                        MatInvertBlockDiagonal_MPIBAIJ,
2654                                        0,
2655                                        0,
2656                                /*129*/ 0,
2657                                        0,
2658                                        0,
2659                                        0,
2660                                        0,
2661                                /*134*/ 0,
2662                                        0,
2663                                        0,
2664                                        0,
2665                                        0,
2666                                /*139*/ 0,
2667                                        0,
2668                                        0,
2669                                        MatFDColoringSetUp_MPIXAIJ
2670 };
2671 
2672 #undef __FUNCT__
2673 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2674 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2675 {
2676   PetscFunctionBegin;
2677   *a = ((Mat_MPIBAIJ*)A->data)->A;
2678   PetscFunctionReturn(0);
2679 }
2680 
2681 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2682 
2683 #undef __FUNCT__
2684 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2685 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2686 {
2687   PetscInt       m,rstart,cstart,cend;
2688   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2689   const PetscInt *JJ    =0;
2690   PetscScalar    *values=0;
2691   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2692   PetscErrorCode ierr;
2693 
2694   PetscFunctionBegin;
2695   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2696   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2697   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2698   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2699   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2700   m      = B->rmap->n/bs;
2701   rstart = B->rmap->rstart/bs;
2702   cstart = B->cmap->rstart/bs;
2703   cend   = B->cmap->rend/bs;
2704 
2705   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2706   ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr);
2707   for (i=0; i<m; i++) {
2708     nz = ii[i+1] - ii[i];
2709     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2710     nz_max = PetscMax(nz_max,nz);
2711     JJ     = jj + ii[i];
2712     for (j=0; j<nz; j++) {
2713       if (*JJ >= cstart) break;
2714       JJ++;
2715     }
2716     d = 0;
2717     for (; j<nz; j++) {
2718       if (*JJ++ >= cend) break;
2719       d++;
2720     }
2721     d_nnz[i] = d;
2722     o_nnz[i] = nz - d;
2723   }
2724   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2725   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2726 
2727   values = (PetscScalar*)V;
2728   if (!values) {
2729     ierr = PetscMalloc(bs*bs*nz_max*sizeof(PetscScalar),&values);CHKERRQ(ierr);
2730     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2731   }
2732   for (i=0; i<m; i++) {
2733     PetscInt          row    = i + rstart;
2734     PetscInt          ncols  = ii[i+1] - ii[i];
2735     const PetscInt    *icols = jj + ii[i];
2736     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2737       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2738       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2739     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2740       PetscInt j;
2741       for (j=0; j<ncols; j++) {
2742         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2743         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2744       }
2745     }
2746   }
2747 
2748   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2749   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2750   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2751   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2752   PetscFunctionReturn(0);
2753 }
2754 
2755 #undef __FUNCT__
2756 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2757 /*@C
2758    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2759    (the default parallel PETSc format).
2760 
2761    Collective on MPI_Comm
2762 
2763    Input Parameters:
2764 +  A - the matrix
2765 .  bs - the block size
2766 .  i - the indices into j for the start of each local row (starts with zero)
2767 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2768 -  v - optional values in the matrix
2769 
2770    Level: developer
2771 
2772    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2773    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2774    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2775    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2776    block column and the second index is over columns within a block.
2777 
2778 .keywords: matrix, aij, compressed row, sparse, parallel
2779 
2780 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2781 @*/
2782 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2783 {
2784   PetscErrorCode ierr;
2785 
2786   PetscFunctionBegin;
2787   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2788   PetscValidType(B,1);
2789   PetscValidLogicalCollectiveInt(B,bs,2);
2790   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2791   PetscFunctionReturn(0);
2792 }
2793 
2794 #undef __FUNCT__
2795 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2796 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2797 {
2798   Mat_MPIBAIJ    *b;
2799   PetscErrorCode ierr;
2800   PetscInt       i;
2801 
2802   PetscFunctionBegin;
2803   ierr = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2804   ierr = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2805   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2806   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2807   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2808 
2809   if (d_nnz) {
2810     for (i=0; i<B->rmap->n/bs; i++) {
2811       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2812     }
2813   }
2814   if (o_nnz) {
2815     for (i=0; i<B->rmap->n/bs; i++) {
2816       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2817     }
2818   }
2819 
2820   b      = (Mat_MPIBAIJ*)B->data;
2821   b->bs2 = bs*bs;
2822   b->mbs = B->rmap->n/bs;
2823   b->nbs = B->cmap->n/bs;
2824   b->Mbs = B->rmap->N/bs;
2825   b->Nbs = B->cmap->N/bs;
2826 
2827   for (i=0; i<=b->size; i++) {
2828     b->rangebs[i] = B->rmap->range[i]/bs;
2829   }
2830   b->rstartbs = B->rmap->rstart/bs;
2831   b->rendbs   = B->rmap->rend/bs;
2832   b->cstartbs = B->cmap->rstart/bs;
2833   b->cendbs   = B->cmap->rend/bs;
2834 
2835   if (!B->preallocated) {
2836     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
2837     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
2838     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
2839     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
2840     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
2841     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
2842     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
2843     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
2844     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
2845   }
2846 
2847   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2848   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
2849   B->preallocated = PETSC_TRUE;
2850   PetscFunctionReturn(0);
2851 }
2852 
2853 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
2854 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
2855 
2856 #undef __FUNCT__
2857 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
2858 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
2859 {
2860   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2861   PetscErrorCode ierr;
2862   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
2863   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
2864   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
2865 
2866   PetscFunctionBegin;
2867   ierr  = PetscMalloc((M+1)*sizeof(PetscInt),&ii);CHKERRQ(ierr);
2868   ii[0] = 0;
2869   for (i=0; i<M; i++) {
2870     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
2871     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
2872     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
2873     /* remove one from count of matrix has diagonal */
2874     for (j=id[i]; j<id[i+1]; j++) {
2875       if (jd[j] == i) {ii[i+1]--;break;}
2876     }
2877   }
2878   ierr = PetscMalloc(ii[M]*sizeof(PetscInt),&jj);CHKERRQ(ierr);
2879   cnt  = 0;
2880   for (i=0; i<M; i++) {
2881     for (j=io[i]; j<io[i+1]; j++) {
2882       if (garray[jo[j]] > rstart) break;
2883       jj[cnt++] = garray[jo[j]];
2884     }
2885     for (k=id[i]; k<id[i+1]; k++) {
2886       if (jd[k] != i) {
2887         jj[cnt++] = rstart + jd[k];
2888       }
2889     }
2890     for (; j<io[i+1]; j++) {
2891       jj[cnt++] = garray[jo[j]];
2892     }
2893   }
2894   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
2895   PetscFunctionReturn(0);
2896 }
2897 
2898 #include <../src/mat/impls/aij/mpi/mpiaij.h>
2899 
2900 PETSC_EXTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
2901 
2902 #undef __FUNCT__
2903 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
2904 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
2905 {
2906   PetscErrorCode ierr;
2907   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2908   Mat            B;
2909   Mat_MPIAIJ     *b;
2910 
2911   PetscFunctionBegin;
2912   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
2913 
2914   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
2915   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
2916   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
2917   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
2918   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
2919   b    = (Mat_MPIAIJ*) B->data;
2920 
2921   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
2922   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
2923   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
2924   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
2925   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
2926   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2927   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2928   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2929   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2930   if (reuse == MAT_REUSE_MATRIX) {
2931     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
2932   } else {
2933    *newmat = B;
2934   }
2935   PetscFunctionReturn(0);
2936 }
2937 
2938 #if defined(PETSC_HAVE_MUMPS)
2939 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
2940 #endif
2941 
2942 /*MC
2943    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
2944 
2945    Options Database Keys:
2946 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
2947 . -mat_block_size <bs> - set the blocksize used to store the matrix
2948 - -mat_use_hash_table <fact>
2949 
2950   Level: beginner
2951 
2952 .seealso: MatCreateMPIBAIJ
2953 M*/
2954 
2955 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
2956 
2957 #undef __FUNCT__
2958 #define __FUNCT__ "MatCreate_MPIBAIJ"
2959 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
2960 {
2961   Mat_MPIBAIJ    *b;
2962   PetscErrorCode ierr;
2963   PetscBool      flg;
2964 
2965   PetscFunctionBegin;
2966   ierr    = PetscNewLog(B,Mat_MPIBAIJ,&b);CHKERRQ(ierr);
2967   B->data = (void*)b;
2968 
2969   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
2970   B->assembled = PETSC_FALSE;
2971 
2972   B->insertmode = NOT_SET_VALUES;
2973   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
2974   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
2975 
2976   /* build local table of row and column ownerships */
2977   ierr = PetscMalloc((b->size+1)*sizeof(PetscInt),&b->rangebs);CHKERRQ(ierr);
2978 
2979   /* build cache for off array entries formed */
2980   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2981 
2982   b->donotstash  = PETSC_FALSE;
2983   b->colmap      = NULL;
2984   b->garray      = NULL;
2985   b->roworiented = PETSC_TRUE;
2986 
2987   /* stuff used in block assembly */
2988   b->barray = 0;
2989 
2990   /* stuff used for matrix vector multiply */
2991   b->lvec  = 0;
2992   b->Mvctx = 0;
2993 
2994   /* stuff for MatGetRow() */
2995   b->rowindices   = 0;
2996   b->rowvalues    = 0;
2997   b->getrowactive = PETSC_FALSE;
2998 
2999   /* hash table stuff */
3000   b->ht           = 0;
3001   b->hd           = 0;
3002   b->ht_size      = 0;
3003   b->ht_flag      = PETSC_FALSE;
3004   b->ht_fact      = 0;
3005   b->ht_total_ct  = 0;
3006   b->ht_insert_ct = 0;
3007 
3008   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3009   b->ijonly = PETSC_FALSE;
3010 
3011   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3012   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,NULL);CHKERRQ(ierr);
3013   if (flg) {
3014     PetscReal fact = 1.39;
3015     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3016     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3017     if (fact <= 1.0) fact = 1.39;
3018     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3019     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3020   }
3021   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3022 
3023 #if defined(PETSC_HAVE_MUMPS)
3024   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_baij_mumps);CHKERRQ(ierr);
3025 #endif
3026   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3027   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3028   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3029   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3030   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3031   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3032   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3033   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3034   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3035   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3036   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
3037   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3038   PetscFunctionReturn(0);
3039 }
3040 
3041 /*MC
3042    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3043 
3044    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3045    and MATMPIBAIJ otherwise.
3046 
3047    Options Database Keys:
3048 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3049 
3050   Level: beginner
3051 
3052 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3053 M*/
3054 
3055 #undef __FUNCT__
3056 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3057 /*@C
3058    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3059    (block compressed row).  For good matrix assembly performance
3060    the user should preallocate the matrix storage by setting the parameters
3061    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3062    performance can be increased by more than a factor of 50.
3063 
3064    Collective on Mat
3065 
3066    Input Parameters:
3067 +  A - the matrix
3068 .  bs   - size of block
3069 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3070            submatrix  (same for all local rows)
3071 .  d_nnz - array containing the number of block nonzeros in the various block rows
3072            of the in diagonal portion of the local (possibly different for each block
3073            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3074            set it even if it is zero.
3075 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3076            submatrix (same for all local rows).
3077 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3078            off-diagonal portion of the local submatrix (possibly different for
3079            each block row) or NULL.
3080 
3081    If the *_nnz parameter is given then the *_nz parameter is ignored
3082 
3083    Options Database Keys:
3084 +   -mat_block_size - size of the blocks to use
3085 -   -mat_use_hash_table <fact>
3086 
3087    Notes:
3088    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3089    than it must be used on all processors that share the object for that argument.
3090 
3091    Storage Information:
3092    For a square global matrix we define each processor's diagonal portion
3093    to be its local rows and the corresponding columns (a square submatrix);
3094    each processor's off-diagonal portion encompasses the remainder of the
3095    local matrix (a rectangular submatrix).
3096 
3097    The user can specify preallocated storage for the diagonal part of
3098    the local submatrix with either d_nz or d_nnz (not both).  Set
3099    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3100    memory allocation.  Likewise, specify preallocated storage for the
3101    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3102 
3103    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3104    the figure below we depict these three local rows and all columns (0-11).
3105 
3106 .vb
3107            0 1 2 3 4 5 6 7 8 9 10 11
3108           --------------------------
3109    row 3  |o o o d d d o o o o  o  o
3110    row 4  |o o o d d d o o o o  o  o
3111    row 5  |o o o d d d o o o o  o  o
3112           --------------------------
3113 .ve
3114 
3115    Thus, any entries in the d locations are stored in the d (diagonal)
3116    submatrix, and any entries in the o locations are stored in the
3117    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3118    stored simply in the MATSEQBAIJ format for compressed row storage.
3119 
3120    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3121    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3122    In general, for PDE problems in which most nonzeros are near the diagonal,
3123    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3124    or you will get TERRIBLE performance; see the users' manual chapter on
3125    matrices.
3126 
3127    You can call MatGetInfo() to get information on how effective the preallocation was;
3128    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3129    You can also run with the option -info and look for messages with the string
3130    malloc in them to see if additional memory allocation was needed.
3131 
3132    Level: intermediate
3133 
3134 .keywords: matrix, block, aij, compressed row, sparse, parallel
3135 
3136 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3137 @*/
3138 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3139 {
3140   PetscErrorCode ierr;
3141 
3142   PetscFunctionBegin;
3143   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3144   PetscValidType(B,1);
3145   PetscValidLogicalCollectiveInt(B,bs,2);
3146   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3147   PetscFunctionReturn(0);
3148 }
3149 
3150 #undef __FUNCT__
3151 #define __FUNCT__ "MatCreateBAIJ"
3152 /*@C
3153    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3154    (block compressed row).  For good matrix assembly performance
3155    the user should preallocate the matrix storage by setting the parameters
3156    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3157    performance can be increased by more than a factor of 50.
3158 
3159    Collective on MPI_Comm
3160 
3161    Input Parameters:
3162 +  comm - MPI communicator
3163 .  bs   - size of blockk
3164 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3165            This value should be the same as the local size used in creating the
3166            y vector for the matrix-vector product y = Ax.
3167 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3168            This value should be the same as the local size used in creating the
3169            x vector for the matrix-vector product y = Ax.
3170 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3171 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3172 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3173            submatrix  (same for all local rows)
3174 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3175            of the in diagonal portion of the local (possibly different for each block
3176            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3177            and set it even if it is zero.
3178 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3179            submatrix (same for all local rows).
3180 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3181            off-diagonal portion of the local submatrix (possibly different for
3182            each block row) or NULL.
3183 
3184    Output Parameter:
3185 .  A - the matrix
3186 
3187    Options Database Keys:
3188 +   -mat_block_size - size of the blocks to use
3189 -   -mat_use_hash_table <fact>
3190 
3191    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3192    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3193    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3194 
3195    Notes:
3196    If the *_nnz parameter is given then the *_nz parameter is ignored
3197 
3198    A nonzero block is any block that as 1 or more nonzeros in it
3199 
3200    The user MUST specify either the local or global matrix dimensions
3201    (possibly both).
3202 
3203    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3204    than it must be used on all processors that share the object for that argument.
3205 
3206    Storage Information:
3207    For a square global matrix we define each processor's diagonal portion
3208    to be its local rows and the corresponding columns (a square submatrix);
3209    each processor's off-diagonal portion encompasses the remainder of the
3210    local matrix (a rectangular submatrix).
3211 
3212    The user can specify preallocated storage for the diagonal part of
3213    the local submatrix with either d_nz or d_nnz (not both).  Set
3214    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3215    memory allocation.  Likewise, specify preallocated storage for the
3216    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3217 
3218    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3219    the figure below we depict these three local rows and all columns (0-11).
3220 
3221 .vb
3222            0 1 2 3 4 5 6 7 8 9 10 11
3223           --------------------------
3224    row 3  |o o o d d d o o o o  o  o
3225    row 4  |o o o d d d o o o o  o  o
3226    row 5  |o o o d d d o o o o  o  o
3227           --------------------------
3228 .ve
3229 
3230    Thus, any entries in the d locations are stored in the d (diagonal)
3231    submatrix, and any entries in the o locations are stored in the
3232    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3233    stored simply in the MATSEQBAIJ format for compressed row storage.
3234 
3235    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3236    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3237    In general, for PDE problems in which most nonzeros are near the diagonal,
3238    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3239    or you will get TERRIBLE performance; see the users' manual chapter on
3240    matrices.
3241 
3242    Level: intermediate
3243 
3244 .keywords: matrix, block, aij, compressed row, sparse, parallel
3245 
3246 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3247 @*/
3248 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3249 {
3250   PetscErrorCode ierr;
3251   PetscMPIInt    size;
3252 
3253   PetscFunctionBegin;
3254   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3255   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3256   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3257   if (size > 1) {
3258     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3259     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3260   } else {
3261     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3262     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3263   }
3264   PetscFunctionReturn(0);
3265 }
3266 
3267 #undef __FUNCT__
3268 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3269 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3270 {
3271   Mat            mat;
3272   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3273   PetscErrorCode ierr;
3274   PetscInt       len=0;
3275 
3276   PetscFunctionBegin;
3277   *newmat = 0;
3278   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3279   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3280   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3281   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3282 
3283   mat->factortype   = matin->factortype;
3284   mat->preallocated = PETSC_TRUE;
3285   mat->assembled    = PETSC_TRUE;
3286   mat->insertmode   = NOT_SET_VALUES;
3287 
3288   a             = (Mat_MPIBAIJ*)mat->data;
3289   mat->rmap->bs = matin->rmap->bs;
3290   a->bs2        = oldmat->bs2;
3291   a->mbs        = oldmat->mbs;
3292   a->nbs        = oldmat->nbs;
3293   a->Mbs        = oldmat->Mbs;
3294   a->Nbs        = oldmat->Nbs;
3295 
3296   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3297   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3298 
3299   a->size         = oldmat->size;
3300   a->rank         = oldmat->rank;
3301   a->donotstash   = oldmat->donotstash;
3302   a->roworiented  = oldmat->roworiented;
3303   a->rowindices   = 0;
3304   a->rowvalues    = 0;
3305   a->getrowactive = PETSC_FALSE;
3306   a->barray       = 0;
3307   a->rstartbs     = oldmat->rstartbs;
3308   a->rendbs       = oldmat->rendbs;
3309   a->cstartbs     = oldmat->cstartbs;
3310   a->cendbs       = oldmat->cendbs;
3311 
3312   /* hash table stuff */
3313   a->ht           = 0;
3314   a->hd           = 0;
3315   a->ht_size      = 0;
3316   a->ht_flag      = oldmat->ht_flag;
3317   a->ht_fact      = oldmat->ht_fact;
3318   a->ht_total_ct  = 0;
3319   a->ht_insert_ct = 0;
3320 
3321   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3322   if (oldmat->colmap) {
3323 #if defined(PETSC_USE_CTABLE)
3324     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3325 #else
3326     ierr = PetscMalloc((a->Nbs)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
3327     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3328     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3329 #endif
3330   } else a->colmap = 0;
3331 
3332   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3333     ierr = PetscMalloc(len*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
3334     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3335     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3336   } else a->garray = 0;
3337 
3338   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3339   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3340   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3341   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3342   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3343 
3344   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3345   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3346   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3347   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3348   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3349   *newmat = mat;
3350   PetscFunctionReturn(0);
3351 }
3352 
3353 #undef __FUNCT__
3354 #define __FUNCT__ "MatLoad_MPIBAIJ"
3355 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3356 {
3357   PetscErrorCode ierr;
3358   int            fd;
3359   PetscInt       i,nz,j,rstart,rend;
3360   PetscScalar    *vals,*buf;
3361   MPI_Comm       comm;
3362   MPI_Status     status;
3363   PetscMPIInt    rank,size,maxnz;
3364   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3365   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3366   PetscInt       jj,*mycols,*ibuf,bs=1,Mbs,mbs,extra_rows,mmax;
3367   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3368   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3369   PetscInt       dcount,kmax,k,nzcount,tmp,mend,sizesset=1,grows,gcols;
3370 
3371   PetscFunctionBegin;
3372   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3373   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3374   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3375   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3376 
3377   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3378   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3379   if (!rank) {
3380     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3381     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3382     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3383   }
3384 
3385   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
3386 
3387   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3388   M    = header[1]; N = header[2];
3389 
3390   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
3391   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
3392   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
3393 
3394   /* If global sizes are set, check if they are consistent with that given in the file */
3395   if (sizesset) {
3396     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
3397   }
3398   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
3399   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
3400 
3401   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3402 
3403   /*
3404      This code adds extra rows to make sure the number of rows is
3405      divisible by the blocksize
3406   */
3407   Mbs        = M/bs;
3408   extra_rows = bs - M + bs*Mbs;
3409   if (extra_rows == bs) extra_rows = 0;
3410   else                  Mbs++;
3411   if (extra_rows && !rank) {
3412     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3413   }
3414 
3415   /* determine ownership of all rows */
3416   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3417     mbs = Mbs/size + ((Mbs % size) > rank);
3418     m   = mbs*bs;
3419   } else { /* User set */
3420     m   = newmat->rmap->n;
3421     mbs = m/bs;
3422   }
3423   ierr = PetscMalloc2(size+1,PetscInt,&rowners,size+1,PetscInt,&browners);CHKERRQ(ierr);
3424   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3425 
3426   /* process 0 needs enough room for process with most rows */
3427   if (!rank) {
3428     mmax = rowners[1];
3429     for (i=2; i<=size; i++) {
3430       mmax = PetscMax(mmax,rowners[i]);
3431     }
3432     mmax*=bs;
3433   } else mmax = -1;             /* unused, but compiler warns anyway */
3434 
3435   rowners[0] = 0;
3436   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3437   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3438   rstart = rowners[rank];
3439   rend   = rowners[rank+1];
3440 
3441   /* distribute row lengths to all processors */
3442   ierr = PetscMalloc(m*sizeof(PetscInt),&locrowlens);CHKERRQ(ierr);
3443   if (!rank) {
3444     mend = m;
3445     if (size == 1) mend = mend - extra_rows;
3446     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3447     for (j=mend; j<m; j++) locrowlens[j] = 1;
3448     ierr = PetscMalloc(mmax*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
3449     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
3450     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
3451     for (j=0; j<m; j++) {
3452       procsnz[0] += locrowlens[j];
3453     }
3454     for (i=1; i<size; i++) {
3455       mend = browners[i+1] - browners[i];
3456       if (i == size-1) mend = mend - extra_rows;
3457       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3458       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3459       /* calculate the number of nonzeros on each processor */
3460       for (j=0; j<browners[i+1]-browners[i]; j++) {
3461         procsnz[i] += rowlengths[j];
3462       }
3463       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3464     }
3465     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3466   } else {
3467     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3468   }
3469 
3470   if (!rank) {
3471     /* determine max buffer needed and allocate it */
3472     maxnz = procsnz[0];
3473     for (i=1; i<size; i++) {
3474       maxnz = PetscMax(maxnz,procsnz[i]);
3475     }
3476     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
3477 
3478     /* read in my part of the matrix column indices  */
3479     nz     = procsnz[0];
3480     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3481     mycols = ibuf;
3482     if (size == 1) nz -= extra_rows;
3483     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3484     if (size == 1) {
3485       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3486     }
3487 
3488     /* read in every ones (except the last) and ship off */
3489     for (i=1; i<size-1; i++) {
3490       nz   = procsnz[i];
3491       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3492       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3493     }
3494     /* read in the stuff for the last proc */
3495     if (size != 1) {
3496       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3497       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3498       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3499       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3500     }
3501     ierr = PetscFree(cols);CHKERRQ(ierr);
3502   } else {
3503     /* determine buffer space needed for message */
3504     nz = 0;
3505     for (i=0; i<m; i++) {
3506       nz += locrowlens[i];
3507     }
3508     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3509     mycols = ibuf;
3510     /* receive message of column indices*/
3511     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3512     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3513     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3514   }
3515 
3516   /* loop over local rows, determining number of off diagonal entries */
3517   ierr     = PetscMalloc2(rend-rstart,PetscInt,&dlens,rend-rstart,PetscInt,&odlens);CHKERRQ(ierr);
3518   ierr     = PetscMalloc3(Mbs,PetscInt,&mask,Mbs,PetscInt,&masked1,Mbs,PetscInt,&masked2);CHKERRQ(ierr);
3519   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3520   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3521   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3522   rowcount = 0; nzcount = 0;
3523   for (i=0; i<mbs; i++) {
3524     dcount  = 0;
3525     odcount = 0;
3526     for (j=0; j<bs; j++) {
3527       kmax = locrowlens[rowcount];
3528       for (k=0; k<kmax; k++) {
3529         tmp = mycols[nzcount++]/bs;
3530         if (!mask[tmp]) {
3531           mask[tmp] = 1;
3532           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3533           else masked1[dcount++] = tmp;
3534         }
3535       }
3536       rowcount++;
3537     }
3538 
3539     dlens[i]  = dcount;
3540     odlens[i] = odcount;
3541 
3542     /* zero out the mask elements we set */
3543     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3544     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3545   }
3546 
3547 
3548   if (!sizesset) {
3549     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3550   }
3551   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3552 
3553   if (!rank) {
3554     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3555     /* read in my part of the matrix numerical values  */
3556     nz     = procsnz[0];
3557     vals   = buf;
3558     mycols = ibuf;
3559     if (size == 1) nz -= extra_rows;
3560     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3561     if (size == 1) {
3562       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3563     }
3564 
3565     /* insert into matrix */
3566     jj = rstart*bs;
3567     for (i=0; i<m; i++) {
3568       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3569       mycols += locrowlens[i];
3570       vals   += locrowlens[i];
3571       jj++;
3572     }
3573     /* read in other processors (except the last one) and ship out */
3574     for (i=1; i<size-1; i++) {
3575       nz   = procsnz[i];
3576       vals = buf;
3577       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3578       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3579     }
3580     /* the last proc */
3581     if (size != 1) {
3582       nz   = procsnz[i] - extra_rows;
3583       vals = buf;
3584       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3585       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3586       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3587     }
3588     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3589   } else {
3590     /* receive numeric values */
3591     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3592 
3593     /* receive message of values*/
3594     vals   = buf;
3595     mycols = ibuf;
3596     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3597 
3598     /* insert into matrix */
3599     jj = rstart*bs;
3600     for (i=0; i<m; i++) {
3601       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3602       mycols += locrowlens[i];
3603       vals   += locrowlens[i];
3604       jj++;
3605     }
3606   }
3607   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3608   ierr = PetscFree(buf);CHKERRQ(ierr);
3609   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3610   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3611   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3612   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3613   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3614   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3615   PetscFunctionReturn(0);
3616 }
3617 
3618 #undef __FUNCT__
3619 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3620 /*@
3621    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3622 
3623    Input Parameters:
3624 .  mat  - the matrix
3625 .  fact - factor
3626 
3627    Not Collective, each process can use a different factor
3628 
3629    Level: advanced
3630 
3631   Notes:
3632    This can also be set by the command line option: -mat_use_hash_table <fact>
3633 
3634 .keywords: matrix, hashtable, factor, HT
3635 
3636 .seealso: MatSetOption()
3637 @*/
3638 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3639 {
3640   PetscErrorCode ierr;
3641 
3642   PetscFunctionBegin;
3643   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3644   PetscFunctionReturn(0);
3645 }
3646 
3647 #undef __FUNCT__
3648 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3649 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3650 {
3651   Mat_MPIBAIJ *baij;
3652 
3653   PetscFunctionBegin;
3654   baij          = (Mat_MPIBAIJ*)mat->data;
3655   baij->ht_fact = fact;
3656   PetscFunctionReturn(0);
3657 }
3658 
3659 #undef __FUNCT__
3660 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3661 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3662 {
3663   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3664 
3665   PetscFunctionBegin;
3666   *Ad     = a->A;
3667   *Ao     = a->B;
3668   *colmap = a->garray;
3669   PetscFunctionReturn(0);
3670 }
3671 
3672 /*
3673     Special version for direct calls from Fortran (to eliminate two function call overheads
3674 */
3675 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3676 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3677 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3678 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3679 #endif
3680 
3681 #undef __FUNCT__
3682 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3683 /*@C
3684   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3685 
3686   Collective on Mat
3687 
3688   Input Parameters:
3689 + mat - the matrix
3690 . min - number of input rows
3691 . im - input rows
3692 . nin - number of input columns
3693 . in - input columns
3694 . v - numerical values input
3695 - addvin - INSERT_VALUES or ADD_VALUES
3696 
3697   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3698 
3699   Level: advanced
3700 
3701 .seealso:   MatSetValuesBlocked()
3702 @*/
3703 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3704 {
3705   /* convert input arguments to C version */
3706   Mat        mat  = *matin;
3707   PetscInt   m    = *min, n = *nin;
3708   InsertMode addv = *addvin;
3709 
3710   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3711   const MatScalar *value;
3712   MatScalar       *barray     = baij->barray;
3713   PetscBool       roworiented = baij->roworiented;
3714   PetscErrorCode  ierr;
3715   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3716   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3717   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3718 
3719   PetscFunctionBegin;
3720   /* tasks normally handled by MatSetValuesBlocked() */
3721   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3722 #if defined(PETSC_USE_DEBUG)
3723   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3724   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3725 #endif
3726   if (mat->assembled) {
3727     mat->was_assembled = PETSC_TRUE;
3728     mat->assembled     = PETSC_FALSE;
3729   }
3730   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3731 
3732 
3733   if (!barray) {
3734     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
3735     baij->barray = barray;
3736   }
3737 
3738   if (roworiented) stepval = (n-1)*bs;
3739   else stepval = (m-1)*bs;
3740 
3741   for (i=0; i<m; i++) {
3742     if (im[i] < 0) continue;
3743 #if defined(PETSC_USE_DEBUG)
3744     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3745 #endif
3746     if (im[i] >= rstart && im[i] < rend) {
3747       row = im[i] - rstart;
3748       for (j=0; j<n; j++) {
3749         /* If NumCol = 1 then a copy is not required */
3750         if ((roworiented) && (n == 1)) {
3751           barray = (MatScalar*)v + i*bs2;
3752         } else if ((!roworiented) && (m == 1)) {
3753           barray = (MatScalar*)v + j*bs2;
3754         } else { /* Here a copy is required */
3755           if (roworiented) {
3756             value = v + i*(stepval+bs)*bs + j*bs;
3757           } else {
3758             value = v + j*(stepval+bs)*bs + i*bs;
3759           }
3760           for (ii=0; ii<bs; ii++,value+=stepval) {
3761             for (jj=0; jj<bs; jj++) {
3762               *barray++ = *value++;
3763             }
3764           }
3765           barray -=bs2;
3766         }
3767 
3768         if (in[j] >= cstart && in[j] < cend) {
3769           col  = in[j] - cstart;
3770           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
3771         } else if (in[j] < 0) continue;
3772 #if defined(PETSC_USE_DEBUG)
3773         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3774 #endif
3775         else {
3776           if (mat->was_assembled) {
3777             if (!baij->colmap) {
3778               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3779             }
3780 
3781 #if defined(PETSC_USE_DEBUG)
3782 #if defined(PETSC_USE_CTABLE)
3783             { PetscInt data;
3784               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3785               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3786             }
3787 #else
3788             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3789 #endif
3790 #endif
3791 #if defined(PETSC_USE_CTABLE)
3792             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3793             col  = (col - 1)/bs;
3794 #else
3795             col = (baij->colmap[in[j]] - 1)/bs;
3796 #endif
3797             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3798               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3799               col  =  in[j];
3800             }
3801           } else col = in[j];
3802           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
3803         }
3804       }
3805     } else {
3806       if (!baij->donotstash) {
3807         if (roworiented) {
3808           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3809         } else {
3810           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3811         }
3812       }
3813     }
3814   }
3815 
3816   /* task normally handled by MatSetValuesBlocked() */
3817   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3818   PetscFunctionReturn(0);
3819 }
3820 
3821 #undef __FUNCT__
3822 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3823 /*@
3824      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3825          CSR format the local rows.
3826 
3827    Collective on MPI_Comm
3828 
3829    Input Parameters:
3830 +  comm - MPI communicator
3831 .  bs - the block size, only a block size of 1 is supported
3832 .  m - number of local rows (Cannot be PETSC_DECIDE)
3833 .  n - This value should be the same as the local size used in creating the
3834        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3835        calculated if N is given) For square matrices n is almost always m.
3836 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3837 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3838 .   i - row indices
3839 .   j - column indices
3840 -   a - matrix values
3841 
3842    Output Parameter:
3843 .   mat - the matrix
3844 
3845    Level: intermediate
3846 
3847    Notes:
3848        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
3849      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3850      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
3851 
3852      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
3853      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
3854      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
3855      with column-major ordering within blocks.
3856 
3857        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
3858 
3859 .keywords: matrix, aij, compressed row, sparse, parallel
3860 
3861 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
3862           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
3863 @*/
3864 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
3865 {
3866   PetscErrorCode ierr;
3867 
3868   PetscFunctionBegin;
3869   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
3870   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
3871   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3872   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
3873   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
3874   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
3875   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
3876   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
3877   PetscFunctionReturn(0);
3878 }
3879