xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision d7ee760dd7ca5b38f28bdad847c77fc68b3a3d54)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   low  = 0;
287   high = nrow;
288   value = v;
289   low = 0;
290   high = nrow;
291   while (high-low > 7) {
292     t = (low+high)/2;
293     if (rp[t] > col) high = t;
294     else             low  = t;
295   }
296   for (i=low; i<high; i++) {
297     if (rp[i] > col) break;
298     if (rp[i] == col) {
299       bap = ap +  bs2*i;
300       if (roworiented) {
301         if (is == ADD_VALUES) {
302           for (ii=0; ii<bs; ii++) {
303             for (jj=ii; jj<bs2; jj+=bs) {
304               bap[jj] += *value++;
305             }
306           }
307         } else {
308           for (ii=0; ii<bs; ii++) {
309             for (jj=ii; jj<bs2; jj+=bs) {
310               bap[jj] = *value++;
311             }
312           }
313         }
314       } else {
315         if (is == ADD_VALUES) {
316           for (ii=0; ii<bs; ii++,value+=bs) {
317             for (jj=0; jj<bs; jj++) {
318               bap[jj] += value[jj];
319             }
320             bap += bs;
321           }
322         } else {
323           for (ii=0; ii<bs; ii++,value+=bs) {
324             for (jj=0; jj<bs; jj++) {
325               bap[jj]  = value[jj];
326             }
327             bap += bs;
328           }
329         }
330       }
331       goto noinsert2;
332     }
333   }
334   if (nonew == 1) goto noinsert2;
335   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
336   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
337   N = nrow++ - 1; high++;
338   /* shift up all the later entries in this row */
339   for (ii=N; ii>=i; ii--) {
340     rp[ii+1] = rp[ii];
341     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
342   }
343   if (N >= i) {
344     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
345   }
346   rp[i] = col;
347   bap   = ap +  bs2*i;
348   if (roworiented) {
349     for (ii=0; ii<bs; ii++) {
350       for (jj=ii; jj<bs2; jj+=bs) {
351         bap[jj] = *value++;
352       }
353     }
354   } else {
355     for (ii=0; ii<bs; ii++) {
356       for (jj=0; jj<bs; jj++) {
357         *bap++ = *value++;
358       }
359     }
360   }
361   noinsert2:;
362   ailen[row] = nrow;
363   PetscFunctionReturn(0);
364 }
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
368 /*
369     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
370     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
371 */
372 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
373 {
374   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
375   const PetscScalar *value;
376   MatScalar         *barray     = baij->barray;
377   PetscBool         roworiented = baij->roworiented;
378   PetscErrorCode    ierr;
379   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
380   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
381   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
382 
383   PetscFunctionBegin;
384   if (!barray) {
385     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
386     baij->barray = barray;
387   }
388 
389   if (roworiented) stepval = (n-1)*bs;
390   else stepval = (m-1)*bs;
391 
392   for (i=0; i<m; i++) {
393     if (im[i] < 0) continue;
394 #if defined(PETSC_USE_DEBUG)
395     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
396 #endif
397     if (im[i] >= rstart && im[i] < rend) {
398       row = im[i] - rstart;
399       for (j=0; j<n; j++) {
400         /* If NumCol = 1 then a copy is not required */
401         if ((roworiented) && (n == 1)) {
402           barray = (MatScalar*)v + i*bs2;
403         } else if ((!roworiented) && (m == 1)) {
404           barray = (MatScalar*)v + j*bs2;
405         } else { /* Here a copy is required */
406           if (roworiented) {
407             value = v + (i*(stepval+bs) + j)*bs;
408           } else {
409             value = v + (j*(stepval+bs) + i)*bs;
410           }
411           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
412             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
413             barray += bs;
414           }
415           barray -= bs2;
416         }
417 
418         if (in[j] >= cstart && in[j] < cend) {
419           col  = in[j] - cstart;
420           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
421         } else if (in[j] < 0) continue;
422 #if defined(PETSC_USE_DEBUG)
423         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
424 #endif
425         else {
426           if (mat->was_assembled) {
427             if (!baij->colmap) {
428               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
429             }
430 
431 #if defined(PETSC_USE_DEBUG)
432 #if defined(PETSC_USE_CTABLE)
433             { PetscInt data;
434               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
435               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
436             }
437 #else
438             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
439 #endif
440 #endif
441 #if defined(PETSC_USE_CTABLE)
442             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
443             col  = (col - 1)/bs;
444 #else
445             col = (baij->colmap[in[j]] - 1)/bs;
446 #endif
447             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
448               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
449               col  =  in[j];
450             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
451           } else col = in[j];
452           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
453         }
454       }
455     } else {
456       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
457       if (!baij->donotstash) {
458         if (roworiented) {
459           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         } else {
461           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
462         }
463       }
464     }
465   }
466   PetscFunctionReturn(0);
467 }
468 
469 #define HASH_KEY 0.6180339887
470 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
471 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
472 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
473 #undef __FUNCT__
474 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
475 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
476 {
477   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
478   PetscBool      roworiented = baij->roworiented;
479   PetscErrorCode ierr;
480   PetscInt       i,j,row,col;
481   PetscInt       rstart_orig=mat->rmap->rstart;
482   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
483   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
484   PetscReal      tmp;
485   MatScalar      **HD = baij->hd,value;
486 #if defined(PETSC_USE_DEBUG)
487   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
488 #endif
489 
490   PetscFunctionBegin;
491   for (i=0; i<m; i++) {
492 #if defined(PETSC_USE_DEBUG)
493     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
494     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
495 #endif
496     row = im[i];
497     if (row >= rstart_orig && row < rend_orig) {
498       for (j=0; j<n; j++) {
499         col = in[j];
500         if (roworiented) value = v[i*n+j];
501         else             value = v[i+j*m];
502         /* Look up PetscInto the Hash Table */
503         key = (row/bs)*Nbs+(col/bs)+1;
504         h1  = HASH(size,key,tmp);
505 
506 
507         idx = h1;
508 #if defined(PETSC_USE_DEBUG)
509         insert_ct++;
510         total_ct++;
511         if (HT[idx] != key) {
512           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
513           if (idx == size) {
514             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
515             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
516           }
517         }
518 #else
519         if (HT[idx] != key) {
520           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
521           if (idx == size) {
522             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
523             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
524           }
525         }
526 #endif
527         /* A HASH table entry is found, so insert the values at the correct address */
528         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
529         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
530       }
531     } else if (!baij->donotstash) {
532       if (roworiented) {
533         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
534       } else {
535         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
536       }
537     }
538   }
539 #if defined(PETSC_USE_DEBUG)
540   baij->ht_total_ct  = total_ct;
541   baij->ht_insert_ct = insert_ct;
542 #endif
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
548 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
549 {
550   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
551   PetscBool         roworiented = baij->roworiented;
552   PetscErrorCode    ierr;
553   PetscInt          i,j,ii,jj,row,col;
554   PetscInt          rstart=baij->rstartbs;
555   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
556   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
557   PetscReal         tmp;
558   MatScalar         **HD = baij->hd,*baij_a;
559   const PetscScalar *v_t,*value;
560 #if defined(PETSC_USE_DEBUG)
561   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
562 #endif
563 
564   PetscFunctionBegin;
565   if (roworiented) stepval = (n-1)*bs;
566   else stepval = (m-1)*bs;
567 
568   for (i=0; i<m; i++) {
569 #if defined(PETSC_USE_DEBUG)
570     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
571     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
572 #endif
573     row = im[i];
574     v_t = v + i*nbs2;
575     if (row >= rstart && row < rend) {
576       for (j=0; j<n; j++) {
577         col = in[j];
578 
579         /* Look up into the Hash Table */
580         key = row*Nbs+col+1;
581         h1  = HASH(size,key,tmp);
582 
583         idx = h1;
584 #if defined(PETSC_USE_DEBUG)
585         total_ct++;
586         insert_ct++;
587         if (HT[idx] != key) {
588           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
589           if (idx == size) {
590             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
591             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
592           }
593         }
594 #else
595         if (HT[idx] != key) {
596           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
597           if (idx == size) {
598             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
599             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
600           }
601         }
602 #endif
603         baij_a = HD[idx];
604         if (roworiented) {
605           /*value = v + i*(stepval+bs)*bs + j*bs;*/
606           /* value = v + (i*(stepval+bs)+j)*bs; */
607           value = v_t;
608           v_t  += bs;
609           if (addv == ADD_VALUES) {
610             for (ii=0; ii<bs; ii++,value+=stepval) {
611               for (jj=ii; jj<bs2; jj+=bs) {
612                 baij_a[jj] += *value++;
613               }
614             }
615           } else {
616             for (ii=0; ii<bs; ii++,value+=stepval) {
617               for (jj=ii; jj<bs2; jj+=bs) {
618                 baij_a[jj] = *value++;
619               }
620             }
621           }
622         } else {
623           value = v + j*(stepval+bs)*bs + i*bs;
624           if (addv == ADD_VALUES) {
625             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
626               for (jj=0; jj<bs; jj++) {
627                 baij_a[jj] += *value++;
628               }
629             }
630           } else {
631             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
632               for (jj=0; jj<bs; jj++) {
633                 baij_a[jj] = *value++;
634               }
635             }
636           }
637         }
638       }
639     } else {
640       if (!baij->donotstash) {
641         if (roworiented) {
642           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         } else {
644           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
645         }
646       }
647     }
648   }
649 #if defined(PETSC_USE_DEBUG)
650   baij->ht_total_ct  = total_ct;
651   baij->ht_insert_ct = insert_ct;
652 #endif
653   PetscFunctionReturn(0);
654 }
655 
656 #undef __FUNCT__
657 #define __FUNCT__ "MatGetValues_MPIBAIJ"
658 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
659 {
660   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
661   PetscErrorCode ierr;
662   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
663   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
664 
665   PetscFunctionBegin;
666   for (i=0; i<m; i++) {
667     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
668     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
669     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
670       row = idxm[i] - bsrstart;
671       for (j=0; j<n; j++) {
672         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
673         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
674         if (idxn[j] >= bscstart && idxn[j] < bscend) {
675           col  = idxn[j] - bscstart;
676           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
677         } else {
678           if (!baij->colmap) {
679             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
680           }
681 #if defined(PETSC_USE_CTABLE)
682           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
683           data--;
684 #else
685           data = baij->colmap[idxn[j]/bs]-1;
686 #endif
687           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
688           else {
689             col  = data + idxn[j]%bs;
690             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
691           }
692         }
693       }
694     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
695   }
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "MatNorm_MPIBAIJ"
701 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
702 {
703   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
704   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
705   PetscErrorCode ierr;
706   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
707   PetscReal      sum = 0.0;
708   MatScalar      *v;
709 
710   PetscFunctionBegin;
711   if (baij->size == 1) {
712     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
713   } else {
714     if (type == NORM_FROBENIUS) {
715       v  = amat->a;
716       nz = amat->nz*bs2;
717       for (i=0; i<nz; i++) {
718         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
719       }
720       v  = bmat->a;
721       nz = bmat->nz*bs2;
722       for (i=0; i<nz; i++) {
723         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
724       }
725       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
726       *nrm = PetscSqrtReal(*nrm);
727     } else if (type == NORM_1) { /* max column sum */
728       PetscReal *tmp,*tmp2;
729       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
730       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
731       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
732       v    = amat->a; jj = amat->j;
733       for (i=0; i<amat->nz; i++) {
734         for (j=0; j<bs; j++) {
735           col = bs*(cstart + *jj) + j; /* column index */
736           for (row=0; row<bs; row++) {
737             tmp[col] += PetscAbsScalar(*v);  v++;
738           }
739         }
740         jj++;
741       }
742       v = bmat->a; jj = bmat->j;
743       for (i=0; i<bmat->nz; i++) {
744         for (j=0; j<bs; j++) {
745           col = bs*garray[*jj] + j;
746           for (row=0; row<bs; row++) {
747             tmp[col] += PetscAbsScalar(*v); v++;
748           }
749         }
750         jj++;
751       }
752       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
753       *nrm = 0.0;
754       for (j=0; j<mat->cmap->N; j++) {
755         if (tmp2[j] > *nrm) *nrm = tmp2[j];
756       }
757       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
758     } else if (type == NORM_INFINITY) { /* max row sum */
759       PetscReal *sums;
760       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
761       sum  = 0.0;
762       for (j=0; j<amat->mbs; j++) {
763         for (row=0; row<bs; row++) sums[row] = 0.0;
764         v  = amat->a + bs2*amat->i[j];
765         nz = amat->i[j+1]-amat->i[j];
766         for (i=0; i<nz; i++) {
767           for (col=0; col<bs; col++) {
768             for (row=0; row<bs; row++) {
769               sums[row] += PetscAbsScalar(*v); v++;
770             }
771           }
772         }
773         v  = bmat->a + bs2*bmat->i[j];
774         nz = bmat->i[j+1]-bmat->i[j];
775         for (i=0; i<nz; i++) {
776           for (col=0; col<bs; col++) {
777             for (row=0; row<bs; row++) {
778               sums[row] += PetscAbsScalar(*v); v++;
779             }
780           }
781         }
782         for (row=0; row<bs; row++) {
783           if (sums[row] > sum) sum = sums[row];
784         }
785       }
786       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
787       ierr = PetscFree(sums);CHKERRQ(ierr);
788     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 /*
794   Creates the hash table, and sets the table
795   This table is created only once.
796   If new entried need to be added to the matrix
797   then the hash table has to be destroyed and
798   recreated.
799 */
800 #undef __FUNCT__
801 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
802 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
803 {
804   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
805   Mat            A     = baij->A,B=baij->B;
806   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
807   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
808   PetscErrorCode ierr;
809   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
810   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
811   PetscInt       *HT,key;
812   MatScalar      **HD;
813   PetscReal      tmp;
814 #if defined(PETSC_USE_INFO)
815   PetscInt ct=0,max=0;
816 #endif
817 
818   PetscFunctionBegin;
819   if (baij->ht) PetscFunctionReturn(0);
820 
821   baij->ht_size = (PetscInt)(factor*nz);
822   ht_size       = baij->ht_size;
823 
824   /* Allocate Memory for Hash Table */
825   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
826   HD   = baij->hd;
827   HT   = baij->ht;
828 
829   /* Loop Over A */
830   for (i=0; i<a->mbs; i++) {
831     for (j=ai[i]; j<ai[i+1]; j++) {
832       row = i+rstart;
833       col = aj[j]+cstart;
834 
835       key = row*Nbs + col + 1;
836       h1  = HASH(ht_size,key,tmp);
837       for (k=0; k<ht_size; k++) {
838         if (!HT[(h1+k)%ht_size]) {
839           HT[(h1+k)%ht_size] = key;
840           HD[(h1+k)%ht_size] = a->a + j*bs2;
841           break;
842 #if defined(PETSC_USE_INFO)
843         } else {
844           ct++;
845 #endif
846         }
847       }
848 #if defined(PETSC_USE_INFO)
849       if (k> max) max = k;
850 #endif
851     }
852   }
853   /* Loop Over B */
854   for (i=0; i<b->mbs; i++) {
855     for (j=bi[i]; j<bi[i+1]; j++) {
856       row = i+rstart;
857       col = garray[bj[j]];
858       key = row*Nbs + col + 1;
859       h1  = HASH(ht_size,key,tmp);
860       for (k=0; k<ht_size; k++) {
861         if (!HT[(h1+k)%ht_size]) {
862           HT[(h1+k)%ht_size] = key;
863           HD[(h1+k)%ht_size] = b->a + j*bs2;
864           break;
865 #if defined(PETSC_USE_INFO)
866         } else {
867           ct++;
868 #endif
869         }
870       }
871 #if defined(PETSC_USE_INFO)
872       if (k> max) max = k;
873 #endif
874     }
875   }
876 
877   /* Print Summary */
878 #if defined(PETSC_USE_INFO)
879   for (i=0,j=0; i<ht_size; i++) {
880     if (HT[i]) j++;
881   }
882   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
883 #endif
884   PetscFunctionReturn(0);
885 }
886 
887 #undef __FUNCT__
888 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
889 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
890 {
891   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
892   PetscErrorCode ierr;
893   PetscInt       nstash,reallocs;
894 
895   PetscFunctionBegin;
896   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
897 
898   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
899   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
903   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
904   PetscFunctionReturn(0);
905 }
906 
907 #undef __FUNCT__
908 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
909 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
910 {
911   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
912   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
913   PetscErrorCode ierr;
914   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
915   PetscInt       *row,*col;
916   PetscBool      r1,r2,r3,other_disassembled;
917   MatScalar      *val;
918   PetscMPIInt    n;
919 
920   PetscFunctionBegin;
921   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
922   if (!baij->donotstash && !mat->nooffprocentries) {
923     while (1) {
924       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
925       if (!flg) break;
926 
927       for (i=0; i<n;) {
928         /* Now identify the consecutive vals belonging to the same row */
929         for (j=i,rstart=row[j]; j<n; j++) {
930           if (row[j] != rstart) break;
931         }
932         if (j < n) ncols = j-i;
933         else       ncols = n-i;
934         /* Now assemble all these values with a single function call */
935         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
936         i    = j;
937       }
938     }
939     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
940     /* Now process the block-stash. Since the values are stashed column-oriented,
941        set the roworiented flag to column oriented, and after MatSetValues()
942        restore the original flags */
943     r1 = baij->roworiented;
944     r2 = a->roworiented;
945     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
946 
947     baij->roworiented = PETSC_FALSE;
948     a->roworiented    = PETSC_FALSE;
949 
950     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
951     while (1) {
952       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
953       if (!flg) break;
954 
955       for (i=0; i<n;) {
956         /* Now identify the consecutive vals belonging to the same row */
957         for (j=i,rstart=row[j]; j<n; j++) {
958           if (row[j] != rstart) break;
959         }
960         if (j < n) ncols = j-i;
961         else       ncols = n-i;
962         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
963         i    = j;
964       }
965     }
966     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
967 
968     baij->roworiented = r1;
969     a->roworiented    = r2;
970 
971     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
972   }
973 
974   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
975   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
976 
977   /* determine if any processor has disassembled, if so we must
978      also disassemble ourselfs, in order that we may reassemble. */
979   /*
980      if nonzero structure of submatrix B cannot change then we know that
981      no processor disassembled thus we can skip this stuff
982   */
983   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
984     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
985     if (mat->was_assembled && !other_disassembled) {
986       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
987     }
988   }
989 
990   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
991     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
992   }
993   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
994   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
995 
996 #if defined(PETSC_USE_INFO)
997   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
998     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
999 
1000     baij->ht_total_ct  = 0;
1001     baij->ht_insert_ct = 0;
1002   }
1003 #endif
1004   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1005     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1006 
1007     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1008     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1009   }
1010 
1011   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1012 
1013   baij->rowvalues = 0;
1014 
1015   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1016   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1017     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1018     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1019   }
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1024 #include <petscdraw.h>
1025 #undef __FUNCT__
1026 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1027 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1028 {
1029   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1030   PetscErrorCode    ierr;
1031   PetscMPIInt       rank = baij->rank;
1032   PetscInt          bs   = mat->rmap->bs;
1033   PetscBool         iascii,isdraw;
1034   PetscViewer       sviewer;
1035   PetscViewerFormat format;
1036 
1037   PetscFunctionBegin;
1038   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1039   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1040   if (iascii) {
1041     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1042     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1043       MatInfo info;
1044       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1045       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1046       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1047       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1048                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1053       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1054       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1055       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1056       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1057       PetscFunctionReturn(0);
1058     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1059       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1060       PetscFunctionReturn(0);
1061     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1062       PetscFunctionReturn(0);
1063     }
1064   }
1065 
1066   if (isdraw) {
1067     PetscDraw draw;
1068     PetscBool isnull;
1069     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1070     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
1071   }
1072 
1073   {
1074     /* assemble the entire matrix onto first processor. */
1075     Mat         A;
1076     Mat_SeqBAIJ *Aloc;
1077     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1078     MatScalar   *a;
1079     const char  *matname;
1080 
1081     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1082     /* Perhaps this should be the type of mat? */
1083     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1084     if (!rank) {
1085       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1086     } else {
1087       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1088     }
1089     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1090     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1091     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1092     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1093 
1094     /* copy over the A part */
1095     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1096     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1097     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1098 
1099     for (i=0; i<mbs; i++) {
1100       rvals[0] = bs*(baij->rstartbs + i);
1101       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1102       for (j=ai[i]; j<ai[i+1]; j++) {
1103         col = (baij->cstartbs+aj[j])*bs;
1104         for (k=0; k<bs; k++) {
1105           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1106           col++; a += bs;
1107         }
1108       }
1109     }
1110     /* copy over the B part */
1111     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1112     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1113     for (i=0; i<mbs; i++) {
1114       rvals[0] = bs*(baij->rstartbs + i);
1115       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1116       for (j=ai[i]; j<ai[i+1]; j++) {
1117         col = baij->garray[aj[j]]*bs;
1118         for (k=0; k<bs; k++) {
1119           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1120           col++; a += bs;
1121         }
1122       }
1123     }
1124     ierr = PetscFree(rvals);CHKERRQ(ierr);
1125     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1127     /*
1128        Everyone has to call to draw the matrix since the graphics waits are
1129        synchronized across all processors that share the PetscDraw object
1130     */
1131     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1132     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1133     if (!rank) {
1134       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1135       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1136     }
1137     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1138     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1139     ierr = MatDestroy(&A);CHKERRQ(ierr);
1140   }
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 #undef __FUNCT__
1145 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1146 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1147 {
1148   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1149   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1150   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1151   PetscErrorCode ierr;
1152   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1153   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1154   int            fd;
1155   PetscScalar    *column_values;
1156   FILE           *file;
1157   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1158   PetscInt       message_count,flowcontrolcount;
1159 
1160   PetscFunctionBegin;
1161   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1162   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1163   nz   = bs2*(A->nz + B->nz);
1164   rlen = mat->rmap->n;
1165   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1166   if (!rank) {
1167     header[0] = MAT_FILE_CLASSID;
1168     header[1] = mat->rmap->N;
1169     header[2] = mat->cmap->N;
1170 
1171     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1172     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1173     /* get largest number of rows any processor has */
1174     range = mat->rmap->range;
1175     for (i=1; i<size; i++) {
1176       rlen = PetscMax(rlen,range[i+1] - range[i]);
1177     }
1178   } else {
1179     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1180   }
1181 
1182   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1183   /* compute lengths of each row  */
1184   for (i=0; i<a->mbs; i++) {
1185     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1186   }
1187   /* store the row lengths to the file */
1188   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1189   if (!rank) {
1190     MPI_Status status;
1191     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1192     rlen = (range[1] - range[0])/bs;
1193     for (i=0; i<rlen; i++) {
1194       for (j=0; j<bs; j++) {
1195         row_lens[i*bs+j] = bs*crow_lens[i];
1196       }
1197     }
1198     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1199     for (i=1; i<size; i++) {
1200       rlen = (range[i+1] - range[i])/bs;
1201       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1202       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1203       for (k=0; k<rlen; k++) {
1204         for (j=0; j<bs; j++) {
1205           row_lens[k*bs+j] = bs*crow_lens[k];
1206         }
1207       }
1208       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1209     }
1210     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1211     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1212   } else {
1213     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1214     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1215     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1216   }
1217   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1218 
1219   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1220      information needed to make it for each row from a block row. This does require more communication but still not more than
1221      the communication needed for the nonzero values  */
1222   nzmax = nz; /*  space a largest processor needs */
1223   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1224   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1225   cnt   = 0;
1226   for (i=0; i<a->mbs; i++) {
1227     pcnt = cnt;
1228     for (j=B->i[i]; j<B->i[i+1]; j++) {
1229       if ((col = garray[B->j[j]]) > cstart) break;
1230       for (l=0; l<bs; l++) {
1231         column_indices[cnt++] = bs*col+l;
1232       }
1233     }
1234     for (k=A->i[i]; k<A->i[i+1]; k++) {
1235       for (l=0; l<bs; l++) {
1236         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1237       }
1238     }
1239     for (; j<B->i[i+1]; j++) {
1240       for (l=0; l<bs; l++) {
1241         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1242       }
1243     }
1244     len = cnt - pcnt;
1245     for (k=1; k<bs; k++) {
1246       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1247       cnt += len;
1248     }
1249   }
1250   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1251 
1252   /* store the columns to the file */
1253   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1254   if (!rank) {
1255     MPI_Status status;
1256     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1257     for (i=1; i<size; i++) {
1258       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1259       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1261       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1262     }
1263     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1264   } else {
1265     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1266     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1268     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1269   }
1270   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1271 
1272   /* load up the numerical values */
1273   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1274   cnt  = 0;
1275   for (i=0; i<a->mbs; i++) {
1276     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1277     for (j=B->i[i]; j<B->i[i+1]; j++) {
1278       if (garray[B->j[j]] > cstart) break;
1279       for (l=0; l<bs; l++) {
1280         for (ll=0; ll<bs; ll++) {
1281           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1282         }
1283       }
1284       cnt += bs;
1285     }
1286     for (k=A->i[i]; k<A->i[i+1]; k++) {
1287       for (l=0; l<bs; l++) {
1288         for (ll=0; ll<bs; ll++) {
1289           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1290         }
1291       }
1292       cnt += bs;
1293     }
1294     for (; j<B->i[i+1]; j++) {
1295       for (l=0; l<bs; l++) {
1296         for (ll=0; ll<bs; ll++) {
1297           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1298         }
1299       }
1300       cnt += bs;
1301     }
1302     cnt += (bs-1)*rlen;
1303   }
1304   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1305 
1306   /* store the column values to the file */
1307   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1308   if (!rank) {
1309     MPI_Status status;
1310     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1311     for (i=1; i<size; i++) {
1312       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1313       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1315       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1316     }
1317     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1318   } else {
1319     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1320     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1322     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1323   }
1324   ierr = PetscFree(column_values);CHKERRQ(ierr);
1325 
1326   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1327   if (file) {
1328     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1329   }
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 #undef __FUNCT__
1334 #define __FUNCT__ "MatView_MPIBAIJ"
1335 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1336 {
1337   PetscErrorCode ierr;
1338   PetscBool      iascii,isdraw,issocket,isbinary;
1339 
1340   PetscFunctionBegin;
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1344   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1345   if (iascii || isdraw || issocket) {
1346     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1347   } else if (isbinary) {
1348     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1349   }
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 #undef __FUNCT__
1354 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1355 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1356 {
1357   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1358   PetscErrorCode ierr;
1359 
1360   PetscFunctionBegin;
1361 #if defined(PETSC_USE_LOG)
1362   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1363 #endif
1364   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1365   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1367   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1368 #if defined(PETSC_USE_CTABLE)
1369   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1370 #else
1371   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1372 #endif
1373   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1374   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1375   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1376   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1377   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1378   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1379   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1380   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1381 
1382   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1390   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1391   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "MatMult_MPIBAIJ"
1397 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1398 {
1399   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1400   PetscErrorCode ierr;
1401   PetscInt       nt;
1402 
1403   PetscFunctionBegin;
1404   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1405   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1406   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1407   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1408   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1409   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1410   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1411   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1412   PetscFunctionReturn(0);
1413 }
1414 
1415 #undef __FUNCT__
1416 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1417 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1418 {
1419   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1420   PetscErrorCode ierr;
1421 
1422   PetscFunctionBegin;
1423   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1424   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1425   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1426   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1432 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1433 {
1434   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1435   PetscErrorCode ierr;
1436   PetscBool      merged;
1437 
1438   PetscFunctionBegin;
1439   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1440   /* do nondiagonal part */
1441   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1442   if (!merged) {
1443     /* send it on its way */
1444     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1445     /* do local part */
1446     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1447     /* receive remote parts: note this assumes the values are not actually */
1448     /* inserted in yy until the next line */
1449     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1450   } else {
1451     /* do local part */
1452     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1453     /* send it on its way */
1454     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1455     /* values actually were received in the Begin() but we need to call this nop */
1456     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1457   }
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 #undef __FUNCT__
1462 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1463 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1464 {
1465   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1466   PetscErrorCode ierr;
1467 
1468   PetscFunctionBegin;
1469   /* do nondiagonal part */
1470   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1471   /* send it on its way */
1472   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1473   /* do local part */
1474   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1475   /* receive remote parts: note this assumes the values are not actually */
1476   /* inserted in yy until the next line, which is true for my implementation*/
1477   /* but is not perhaps always true. */
1478   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 /*
1483   This only works correctly for square matrices where the subblock A->A is the
1484    diagonal block
1485 */
1486 #undef __FUNCT__
1487 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1488 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1489 {
1490   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1491   PetscErrorCode ierr;
1492 
1493   PetscFunctionBegin;
1494   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1495   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 #undef __FUNCT__
1500 #define __FUNCT__ "MatScale_MPIBAIJ"
1501 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1502 {
1503   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1504   PetscErrorCode ierr;
1505 
1506   PetscFunctionBegin;
1507   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1508   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1509   PetscFunctionReturn(0);
1510 }
1511 
1512 #undef __FUNCT__
1513 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1514 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1515 {
1516   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1517   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1518   PetscErrorCode ierr;
1519   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1520   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1521   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1522 
1523   PetscFunctionBegin;
1524   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1525   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1526   mat->getrowactive = PETSC_TRUE;
1527 
1528   if (!mat->rowvalues && (idx || v)) {
1529     /*
1530         allocate enough space to hold information from the longest row.
1531     */
1532     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1533     PetscInt    max = 1,mbs = mat->mbs,tmp;
1534     for (i=0; i<mbs; i++) {
1535       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1536       if (max < tmp) max = tmp;
1537     }
1538     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1539   }
1540   lrow = row - brstart;
1541 
1542   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1543   if (!v)   {pvA = 0; pvB = 0;}
1544   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1545   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1546   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1547   nztot = nzA + nzB;
1548 
1549   cmap = mat->garray;
1550   if (v  || idx) {
1551     if (nztot) {
1552       /* Sort by increasing column numbers, assuming A and B already sorted */
1553       PetscInt imark = -1;
1554       if (v) {
1555         *v = v_p = mat->rowvalues;
1556         for (i=0; i<nzB; i++) {
1557           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1558           else break;
1559         }
1560         imark = i;
1561         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1562         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1563       }
1564       if (idx) {
1565         *idx = idx_p = mat->rowindices;
1566         if (imark > -1) {
1567           for (i=0; i<imark; i++) {
1568             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1569           }
1570         } else {
1571           for (i=0; i<nzB; i++) {
1572             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1573             else break;
1574           }
1575           imark = i;
1576         }
1577         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1578         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1579       }
1580     } else {
1581       if (idx) *idx = 0;
1582       if (v)   *v   = 0;
1583     }
1584   }
1585   *nz  = nztot;
1586   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1587   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1588   PetscFunctionReturn(0);
1589 }
1590 
1591 #undef __FUNCT__
1592 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1593 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1594 {
1595   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1596 
1597   PetscFunctionBegin;
1598   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1599   baij->getrowactive = PETSC_FALSE;
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 #undef __FUNCT__
1604 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1605 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1606 {
1607   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1608   PetscErrorCode ierr;
1609 
1610   PetscFunctionBegin;
1611   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1612   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1613   PetscFunctionReturn(0);
1614 }
1615 
1616 #undef __FUNCT__
1617 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1618 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1619 {
1620   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1621   Mat            A  = a->A,B = a->B;
1622   PetscErrorCode ierr;
1623   PetscReal      isend[5],irecv[5];
1624 
1625   PetscFunctionBegin;
1626   info->block_size = (PetscReal)matin->rmap->bs;
1627 
1628   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1629 
1630   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1631   isend[3] = info->memory;  isend[4] = info->mallocs;
1632 
1633   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1634 
1635   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1636   isend[3] += info->memory;  isend[4] += info->mallocs;
1637 
1638   if (flag == MAT_LOCAL) {
1639     info->nz_used      = isend[0];
1640     info->nz_allocated = isend[1];
1641     info->nz_unneeded  = isend[2];
1642     info->memory       = isend[3];
1643     info->mallocs      = isend[4];
1644   } else if (flag == MAT_GLOBAL_MAX) {
1645     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1646 
1647     info->nz_used      = irecv[0];
1648     info->nz_allocated = irecv[1];
1649     info->nz_unneeded  = irecv[2];
1650     info->memory       = irecv[3];
1651     info->mallocs      = irecv[4];
1652   } else if (flag == MAT_GLOBAL_SUM) {
1653     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1654 
1655     info->nz_used      = irecv[0];
1656     info->nz_allocated = irecv[1];
1657     info->nz_unneeded  = irecv[2];
1658     info->memory       = irecv[3];
1659     info->mallocs      = irecv[4];
1660   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1661   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1662   info->fill_ratio_needed = 0;
1663   info->factor_mallocs    = 0;
1664   PetscFunctionReturn(0);
1665 }
1666 
1667 #undef __FUNCT__
1668 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1669 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1670 {
1671   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1672   PetscErrorCode ierr;
1673 
1674   PetscFunctionBegin;
1675   switch (op) {
1676   case MAT_NEW_NONZERO_LOCATIONS:
1677   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1678   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1679   case MAT_KEEP_NONZERO_PATTERN:
1680   case MAT_NEW_NONZERO_LOCATION_ERR:
1681     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1682     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1683     break;
1684   case MAT_ROW_ORIENTED:
1685     a->roworiented = flg;
1686 
1687     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1688     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1689     break;
1690   case MAT_NEW_DIAGONALS:
1691     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1692     break;
1693   case MAT_IGNORE_OFF_PROC_ENTRIES:
1694     a->donotstash = flg;
1695     break;
1696   case MAT_USE_HASH_TABLE:
1697     a->ht_flag = flg;
1698     break;
1699   case MAT_SYMMETRIC:
1700   case MAT_STRUCTURALLY_SYMMETRIC:
1701   case MAT_HERMITIAN:
1702   case MAT_SYMMETRY_ETERNAL:
1703     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1704     break;
1705   default:
1706     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1707   }
1708   PetscFunctionReturn(0);
1709 }
1710 
1711 #undef __FUNCT__
1712 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1713 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1714 {
1715   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1716   Mat_SeqBAIJ    *Aloc;
1717   Mat            B;
1718   PetscErrorCode ierr;
1719   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1720   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1721   MatScalar      *a;
1722 
1723   PetscFunctionBegin;
1724   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1725   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1726     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1727     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1728     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1729     /* Do not know preallocation information, but must set block size */
1730     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1731   } else {
1732     B = *matout;
1733   }
1734 
1735   /* copy over the A part */
1736   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1737   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1738   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1739 
1740   for (i=0; i<mbs; i++) {
1741     rvals[0] = bs*(baij->rstartbs + i);
1742     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1743     for (j=ai[i]; j<ai[i+1]; j++) {
1744       col = (baij->cstartbs+aj[j])*bs;
1745       for (k=0; k<bs; k++) {
1746         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1747 
1748         col++; a += bs;
1749       }
1750     }
1751   }
1752   /* copy over the B part */
1753   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1754   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1755   for (i=0; i<mbs; i++) {
1756     rvals[0] = bs*(baij->rstartbs + i);
1757     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1758     for (j=ai[i]; j<ai[i+1]; j++) {
1759       col = baij->garray[aj[j]]*bs;
1760       for (k=0; k<bs; k++) {
1761         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1762         col++;
1763         a += bs;
1764       }
1765     }
1766   }
1767   ierr = PetscFree(rvals);CHKERRQ(ierr);
1768   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1769   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1770 
1771   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1772   else {
1773     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1774   }
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 #undef __FUNCT__
1779 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1780 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1781 {
1782   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1783   Mat            a     = baij->A,b = baij->B;
1784   PetscErrorCode ierr;
1785   PetscInt       s1,s2,s3;
1786 
1787   PetscFunctionBegin;
1788   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1789   if (rr) {
1790     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1791     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1792     /* Overlap communication with computation. */
1793     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1794   }
1795   if (ll) {
1796     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1797     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1798     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1799   }
1800   /* scale  the diagonal block */
1801   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1802 
1803   if (rr) {
1804     /* Do a scatter end and then right scale the off-diagonal block */
1805     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1806     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1807   }
1808   PetscFunctionReturn(0);
1809 }
1810 
1811 #undef __FUNCT__
1812 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1813 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1814 {
1815   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1816   PetscInt      *owners = A->rmap->range;
1817   PetscInt       n      = A->rmap->n;
1818   PetscSF        sf;
1819   PetscInt      *lrows;
1820   PetscSFNode   *rrows;
1821   PetscInt       r, p = 0, len = 0;
1822   PetscErrorCode ierr;
1823 
1824   PetscFunctionBegin;
1825   /* Create SF where leaves are input rows and roots are owned rows */
1826   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1827   for (r = 0; r < n; ++r) lrows[r] = -1;
1828   if (!A->nooffproczerorows) {ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);}
1829   for (r = 0; r < N; ++r) {
1830     const PetscInt idx   = rows[r];
1831     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1832     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1833       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1834     }
1835     if (A->nooffproczerorows) {
1836       if (p != l->rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"MAT_NO_OFF_PROC_ZERO_ROWS set, but row %D is not owned by rank %d",idx,l->rank);
1837       lrows[len++] = idx - owners[p];
1838     } else {
1839       rrows[r].rank = p;
1840       rrows[r].index = rows[r] - owners[p];
1841     }
1842   }
1843   if (!A->nooffproczerorows) {
1844     ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1845     ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1846     /* Collect flags for rows to be zeroed */
1847     ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1848     ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1849     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1850     /* Compress and put in row numbers */
1851     for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1852   }
1853   /* fix right hand side if needed */
1854   if (x && b) {
1855     const PetscScalar *xx;
1856     PetscScalar       *bb;
1857 
1858     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1859     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1860     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1861     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1862     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1863   }
1864 
1865   /* actually zap the local rows */
1866   /*
1867         Zero the required rows. If the "diagonal block" of the matrix
1868      is square and the user wishes to set the diagonal we use separate
1869      code so that MatSetValues() is not called for each diagonal allocating
1870      new memory, thus calling lots of mallocs and slowing things down.
1871 
1872   */
1873   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1874   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1875   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1876     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1877   } else if (diag != 0.0) {
1878     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1879     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1880        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1881     for (r = 0; r < len; ++r) {
1882       const PetscInt row = lrows[r] + A->rmap->rstart;
1883       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1884     }
1885     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1886     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1887   } else {
1888     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1889   }
1890   ierr = PetscFree(lrows);CHKERRQ(ierr);
1891 
1892   /* only change matrix nonzero state if pattern was allowed to be changed */
1893   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1894     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1895     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1896   }
1897   PetscFunctionReturn(0);
1898 }
1899 
1900 #undef __FUNCT__
1901 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1902 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1903 {
1904   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1905   PetscErrorCode    ierr;
1906   PetscMPIInt       n = A->rmap->n;
1907   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1908   PetscInt          *lrows,*owners = A->rmap->range;
1909   PetscSFNode       *rrows;
1910   PetscSF           sf;
1911   const PetscScalar *xx;
1912   PetscScalar       *bb,*mask;
1913   Vec               xmask,lmask;
1914   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1915   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1916   PetscScalar       *aa;
1917 
1918   PetscFunctionBegin;
1919   /* Create SF where leaves are input rows and roots are owned rows */
1920   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1921   for (r = 0; r < n; ++r) lrows[r] = -1;
1922   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1923   for (r = 0; r < N; ++r) {
1924     const PetscInt idx   = rows[r];
1925     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1926     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1927       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1928     }
1929     rrows[r].rank  = p;
1930     rrows[r].index = rows[r] - owners[p];
1931   }
1932   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1933   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1934   /* Collect flags for rows to be zeroed */
1935   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1936   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1937   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1938   /* Compress and put in row numbers */
1939   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1940   /* zero diagonal part of matrix */
1941   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1942   /* handle off diagonal part of matrix */
1943   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1944   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1945   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1946   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1947   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1948   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1949   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1950   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1951   if (x) {
1952     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1953     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1954     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1955     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1956   }
1957   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1958   /* remove zeroed rows of off diagonal matrix */
1959   for (i = 0; i < len; ++i) {
1960     row   = lrows[i];
1961     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1962     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1963     for (k = 0; k < count; ++k) {
1964       aa[0] = 0.0;
1965       aa   += bs;
1966     }
1967   }
1968   /* loop over all elements of off process part of matrix zeroing removed columns*/
1969   for (i = 0; i < l->B->rmap->N; ++i) {
1970     row = i/bs;
1971     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1972       for (k = 0; k < bs; ++k) {
1973         col = bs*baij->j[j] + k;
1974         if (PetscAbsScalar(mask[col])) {
1975           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1976           if (b) bb[i] -= aa[0]*xx[col];
1977           aa[0] = 0.0;
1978         }
1979       }
1980     }
1981   }
1982   if (x) {
1983     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1984     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1985   }
1986   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1987   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1988   ierr = PetscFree(lrows);CHKERRQ(ierr);
1989 
1990   /* only change matrix nonzero state if pattern was allowed to be changed */
1991   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1992     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1993     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1994   }
1995   PetscFunctionReturn(0);
1996 }
1997 
1998 #undef __FUNCT__
1999 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
2000 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
2001 {
2002   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2003   PetscErrorCode ierr;
2004 
2005   PetscFunctionBegin;
2006   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
2011 
2012 #undef __FUNCT__
2013 #define __FUNCT__ "MatEqual_MPIBAIJ"
2014 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
2015 {
2016   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
2017   Mat            a,b,c,d;
2018   PetscBool      flg;
2019   PetscErrorCode ierr;
2020 
2021   PetscFunctionBegin;
2022   a = matA->A; b = matA->B;
2023   c = matB->A; d = matB->B;
2024 
2025   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2026   if (flg) {
2027     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2028   }
2029   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2030   PetscFunctionReturn(0);
2031 }
2032 
2033 #undef __FUNCT__
2034 #define __FUNCT__ "MatCopy_MPIBAIJ"
2035 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2036 {
2037   PetscErrorCode ierr;
2038   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2039   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2040 
2041   PetscFunctionBegin;
2042   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2043   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2044     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2045   } else {
2046     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2047     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2048   }
2049   PetscFunctionReturn(0);
2050 }
2051 
2052 #undef __FUNCT__
2053 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2054 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2055 {
2056   PetscErrorCode ierr;
2057 
2058   PetscFunctionBegin;
2059   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2060   PetscFunctionReturn(0);
2061 }
2062 
2063 #undef __FUNCT__
2064 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2065 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2066 {
2067   PetscErrorCode ierr;
2068   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2069   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2070   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2071 
2072   PetscFunctionBegin;
2073   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2074   PetscFunctionReturn(0);
2075 }
2076 
2077 #undef __FUNCT__
2078 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2079 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2080 {
2081   PetscErrorCode ierr;
2082   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2083   PetscBLASInt   bnz,one=1;
2084   Mat_SeqBAIJ    *x,*y;
2085 
2086   PetscFunctionBegin;
2087   if (str == SAME_NONZERO_PATTERN) {
2088     PetscScalar alpha = a;
2089     x    = (Mat_SeqBAIJ*)xx->A->data;
2090     y    = (Mat_SeqBAIJ*)yy->A->data;
2091     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2092     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2093     x    = (Mat_SeqBAIJ*)xx->B->data;
2094     y    = (Mat_SeqBAIJ*)yy->B->data;
2095     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2096     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2097     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2098   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2099     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2100   } else {
2101     Mat      B;
2102     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2103     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2104     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2105     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2106     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2107     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2108     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2109     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2110     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2111     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2112     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2113     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2114     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2115     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2116     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2117     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2118   }
2119   PetscFunctionReturn(0);
2120 }
2121 
2122 #undef __FUNCT__
2123 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2124 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2125 {
2126   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2127   PetscErrorCode ierr;
2128 
2129   PetscFunctionBegin;
2130   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2131   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2132   PetscFunctionReturn(0);
2133 }
2134 
2135 #undef __FUNCT__
2136 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2137 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2138 {
2139   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2140   PetscErrorCode ierr;
2141 
2142   PetscFunctionBegin;
2143   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2144   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2145   PetscFunctionReturn(0);
2146 }
2147 
2148 #undef __FUNCT__
2149 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2150 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2151 {
2152   PetscErrorCode ierr;
2153   IS             iscol_local;
2154   PetscInt       csize;
2155 
2156   PetscFunctionBegin;
2157   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2158   if (call == MAT_REUSE_MATRIX) {
2159     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2160     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2161   } else {
2162     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2163   }
2164   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2165   if (call == MAT_INITIAL_MATRIX) {
2166     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2167     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2168   }
2169   PetscFunctionReturn(0);
2170 }
2171 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2172 #undef __FUNCT__
2173 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2174 /*
2175   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2176   in local and then by concatenating the local matrices the end result.
2177   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2178   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2179 */
2180 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2181 {
2182   PetscErrorCode ierr;
2183   PetscMPIInt    rank,size;
2184   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2185   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2186   Mat            M,Mreuse;
2187   MatScalar      *vwork,*aa;
2188   MPI_Comm       comm;
2189   IS             isrow_new, iscol_new;
2190   PetscBool      idflag,allrows, allcols;
2191   Mat_SeqBAIJ    *aij;
2192 
2193   PetscFunctionBegin;
2194   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2195   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2196   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2197   /* The compression and expansion should be avoided. Doesn't point
2198      out errors, might change the indices, hence buggey */
2199   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2200   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2201 
2202   /* Check for special case: each processor gets entire matrix columns */
2203   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2204   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2205   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2206   else allcols = PETSC_FALSE;
2207 
2208   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2209   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2210   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2211   else allrows = PETSC_FALSE;
2212 
2213   if (call ==  MAT_REUSE_MATRIX) {
2214     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2215     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2216     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2217   } else {
2218     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2219   }
2220   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2221   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2222   /*
2223       m - number of local rows
2224       n - number of columns (same on all processors)
2225       rstart - first row in new global matrix generated
2226   */
2227   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2228   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2229   m    = m/bs;
2230   n    = n/bs;
2231 
2232   if (call == MAT_INITIAL_MATRIX) {
2233     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2234     ii  = aij->i;
2235     jj  = aij->j;
2236 
2237     /*
2238         Determine the number of non-zeros in the diagonal and off-diagonal
2239         portions of the matrix in order to do correct preallocation
2240     */
2241 
2242     /* first get start and end of "diagonal" columns */
2243     if (csize == PETSC_DECIDE) {
2244       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2245       if (mglobal == n*bs) { /* square matrix */
2246         nlocal = m;
2247       } else {
2248         nlocal = n/size + ((n % size) > rank);
2249       }
2250     } else {
2251       nlocal = csize/bs;
2252     }
2253     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2254     rstart = rend - nlocal;
2255     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2256 
2257     /* next, compute all the lengths */
2258     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2259     for (i=0; i<m; i++) {
2260       jend = ii[i+1] - ii[i];
2261       olen = 0;
2262       dlen = 0;
2263       for (j=0; j<jend; j++) {
2264         if (*jj < rstart || *jj >= rend) olen++;
2265         else dlen++;
2266         jj++;
2267       }
2268       olens[i] = olen;
2269       dlens[i] = dlen;
2270     }
2271     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2272     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2273     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2274     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2275     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2276     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2277   } else {
2278     PetscInt ml,nl;
2279 
2280     M    = *newmat;
2281     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2282     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2283     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2284     /*
2285          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2286        rather than the slower MatSetValues().
2287     */
2288     M->was_assembled = PETSC_TRUE;
2289     M->assembled     = PETSC_FALSE;
2290   }
2291   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2292   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2293   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2294   ii   = aij->i;
2295   jj   = aij->j;
2296   aa   = aij->a;
2297   for (i=0; i<m; i++) {
2298     row   = rstart/bs + i;
2299     nz    = ii[i+1] - ii[i];
2300     cwork = jj;     jj += nz;
2301     vwork = aa;     aa += nz*bs*bs;
2302     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2303   }
2304 
2305   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2306   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2307   *newmat = M;
2308 
2309   /* save submatrix used in processor for next request */
2310   if (call ==  MAT_INITIAL_MATRIX) {
2311     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2312     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2313   }
2314   PetscFunctionReturn(0);
2315 }
2316 
2317 #undef __FUNCT__
2318 #define __FUNCT__ "MatPermute_MPIBAIJ"
2319 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2320 {
2321   MPI_Comm       comm,pcomm;
2322   PetscInt       clocal_size,nrows;
2323   const PetscInt *rows;
2324   PetscMPIInt    size;
2325   IS             crowp,lcolp;
2326   PetscErrorCode ierr;
2327 
2328   PetscFunctionBegin;
2329   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2330   /* make a collective version of 'rowp' */
2331   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2332   if (pcomm==comm) {
2333     crowp = rowp;
2334   } else {
2335     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2336     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2337     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2338     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2339   }
2340   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2341   /* make a local version of 'colp' */
2342   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2343   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2344   if (size==1) {
2345     lcolp = colp;
2346   } else {
2347     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2348   }
2349   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2350   /* now we just get the submatrix */
2351   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2352   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2353   /* clean up */
2354   if (pcomm!=comm) {
2355     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2356   }
2357   if (size>1) {
2358     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2359   }
2360   PetscFunctionReturn(0);
2361 }
2362 
2363 #undef __FUNCT__
2364 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2365 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2366 {
2367   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2368   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2369 
2370   PetscFunctionBegin;
2371   if (nghosts) *nghosts = B->nbs;
2372   if (ghosts) *ghosts = baij->garray;
2373   PetscFunctionReturn(0);
2374 }
2375 
2376 #undef __FUNCT__
2377 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2378 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2379 {
2380   Mat            B;
2381   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2382   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2383   Mat_SeqAIJ     *b;
2384   PetscErrorCode ierr;
2385   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2386   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2387   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2388 
2389   PetscFunctionBegin;
2390   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2391   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2392 
2393   /* ----------------------------------------------------------------
2394      Tell every processor the number of nonzeros per row
2395   */
2396   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2397   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2398     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2399   }
2400   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2401   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2402   displs    = recvcounts + size;
2403   for (i=0; i<size; i++) {
2404     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2405     displs[i]     = A->rmap->range[i]/bs;
2406   }
2407 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2408   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2409 #else
2410   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2411 #endif
2412   /* ---------------------------------------------------------------
2413      Create the sequential matrix of the same type as the local block diagonal
2414   */
2415   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2416   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2417   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2418   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2419   b    = (Mat_SeqAIJ*)B->data;
2420 
2421   /*--------------------------------------------------------------------
2422     Copy my part of matrix column indices over
2423   */
2424   sendcount  = ad->nz + bd->nz;
2425   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2426   a_jsendbuf = ad->j;
2427   b_jsendbuf = bd->j;
2428   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2429   cnt        = 0;
2430   for (i=0; i<n; i++) {
2431 
2432     /* put in lower diagonal portion */
2433     m = bd->i[i+1] - bd->i[i];
2434     while (m > 0) {
2435       /* is it above diagonal (in bd (compressed) numbering) */
2436       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2437       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2438       m--;
2439     }
2440 
2441     /* put in diagonal portion */
2442     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2443       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2444     }
2445 
2446     /* put in upper diagonal portion */
2447     while (m-- > 0) {
2448       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2449     }
2450   }
2451   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2452 
2453   /*--------------------------------------------------------------------
2454     Gather all column indices to all processors
2455   */
2456   for (i=0; i<size; i++) {
2457     recvcounts[i] = 0;
2458     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2459       recvcounts[i] += lens[j];
2460     }
2461   }
2462   displs[0] = 0;
2463   for (i=1; i<size; i++) {
2464     displs[i] = displs[i-1] + recvcounts[i-1];
2465   }
2466 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2467   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2468 #else
2469   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2470 #endif
2471   /*--------------------------------------------------------------------
2472     Assemble the matrix into useable form (note numerical values not yet set)
2473   */
2474   /* set the b->ilen (length of each row) values */
2475   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2476   /* set the b->i indices */
2477   b->i[0] = 0;
2478   for (i=1; i<=A->rmap->N/bs; i++) {
2479     b->i[i] = b->i[i-1] + lens[i-1];
2480   }
2481   ierr = PetscFree(lens);CHKERRQ(ierr);
2482   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2483   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2484   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2485 
2486   if (A->symmetric) {
2487     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2488   } else if (A->hermitian) {
2489     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2490   } else if (A->structurally_symmetric) {
2491     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2492   }
2493   *newmat = B;
2494   PetscFunctionReturn(0);
2495 }
2496 
2497 #undef __FUNCT__
2498 #define __FUNCT__ "MatSOR_MPIBAIJ"
2499 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2500 {
2501   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2502   PetscErrorCode ierr;
2503   Vec            bb1 = 0;
2504 
2505   PetscFunctionBegin;
2506   if (flag == SOR_APPLY_UPPER) {
2507     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2508     PetscFunctionReturn(0);
2509   }
2510 
2511   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2512     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2513   }
2514 
2515   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2516     if (flag & SOR_ZERO_INITIAL_GUESS) {
2517       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2518       its--;
2519     }
2520 
2521     while (its--) {
2522       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2523       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2524 
2525       /* update rhs: bb1 = bb - B*x */
2526       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2527       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2528 
2529       /* local sweep */
2530       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2531     }
2532   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2533     if (flag & SOR_ZERO_INITIAL_GUESS) {
2534       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2535       its--;
2536     }
2537     while (its--) {
2538       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2539       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2540 
2541       /* update rhs: bb1 = bb - B*x */
2542       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2543       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2544 
2545       /* local sweep */
2546       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2547     }
2548   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2549     if (flag & SOR_ZERO_INITIAL_GUESS) {
2550       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2551       its--;
2552     }
2553     while (its--) {
2554       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2555       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2556 
2557       /* update rhs: bb1 = bb - B*x */
2558       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2559       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2560 
2561       /* local sweep */
2562       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2563     }
2564   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2565 
2566   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2567   PetscFunctionReturn(0);
2568 }
2569 
2570 #undef __FUNCT__
2571 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2572 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2573 {
2574   PetscErrorCode ierr;
2575   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2576   PetscInt       N,i,*garray = aij->garray;
2577   PetscInt       ib,jb,bs = A->rmap->bs;
2578   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2579   MatScalar      *a_val = a_aij->a;
2580   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2581   MatScalar      *b_val = b_aij->a;
2582   PetscReal      *work;
2583 
2584   PetscFunctionBegin;
2585   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2586   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2587   if (type == NORM_2) {
2588     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2589       for (jb=0; jb<bs; jb++) {
2590         for (ib=0; ib<bs; ib++) {
2591           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2592           a_val++;
2593         }
2594       }
2595     }
2596     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2597       for (jb=0; jb<bs; jb++) {
2598         for (ib=0; ib<bs; ib++) {
2599           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2600           b_val++;
2601         }
2602       }
2603     }
2604   } else if (type == NORM_1) {
2605     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2606       for (jb=0; jb<bs; jb++) {
2607         for (ib=0; ib<bs; ib++) {
2608           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2609           a_val++;
2610         }
2611       }
2612     }
2613     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2614       for (jb=0; jb<bs; jb++) {
2615        for (ib=0; ib<bs; ib++) {
2616           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2617           b_val++;
2618         }
2619       }
2620     }
2621   } else if (type == NORM_INFINITY) {
2622     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2623       for (jb=0; jb<bs; jb++) {
2624         for (ib=0; ib<bs; ib++) {
2625           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2626           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2627           a_val++;
2628         }
2629       }
2630     }
2631     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2632       for (jb=0; jb<bs; jb++) {
2633         for (ib=0; ib<bs; ib++) {
2634           int col = garray[b_aij->j[i]] * bs + jb;
2635           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2636           b_val++;
2637         }
2638       }
2639     }
2640   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2641   if (type == NORM_INFINITY) {
2642     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2643   } else {
2644     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2645   }
2646   ierr = PetscFree(work);CHKERRQ(ierr);
2647   if (type == NORM_2) {
2648     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2649   }
2650   PetscFunctionReturn(0);
2651 }
2652 
2653 #undef __FUNCT__
2654 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2655 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2656 {
2657   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2658   PetscErrorCode ierr;
2659 
2660   PetscFunctionBegin;
2661   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2662   A->errortype = a->A->errortype;
2663   PetscFunctionReturn(0);
2664 }
2665 
2666 #undef __FUNCT__
2667 #define __FUNCT__ "MatShift_MPIBAIJ"
2668 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2669 {
2670   PetscErrorCode ierr;
2671   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2672   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2673 
2674   PetscFunctionBegin;
2675   if (!Y->preallocated) {
2676     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2677   } else if (!aij->nz) {
2678     PetscInt nonew = aij->nonew;
2679     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2680     aij->nonew = nonew;
2681   }
2682   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2683   PetscFunctionReturn(0);
2684 }
2685 
2686 #undef __FUNCT__
2687 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2688 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2689 {
2690   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2691   PetscErrorCode ierr;
2692 
2693   PetscFunctionBegin;
2694   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2695   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2696   if (d) {
2697     PetscInt rstart;
2698     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2699     *d += rstart/A->rmap->bs;
2700 
2701   }
2702   PetscFunctionReturn(0);
2703 }
2704 
2705 /* -------------------------------------------------------------------*/
2706 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2707                                        MatGetRow_MPIBAIJ,
2708                                        MatRestoreRow_MPIBAIJ,
2709                                        MatMult_MPIBAIJ,
2710                                 /* 4*/ MatMultAdd_MPIBAIJ,
2711                                        MatMultTranspose_MPIBAIJ,
2712                                        MatMultTransposeAdd_MPIBAIJ,
2713                                        0,
2714                                        0,
2715                                        0,
2716                                 /*10*/ 0,
2717                                        0,
2718                                        0,
2719                                        MatSOR_MPIBAIJ,
2720                                        MatTranspose_MPIBAIJ,
2721                                 /*15*/ MatGetInfo_MPIBAIJ,
2722                                        MatEqual_MPIBAIJ,
2723                                        MatGetDiagonal_MPIBAIJ,
2724                                        MatDiagonalScale_MPIBAIJ,
2725                                        MatNorm_MPIBAIJ,
2726                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2727                                        MatAssemblyEnd_MPIBAIJ,
2728                                        MatSetOption_MPIBAIJ,
2729                                        MatZeroEntries_MPIBAIJ,
2730                                 /*24*/ MatZeroRows_MPIBAIJ,
2731                                        0,
2732                                        0,
2733                                        0,
2734                                        0,
2735                                 /*29*/ MatSetUp_MPIBAIJ,
2736                                        0,
2737                                        0,
2738                                        0,
2739                                        0,
2740                                 /*34*/ MatDuplicate_MPIBAIJ,
2741                                        0,
2742                                        0,
2743                                        0,
2744                                        0,
2745                                 /*39*/ MatAXPY_MPIBAIJ,
2746                                        MatGetSubMatrices_MPIBAIJ,
2747                                        MatIncreaseOverlap_MPIBAIJ,
2748                                        MatGetValues_MPIBAIJ,
2749                                        MatCopy_MPIBAIJ,
2750                                 /*44*/ 0,
2751                                        MatScale_MPIBAIJ,
2752                                        MatShift_MPIBAIJ,
2753                                        0,
2754                                        MatZeroRowsColumns_MPIBAIJ,
2755                                 /*49*/ 0,
2756                                        0,
2757                                        0,
2758                                        0,
2759                                        0,
2760                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2761                                        0,
2762                                        MatSetUnfactored_MPIBAIJ,
2763                                        MatPermute_MPIBAIJ,
2764                                        MatSetValuesBlocked_MPIBAIJ,
2765                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2766                                        MatDestroy_MPIBAIJ,
2767                                        MatView_MPIBAIJ,
2768                                        0,
2769                                        0,
2770                                 /*64*/ 0,
2771                                        0,
2772                                        0,
2773                                        0,
2774                                        0,
2775                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2776                                        0,
2777                                        0,
2778                                        0,
2779                                        0,
2780                                 /*74*/ 0,
2781                                        MatFDColoringApply_BAIJ,
2782                                        0,
2783                                        0,
2784                                        0,
2785                                 /*79*/ 0,
2786                                        0,
2787                                        0,
2788                                        0,
2789                                        MatLoad_MPIBAIJ,
2790                                 /*84*/ 0,
2791                                        0,
2792                                        0,
2793                                        0,
2794                                        0,
2795                                 /*89*/ 0,
2796                                        0,
2797                                        0,
2798                                        0,
2799                                        0,
2800                                 /*94*/ 0,
2801                                        0,
2802                                        0,
2803                                        0,
2804                                        0,
2805                                 /*99*/ 0,
2806                                        0,
2807                                        0,
2808                                        0,
2809                                        0,
2810                                 /*104*/0,
2811                                        MatRealPart_MPIBAIJ,
2812                                        MatImaginaryPart_MPIBAIJ,
2813                                        0,
2814                                        0,
2815                                 /*109*/0,
2816                                        0,
2817                                        0,
2818                                        0,
2819                                        MatMissingDiagonal_MPIBAIJ,
2820                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2821                                        0,
2822                                        MatGetGhosts_MPIBAIJ,
2823                                        0,
2824                                        0,
2825                                 /*119*/0,
2826                                        0,
2827                                        0,
2828                                        0,
2829                                        MatGetMultiProcBlock_MPIBAIJ,
2830                                 /*124*/0,
2831                                        MatGetColumnNorms_MPIBAIJ,
2832                                        MatInvertBlockDiagonal_MPIBAIJ,
2833                                        0,
2834                                        0,
2835                                /*129*/ 0,
2836                                        0,
2837                                        0,
2838                                        0,
2839                                        0,
2840                                /*134*/ 0,
2841                                        0,
2842                                        0,
2843                                        0,
2844                                        0,
2845                                /*139*/ 0,
2846                                        0,
2847                                        0,
2848                                        MatFDColoringSetUp_MPIXAIJ,
2849                                        0,
2850                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2851 };
2852 
2853 #undef __FUNCT__
2854 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2855 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2856 {
2857   PetscFunctionBegin;
2858   *a = ((Mat_MPIBAIJ*)A->data)->A;
2859   PetscFunctionReturn(0);
2860 }
2861 
2862 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2863 
2864 #undef __FUNCT__
2865 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2866 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2867 {
2868   PetscInt       m,rstart,cstart,cend;
2869   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2870   const PetscInt *JJ    =0;
2871   PetscScalar    *values=0;
2872   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2873   PetscErrorCode ierr;
2874 
2875   PetscFunctionBegin;
2876   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2877   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2878   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2879   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2880   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2881   m      = B->rmap->n/bs;
2882   rstart = B->rmap->rstart/bs;
2883   cstart = B->cmap->rstart/bs;
2884   cend   = B->cmap->rend/bs;
2885 
2886   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2887   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2888   for (i=0; i<m; i++) {
2889     nz = ii[i+1] - ii[i];
2890     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2891     nz_max = PetscMax(nz_max,nz);
2892     JJ     = jj + ii[i];
2893     for (j=0; j<nz; j++) {
2894       if (*JJ >= cstart) break;
2895       JJ++;
2896     }
2897     d = 0;
2898     for (; j<nz; j++) {
2899       if (*JJ++ >= cend) break;
2900       d++;
2901     }
2902     d_nnz[i] = d;
2903     o_nnz[i] = nz - d;
2904   }
2905   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2906   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2907 
2908   values = (PetscScalar*)V;
2909   if (!values) {
2910     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2911     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2912   }
2913   for (i=0; i<m; i++) {
2914     PetscInt          row    = i + rstart;
2915     PetscInt          ncols  = ii[i+1] - ii[i];
2916     const PetscInt    *icols = jj + ii[i];
2917     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2918       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2919       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2920     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2921       PetscInt j;
2922       for (j=0; j<ncols; j++) {
2923         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2924         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2925       }
2926     }
2927   }
2928 
2929   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2930   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2931   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2932   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2933   PetscFunctionReturn(0);
2934 }
2935 
2936 #undef __FUNCT__
2937 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2938 /*@C
2939    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2940    (the default parallel PETSc format).
2941 
2942    Collective on MPI_Comm
2943 
2944    Input Parameters:
2945 +  B - the matrix
2946 .  bs - the block size
2947 .  i - the indices into j for the start of each local row (starts with zero)
2948 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2949 -  v - optional values in the matrix
2950 
2951    Level: developer
2952 
2953    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2954    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2955    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2956    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2957    block column and the second index is over columns within a block.
2958 
2959 .keywords: matrix, aij, compressed row, sparse, parallel
2960 
2961 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2962 @*/
2963 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2964 {
2965   PetscErrorCode ierr;
2966 
2967   PetscFunctionBegin;
2968   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2969   PetscValidType(B,1);
2970   PetscValidLogicalCollectiveInt(B,bs,2);
2971   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2972   PetscFunctionReturn(0);
2973 }
2974 
2975 #undef __FUNCT__
2976 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2977 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2978 {
2979   Mat_MPIBAIJ    *b;
2980   PetscErrorCode ierr;
2981   PetscInt       i;
2982 
2983   PetscFunctionBegin;
2984   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2985   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2986   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2987   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2988 
2989   if (d_nnz) {
2990     for (i=0; i<B->rmap->n/bs; i++) {
2991       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2992     }
2993   }
2994   if (o_nnz) {
2995     for (i=0; i<B->rmap->n/bs; i++) {
2996       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2997     }
2998   }
2999 
3000   b      = (Mat_MPIBAIJ*)B->data;
3001   b->bs2 = bs*bs;
3002   b->mbs = B->rmap->n/bs;
3003   b->nbs = B->cmap->n/bs;
3004   b->Mbs = B->rmap->N/bs;
3005   b->Nbs = B->cmap->N/bs;
3006 
3007   for (i=0; i<=b->size; i++) {
3008     b->rangebs[i] = B->rmap->range[i]/bs;
3009   }
3010   b->rstartbs = B->rmap->rstart/bs;
3011   b->rendbs   = B->rmap->rend/bs;
3012   b->cstartbs = B->cmap->rstart/bs;
3013   b->cendbs   = B->cmap->rend/bs;
3014 
3015   if (!B->preallocated) {
3016     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3017     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3018     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3019     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3020     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3021     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3022     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3023     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3024     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3025   }
3026 
3027   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3028   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3029   B->preallocated = PETSC_TRUE;
3030   PetscFunctionReturn(0);
3031 }
3032 
3033 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3034 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3035 
3036 #undef __FUNCT__
3037 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3038 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3039 {
3040   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3041   PetscErrorCode ierr;
3042   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3043   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3044   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3045 
3046   PetscFunctionBegin;
3047   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3048   ii[0] = 0;
3049   for (i=0; i<M; i++) {
3050     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3051     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3052     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3053     /* remove one from count of matrix has diagonal */
3054     for (j=id[i]; j<id[i+1]; j++) {
3055       if (jd[j] == i) {ii[i+1]--;break;}
3056     }
3057   }
3058   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3059   cnt  = 0;
3060   for (i=0; i<M; i++) {
3061     for (j=io[i]; j<io[i+1]; j++) {
3062       if (garray[jo[j]] > rstart) break;
3063       jj[cnt++] = garray[jo[j]];
3064     }
3065     for (k=id[i]; k<id[i+1]; k++) {
3066       if (jd[k] != i) {
3067         jj[cnt++] = rstart + jd[k];
3068       }
3069     }
3070     for (; j<io[i+1]; j++) {
3071       jj[cnt++] = garray[jo[j]];
3072     }
3073   }
3074   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3075   PetscFunctionReturn(0);
3076 }
3077 
3078 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3079 
3080 PETSC_EXTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3081 
3082 #undef __FUNCT__
3083 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3084 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3085 {
3086   PetscErrorCode ierr;
3087   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3088   Mat            B;
3089   Mat_MPIAIJ     *b;
3090 
3091   PetscFunctionBegin;
3092   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3093 
3094   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3095   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3096   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3097   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3098   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3099   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3100   b    = (Mat_MPIAIJ*) B->data;
3101 
3102   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3103   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3104   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3105   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3106   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3107   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3108   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3109   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3110   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3111   if (reuse == MAT_INPLACE_MATRIX) {
3112     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3113   } else {
3114    *newmat = B;
3115   }
3116   PetscFunctionReturn(0);
3117 }
3118 
3119 /*MC
3120    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3121 
3122    Options Database Keys:
3123 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3124 . -mat_block_size <bs> - set the blocksize used to store the matrix
3125 - -mat_use_hash_table <fact>
3126 
3127   Level: beginner
3128 
3129 .seealso: MatCreateMPIBAIJ
3130 M*/
3131 
3132 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3133 
3134 #undef __FUNCT__
3135 #define __FUNCT__ "MatCreate_MPIBAIJ"
3136 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3137 {
3138   Mat_MPIBAIJ    *b;
3139   PetscErrorCode ierr;
3140   PetscBool      flg = PETSC_FALSE;
3141 
3142   PetscFunctionBegin;
3143   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3144   B->data = (void*)b;
3145 
3146   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3147   B->assembled = PETSC_FALSE;
3148 
3149   B->insertmode = NOT_SET_VALUES;
3150   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3151   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3152 
3153   /* build local table of row and column ownerships */
3154   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3155 
3156   /* build cache for off array entries formed */
3157   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3158 
3159   b->donotstash  = PETSC_FALSE;
3160   b->colmap      = NULL;
3161   b->garray      = NULL;
3162   b->roworiented = PETSC_TRUE;
3163 
3164   /* stuff used in block assembly */
3165   b->barray = 0;
3166 
3167   /* stuff used for matrix vector multiply */
3168   b->lvec  = 0;
3169   b->Mvctx = 0;
3170 
3171   /* stuff for MatGetRow() */
3172   b->rowindices   = 0;
3173   b->rowvalues    = 0;
3174   b->getrowactive = PETSC_FALSE;
3175 
3176   /* hash table stuff */
3177   b->ht           = 0;
3178   b->hd           = 0;
3179   b->ht_size      = 0;
3180   b->ht_flag      = PETSC_FALSE;
3181   b->ht_fact      = 0;
3182   b->ht_total_ct  = 0;
3183   b->ht_insert_ct = 0;
3184 
3185   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3186   b->ijonly = PETSC_FALSE;
3187 
3188 
3189   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3190   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3191   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3192   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3193   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3194   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3195   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3196   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3197   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3198   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3199   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
3200   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3201 
3202   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3203   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
3204   if (flg) {
3205     PetscReal fact = 1.39;
3206     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3207     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3208     if (fact <= 1.0) fact = 1.39;
3209     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3210     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3211   }
3212   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3213   PetscFunctionReturn(0);
3214 }
3215 
3216 /*MC
3217    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3218 
3219    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3220    and MATMPIBAIJ otherwise.
3221 
3222    Options Database Keys:
3223 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3224 
3225   Level: beginner
3226 
3227 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3228 M*/
3229 
3230 #undef __FUNCT__
3231 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3232 /*@C
3233    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3234    (block compressed row).  For good matrix assembly performance
3235    the user should preallocate the matrix storage by setting the parameters
3236    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3237    performance can be increased by more than a factor of 50.
3238 
3239    Collective on Mat
3240 
3241    Input Parameters:
3242 +  B - the matrix
3243 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3244           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3245 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3246            submatrix  (same for all local rows)
3247 .  d_nnz - array containing the number of block nonzeros in the various block rows
3248            of the in diagonal portion of the local (possibly different for each block
3249            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3250            set it even if it is zero.
3251 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3252            submatrix (same for all local rows).
3253 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3254            off-diagonal portion of the local submatrix (possibly different for
3255            each block row) or NULL.
3256 
3257    If the *_nnz parameter is given then the *_nz parameter is ignored
3258 
3259    Options Database Keys:
3260 +   -mat_block_size - size of the blocks to use
3261 -   -mat_use_hash_table <fact>
3262 
3263    Notes:
3264    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3265    than it must be used on all processors that share the object for that argument.
3266 
3267    Storage Information:
3268    For a square global matrix we define each processor's diagonal portion
3269    to be its local rows and the corresponding columns (a square submatrix);
3270    each processor's off-diagonal portion encompasses the remainder of the
3271    local matrix (a rectangular submatrix).
3272 
3273    The user can specify preallocated storage for the diagonal part of
3274    the local submatrix with either d_nz or d_nnz (not both).  Set
3275    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3276    memory allocation.  Likewise, specify preallocated storage for the
3277    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3278 
3279    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3280    the figure below we depict these three local rows and all columns (0-11).
3281 
3282 .vb
3283            0 1 2 3 4 5 6 7 8 9 10 11
3284           --------------------------
3285    row 3  |o o o d d d o o o o  o  o
3286    row 4  |o o o d d d o o o o  o  o
3287    row 5  |o o o d d d o o o o  o  o
3288           --------------------------
3289 .ve
3290 
3291    Thus, any entries in the d locations are stored in the d (diagonal)
3292    submatrix, and any entries in the o locations are stored in the
3293    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3294    stored simply in the MATSEQBAIJ format for compressed row storage.
3295 
3296    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3297    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3298    In general, for PDE problems in which most nonzeros are near the diagonal,
3299    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3300    or you will get TERRIBLE performance; see the users' manual chapter on
3301    matrices.
3302 
3303    You can call MatGetInfo() to get information on how effective the preallocation was;
3304    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3305    You can also run with the option -info and look for messages with the string
3306    malloc in them to see if additional memory allocation was needed.
3307 
3308    Level: intermediate
3309 
3310 .keywords: matrix, block, aij, compressed row, sparse, parallel
3311 
3312 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3313 @*/
3314 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3315 {
3316   PetscErrorCode ierr;
3317 
3318   PetscFunctionBegin;
3319   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3320   PetscValidType(B,1);
3321   PetscValidLogicalCollectiveInt(B,bs,2);
3322   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3323   PetscFunctionReturn(0);
3324 }
3325 
3326 #undef __FUNCT__
3327 #define __FUNCT__ "MatCreateBAIJ"
3328 /*@C
3329    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3330    (block compressed row).  For good matrix assembly performance
3331    the user should preallocate the matrix storage by setting the parameters
3332    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3333    performance can be increased by more than a factor of 50.
3334 
3335    Collective on MPI_Comm
3336 
3337    Input Parameters:
3338 +  comm - MPI communicator
3339 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3340           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3341 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3342            This value should be the same as the local size used in creating the
3343            y vector for the matrix-vector product y = Ax.
3344 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3345            This value should be the same as the local size used in creating the
3346            x vector for the matrix-vector product y = Ax.
3347 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3348 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3349 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3350            submatrix  (same for all local rows)
3351 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3352            of the in diagonal portion of the local (possibly different for each block
3353            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3354            and set it even if it is zero.
3355 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3356            submatrix (same for all local rows).
3357 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3358            off-diagonal portion of the local submatrix (possibly different for
3359            each block row) or NULL.
3360 
3361    Output Parameter:
3362 .  A - the matrix
3363 
3364    Options Database Keys:
3365 +   -mat_block_size - size of the blocks to use
3366 -   -mat_use_hash_table <fact>
3367 
3368    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3369    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3370    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3371 
3372    Notes:
3373    If the *_nnz parameter is given then the *_nz parameter is ignored
3374 
3375    A nonzero block is any block that as 1 or more nonzeros in it
3376 
3377    The user MUST specify either the local or global matrix dimensions
3378    (possibly both).
3379 
3380    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3381    than it must be used on all processors that share the object for that argument.
3382 
3383    Storage Information:
3384    For a square global matrix we define each processor's diagonal portion
3385    to be its local rows and the corresponding columns (a square submatrix);
3386    each processor's off-diagonal portion encompasses the remainder of the
3387    local matrix (a rectangular submatrix).
3388 
3389    The user can specify preallocated storage for the diagonal part of
3390    the local submatrix with either d_nz or d_nnz (not both).  Set
3391    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3392    memory allocation.  Likewise, specify preallocated storage for the
3393    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3394 
3395    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3396    the figure below we depict these three local rows and all columns (0-11).
3397 
3398 .vb
3399            0 1 2 3 4 5 6 7 8 9 10 11
3400           --------------------------
3401    row 3  |o o o d d d o o o o  o  o
3402    row 4  |o o o d d d o o o o  o  o
3403    row 5  |o o o d d d o o o o  o  o
3404           --------------------------
3405 .ve
3406 
3407    Thus, any entries in the d locations are stored in the d (diagonal)
3408    submatrix, and any entries in the o locations are stored in the
3409    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3410    stored simply in the MATSEQBAIJ format for compressed row storage.
3411 
3412    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3413    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3414    In general, for PDE problems in which most nonzeros are near the diagonal,
3415    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3416    or you will get TERRIBLE performance; see the users' manual chapter on
3417    matrices.
3418 
3419    Level: intermediate
3420 
3421 .keywords: matrix, block, aij, compressed row, sparse, parallel
3422 
3423 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3424 @*/
3425 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3426 {
3427   PetscErrorCode ierr;
3428   PetscMPIInt    size;
3429 
3430   PetscFunctionBegin;
3431   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3432   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3433   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3434   if (size > 1) {
3435     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3436     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3437   } else {
3438     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3439     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3440   }
3441   PetscFunctionReturn(0);
3442 }
3443 
3444 #undef __FUNCT__
3445 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3446 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3447 {
3448   Mat            mat;
3449   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3450   PetscErrorCode ierr;
3451   PetscInt       len=0;
3452 
3453   PetscFunctionBegin;
3454   *newmat = 0;
3455   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3456   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3457   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3458   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3459 
3460   mat->factortype   = matin->factortype;
3461   mat->preallocated = PETSC_TRUE;
3462   mat->assembled    = PETSC_TRUE;
3463   mat->insertmode   = NOT_SET_VALUES;
3464 
3465   a             = (Mat_MPIBAIJ*)mat->data;
3466   mat->rmap->bs = matin->rmap->bs;
3467   a->bs2        = oldmat->bs2;
3468   a->mbs        = oldmat->mbs;
3469   a->nbs        = oldmat->nbs;
3470   a->Mbs        = oldmat->Mbs;
3471   a->Nbs        = oldmat->Nbs;
3472 
3473   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3474   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3475 
3476   a->size         = oldmat->size;
3477   a->rank         = oldmat->rank;
3478   a->donotstash   = oldmat->donotstash;
3479   a->roworiented  = oldmat->roworiented;
3480   a->rowindices   = 0;
3481   a->rowvalues    = 0;
3482   a->getrowactive = PETSC_FALSE;
3483   a->barray       = 0;
3484   a->rstartbs     = oldmat->rstartbs;
3485   a->rendbs       = oldmat->rendbs;
3486   a->cstartbs     = oldmat->cstartbs;
3487   a->cendbs       = oldmat->cendbs;
3488 
3489   /* hash table stuff */
3490   a->ht           = 0;
3491   a->hd           = 0;
3492   a->ht_size      = 0;
3493   a->ht_flag      = oldmat->ht_flag;
3494   a->ht_fact      = oldmat->ht_fact;
3495   a->ht_total_ct  = 0;
3496   a->ht_insert_ct = 0;
3497 
3498   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3499   if (oldmat->colmap) {
3500 #if defined(PETSC_USE_CTABLE)
3501     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3502 #else
3503     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3504     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3505     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3506 #endif
3507   } else a->colmap = 0;
3508 
3509   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3510     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3511     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3512     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3513   } else a->garray = 0;
3514 
3515   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3516   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3517   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3518   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3519   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3520 
3521   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3522   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3523   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3524   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3525   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3526   *newmat = mat;
3527   PetscFunctionReturn(0);
3528 }
3529 
3530 #undef __FUNCT__
3531 #define __FUNCT__ "MatLoad_MPIBAIJ"
3532 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3533 {
3534   PetscErrorCode ierr;
3535   int            fd;
3536   PetscInt       i,nz,j,rstart,rend;
3537   PetscScalar    *vals,*buf;
3538   MPI_Comm       comm;
3539   MPI_Status     status;
3540   PetscMPIInt    rank,size,maxnz;
3541   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3542   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3543   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3544   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3545   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3546   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3547 
3548   PetscFunctionBegin;
3549   /* force binary viewer to load .info file if it has not yet done so */
3550   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3551   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3552   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3553   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3554   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3555   if (bs < 0) bs = 1;
3556 
3557   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3558   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3559   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3560   if (!rank) {
3561     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3562     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3563   }
3564   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3565   M    = header[1]; N = header[2];
3566 
3567   /* If global sizes are set, check if they are consistent with that given in the file */
3568   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3569   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3570 
3571   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3572 
3573   /*
3574      This code adds extra rows to make sure the number of rows is
3575      divisible by the blocksize
3576   */
3577   Mbs        = M/bs;
3578   extra_rows = bs - M + bs*Mbs;
3579   if (extra_rows == bs) extra_rows = 0;
3580   else                  Mbs++;
3581   if (extra_rows && !rank) {
3582     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3583   }
3584 
3585   /* determine ownership of all rows */
3586   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3587     mbs = Mbs/size + ((Mbs % size) > rank);
3588     m   = mbs*bs;
3589   } else { /* User set */
3590     m   = newmat->rmap->n;
3591     mbs = m/bs;
3592   }
3593   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3594   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3595 
3596   /* process 0 needs enough room for process with most rows */
3597   if (!rank) {
3598     mmax = rowners[1];
3599     for (i=2; i<=size; i++) {
3600       mmax = PetscMax(mmax,rowners[i]);
3601     }
3602     mmax*=bs;
3603   } else mmax = -1;             /* unused, but compiler warns anyway */
3604 
3605   rowners[0] = 0;
3606   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3607   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3608   rstart = rowners[rank];
3609   rend   = rowners[rank+1];
3610 
3611   /* distribute row lengths to all processors */
3612   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3613   if (!rank) {
3614     mend = m;
3615     if (size == 1) mend = mend - extra_rows;
3616     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3617     for (j=mend; j<m; j++) locrowlens[j] = 1;
3618     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3619     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3620     for (j=0; j<m; j++) {
3621       procsnz[0] += locrowlens[j];
3622     }
3623     for (i=1; i<size; i++) {
3624       mend = browners[i+1] - browners[i];
3625       if (i == size-1) mend = mend - extra_rows;
3626       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3627       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3628       /* calculate the number of nonzeros on each processor */
3629       for (j=0; j<browners[i+1]-browners[i]; j++) {
3630         procsnz[i] += rowlengths[j];
3631       }
3632       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3633     }
3634     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3635   } else {
3636     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3637   }
3638 
3639   if (!rank) {
3640     /* determine max buffer needed and allocate it */
3641     maxnz = procsnz[0];
3642     for (i=1; i<size; i++) {
3643       maxnz = PetscMax(maxnz,procsnz[i]);
3644     }
3645     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3646 
3647     /* read in my part of the matrix column indices  */
3648     nz     = procsnz[0];
3649     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3650     mycols = ibuf;
3651     if (size == 1) nz -= extra_rows;
3652     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3653     if (size == 1) {
3654       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3655     }
3656 
3657     /* read in every ones (except the last) and ship off */
3658     for (i=1; i<size-1; i++) {
3659       nz   = procsnz[i];
3660       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3661       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3662     }
3663     /* read in the stuff for the last proc */
3664     if (size != 1) {
3665       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3666       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3667       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3668       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3669     }
3670     ierr = PetscFree(cols);CHKERRQ(ierr);
3671   } else {
3672     /* determine buffer space needed for message */
3673     nz = 0;
3674     for (i=0; i<m; i++) {
3675       nz += locrowlens[i];
3676     }
3677     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3678     mycols = ibuf;
3679     /* receive message of column indices*/
3680     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3681     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3682     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3683   }
3684 
3685   /* loop over local rows, determining number of off diagonal entries */
3686   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3687   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3688   rowcount = 0; nzcount = 0;
3689   for (i=0; i<mbs; i++) {
3690     dcount  = 0;
3691     odcount = 0;
3692     for (j=0; j<bs; j++) {
3693       kmax = locrowlens[rowcount];
3694       for (k=0; k<kmax; k++) {
3695         tmp = mycols[nzcount++]/bs;
3696         if (!mask[tmp]) {
3697           mask[tmp] = 1;
3698           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3699           else masked1[dcount++] = tmp;
3700         }
3701       }
3702       rowcount++;
3703     }
3704 
3705     dlens[i]  = dcount;
3706     odlens[i] = odcount;
3707 
3708     /* zero out the mask elements we set */
3709     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3710     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3711   }
3712 
3713   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3714   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3715 
3716   if (!rank) {
3717     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3718     /* read in my part of the matrix numerical values  */
3719     nz     = procsnz[0];
3720     vals   = buf;
3721     mycols = ibuf;
3722     if (size == 1) nz -= extra_rows;
3723     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3724     if (size == 1) {
3725       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3726     }
3727 
3728     /* insert into matrix */
3729     jj = rstart*bs;
3730     for (i=0; i<m; i++) {
3731       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3732       mycols += locrowlens[i];
3733       vals   += locrowlens[i];
3734       jj++;
3735     }
3736     /* read in other processors (except the last one) and ship out */
3737     for (i=1; i<size-1; i++) {
3738       nz   = procsnz[i];
3739       vals = buf;
3740       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3741       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3742     }
3743     /* the last proc */
3744     if (size != 1) {
3745       nz   = procsnz[i] - extra_rows;
3746       vals = buf;
3747       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3748       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3749       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3750     }
3751     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3752   } else {
3753     /* receive numeric values */
3754     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3755 
3756     /* receive message of values*/
3757     vals   = buf;
3758     mycols = ibuf;
3759     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3760 
3761     /* insert into matrix */
3762     jj = rstart*bs;
3763     for (i=0; i<m; i++) {
3764       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3765       mycols += locrowlens[i];
3766       vals   += locrowlens[i];
3767       jj++;
3768     }
3769   }
3770   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3771   ierr = PetscFree(buf);CHKERRQ(ierr);
3772   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3773   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3774   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3775   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3776   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3777   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3778   PetscFunctionReturn(0);
3779 }
3780 
3781 #undef __FUNCT__
3782 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3783 /*@
3784    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3785 
3786    Input Parameters:
3787 .  mat  - the matrix
3788 .  fact - factor
3789 
3790    Not Collective, each process can use a different factor
3791 
3792    Level: advanced
3793 
3794   Notes:
3795    This can also be set by the command line option: -mat_use_hash_table <fact>
3796 
3797 .keywords: matrix, hashtable, factor, HT
3798 
3799 .seealso: MatSetOption()
3800 @*/
3801 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3802 {
3803   PetscErrorCode ierr;
3804 
3805   PetscFunctionBegin;
3806   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3807   PetscFunctionReturn(0);
3808 }
3809 
3810 #undef __FUNCT__
3811 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3812 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3813 {
3814   Mat_MPIBAIJ *baij;
3815 
3816   PetscFunctionBegin;
3817   baij          = (Mat_MPIBAIJ*)mat->data;
3818   baij->ht_fact = fact;
3819   PetscFunctionReturn(0);
3820 }
3821 
3822 #undef __FUNCT__
3823 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3824 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3825 {
3826   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3827 
3828   PetscFunctionBegin;
3829   if (Ad)     *Ad     = a->A;
3830   if (Ao)     *Ao     = a->B;
3831   if (colmap) *colmap = a->garray;
3832   PetscFunctionReturn(0);
3833 }
3834 
3835 /*
3836     Special version for direct calls from Fortran (to eliminate two function call overheads
3837 */
3838 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3839 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3840 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3841 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3842 #endif
3843 
3844 #undef __FUNCT__
3845 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3846 /*@C
3847   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3848 
3849   Collective on Mat
3850 
3851   Input Parameters:
3852 + mat - the matrix
3853 . min - number of input rows
3854 . im - input rows
3855 . nin - number of input columns
3856 . in - input columns
3857 . v - numerical values input
3858 - addvin - INSERT_VALUES or ADD_VALUES
3859 
3860   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3861 
3862   Level: advanced
3863 
3864 .seealso:   MatSetValuesBlocked()
3865 @*/
3866 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3867 {
3868   /* convert input arguments to C version */
3869   Mat        mat  = *matin;
3870   PetscInt   m    = *min, n = *nin;
3871   InsertMode addv = *addvin;
3872 
3873   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3874   const MatScalar *value;
3875   MatScalar       *barray     = baij->barray;
3876   PetscBool       roworiented = baij->roworiented;
3877   PetscErrorCode  ierr;
3878   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3879   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3880   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3881 
3882   PetscFunctionBegin;
3883   /* tasks normally handled by MatSetValuesBlocked() */
3884   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3885 #if defined(PETSC_USE_DEBUG)
3886   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3887   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3888 #endif
3889   if (mat->assembled) {
3890     mat->was_assembled = PETSC_TRUE;
3891     mat->assembled     = PETSC_FALSE;
3892   }
3893   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3894 
3895 
3896   if (!barray) {
3897     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3898     baij->barray = barray;
3899   }
3900 
3901   if (roworiented) stepval = (n-1)*bs;
3902   else stepval = (m-1)*bs;
3903 
3904   for (i=0; i<m; i++) {
3905     if (im[i] < 0) continue;
3906 #if defined(PETSC_USE_DEBUG)
3907     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3908 #endif
3909     if (im[i] >= rstart && im[i] < rend) {
3910       row = im[i] - rstart;
3911       for (j=0; j<n; j++) {
3912         /* If NumCol = 1 then a copy is not required */
3913         if ((roworiented) && (n == 1)) {
3914           barray = (MatScalar*)v + i*bs2;
3915         } else if ((!roworiented) && (m == 1)) {
3916           barray = (MatScalar*)v + j*bs2;
3917         } else { /* Here a copy is required */
3918           if (roworiented) {
3919             value = v + i*(stepval+bs)*bs + j*bs;
3920           } else {
3921             value = v + j*(stepval+bs)*bs + i*bs;
3922           }
3923           for (ii=0; ii<bs; ii++,value+=stepval) {
3924             for (jj=0; jj<bs; jj++) {
3925               *barray++ = *value++;
3926             }
3927           }
3928           barray -=bs2;
3929         }
3930 
3931         if (in[j] >= cstart && in[j] < cend) {
3932           col  = in[j] - cstart;
3933           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3934         } else if (in[j] < 0) continue;
3935 #if defined(PETSC_USE_DEBUG)
3936         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3937 #endif
3938         else {
3939           if (mat->was_assembled) {
3940             if (!baij->colmap) {
3941               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3942             }
3943 
3944 #if defined(PETSC_USE_DEBUG)
3945 #if defined(PETSC_USE_CTABLE)
3946             { PetscInt data;
3947               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3948               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3949             }
3950 #else
3951             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3952 #endif
3953 #endif
3954 #if defined(PETSC_USE_CTABLE)
3955             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3956             col  = (col - 1)/bs;
3957 #else
3958             col = (baij->colmap[in[j]] - 1)/bs;
3959 #endif
3960             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3961               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3962               col  =  in[j];
3963             }
3964           } else col = in[j];
3965           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3966         }
3967       }
3968     } else {
3969       if (!baij->donotstash) {
3970         if (roworiented) {
3971           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3972         } else {
3973           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3974         }
3975       }
3976     }
3977   }
3978 
3979   /* task normally handled by MatSetValuesBlocked() */
3980   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3981   PetscFunctionReturn(0);
3982 }
3983 
3984 #undef __FUNCT__
3985 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3986 /*@
3987      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3988          CSR format the local rows.
3989 
3990    Collective on MPI_Comm
3991 
3992    Input Parameters:
3993 +  comm - MPI communicator
3994 .  bs - the block size, only a block size of 1 is supported
3995 .  m - number of local rows (Cannot be PETSC_DECIDE)
3996 .  n - This value should be the same as the local size used in creating the
3997        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3998        calculated if N is given) For square matrices n is almost always m.
3999 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4000 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4001 .   i - row indices
4002 .   j - column indices
4003 -   a - matrix values
4004 
4005    Output Parameter:
4006 .   mat - the matrix
4007 
4008    Level: intermediate
4009 
4010    Notes:
4011        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4012      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4013      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4014 
4015      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4016      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4017      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4018      with column-major ordering within blocks.
4019 
4020        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4021 
4022 .keywords: matrix, aij, compressed row, sparse, parallel
4023 
4024 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4025           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4026 @*/
4027 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4028 {
4029   PetscErrorCode ierr;
4030 
4031   PetscFunctionBegin;
4032   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4033   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4034   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4035   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4036   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4037   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4038   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4039   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4040   PetscFunctionReturn(0);
4041 }
4042 
4043 #undef __FUNCT__
4044 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4045 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4046 {
4047   PetscErrorCode ierr;
4048   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4049   PetscInt       *indx;
4050   PetscScalar    *values;
4051 
4052   PetscFunctionBegin;
4053   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4054   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4055     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4056     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4057     PetscInt       *bindx,rmax=a->rmax,j;
4058 
4059     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4060     mbs = m/bs; Nbs = N/cbs;
4061     if (n == PETSC_DECIDE) {
4062       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4063     }
4064     /* Check sum(n) = Nbs */
4065     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4066     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4067 
4068     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4069     rstart -= mbs;
4070 
4071     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4072     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4073     for (i=0; i<mbs; i++) {
4074       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4075       nnz = nnz/bs;
4076       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4077       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4078       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4079     }
4080     ierr = PetscFree(bindx);CHKERRQ(ierr);
4081 
4082     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4083     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4084     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4085     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4086     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4087     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4088   }
4089 
4090   /* numeric phase */
4091   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4092   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4093 
4094   for (i=0; i<m; i++) {
4095     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4096     Ii   = i + rstart;
4097     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4098     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4099   }
4100   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4101   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4102   PetscFunctionReturn(0);
4103 }
4104