xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 55c764b3d2fcb403b7633381ebbbc9a9e3e94560)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt       total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  += total_ct;
539   baij->ht_insert_ct += insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt          total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  += total_ct;
649   baij->ht_insert_ct += insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",(double)((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1390   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "MatMult_MPIBAIJ"
1396 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1397 {
1398   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1399   PetscErrorCode ierr;
1400   PetscInt       nt;
1401 
1402   PetscFunctionBegin;
1403   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1404   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1405   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1406   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1407   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1408   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1409   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1410   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1416 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1417 {
1418   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1423   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1424   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1425   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1431 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1432 {
1433   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1434   PetscErrorCode ierr;
1435   PetscBool      merged;
1436 
1437   PetscFunctionBegin;
1438   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1439   /* do nondiagonal part */
1440   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1441   if (!merged) {
1442     /* send it on its way */
1443     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1444     /* do local part */
1445     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1446     /* receive remote parts: note this assumes the values are not actually */
1447     /* inserted in yy until the next line */
1448     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1449   } else {
1450     /* do local part */
1451     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1452     /* send it on its way */
1453     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1454     /* values actually were received in the Begin() but we need to call this nop */
1455     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1456   }
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 #undef __FUNCT__
1461 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1462 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1463 {
1464   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1465   PetscErrorCode ierr;
1466 
1467   PetscFunctionBegin;
1468   /* do nondiagonal part */
1469   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1470   /* send it on its way */
1471   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1472   /* do local part */
1473   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1474   /* receive remote parts: note this assumes the values are not actually */
1475   /* inserted in yy until the next line, which is true for my implementation*/
1476   /* but is not perhaps always true. */
1477   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1478   PetscFunctionReturn(0);
1479 }
1480 
1481 /*
1482   This only works correctly for square matrices where the subblock A->A is the
1483    diagonal block
1484 */
1485 #undef __FUNCT__
1486 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1487 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1488 {
1489   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1490   PetscErrorCode ierr;
1491 
1492   PetscFunctionBegin;
1493   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1494   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 #undef __FUNCT__
1499 #define __FUNCT__ "MatScale_MPIBAIJ"
1500 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1501 {
1502   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1503   PetscErrorCode ierr;
1504 
1505   PetscFunctionBegin;
1506   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1507   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1508   PetscFunctionReturn(0);
1509 }
1510 
1511 #undef __FUNCT__
1512 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1513 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1514 {
1515   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1516   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1517   PetscErrorCode ierr;
1518   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1519   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1520   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1521 
1522   PetscFunctionBegin;
1523   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1524   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1525   mat->getrowactive = PETSC_TRUE;
1526 
1527   if (!mat->rowvalues && (idx || v)) {
1528     /*
1529         allocate enough space to hold information from the longest row.
1530     */
1531     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1532     PetscInt    max = 1,mbs = mat->mbs,tmp;
1533     for (i=0; i<mbs; i++) {
1534       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1535       if (max < tmp) max = tmp;
1536     }
1537     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1538   }
1539   lrow = row - brstart;
1540 
1541   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1542   if (!v)   {pvA = 0; pvB = 0;}
1543   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1544   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1545   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1546   nztot = nzA + nzB;
1547 
1548   cmap = mat->garray;
1549   if (v  || idx) {
1550     if (nztot) {
1551       /* Sort by increasing column numbers, assuming A and B already sorted */
1552       PetscInt imark = -1;
1553       if (v) {
1554         *v = v_p = mat->rowvalues;
1555         for (i=0; i<nzB; i++) {
1556           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1557           else break;
1558         }
1559         imark = i;
1560         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1561         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1562       }
1563       if (idx) {
1564         *idx = idx_p = mat->rowindices;
1565         if (imark > -1) {
1566           for (i=0; i<imark; i++) {
1567             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1568           }
1569         } else {
1570           for (i=0; i<nzB; i++) {
1571             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1572             else break;
1573           }
1574           imark = i;
1575         }
1576         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1577         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1578       }
1579     } else {
1580       if (idx) *idx = 0;
1581       if (v)   *v   = 0;
1582     }
1583   }
1584   *nz  = nztot;
1585   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1586   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 #undef __FUNCT__
1591 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1592 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1593 {
1594   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1595 
1596   PetscFunctionBegin;
1597   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1598   baij->getrowactive = PETSC_FALSE;
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1604 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1605 {
1606   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1607   PetscErrorCode ierr;
1608 
1609   PetscFunctionBegin;
1610   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1611   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 #undef __FUNCT__
1616 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1617 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1618 {
1619   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1620   Mat            A  = a->A,B = a->B;
1621   PetscErrorCode ierr;
1622   PetscReal      isend[5],irecv[5];
1623 
1624   PetscFunctionBegin;
1625   info->block_size = (PetscReal)matin->rmap->bs;
1626 
1627   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1628 
1629   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1630   isend[3] = info->memory;  isend[4] = info->mallocs;
1631 
1632   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1633 
1634   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1635   isend[3] += info->memory;  isend[4] += info->mallocs;
1636 
1637   if (flag == MAT_LOCAL) {
1638     info->nz_used      = isend[0];
1639     info->nz_allocated = isend[1];
1640     info->nz_unneeded  = isend[2];
1641     info->memory       = isend[3];
1642     info->mallocs      = isend[4];
1643   } else if (flag == MAT_GLOBAL_MAX) {
1644     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1645 
1646     info->nz_used      = irecv[0];
1647     info->nz_allocated = irecv[1];
1648     info->nz_unneeded  = irecv[2];
1649     info->memory       = irecv[3];
1650     info->mallocs      = irecv[4];
1651   } else if (flag == MAT_GLOBAL_SUM) {
1652     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1653 
1654     info->nz_used      = irecv[0];
1655     info->nz_allocated = irecv[1];
1656     info->nz_unneeded  = irecv[2];
1657     info->memory       = irecv[3];
1658     info->mallocs      = irecv[4];
1659   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1660   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1661   info->fill_ratio_needed = 0;
1662   info->factor_mallocs    = 0;
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 #undef __FUNCT__
1667 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1668 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1669 {
1670   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   switch (op) {
1675   case MAT_NEW_NONZERO_LOCATIONS:
1676   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1677   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1678   case MAT_KEEP_NONZERO_PATTERN:
1679   case MAT_NEW_NONZERO_LOCATION_ERR:
1680     MatCheckPreallocated(A,1);
1681     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1682     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1683     break;
1684   case MAT_ROW_ORIENTED:
1685     MatCheckPreallocated(A,1);
1686     a->roworiented = flg;
1687 
1688     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1689     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1690     break;
1691   case MAT_NEW_DIAGONALS:
1692     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1693     break;
1694   case MAT_IGNORE_OFF_PROC_ENTRIES:
1695     a->donotstash = flg;
1696     break;
1697   case MAT_USE_HASH_TABLE:
1698     a->ht_flag = flg;
1699     a->ht_fact = 1.39;
1700     break;
1701   case MAT_SYMMETRIC:
1702   case MAT_STRUCTURALLY_SYMMETRIC:
1703   case MAT_HERMITIAN:
1704   case MAT_SYMMETRY_ETERNAL:
1705     MatCheckPreallocated(A,1);
1706     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1707     break;
1708   default:
1709     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1710   }
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 #undef __FUNCT__
1715 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1716 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1717 {
1718   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1719   Mat_SeqBAIJ    *Aloc;
1720   Mat            B;
1721   PetscErrorCode ierr;
1722   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1723   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1724   MatScalar      *a;
1725 
1726   PetscFunctionBegin;
1727   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1728   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1729     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1730     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1731     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1732     /* Do not know preallocation information, but must set block size */
1733     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1734   } else {
1735     B = *matout;
1736   }
1737 
1738   /* copy over the A part */
1739   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1740   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1741   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1742 
1743   for (i=0; i<mbs; i++) {
1744     rvals[0] = bs*(baij->rstartbs + i);
1745     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1746     for (j=ai[i]; j<ai[i+1]; j++) {
1747       col = (baij->cstartbs+aj[j])*bs;
1748       for (k=0; k<bs; k++) {
1749         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1750 
1751         col++; a += bs;
1752       }
1753     }
1754   }
1755   /* copy over the B part */
1756   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1757   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1758   for (i=0; i<mbs; i++) {
1759     rvals[0] = bs*(baij->rstartbs + i);
1760     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1761     for (j=ai[i]; j<ai[i+1]; j++) {
1762       col = baij->garray[aj[j]]*bs;
1763       for (k=0; k<bs; k++) {
1764         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1765         col++;
1766         a += bs;
1767       }
1768     }
1769   }
1770   ierr = PetscFree(rvals);CHKERRQ(ierr);
1771   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1772   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1773 
1774   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1775   else {
1776     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1777   }
1778   PetscFunctionReturn(0);
1779 }
1780 
1781 #undef __FUNCT__
1782 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1783 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1784 {
1785   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1786   Mat            a     = baij->A,b = baij->B;
1787   PetscErrorCode ierr;
1788   PetscInt       s1,s2,s3;
1789 
1790   PetscFunctionBegin;
1791   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1792   if (rr) {
1793     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1794     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1795     /* Overlap communication with computation. */
1796     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1797   }
1798   if (ll) {
1799     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1800     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1801     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1802   }
1803   /* scale  the diagonal block */
1804   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1805 
1806   if (rr) {
1807     /* Do a scatter end and then right scale the off-diagonal block */
1808     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1809     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1810   }
1811   PetscFunctionReturn(0);
1812 }
1813 
1814 #undef __FUNCT__
1815 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1816 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1817 {
1818   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1819   PetscInt      *owners = A->rmap->range;
1820   PetscInt       n      = A->rmap->n;
1821   PetscSF        sf;
1822   PetscInt      *lrows;
1823   PetscSFNode   *rrows;
1824   PetscInt       r, p = 0, len = 0;
1825   PetscErrorCode ierr;
1826 
1827   PetscFunctionBegin;
1828   /* Create SF where leaves are input rows and roots are owned rows */
1829   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1830   for (r = 0; r < n; ++r) lrows[r] = -1;
1831   if (!A->nooffproczerorows) {ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);}
1832   for (r = 0; r < N; ++r) {
1833     const PetscInt idx   = rows[r];
1834     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1835     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1836       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1837     }
1838     if (A->nooffproczerorows) {
1839       if (p != l->rank) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"MAT_NO_OFF_PROC_ZERO_ROWS set, but row %D is not owned by rank %d",idx,l->rank);
1840       lrows[len++] = idx - owners[p];
1841     } else {
1842       rrows[r].rank = p;
1843       rrows[r].index = rows[r] - owners[p];
1844     }
1845   }
1846   if (!A->nooffproczerorows) {
1847     ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1848     ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1849     /* Collect flags for rows to be zeroed */
1850     ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1851     ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1852     ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1853     /* Compress and put in row numbers */
1854     for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1855   }
1856   /* fix right hand side if needed */
1857   if (x && b) {
1858     const PetscScalar *xx;
1859     PetscScalar       *bb;
1860 
1861     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1862     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1863     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1864     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1865     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1866   }
1867 
1868   /* actually zap the local rows */
1869   /*
1870         Zero the required rows. If the "diagonal block" of the matrix
1871      is square and the user wishes to set the diagonal we use separate
1872      code so that MatSetValues() is not called for each diagonal allocating
1873      new memory, thus calling lots of mallocs and slowing things down.
1874 
1875   */
1876   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1877   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1878   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1879     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1880   } else if (diag != 0.0) {
1881     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1882     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1883        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1884     for (r = 0; r < len; ++r) {
1885       const PetscInt row = lrows[r] + A->rmap->rstart;
1886       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1887     }
1888     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1889     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1890   } else {
1891     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1892   }
1893   ierr = PetscFree(lrows);CHKERRQ(ierr);
1894 
1895   /* only change matrix nonzero state if pattern was allowed to be changed */
1896   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1897     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1898     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1899   }
1900   PetscFunctionReturn(0);
1901 }
1902 
1903 #undef __FUNCT__
1904 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1905 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1906 {
1907   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1908   PetscErrorCode    ierr;
1909   PetscMPIInt       n = A->rmap->n;
1910   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1911   PetscInt          *lrows,*owners = A->rmap->range;
1912   PetscSFNode       *rrows;
1913   PetscSF           sf;
1914   const PetscScalar *xx;
1915   PetscScalar       *bb,*mask;
1916   Vec               xmask,lmask;
1917   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1918   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1919   PetscScalar       *aa;
1920 
1921   PetscFunctionBegin;
1922   /* Create SF where leaves are input rows and roots are owned rows */
1923   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1924   for (r = 0; r < n; ++r) lrows[r] = -1;
1925   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1926   for (r = 0; r < N; ++r) {
1927     const PetscInt idx   = rows[r];
1928     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1929     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1930       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1931     }
1932     rrows[r].rank  = p;
1933     rrows[r].index = rows[r] - owners[p];
1934   }
1935   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1936   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1937   /* Collect flags for rows to be zeroed */
1938   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1939   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1940   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1941   /* Compress and put in row numbers */
1942   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1943   /* zero diagonal part of matrix */
1944   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1945   /* handle off diagonal part of matrix */
1946   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1947   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1948   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1949   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1950   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1951   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1952   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1953   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1954   if (x) {
1955     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1956     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1957     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1958     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1959   }
1960   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1961   /* remove zeroed rows of off diagonal matrix */
1962   for (i = 0; i < len; ++i) {
1963     row   = lrows[i];
1964     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1965     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1966     for (k = 0; k < count; ++k) {
1967       aa[0] = 0.0;
1968       aa   += bs;
1969     }
1970   }
1971   /* loop over all elements of off process part of matrix zeroing removed columns*/
1972   for (i = 0; i < l->B->rmap->N; ++i) {
1973     row = i/bs;
1974     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1975       for (k = 0; k < bs; ++k) {
1976         col = bs*baij->j[j] + k;
1977         if (PetscAbsScalar(mask[col])) {
1978           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1979           if (x) bb[i] -= aa[0]*xx[col];
1980           aa[0] = 0.0;
1981         }
1982       }
1983     }
1984   }
1985   if (x) {
1986     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1987     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1988   }
1989   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1990   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1991   ierr = PetscFree(lrows);CHKERRQ(ierr);
1992 
1993   /* only change matrix nonzero state if pattern was allowed to be changed */
1994   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1995     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1996     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1997   }
1998   PetscFunctionReturn(0);
1999 }
2000 
2001 #undef __FUNCT__
2002 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
2003 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
2004 {
2005   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2006   PetscErrorCode ierr;
2007 
2008   PetscFunctionBegin;
2009   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
2010   PetscFunctionReturn(0);
2011 }
2012 
2013 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
2014 
2015 #undef __FUNCT__
2016 #define __FUNCT__ "MatEqual_MPIBAIJ"
2017 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
2018 {
2019   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
2020   Mat            a,b,c,d;
2021   PetscBool      flg;
2022   PetscErrorCode ierr;
2023 
2024   PetscFunctionBegin;
2025   a = matA->A; b = matA->B;
2026   c = matB->A; d = matB->B;
2027 
2028   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2029   if (flg) {
2030     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2031   }
2032   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2033   PetscFunctionReturn(0);
2034 }
2035 
2036 #undef __FUNCT__
2037 #define __FUNCT__ "MatCopy_MPIBAIJ"
2038 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2039 {
2040   PetscErrorCode ierr;
2041   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2042   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2043 
2044   PetscFunctionBegin;
2045   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2046   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2047     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2048   } else {
2049     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2050     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2051   }
2052   PetscFunctionReturn(0);
2053 }
2054 
2055 #undef __FUNCT__
2056 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2057 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2058 {
2059   PetscErrorCode ierr;
2060 
2061   PetscFunctionBegin;
2062   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2063   PetscFunctionReturn(0);
2064 }
2065 
2066 #undef __FUNCT__
2067 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2068 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2069 {
2070   PetscErrorCode ierr;
2071   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2072   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2073   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2074 
2075   PetscFunctionBegin;
2076   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2077   PetscFunctionReturn(0);
2078 }
2079 
2080 #undef __FUNCT__
2081 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2082 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2083 {
2084   PetscErrorCode ierr;
2085   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2086   PetscBLASInt   bnz,one=1;
2087   Mat_SeqBAIJ    *x,*y;
2088 
2089   PetscFunctionBegin;
2090   if (str == SAME_NONZERO_PATTERN) {
2091     PetscScalar alpha = a;
2092     x    = (Mat_SeqBAIJ*)xx->A->data;
2093     y    = (Mat_SeqBAIJ*)yy->A->data;
2094     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2095     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2096     x    = (Mat_SeqBAIJ*)xx->B->data;
2097     y    = (Mat_SeqBAIJ*)yy->B->data;
2098     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2099     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2100     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2101   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2102     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2103   } else {
2104     Mat      B;
2105     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2106     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2107     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2108     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2109     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2110     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2111     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2112     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2113     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2114     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2115     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2116     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2117     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2118     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2119     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2120     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2121   }
2122   PetscFunctionReturn(0);
2123 }
2124 
2125 #undef __FUNCT__
2126 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2127 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2128 {
2129   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2130   PetscErrorCode ierr;
2131 
2132   PetscFunctionBegin;
2133   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2134   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2135   PetscFunctionReturn(0);
2136 }
2137 
2138 #undef __FUNCT__
2139 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2140 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2141 {
2142   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2143   PetscErrorCode ierr;
2144 
2145   PetscFunctionBegin;
2146   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2147   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 #undef __FUNCT__
2152 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2153 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2154 {
2155   PetscErrorCode ierr;
2156   IS             iscol_local;
2157   PetscInt       csize;
2158 
2159   PetscFunctionBegin;
2160   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2161   if (call == MAT_REUSE_MATRIX) {
2162     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2163     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2164   } else {
2165     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2166   }
2167   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2168   if (call == MAT_INITIAL_MATRIX) {
2169     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2170     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2171   }
2172   PetscFunctionReturn(0);
2173 }
2174 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2175 #undef __FUNCT__
2176 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2177 /*
2178   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2179   in local and then by concatenating the local matrices the end result.
2180   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2181   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2182 */
2183 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2184 {
2185   PetscErrorCode ierr;
2186   PetscMPIInt    rank,size;
2187   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2188   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2189   Mat            M,Mreuse;
2190   MatScalar      *vwork,*aa;
2191   MPI_Comm       comm;
2192   IS             isrow_new, iscol_new;
2193   PetscBool      idflag,allrows, allcols;
2194   Mat_SeqBAIJ    *aij;
2195 
2196   PetscFunctionBegin;
2197   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2198   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2199   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2200   /* The compression and expansion should be avoided. Doesn't point
2201      out errors, might change the indices, hence buggey */
2202   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2203   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2204 
2205   /* Check for special case: each processor gets entire matrix columns */
2206   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2207   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2208   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2209   else allcols = PETSC_FALSE;
2210 
2211   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2212   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2213   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2214   else allrows = PETSC_FALSE;
2215 
2216   if (call ==  MAT_REUSE_MATRIX) {
2217     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2218     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2219     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2220   } else {
2221     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2222   }
2223   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2224   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2225   /*
2226       m - number of local rows
2227       n - number of columns (same on all processors)
2228       rstart - first row in new global matrix generated
2229   */
2230   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2231   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2232   m    = m/bs;
2233   n    = n/bs;
2234 
2235   if (call == MAT_INITIAL_MATRIX) {
2236     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2237     ii  = aij->i;
2238     jj  = aij->j;
2239 
2240     /*
2241         Determine the number of non-zeros in the diagonal and off-diagonal
2242         portions of the matrix in order to do correct preallocation
2243     */
2244 
2245     /* first get start and end of "diagonal" columns */
2246     if (csize == PETSC_DECIDE) {
2247       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2248       if (mglobal == n*bs) { /* square matrix */
2249         nlocal = m;
2250       } else {
2251         nlocal = n/size + ((n % size) > rank);
2252       }
2253     } else {
2254       nlocal = csize/bs;
2255     }
2256     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2257     rstart = rend - nlocal;
2258     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2259 
2260     /* next, compute all the lengths */
2261     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2262     for (i=0; i<m; i++) {
2263       jend = ii[i+1] - ii[i];
2264       olen = 0;
2265       dlen = 0;
2266       for (j=0; j<jend; j++) {
2267         if (*jj < rstart || *jj >= rend) olen++;
2268         else dlen++;
2269         jj++;
2270       }
2271       olens[i] = olen;
2272       dlens[i] = dlen;
2273     }
2274     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2275     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2276     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2277     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2278     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2279     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2280   } else {
2281     PetscInt ml,nl;
2282 
2283     M    = *newmat;
2284     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2285     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2286     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2287     /*
2288          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2289        rather than the slower MatSetValues().
2290     */
2291     M->was_assembled = PETSC_TRUE;
2292     M->assembled     = PETSC_FALSE;
2293   }
2294   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2295   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2296   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2297   ii   = aij->i;
2298   jj   = aij->j;
2299   aa   = aij->a;
2300   for (i=0; i<m; i++) {
2301     row   = rstart/bs + i;
2302     nz    = ii[i+1] - ii[i];
2303     cwork = jj;     jj += nz;
2304     vwork = aa;     aa += nz*bs*bs;
2305     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2306   }
2307 
2308   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2309   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2310   *newmat = M;
2311 
2312   /* save submatrix used in processor for next request */
2313   if (call ==  MAT_INITIAL_MATRIX) {
2314     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2315     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2316   }
2317   PetscFunctionReturn(0);
2318 }
2319 
2320 #undef __FUNCT__
2321 #define __FUNCT__ "MatPermute_MPIBAIJ"
2322 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2323 {
2324   MPI_Comm       comm,pcomm;
2325   PetscInt       clocal_size,nrows;
2326   const PetscInt *rows;
2327   PetscMPIInt    size;
2328   IS             crowp,lcolp;
2329   PetscErrorCode ierr;
2330 
2331   PetscFunctionBegin;
2332   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2333   /* make a collective version of 'rowp' */
2334   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2335   if (pcomm==comm) {
2336     crowp = rowp;
2337   } else {
2338     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2339     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2340     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2341     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2342   }
2343   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2344   /* make a local version of 'colp' */
2345   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2346   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2347   if (size==1) {
2348     lcolp = colp;
2349   } else {
2350     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2351   }
2352   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2353   /* now we just get the submatrix */
2354   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2355   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2356   /* clean up */
2357   if (pcomm!=comm) {
2358     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2359   }
2360   if (size>1) {
2361     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2362   }
2363   PetscFunctionReturn(0);
2364 }
2365 
2366 #undef __FUNCT__
2367 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2368 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2369 {
2370   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2371   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2372 
2373   PetscFunctionBegin;
2374   if (nghosts) *nghosts = B->nbs;
2375   if (ghosts) *ghosts = baij->garray;
2376   PetscFunctionReturn(0);
2377 }
2378 
2379 #undef __FUNCT__
2380 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2381 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2382 {
2383   Mat            B;
2384   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2385   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2386   Mat_SeqAIJ     *b;
2387   PetscErrorCode ierr;
2388   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2389   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2390   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2391 
2392   PetscFunctionBegin;
2393   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2394   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2395 
2396   /* ----------------------------------------------------------------
2397      Tell every processor the number of nonzeros per row
2398   */
2399   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2400   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2401     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2402   }
2403   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2404   displs    = recvcounts + size;
2405   for (i=0; i<size; i++) {
2406     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2407     displs[i]     = A->rmap->range[i]/bs;
2408   }
2409 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2410   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2411 #else
2412   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2413   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2414 #endif
2415   /* ---------------------------------------------------------------
2416      Create the sequential matrix of the same type as the local block diagonal
2417   */
2418   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2419   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2420   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2421   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2422   b    = (Mat_SeqAIJ*)B->data;
2423 
2424   /*--------------------------------------------------------------------
2425     Copy my part of matrix column indices over
2426   */
2427   sendcount  = ad->nz + bd->nz;
2428   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2429   a_jsendbuf = ad->j;
2430   b_jsendbuf = bd->j;
2431   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2432   cnt        = 0;
2433   for (i=0; i<n; i++) {
2434 
2435     /* put in lower diagonal portion */
2436     m = bd->i[i+1] - bd->i[i];
2437     while (m > 0) {
2438       /* is it above diagonal (in bd (compressed) numbering) */
2439       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2440       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2441       m--;
2442     }
2443 
2444     /* put in diagonal portion */
2445     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2446       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2447     }
2448 
2449     /* put in upper diagonal portion */
2450     while (m-- > 0) {
2451       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2452     }
2453   }
2454   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2455 
2456   /*--------------------------------------------------------------------
2457     Gather all column indices to all processors
2458   */
2459   for (i=0; i<size; i++) {
2460     recvcounts[i] = 0;
2461     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2462       recvcounts[i] += lens[j];
2463     }
2464   }
2465   displs[0] = 0;
2466   for (i=1; i<size; i++) {
2467     displs[i] = displs[i-1] + recvcounts[i-1];
2468   }
2469 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2470   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2471 #else
2472   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2473 #endif
2474   /*--------------------------------------------------------------------
2475     Assemble the matrix into useable form (note numerical values not yet set)
2476   */
2477   /* set the b->ilen (length of each row) values */
2478   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2479   /* set the b->i indices */
2480   b->i[0] = 0;
2481   for (i=1; i<=A->rmap->N/bs; i++) {
2482     b->i[i] = b->i[i-1] + lens[i-1];
2483   }
2484   ierr = PetscFree(lens);CHKERRQ(ierr);
2485   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2486   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2487   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2488 
2489   if (A->symmetric) {
2490     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2491   } else if (A->hermitian) {
2492     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2493   } else if (A->structurally_symmetric) {
2494     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2495   }
2496   *newmat = B;
2497   PetscFunctionReturn(0);
2498 }
2499 
2500 #undef __FUNCT__
2501 #define __FUNCT__ "MatSOR_MPIBAIJ"
2502 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2503 {
2504   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2505   PetscErrorCode ierr;
2506   Vec            bb1 = 0;
2507 
2508   PetscFunctionBegin;
2509   if (flag == SOR_APPLY_UPPER) {
2510     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2511     PetscFunctionReturn(0);
2512   }
2513 
2514   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2515     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2516   }
2517 
2518   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2519     if (flag & SOR_ZERO_INITIAL_GUESS) {
2520       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2521       its--;
2522     }
2523 
2524     while (its--) {
2525       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2526       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2527 
2528       /* update rhs: bb1 = bb - B*x */
2529       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2530       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2531 
2532       /* local sweep */
2533       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2534     }
2535   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2536     if (flag & SOR_ZERO_INITIAL_GUESS) {
2537       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2538       its--;
2539     }
2540     while (its--) {
2541       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2542       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2543 
2544       /* update rhs: bb1 = bb - B*x */
2545       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2546       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2547 
2548       /* local sweep */
2549       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2550     }
2551   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2552     if (flag & SOR_ZERO_INITIAL_GUESS) {
2553       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2554       its--;
2555     }
2556     while (its--) {
2557       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2558       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2559 
2560       /* update rhs: bb1 = bb - B*x */
2561       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2562       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2563 
2564       /* local sweep */
2565       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2566     }
2567   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2568 
2569   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2570   PetscFunctionReturn(0);
2571 }
2572 
2573 #undef __FUNCT__
2574 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2575 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2576 {
2577   PetscErrorCode ierr;
2578   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2579   PetscInt       N,i,*garray = aij->garray;
2580   PetscInt       ib,jb,bs = A->rmap->bs;
2581   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2582   MatScalar      *a_val = a_aij->a;
2583   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2584   MatScalar      *b_val = b_aij->a;
2585   PetscReal      *work;
2586 
2587   PetscFunctionBegin;
2588   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2589   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2590   if (type == NORM_2) {
2591     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2592       for (jb=0; jb<bs; jb++) {
2593         for (ib=0; ib<bs; ib++) {
2594           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2595           a_val++;
2596         }
2597       }
2598     }
2599     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2600       for (jb=0; jb<bs; jb++) {
2601         for (ib=0; ib<bs; ib++) {
2602           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2603           b_val++;
2604         }
2605       }
2606     }
2607   } else if (type == NORM_1) {
2608     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2609       for (jb=0; jb<bs; jb++) {
2610         for (ib=0; ib<bs; ib++) {
2611           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2612           a_val++;
2613         }
2614       }
2615     }
2616     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2617       for (jb=0; jb<bs; jb++) {
2618        for (ib=0; ib<bs; ib++) {
2619           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2620           b_val++;
2621         }
2622       }
2623     }
2624   } else if (type == NORM_INFINITY) {
2625     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2626       for (jb=0; jb<bs; jb++) {
2627         for (ib=0; ib<bs; ib++) {
2628           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2629           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2630           a_val++;
2631         }
2632       }
2633     }
2634     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2635       for (jb=0; jb<bs; jb++) {
2636         for (ib=0; ib<bs; ib++) {
2637           int col = garray[b_aij->j[i]] * bs + jb;
2638           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2639           b_val++;
2640         }
2641       }
2642     }
2643   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2644   if (type == NORM_INFINITY) {
2645     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2646   } else {
2647     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2648   }
2649   ierr = PetscFree(work);CHKERRQ(ierr);
2650   if (type == NORM_2) {
2651     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2652   }
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 #undef __FUNCT__
2657 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2658 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2659 {
2660   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2661   PetscErrorCode ierr;
2662 
2663   PetscFunctionBegin;
2664   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2665   A->errortype = a->A->errortype;
2666   PetscFunctionReturn(0);
2667 }
2668 
2669 #undef __FUNCT__
2670 #define __FUNCT__ "MatShift_MPIBAIJ"
2671 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2672 {
2673   PetscErrorCode ierr;
2674   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2675   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2676 
2677   PetscFunctionBegin;
2678   if (!Y->preallocated) {
2679     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2680   } else if (!aij->nz) {
2681     PetscInt nonew = aij->nonew;
2682     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2683     aij->nonew = nonew;
2684   }
2685   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2686   PetscFunctionReturn(0);
2687 }
2688 
2689 #undef __FUNCT__
2690 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2691 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2692 {
2693   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2694   PetscErrorCode ierr;
2695 
2696   PetscFunctionBegin;
2697   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2698   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2699   if (d) {
2700     PetscInt rstart;
2701     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2702     *d += rstart/A->rmap->bs;
2703 
2704   }
2705   PetscFunctionReturn(0);
2706 }
2707 
2708 /* -------------------------------------------------------------------*/
2709 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2710                                        MatGetRow_MPIBAIJ,
2711                                        MatRestoreRow_MPIBAIJ,
2712                                        MatMult_MPIBAIJ,
2713                                 /* 4*/ MatMultAdd_MPIBAIJ,
2714                                        MatMultTranspose_MPIBAIJ,
2715                                        MatMultTransposeAdd_MPIBAIJ,
2716                                        0,
2717                                        0,
2718                                        0,
2719                                 /*10*/ 0,
2720                                        0,
2721                                        0,
2722                                        MatSOR_MPIBAIJ,
2723                                        MatTranspose_MPIBAIJ,
2724                                 /*15*/ MatGetInfo_MPIBAIJ,
2725                                        MatEqual_MPIBAIJ,
2726                                        MatGetDiagonal_MPIBAIJ,
2727                                        MatDiagonalScale_MPIBAIJ,
2728                                        MatNorm_MPIBAIJ,
2729                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2730                                        MatAssemblyEnd_MPIBAIJ,
2731                                        MatSetOption_MPIBAIJ,
2732                                        MatZeroEntries_MPIBAIJ,
2733                                 /*24*/ MatZeroRows_MPIBAIJ,
2734                                        0,
2735                                        0,
2736                                        0,
2737                                        0,
2738                                 /*29*/ MatSetUp_MPIBAIJ,
2739                                        0,
2740                                        0,
2741                                        0,
2742                                        0,
2743                                 /*34*/ MatDuplicate_MPIBAIJ,
2744                                        0,
2745                                        0,
2746                                        0,
2747                                        0,
2748                                 /*39*/ MatAXPY_MPIBAIJ,
2749                                        MatGetSubMatrices_MPIBAIJ,
2750                                        MatIncreaseOverlap_MPIBAIJ,
2751                                        MatGetValues_MPIBAIJ,
2752                                        MatCopy_MPIBAIJ,
2753                                 /*44*/ 0,
2754                                        MatScale_MPIBAIJ,
2755                                        MatShift_MPIBAIJ,
2756                                        0,
2757                                        MatZeroRowsColumns_MPIBAIJ,
2758                                 /*49*/ 0,
2759                                        0,
2760                                        0,
2761                                        0,
2762                                        0,
2763                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2764                                        0,
2765                                        MatSetUnfactored_MPIBAIJ,
2766                                        MatPermute_MPIBAIJ,
2767                                        MatSetValuesBlocked_MPIBAIJ,
2768                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2769                                        MatDestroy_MPIBAIJ,
2770                                        MatView_MPIBAIJ,
2771                                        0,
2772                                        0,
2773                                 /*64*/ 0,
2774                                        0,
2775                                        0,
2776                                        0,
2777                                        0,
2778                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2779                                        0,
2780                                        0,
2781                                        0,
2782                                        0,
2783                                 /*74*/ 0,
2784                                        MatFDColoringApply_BAIJ,
2785                                        0,
2786                                        0,
2787                                        0,
2788                                 /*79*/ 0,
2789                                        0,
2790                                        0,
2791                                        0,
2792                                        MatLoad_MPIBAIJ,
2793                                 /*84*/ 0,
2794                                        0,
2795                                        0,
2796                                        0,
2797                                        0,
2798                                 /*89*/ 0,
2799                                        0,
2800                                        0,
2801                                        0,
2802                                        0,
2803                                 /*94*/ 0,
2804                                        0,
2805                                        0,
2806                                        0,
2807                                        0,
2808                                 /*99*/ 0,
2809                                        0,
2810                                        0,
2811                                        0,
2812                                        0,
2813                                 /*104*/0,
2814                                        MatRealPart_MPIBAIJ,
2815                                        MatImaginaryPart_MPIBAIJ,
2816                                        0,
2817                                        0,
2818                                 /*109*/0,
2819                                        0,
2820                                        0,
2821                                        0,
2822                                        MatMissingDiagonal_MPIBAIJ,
2823                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2824                                        0,
2825                                        MatGetGhosts_MPIBAIJ,
2826                                        0,
2827                                        0,
2828                                 /*119*/0,
2829                                        0,
2830                                        0,
2831                                        0,
2832                                        MatGetMultiProcBlock_MPIBAIJ,
2833                                 /*124*/0,
2834                                        MatGetColumnNorms_MPIBAIJ,
2835                                        MatInvertBlockDiagonal_MPIBAIJ,
2836                                        0,
2837                                        0,
2838                                /*129*/ 0,
2839                                        0,
2840                                        0,
2841                                        0,
2842                                        0,
2843                                /*134*/ 0,
2844                                        0,
2845                                        0,
2846                                        0,
2847                                        0,
2848                                /*139*/ 0,
2849                                        0,
2850                                        0,
2851                                        MatFDColoringSetUp_MPIXAIJ,
2852                                        0,
2853                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2854 };
2855 
2856 #undef __FUNCT__
2857 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2858 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2859 {
2860   PetscFunctionBegin;
2861   *a = ((Mat_MPIBAIJ*)A->data)->A;
2862   PetscFunctionReturn(0);
2863 }
2864 
2865 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2866 
2867 #undef __FUNCT__
2868 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2869 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2870 {
2871   PetscInt       m,rstart,cstart,cend;
2872   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2873   const PetscInt *JJ    =0;
2874   PetscScalar    *values=0;
2875   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2876   PetscErrorCode ierr;
2877 
2878   PetscFunctionBegin;
2879   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2880   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2881   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2882   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2883   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2884   m      = B->rmap->n/bs;
2885   rstart = B->rmap->rstart/bs;
2886   cstart = B->cmap->rstart/bs;
2887   cend   = B->cmap->rend/bs;
2888 
2889   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2890   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2891   for (i=0; i<m; i++) {
2892     nz = ii[i+1] - ii[i];
2893     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2894     nz_max = PetscMax(nz_max,nz);
2895     JJ     = jj + ii[i];
2896     for (j=0; j<nz; j++) {
2897       if (*JJ >= cstart) break;
2898       JJ++;
2899     }
2900     d = 0;
2901     for (; j<nz; j++) {
2902       if (*JJ++ >= cend) break;
2903       d++;
2904     }
2905     d_nnz[i] = d;
2906     o_nnz[i] = nz - d;
2907   }
2908   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2909   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2910 
2911   values = (PetscScalar*)V;
2912   if (!values) {
2913     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2914     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2915   }
2916   for (i=0; i<m; i++) {
2917     PetscInt          row    = i + rstart;
2918     PetscInt          ncols  = ii[i+1] - ii[i];
2919     const PetscInt    *icols = jj + ii[i];
2920     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2921       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2922       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2923     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2924       PetscInt j;
2925       for (j=0; j<ncols; j++) {
2926         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2927         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2928       }
2929     }
2930   }
2931 
2932   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2933   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2934   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2935   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2936   PetscFunctionReturn(0);
2937 }
2938 
2939 #undef __FUNCT__
2940 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2941 /*@C
2942    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2943    (the default parallel PETSc format).
2944 
2945    Collective on MPI_Comm
2946 
2947    Input Parameters:
2948 +  B - the matrix
2949 .  bs - the block size
2950 .  i - the indices into j for the start of each local row (starts with zero)
2951 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2952 -  v - optional values in the matrix
2953 
2954    Level: developer
2955 
2956    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2957    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2958    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2959    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2960    block column and the second index is over columns within a block.
2961 
2962 .keywords: matrix, aij, compressed row, sparse, parallel
2963 
2964 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2965 @*/
2966 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2967 {
2968   PetscErrorCode ierr;
2969 
2970   PetscFunctionBegin;
2971   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2972   PetscValidType(B,1);
2973   PetscValidLogicalCollectiveInt(B,bs,2);
2974   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2975   PetscFunctionReturn(0);
2976 }
2977 
2978 #undef __FUNCT__
2979 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2980 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2981 {
2982   Mat_MPIBAIJ    *b;
2983   PetscErrorCode ierr;
2984   PetscInt       i;
2985 
2986   PetscFunctionBegin;
2987   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2988   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2989   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2990   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2991 
2992   if (d_nnz) {
2993     for (i=0; i<B->rmap->n/bs; i++) {
2994       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2995     }
2996   }
2997   if (o_nnz) {
2998     for (i=0; i<B->rmap->n/bs; i++) {
2999       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
3000     }
3001   }
3002 
3003   b      = (Mat_MPIBAIJ*)B->data;
3004   b->bs2 = bs*bs;
3005   b->mbs = B->rmap->n/bs;
3006   b->nbs = B->cmap->n/bs;
3007   b->Mbs = B->rmap->N/bs;
3008   b->Nbs = B->cmap->N/bs;
3009 
3010   for (i=0; i<=b->size; i++) {
3011     b->rangebs[i] = B->rmap->range[i]/bs;
3012   }
3013   b->rstartbs = B->rmap->rstart/bs;
3014   b->rendbs   = B->rmap->rend/bs;
3015   b->cstartbs = B->cmap->rstart/bs;
3016   b->cendbs   = B->cmap->rend/bs;
3017 
3018   if (!B->preallocated) {
3019     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3020     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3021     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3022     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3023     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3024     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3025     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3026     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3027     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3028   }
3029 
3030   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3031   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3032   B->preallocated = PETSC_TRUE;
3033   PetscFunctionReturn(0);
3034 }
3035 
3036 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3037 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3038 
3039 #undef __FUNCT__
3040 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3041 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3042 {
3043   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3044   PetscErrorCode ierr;
3045   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3046   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3047   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3048 
3049   PetscFunctionBegin;
3050   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3051   ii[0] = 0;
3052   for (i=0; i<M; i++) {
3053     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3054     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3055     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3056     /* remove one from count of matrix has diagonal */
3057     for (j=id[i]; j<id[i+1]; j++) {
3058       if (jd[j] == i) {ii[i+1]--;break;}
3059     }
3060   }
3061   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3062   cnt  = 0;
3063   for (i=0; i<M; i++) {
3064     for (j=io[i]; j<io[i+1]; j++) {
3065       if (garray[jo[j]] > rstart) break;
3066       jj[cnt++] = garray[jo[j]];
3067     }
3068     for (k=id[i]; k<id[i+1]; k++) {
3069       if (jd[k] != i) {
3070         jj[cnt++] = rstart + jd[k];
3071       }
3072     }
3073     for (; j<io[i+1]; j++) {
3074       jj[cnt++] = garray[jo[j]];
3075     }
3076   }
3077   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3078   PetscFunctionReturn(0);
3079 }
3080 
3081 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3082 
3083 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3084 
3085 #undef __FUNCT__
3086 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3087 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3088 {
3089   PetscErrorCode ierr;
3090   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3091   Mat            B;
3092   Mat_MPIAIJ     *b;
3093 
3094   PetscFunctionBegin;
3095   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3096 
3097   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3098   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3099   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3100   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3101   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3102   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3103   b    = (Mat_MPIAIJ*) B->data;
3104 
3105   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3106   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3107   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3108   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3109   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3110   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3111   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3112   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3113   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3114   if (reuse == MAT_INPLACE_MATRIX) {
3115     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3116   } else {
3117    *newmat = B;
3118   }
3119   PetscFunctionReturn(0);
3120 }
3121 
3122 /*MC
3123    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3124 
3125    Options Database Keys:
3126 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3127 . -mat_block_size <bs> - set the blocksize used to store the matrix
3128 - -mat_use_hash_table <fact>
3129 
3130   Level: beginner
3131 
3132 .seealso: MatCreateMPIBAIJ
3133 M*/
3134 
3135 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3136 
3137 #undef __FUNCT__
3138 #define __FUNCT__ "MatCreate_MPIBAIJ"
3139 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3140 {
3141   Mat_MPIBAIJ    *b;
3142   PetscErrorCode ierr;
3143   PetscBool      flg = PETSC_FALSE;
3144 
3145   PetscFunctionBegin;
3146   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3147   B->data = (void*)b;
3148 
3149   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3150   B->assembled = PETSC_FALSE;
3151 
3152   B->insertmode = NOT_SET_VALUES;
3153   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3154   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3155 
3156   /* build local table of row and column ownerships */
3157   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3158 
3159   /* build cache for off array entries formed */
3160   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3161 
3162   b->donotstash  = PETSC_FALSE;
3163   b->colmap      = NULL;
3164   b->garray      = NULL;
3165   b->roworiented = PETSC_TRUE;
3166 
3167   /* stuff used in block assembly */
3168   b->barray = 0;
3169 
3170   /* stuff used for matrix vector multiply */
3171   b->lvec  = 0;
3172   b->Mvctx = 0;
3173 
3174   /* stuff for MatGetRow() */
3175   b->rowindices   = 0;
3176   b->rowvalues    = 0;
3177   b->getrowactive = PETSC_FALSE;
3178 
3179   /* hash table stuff */
3180   b->ht           = 0;
3181   b->hd           = 0;
3182   b->ht_size      = 0;
3183   b->ht_flag      = PETSC_FALSE;
3184   b->ht_fact      = 0;
3185   b->ht_total_ct  = 0;
3186   b->ht_insert_ct = 0;
3187 
3188   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3189   b->ijonly = PETSC_FALSE;
3190 
3191 
3192   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3193   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3194   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3195   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3196   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3197   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3198   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3199   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3200   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3201   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3202   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
3203   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3204 
3205   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3206   ierr = PetscOptionsName("-mat_use_hash_table","Use hash table to save time in constructing matrix","MatSetOption",&flg);CHKERRQ(ierr);
3207   if (flg) {
3208     PetscReal fact = 1.39;
3209     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3210     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3211     if (fact <= 1.0) fact = 1.39;
3212     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3213     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3214   }
3215   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3216   PetscFunctionReturn(0);
3217 }
3218 
3219 /*MC
3220    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3221 
3222    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3223    and MATMPIBAIJ otherwise.
3224 
3225    Options Database Keys:
3226 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3227 
3228   Level: beginner
3229 
3230 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3231 M*/
3232 
3233 #undef __FUNCT__
3234 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3235 /*@C
3236    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3237    (block compressed row).  For good matrix assembly performance
3238    the user should preallocate the matrix storage by setting the parameters
3239    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3240    performance can be increased by more than a factor of 50.
3241 
3242    Collective on Mat
3243 
3244    Input Parameters:
3245 +  B - the matrix
3246 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3247           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3248 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3249            submatrix  (same for all local rows)
3250 .  d_nnz - array containing the number of block nonzeros in the various block rows
3251            of the in diagonal portion of the local (possibly different for each block
3252            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3253            set it even if it is zero.
3254 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3255            submatrix (same for all local rows).
3256 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3257            off-diagonal portion of the local submatrix (possibly different for
3258            each block row) or NULL.
3259 
3260    If the *_nnz parameter is given then the *_nz parameter is ignored
3261 
3262    Options Database Keys:
3263 +   -mat_block_size - size of the blocks to use
3264 -   -mat_use_hash_table <fact>
3265 
3266    Notes:
3267    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3268    than it must be used on all processors that share the object for that argument.
3269 
3270    Storage Information:
3271    For a square global matrix we define each processor's diagonal portion
3272    to be its local rows and the corresponding columns (a square submatrix);
3273    each processor's off-diagonal portion encompasses the remainder of the
3274    local matrix (a rectangular submatrix).
3275 
3276    The user can specify preallocated storage for the diagonal part of
3277    the local submatrix with either d_nz or d_nnz (not both).  Set
3278    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3279    memory allocation.  Likewise, specify preallocated storage for the
3280    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3281 
3282    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3283    the figure below we depict these three local rows and all columns (0-11).
3284 
3285 .vb
3286            0 1 2 3 4 5 6 7 8 9 10 11
3287           --------------------------
3288    row 3  |o o o d d d o o o o  o  o
3289    row 4  |o o o d d d o o o o  o  o
3290    row 5  |o o o d d d o o o o  o  o
3291           --------------------------
3292 .ve
3293 
3294    Thus, any entries in the d locations are stored in the d (diagonal)
3295    submatrix, and any entries in the o locations are stored in the
3296    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3297    stored simply in the MATSEQBAIJ format for compressed row storage.
3298 
3299    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3300    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3301    In general, for PDE problems in which most nonzeros are near the diagonal,
3302    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3303    or you will get TERRIBLE performance; see the users' manual chapter on
3304    matrices.
3305 
3306    You can call MatGetInfo() to get information on how effective the preallocation was;
3307    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3308    You can also run with the option -info and look for messages with the string
3309    malloc in them to see if additional memory allocation was needed.
3310 
3311    Level: intermediate
3312 
3313 .keywords: matrix, block, aij, compressed row, sparse, parallel
3314 
3315 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3316 @*/
3317 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3318 {
3319   PetscErrorCode ierr;
3320 
3321   PetscFunctionBegin;
3322   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3323   PetscValidType(B,1);
3324   PetscValidLogicalCollectiveInt(B,bs,2);
3325   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3326   PetscFunctionReturn(0);
3327 }
3328 
3329 #undef __FUNCT__
3330 #define __FUNCT__ "MatCreateBAIJ"
3331 /*@C
3332    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3333    (block compressed row).  For good matrix assembly performance
3334    the user should preallocate the matrix storage by setting the parameters
3335    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3336    performance can be increased by more than a factor of 50.
3337 
3338    Collective on MPI_Comm
3339 
3340    Input Parameters:
3341 +  comm - MPI communicator
3342 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3343           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3344 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3345            This value should be the same as the local size used in creating the
3346            y vector for the matrix-vector product y = Ax.
3347 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3348            This value should be the same as the local size used in creating the
3349            x vector for the matrix-vector product y = Ax.
3350 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3351 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3352 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3353            submatrix  (same for all local rows)
3354 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3355            of the in diagonal portion of the local (possibly different for each block
3356            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3357            and set it even if it is zero.
3358 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3359            submatrix (same for all local rows).
3360 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3361            off-diagonal portion of the local submatrix (possibly different for
3362            each block row) or NULL.
3363 
3364    Output Parameter:
3365 .  A - the matrix
3366 
3367    Options Database Keys:
3368 +   -mat_block_size - size of the blocks to use
3369 -   -mat_use_hash_table <fact>
3370 
3371    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3372    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3373    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3374 
3375    Notes:
3376    If the *_nnz parameter is given then the *_nz parameter is ignored
3377 
3378    A nonzero block is any block that as 1 or more nonzeros in it
3379 
3380    The user MUST specify either the local or global matrix dimensions
3381    (possibly both).
3382 
3383    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3384    than it must be used on all processors that share the object for that argument.
3385 
3386    Storage Information:
3387    For a square global matrix we define each processor's diagonal portion
3388    to be its local rows and the corresponding columns (a square submatrix);
3389    each processor's off-diagonal portion encompasses the remainder of the
3390    local matrix (a rectangular submatrix).
3391 
3392    The user can specify preallocated storage for the diagonal part of
3393    the local submatrix with either d_nz or d_nnz (not both).  Set
3394    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3395    memory allocation.  Likewise, specify preallocated storage for the
3396    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3397 
3398    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3399    the figure below we depict these three local rows and all columns (0-11).
3400 
3401 .vb
3402            0 1 2 3 4 5 6 7 8 9 10 11
3403           --------------------------
3404    row 3  |o o o d d d o o o o  o  o
3405    row 4  |o o o d d d o o o o  o  o
3406    row 5  |o o o d d d o o o o  o  o
3407           --------------------------
3408 .ve
3409 
3410    Thus, any entries in the d locations are stored in the d (diagonal)
3411    submatrix, and any entries in the o locations are stored in the
3412    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3413    stored simply in the MATSEQBAIJ format for compressed row storage.
3414 
3415    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3416    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3417    In general, for PDE problems in which most nonzeros are near the diagonal,
3418    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3419    or you will get TERRIBLE performance; see the users' manual chapter on
3420    matrices.
3421 
3422    Level: intermediate
3423 
3424 .keywords: matrix, block, aij, compressed row, sparse, parallel
3425 
3426 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3427 @*/
3428 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3429 {
3430   PetscErrorCode ierr;
3431   PetscMPIInt    size;
3432 
3433   PetscFunctionBegin;
3434   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3435   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3436   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3437   if (size > 1) {
3438     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3439     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3440   } else {
3441     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3442     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3443   }
3444   PetscFunctionReturn(0);
3445 }
3446 
3447 #undef __FUNCT__
3448 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3449 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3450 {
3451   Mat            mat;
3452   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3453   PetscErrorCode ierr;
3454   PetscInt       len=0;
3455 
3456   PetscFunctionBegin;
3457   *newmat = 0;
3458   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3459   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3460   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3461   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3462 
3463   mat->factortype   = matin->factortype;
3464   mat->preallocated = PETSC_TRUE;
3465   mat->assembled    = PETSC_TRUE;
3466   mat->insertmode   = NOT_SET_VALUES;
3467 
3468   a             = (Mat_MPIBAIJ*)mat->data;
3469   mat->rmap->bs = matin->rmap->bs;
3470   a->bs2        = oldmat->bs2;
3471   a->mbs        = oldmat->mbs;
3472   a->nbs        = oldmat->nbs;
3473   a->Mbs        = oldmat->Mbs;
3474   a->Nbs        = oldmat->Nbs;
3475 
3476   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3477   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3478 
3479   a->size         = oldmat->size;
3480   a->rank         = oldmat->rank;
3481   a->donotstash   = oldmat->donotstash;
3482   a->roworiented  = oldmat->roworiented;
3483   a->rowindices   = 0;
3484   a->rowvalues    = 0;
3485   a->getrowactive = PETSC_FALSE;
3486   a->barray       = 0;
3487   a->rstartbs     = oldmat->rstartbs;
3488   a->rendbs       = oldmat->rendbs;
3489   a->cstartbs     = oldmat->cstartbs;
3490   a->cendbs       = oldmat->cendbs;
3491 
3492   /* hash table stuff */
3493   a->ht           = 0;
3494   a->hd           = 0;
3495   a->ht_size      = 0;
3496   a->ht_flag      = oldmat->ht_flag;
3497   a->ht_fact      = oldmat->ht_fact;
3498   a->ht_total_ct  = 0;
3499   a->ht_insert_ct = 0;
3500 
3501   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3502   if (oldmat->colmap) {
3503 #if defined(PETSC_USE_CTABLE)
3504     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3505 #else
3506     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3507     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3508     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3509 #endif
3510   } else a->colmap = 0;
3511 
3512   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3513     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3514     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3515     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3516   } else a->garray = 0;
3517 
3518   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3519   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3520   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3521   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3522   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3523 
3524   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3525   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3526   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3527   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3528   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3529   *newmat = mat;
3530   PetscFunctionReturn(0);
3531 }
3532 
3533 #undef __FUNCT__
3534 #define __FUNCT__ "MatLoad_MPIBAIJ"
3535 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3536 {
3537   PetscErrorCode ierr;
3538   int            fd;
3539   PetscInt       i,nz,j,rstart,rend;
3540   PetscScalar    *vals,*buf;
3541   MPI_Comm       comm;
3542   MPI_Status     status;
3543   PetscMPIInt    rank,size,maxnz;
3544   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3545   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3546   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3547   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3548   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3549   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3550 
3551   PetscFunctionBegin;
3552   /* force binary viewer to load .info file if it has not yet done so */
3553   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3554   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3555   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3556   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3557   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3558   if (bs < 0) bs = 1;
3559 
3560   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3561   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3562   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3563   if (!rank) {
3564     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3565     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3566     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3567   }
3568   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3569   M    = header[1]; N = header[2];
3570 
3571   /* If global sizes are set, check if they are consistent with that given in the file */
3572   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3573   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3574 
3575   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3576 
3577   /*
3578      This code adds extra rows to make sure the number of rows is
3579      divisible by the blocksize
3580   */
3581   Mbs        = M/bs;
3582   extra_rows = bs - M + bs*Mbs;
3583   if (extra_rows == bs) extra_rows = 0;
3584   else                  Mbs++;
3585   if (extra_rows && !rank) {
3586     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3587   }
3588 
3589   /* determine ownership of all rows */
3590   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3591     mbs = Mbs/size + ((Mbs % size) > rank);
3592     m   = mbs*bs;
3593   } else { /* User set */
3594     m   = newmat->rmap->n;
3595     mbs = m/bs;
3596   }
3597   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3598   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3599 
3600   /* process 0 needs enough room for process with most rows */
3601   if (!rank) {
3602     mmax = rowners[1];
3603     for (i=2; i<=size; i++) {
3604       mmax = PetscMax(mmax,rowners[i]);
3605     }
3606     mmax*=bs;
3607   } else mmax = -1;             /* unused, but compiler warns anyway */
3608 
3609   rowners[0] = 0;
3610   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3611   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3612   rstart = rowners[rank];
3613   rend   = rowners[rank+1];
3614 
3615   /* distribute row lengths to all processors */
3616   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3617   if (!rank) {
3618     mend = m;
3619     if (size == 1) mend = mend - extra_rows;
3620     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3621     for (j=mend; j<m; j++) locrowlens[j] = 1;
3622     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3623     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3624     for (j=0; j<m; j++) {
3625       procsnz[0] += locrowlens[j];
3626     }
3627     for (i=1; i<size; i++) {
3628       mend = browners[i+1] - browners[i];
3629       if (i == size-1) mend = mend - extra_rows;
3630       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3631       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3632       /* calculate the number of nonzeros on each processor */
3633       for (j=0; j<browners[i+1]-browners[i]; j++) {
3634         procsnz[i] += rowlengths[j];
3635       }
3636       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3637     }
3638     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3639   } else {
3640     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3641   }
3642 
3643   if (!rank) {
3644     /* determine max buffer needed and allocate it */
3645     maxnz = procsnz[0];
3646     for (i=1; i<size; i++) {
3647       maxnz = PetscMax(maxnz,procsnz[i]);
3648     }
3649     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3650 
3651     /* read in my part of the matrix column indices  */
3652     nz     = procsnz[0];
3653     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3654     mycols = ibuf;
3655     if (size == 1) nz -= extra_rows;
3656     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3657     if (size == 1) {
3658       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3659     }
3660 
3661     /* read in every ones (except the last) and ship off */
3662     for (i=1; i<size-1; i++) {
3663       nz   = procsnz[i];
3664       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3665       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3666     }
3667     /* read in the stuff for the last proc */
3668     if (size != 1) {
3669       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3670       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3671       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3672       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3673     }
3674     ierr = PetscFree(cols);CHKERRQ(ierr);
3675   } else {
3676     /* determine buffer space needed for message */
3677     nz = 0;
3678     for (i=0; i<m; i++) {
3679       nz += locrowlens[i];
3680     }
3681     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3682     mycols = ibuf;
3683     /* receive message of column indices*/
3684     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3685     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3686     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3687   }
3688 
3689   /* loop over local rows, determining number of off diagonal entries */
3690   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3691   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3692   rowcount = 0; nzcount = 0;
3693   for (i=0; i<mbs; i++) {
3694     dcount  = 0;
3695     odcount = 0;
3696     for (j=0; j<bs; j++) {
3697       kmax = locrowlens[rowcount];
3698       for (k=0; k<kmax; k++) {
3699         tmp = mycols[nzcount++]/bs;
3700         if (!mask[tmp]) {
3701           mask[tmp] = 1;
3702           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3703           else masked1[dcount++] = tmp;
3704         }
3705       }
3706       rowcount++;
3707     }
3708 
3709     dlens[i]  = dcount;
3710     odlens[i] = odcount;
3711 
3712     /* zero out the mask elements we set */
3713     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3714     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3715   }
3716 
3717   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3718   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3719 
3720   if (!rank) {
3721     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3722     /* read in my part of the matrix numerical values  */
3723     nz     = procsnz[0];
3724     vals   = buf;
3725     mycols = ibuf;
3726     if (size == 1) nz -= extra_rows;
3727     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3728     if (size == 1) {
3729       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3730     }
3731 
3732     /* insert into matrix */
3733     jj = rstart*bs;
3734     for (i=0; i<m; i++) {
3735       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3736       mycols += locrowlens[i];
3737       vals   += locrowlens[i];
3738       jj++;
3739     }
3740     /* read in other processors (except the last one) and ship out */
3741     for (i=1; i<size-1; i++) {
3742       nz   = procsnz[i];
3743       vals = buf;
3744       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3745       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3746     }
3747     /* the last proc */
3748     if (size != 1) {
3749       nz   = procsnz[i] - extra_rows;
3750       vals = buf;
3751       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3752       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3753       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3754     }
3755     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3756   } else {
3757     /* receive numeric values */
3758     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3759 
3760     /* receive message of values*/
3761     vals   = buf;
3762     mycols = ibuf;
3763     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3764 
3765     /* insert into matrix */
3766     jj = rstart*bs;
3767     for (i=0; i<m; i++) {
3768       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3769       mycols += locrowlens[i];
3770       vals   += locrowlens[i];
3771       jj++;
3772     }
3773   }
3774   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3775   ierr = PetscFree(buf);CHKERRQ(ierr);
3776   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3777   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3778   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3779   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3780   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3781   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3782   PetscFunctionReturn(0);
3783 }
3784 
3785 #undef __FUNCT__
3786 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3787 /*@
3788    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3789 
3790    Input Parameters:
3791 .  mat  - the matrix
3792 .  fact - factor
3793 
3794    Not Collective, each process can use a different factor
3795 
3796    Level: advanced
3797 
3798   Notes:
3799    This can also be set by the command line option: -mat_use_hash_table <fact>
3800 
3801 .keywords: matrix, hashtable, factor, HT
3802 
3803 .seealso: MatSetOption()
3804 @*/
3805 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3806 {
3807   PetscErrorCode ierr;
3808 
3809   PetscFunctionBegin;
3810   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3811   PetscFunctionReturn(0);
3812 }
3813 
3814 #undef __FUNCT__
3815 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3816 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3817 {
3818   Mat_MPIBAIJ *baij;
3819 
3820   PetscFunctionBegin;
3821   baij          = (Mat_MPIBAIJ*)mat->data;
3822   baij->ht_fact = fact;
3823   PetscFunctionReturn(0);
3824 }
3825 
3826 #undef __FUNCT__
3827 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3828 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3829 {
3830   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3831 
3832   PetscFunctionBegin;
3833   if (Ad)     *Ad     = a->A;
3834   if (Ao)     *Ao     = a->B;
3835   if (colmap) *colmap = a->garray;
3836   PetscFunctionReturn(0);
3837 }
3838 
3839 /*
3840     Special version for direct calls from Fortran (to eliminate two function call overheads
3841 */
3842 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3843 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3844 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3845 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3846 #endif
3847 
3848 #undef __FUNCT__
3849 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3850 /*@C
3851   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3852 
3853   Collective on Mat
3854 
3855   Input Parameters:
3856 + mat - the matrix
3857 . min - number of input rows
3858 . im - input rows
3859 . nin - number of input columns
3860 . in - input columns
3861 . v - numerical values input
3862 - addvin - INSERT_VALUES or ADD_VALUES
3863 
3864   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3865 
3866   Level: advanced
3867 
3868 .seealso:   MatSetValuesBlocked()
3869 @*/
3870 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3871 {
3872   /* convert input arguments to C version */
3873   Mat        mat  = *matin;
3874   PetscInt   m    = *min, n = *nin;
3875   InsertMode addv = *addvin;
3876 
3877   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3878   const MatScalar *value;
3879   MatScalar       *barray     = baij->barray;
3880   PetscBool       roworiented = baij->roworiented;
3881   PetscErrorCode  ierr;
3882   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3883   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3884   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3885 
3886   PetscFunctionBegin;
3887   /* tasks normally handled by MatSetValuesBlocked() */
3888   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3889 #if defined(PETSC_USE_DEBUG)
3890   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3891   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3892 #endif
3893   if (mat->assembled) {
3894     mat->was_assembled = PETSC_TRUE;
3895     mat->assembled     = PETSC_FALSE;
3896   }
3897   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3898 
3899 
3900   if (!barray) {
3901     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3902     baij->barray = barray;
3903   }
3904 
3905   if (roworiented) stepval = (n-1)*bs;
3906   else stepval = (m-1)*bs;
3907 
3908   for (i=0; i<m; i++) {
3909     if (im[i] < 0) continue;
3910 #if defined(PETSC_USE_DEBUG)
3911     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3912 #endif
3913     if (im[i] >= rstart && im[i] < rend) {
3914       row = im[i] - rstart;
3915       for (j=0; j<n; j++) {
3916         /* If NumCol = 1 then a copy is not required */
3917         if ((roworiented) && (n == 1)) {
3918           barray = (MatScalar*)v + i*bs2;
3919         } else if ((!roworiented) && (m == 1)) {
3920           barray = (MatScalar*)v + j*bs2;
3921         } else { /* Here a copy is required */
3922           if (roworiented) {
3923             value = v + i*(stepval+bs)*bs + j*bs;
3924           } else {
3925             value = v + j*(stepval+bs)*bs + i*bs;
3926           }
3927           for (ii=0; ii<bs; ii++,value+=stepval) {
3928             for (jj=0; jj<bs; jj++) {
3929               *barray++ = *value++;
3930             }
3931           }
3932           barray -=bs2;
3933         }
3934 
3935         if (in[j] >= cstart && in[j] < cend) {
3936           col  = in[j] - cstart;
3937           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3938         } else if (in[j] < 0) continue;
3939 #if defined(PETSC_USE_DEBUG)
3940         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3941 #endif
3942         else {
3943           if (mat->was_assembled) {
3944             if (!baij->colmap) {
3945               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3946             }
3947 
3948 #if defined(PETSC_USE_DEBUG)
3949 #if defined(PETSC_USE_CTABLE)
3950             { PetscInt data;
3951               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3952               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3953             }
3954 #else
3955             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3956 #endif
3957 #endif
3958 #if defined(PETSC_USE_CTABLE)
3959             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3960             col  = (col - 1)/bs;
3961 #else
3962             col = (baij->colmap[in[j]] - 1)/bs;
3963 #endif
3964             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3965               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3966               col  =  in[j];
3967             }
3968           } else col = in[j];
3969           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3970         }
3971       }
3972     } else {
3973       if (!baij->donotstash) {
3974         if (roworiented) {
3975           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3976         } else {
3977           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3978         }
3979       }
3980     }
3981   }
3982 
3983   /* task normally handled by MatSetValuesBlocked() */
3984   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3985   PetscFunctionReturn(0);
3986 }
3987 
3988 #undef __FUNCT__
3989 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3990 /*@
3991      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3992          CSR format the local rows.
3993 
3994    Collective on MPI_Comm
3995 
3996    Input Parameters:
3997 +  comm - MPI communicator
3998 .  bs - the block size, only a block size of 1 is supported
3999 .  m - number of local rows (Cannot be PETSC_DECIDE)
4000 .  n - This value should be the same as the local size used in creating the
4001        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
4002        calculated if N is given) For square matrices n is almost always m.
4003 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4004 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4005 .   i - row indices
4006 .   j - column indices
4007 -   a - matrix values
4008 
4009    Output Parameter:
4010 .   mat - the matrix
4011 
4012    Level: intermediate
4013 
4014    Notes:
4015        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4016      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4017      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4018 
4019      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4020      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4021      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4022      with column-major ordering within blocks.
4023 
4024        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4025 
4026 .keywords: matrix, aij, compressed row, sparse, parallel
4027 
4028 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4029           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4030 @*/
4031 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4032 {
4033   PetscErrorCode ierr;
4034 
4035   PetscFunctionBegin;
4036   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4037   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4038   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4039   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4040   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4041   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4042   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4043   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4044   PetscFunctionReturn(0);
4045 }
4046 
4047 #undef __FUNCT__
4048 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4049 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4050 {
4051   PetscErrorCode ierr;
4052   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4053   PetscInt       *indx;
4054   PetscScalar    *values;
4055 
4056   PetscFunctionBegin;
4057   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4058   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4059     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4060     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4061     PetscInt       *bindx,rmax=a->rmax,j;
4062 
4063     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4064     mbs = m/bs; Nbs = N/cbs;
4065     if (n == PETSC_DECIDE) {
4066       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4067     }
4068     /* Check sum(n) = Nbs */
4069     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4070     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4071 
4072     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4073     rstart -= mbs;
4074 
4075     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4076     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4077     for (i=0; i<mbs; i++) {
4078       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4079       nnz = nnz/bs;
4080       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4081       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4082       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4083     }
4084     ierr = PetscFree(bindx);CHKERRQ(ierr);
4085 
4086     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4087     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4088     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4089     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4090     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4091     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4092   }
4093 
4094   /* numeric phase */
4095   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4096   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4097 
4098   for (i=0; i<m; i++) {
4099     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4100     Ii   = i + rstart;
4101     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4102     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4103   }
4104   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4105   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4106   PetscFunctionReturn(0);
4107 }
4108