xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision 110bb6e1beef8eff0dbcef7b490bbe0d0a77c8ac)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>    /*I "petscmat.h" I*/
3 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
4 #include <../src/mat/impls/sbaij/seq/sbaij.h>
5 #include <petscblaslapack.h>
6 
7 extern PetscErrorCode MatSetUpMultiply_MPISBAIJ(Mat);
8 extern PetscErrorCode MatSetUpMultiply_MPISBAIJ_2comm(Mat);
9 extern PetscErrorCode MatDisAssemble_MPISBAIJ(Mat);
10 extern PetscErrorCode MatIncreaseOverlap_MPISBAIJ(Mat,PetscInt,IS[],PetscInt);
11 extern PetscErrorCode MatGetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []);
12 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar []);
13 extern PetscErrorCode MatSetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt [],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
14 extern PetscErrorCode MatSetValuesBlocked_SeqSBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
15 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
16 extern PetscErrorCode MatGetRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
17 extern PetscErrorCode MatRestoreRow_SeqSBAIJ(Mat,PetscInt,PetscInt*,PetscInt**,PetscScalar**);
18 extern PetscErrorCode MatZeroRows_SeqSBAIJ(Mat,IS,PetscScalar*,Vec,Vec);
19 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,IS,PetscScalar*,Vec,Vec);
20 extern PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat,Vec,PetscInt[]);
21 extern PetscErrorCode MatSOR_MPISBAIJ(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
22 
23 #undef __FUNCT__
24 #define __FUNCT__ "MatStoreValues_MPISBAIJ"
25 PetscErrorCode  MatStoreValues_MPISBAIJ(Mat mat)
26 {
27   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
32   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "MatRetrieveValues_MPISBAIJ"
38 PetscErrorCode  MatRetrieveValues_MPISBAIJ(Mat mat)
39 {
40   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
41   PetscErrorCode ierr;
42 
43   PetscFunctionBegin;
44   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
45   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
46   PetscFunctionReturn(0);
47 }
48 
49 #define  MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv) \
50   { \
51  \
52     brow = row/bs;  \
53     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
54     rmax = aimax[brow]; nrow = ailen[brow]; \
55     bcol = col/bs; \
56     ridx = row % bs; cidx = col % bs; \
57     low  = 0; high = nrow; \
58     while (high-low > 3) { \
59       t = (low+high)/2; \
60       if (rp[t] > bcol) high = t; \
61       else              low  = t; \
62     } \
63     for (_i=low; _i<high; _i++) { \
64       if (rp[_i] > bcol) break; \
65       if (rp[_i] == bcol) { \
66         bap = ap + bs2*_i + bs*cidx + ridx; \
67         if (addv == ADD_VALUES) *bap += value;  \
68         else                    *bap  = value;  \
69         goto a_noinsert; \
70       } \
71     } \
72     if (a->nonew == 1) goto a_noinsert; \
73     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
74     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
75     N = nrow++ - 1;  \
76     /* shift up all the later entries in this row */ \
77     for (ii=N; ii>=_i; ii--) { \
78       rp[ii+1] = rp[ii]; \
79       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
80     } \
81     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
82     rp[_i]                      = bcol;  \
83     ap[bs2*_i + bs*cidx + ridx] = value;  \
84     A->nonzerostate++;\
85 a_noinsert:; \
86     ailen[brow] = nrow; \
87   }
88 
89 #define  MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv) \
90   { \
91     brow = row/bs;  \
92     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
93     rmax = bimax[brow]; nrow = bilen[brow]; \
94     bcol = col/bs; \
95     ridx = row % bs; cidx = col % bs; \
96     low  = 0; high = nrow; \
97     while (high-low > 3) { \
98       t = (low+high)/2; \
99       if (rp[t] > bcol) high = t; \
100       else              low  = t; \
101     } \
102     for (_i=low; _i<high; _i++) { \
103       if (rp[_i] > bcol) break; \
104       if (rp[_i] == bcol) { \
105         bap = ap + bs2*_i + bs*cidx + ridx; \
106         if (addv == ADD_VALUES) *bap += value;  \
107         else                    *bap  = value;  \
108         goto b_noinsert; \
109       } \
110     } \
111     if (b->nonew == 1) goto b_noinsert; \
112     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
113     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
114     N = nrow++ - 1;  \
115     /* shift up all the later entries in this row */ \
116     for (ii=N; ii>=_i; ii--) { \
117       rp[ii+1] = rp[ii]; \
118       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
119     } \
120     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
121     rp[_i]                      = bcol;  \
122     ap[bs2*_i + bs*cidx + ridx] = value;  \
123     B->nonzerostate++;\
124 b_noinsert:; \
125     bilen[brow] = nrow; \
126   }
127 
128 /* Only add/insert a(i,j) with i<=j (blocks).
129    Any a(i,j) with i>j input by user is ingored.
130 */
131 #undef __FUNCT__
132 #define __FUNCT__ "MatSetValues_MPISBAIJ"
133 PetscErrorCode MatSetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
134 {
135   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
136   MatScalar      value;
137   PetscBool      roworiented = baij->roworiented;
138   PetscErrorCode ierr;
139   PetscInt       i,j,row,col;
140   PetscInt       rstart_orig=mat->rmap->rstart;
141   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
142   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
143 
144   /* Some Variables required in the macro */
145   Mat          A     = baij->A;
146   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ*)(A)->data;
147   PetscInt     *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
148   MatScalar    *aa   =a->a;
149 
150   Mat         B     = baij->B;
151   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
152   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
153   MatScalar   *ba   =b->a;
154 
155   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
156   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
157   MatScalar *ap,*bap;
158 
159   /* for stash */
160   PetscInt  n_loc, *in_loc = NULL;
161   MatScalar *v_loc = NULL;
162 
163   PetscFunctionBegin;
164   if (!baij->donotstash) {
165     if (n > baij->n_loc) {
166       ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
167       ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
168       ierr = PetscMalloc1(n,&baij->in_loc);CHKERRQ(ierr);
169       ierr = PetscMalloc1(n,&baij->v_loc);CHKERRQ(ierr);
170 
171       baij->n_loc = n;
172     }
173     in_loc = baij->in_loc;
174     v_loc  = baij->v_loc;
175   }
176 
177   for (i=0; i<m; i++) {
178     if (im[i] < 0) continue;
179 #if defined(PETSC_USE_DEBUG)
180     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
181 #endif
182     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
183       row = im[i] - rstart_orig;              /* local row index */
184       for (j=0; j<n; j++) {
185         if (im[i]/bs > in[j]/bs) {
186           if (a->ignore_ltriangular) {
187             continue;    /* ignore lower triangular blocks */
188           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
189         }
190         if (in[j] >= cstart_orig && in[j] < cend_orig) {  /* diag entry (A) */
191           col  = in[j] - cstart_orig;         /* local col index */
192           brow = row/bs; bcol = col/bs;
193           if (brow > bcol) continue;  /* ignore lower triangular blocks of A */
194           if (roworiented) value = v[i*n+j];
195           else             value = v[i+j*m];
196           MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv);
197           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
198         } else if (in[j] < 0) continue;
199 #if defined(PETSC_USE_DEBUG)
200         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
201 #endif
202         else {  /* off-diag entry (B) */
203           if (mat->was_assembled) {
204             if (!baij->colmap) {
205               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
206             }
207 #if defined(PETSC_USE_CTABLE)
208             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
209             col  = col - 1;
210 #else
211             col = baij->colmap[in[j]/bs] - 1;
212 #endif
213             if (col < 0 && !((Mat_SeqSBAIJ*)(baij->A->data))->nonew) {
214               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
215               col  =  in[j];
216               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
217               B    = baij->B;
218               b    = (Mat_SeqBAIJ*)(B)->data;
219               bimax= b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
220               ba   = b->a;
221             } else col += in[j]%bs;
222           } else col = in[j];
223           if (roworiented) value = v[i*n+j];
224           else             value = v[i+j*m];
225           MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv);
226           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
227         }
228       }
229     } else {  /* off processor entry */
230       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
231       if (!baij->donotstash) {
232         mat->assembled = PETSC_FALSE;
233         n_loc          = 0;
234         for (j=0; j<n; j++) {
235           if (im[i]/bs > in[j]/bs) continue; /* ignore lower triangular blocks */
236           in_loc[n_loc] = in[j];
237           if (roworiented) {
238             v_loc[n_loc] = v[i*n+j];
239           } else {
240             v_loc[n_loc] = v[j*m+i];
241           }
242           n_loc++;
243         }
244         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n_loc,in_loc,v_loc,PETSC_FALSE);CHKERRQ(ierr);
245       }
246     }
247   }
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "MatSetValuesBlocked_MPISBAIJ"
253 PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const MatScalar v[],InsertMode addv)
254 {
255   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ*)mat->data;
256   const MatScalar *value;
257   MatScalar       *barray     =baij->barray;
258   PetscBool       roworiented = baij->roworiented,ignore_ltriangular = ((Mat_SeqSBAIJ*)baij->A->data)->ignore_ltriangular;
259   PetscErrorCode  ierr;
260   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
261   PetscInt        rend=baij->rendbs,cstart=baij->rstartbs,stepval;
262   PetscInt        cend=baij->rendbs,bs=mat->rmap->bs,bs2=baij->bs2;
263 
264   PetscFunctionBegin;
265   if (!barray) {
266     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
267     baij->barray = barray;
268   }
269 
270   if (roworiented) {
271     stepval = (n-1)*bs;
272   } else {
273     stepval = (m-1)*bs;
274   }
275   for (i=0; i<m; i++) {
276     if (im[i] < 0) continue;
277 #if defined(PETSC_USE_DEBUG)
278     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
279 #endif
280     if (im[i] >= rstart && im[i] < rend) {
281       row = im[i] - rstart;
282       for (j=0; j<n; j++) {
283         if (im[i] > in[j]) {
284           if (ignore_ltriangular) continue; /* ignore lower triangular blocks */
285           else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
286         }
287         /* If NumCol = 1 then a copy is not required */
288         if ((roworiented) && (n == 1)) {
289           barray = (MatScalar*) v + i*bs2;
290         } else if ((!roworiented) && (m == 1)) {
291           barray = (MatScalar*) v + j*bs2;
292         } else { /* Here a copy is required */
293           if (roworiented) {
294             value = v + i*(stepval+bs)*bs + j*bs;
295           } else {
296             value = v + j*(stepval+bs)*bs + i*bs;
297           }
298           for (ii=0; ii<bs; ii++,value+=stepval) {
299             for (jj=0; jj<bs; jj++) {
300               *barray++ = *value++;
301             }
302           }
303           barray -=bs2;
304         }
305 
306         if (in[j] >= cstart && in[j] < cend) {
307           col  = in[j] - cstart;
308           ierr = MatSetValuesBlocked_SeqSBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
309         } else if (in[j] < 0) continue;
310 #if defined(PETSC_USE_DEBUG)
311         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
312 #endif
313         else {
314           if (mat->was_assembled) {
315             if (!baij->colmap) {
316               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
317             }
318 
319 #if defined(PETSC_USE_DEBUG)
320 #if defined(PETSC_USE_CTABLE)
321             { PetscInt data;
322               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
323               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
324             }
325 #else
326             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
327 #endif
328 #endif
329 #if defined(PETSC_USE_CTABLE)
330             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
331             col  = (col - 1)/bs;
332 #else
333             col = (baij->colmap[in[j]] - 1)/bs;
334 #endif
335             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
336               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
337               col  = in[j];
338             }
339           } else col = in[j];
340           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
341         }
342       }
343     } else {
344       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
345       if (!baij->donotstash) {
346         if (roworiented) {
347           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
348         } else {
349           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
350         }
351       }
352     }
353   }
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "MatGetValues_MPISBAIJ"
359 PetscErrorCode MatGetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
360 {
361   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
362   PetscErrorCode ierr;
363   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
364   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
365 
366   PetscFunctionBegin;
367   for (i=0; i<m; i++) {
368     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]); */
369     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
370     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
371       row = idxm[i] - bsrstart;
372       for (j=0; j<n; j++) {
373         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column %D",idxn[j]); */
374         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
375         if (idxn[j] >= bscstart && idxn[j] < bscend) {
376           col  = idxn[j] - bscstart;
377           ierr = MatGetValues_SeqSBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
378         } else {
379           if (!baij->colmap) {
380             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
381           }
382 #if defined(PETSC_USE_CTABLE)
383           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
384           data--;
385 #else
386           data = baij->colmap[idxn[j]/bs]-1;
387 #endif
388           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
389           else {
390             col  = data + idxn[j]%bs;
391             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
392           }
393         }
394       }
395     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatNorm_MPISBAIJ"
402 PetscErrorCode MatNorm_MPISBAIJ(Mat mat,NormType type,PetscReal *norm)
403 {
404   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
405   PetscErrorCode ierr;
406   PetscReal      sum[2],*lnorm2;
407 
408   PetscFunctionBegin;
409   if (baij->size == 1) {
410     ierr =  MatNorm(baij->A,type,norm);CHKERRQ(ierr);
411   } else {
412     if (type == NORM_FROBENIUS) {
413       ierr    = PetscMalloc1(2,&lnorm2);CHKERRQ(ierr);
414       ierr    =  MatNorm(baij->A,type,lnorm2);CHKERRQ(ierr);
415       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2++;            /* squar power of norm(A) */
416       ierr    =  MatNorm(baij->B,type,lnorm2);CHKERRQ(ierr);
417       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2--;             /* squar power of norm(B) */
418       ierr    = MPI_Allreduce(lnorm2,sum,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
419       *norm   = PetscSqrtReal(sum[0] + 2*sum[1]);
420       ierr    = PetscFree(lnorm2);CHKERRQ(ierr);
421     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
422       Mat_SeqSBAIJ *amat=(Mat_SeqSBAIJ*)baij->A->data;
423       Mat_SeqBAIJ  *bmat=(Mat_SeqBAIJ*)baij->B->data;
424       PetscReal    *rsum,*rsum2,vabs;
425       PetscInt     *jj,*garray=baij->garray,rstart=baij->rstartbs,nz;
426       PetscInt     brow,bcol,col,bs=baij->A->rmap->bs,row,grow,gcol,mbs=amat->mbs;
427       MatScalar    *v;
428 
429       ierr = PetscMalloc2(mat->cmap->N,&rsum,mat->cmap->N,&rsum2);CHKERRQ(ierr);
430       ierr = PetscMemzero(rsum,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
431       /* Amat */
432       v = amat->a; jj = amat->j;
433       for (brow=0; brow<mbs; brow++) {
434         grow = bs*(rstart + brow);
435         nz   = amat->i[brow+1] - amat->i[brow];
436         for (bcol=0; bcol<nz; bcol++) {
437           gcol = bs*(rstart + *jj); jj++;
438           for (col=0; col<bs; col++) {
439             for (row=0; row<bs; row++) {
440               vabs            = PetscAbsScalar(*v); v++;
441               rsum[gcol+col] += vabs;
442               /* non-diagonal block */
443               if (bcol > 0 && vabs > 0.0) rsum[grow+row] += vabs;
444             }
445           }
446         }
447       }
448       /* Bmat */
449       v = bmat->a; jj = bmat->j;
450       for (brow=0; brow<mbs; brow++) {
451         grow = bs*(rstart + brow);
452         nz = bmat->i[brow+1] - bmat->i[brow];
453         for (bcol=0; bcol<nz; bcol++) {
454           gcol = bs*garray[*jj]; jj++;
455           for (col=0; col<bs; col++) {
456             for (row=0; row<bs; row++) {
457               vabs            = PetscAbsScalar(*v); v++;
458               rsum[gcol+col] += vabs;
459               rsum[grow+row] += vabs;
460             }
461           }
462         }
463       }
464       ierr  = MPI_Allreduce(rsum,rsum2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
465       *norm = 0.0;
466       for (col=0; col<mat->cmap->N; col++) {
467         if (rsum2[col] > *norm) *norm = rsum2[col];
468       }
469       ierr = PetscFree2(rsum,rsum2);CHKERRQ(ierr);
470     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support for this norm yet");
471   }
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "MatAssemblyBegin_MPISBAIJ"
477 PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat,MatAssemblyType mode)
478 {
479   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
480   PetscErrorCode ierr;
481   PetscInt       nstash,reallocs;
482   InsertMode     addv;
483 
484   PetscFunctionBegin;
485   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
486 
487   /* make sure all processors are either in INSERTMODE or ADDMODE */
488   ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
489   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
490   mat->insertmode = addv; /* in case this processor had no cache */
491 
492   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
493   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
494   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
495   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
496   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
497   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "MatAssemblyEnd_MPISBAIJ"
503 PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat,MatAssemblyType mode)
504 {
505   Mat_MPISBAIJ   *baij=(Mat_MPISBAIJ*)mat->data;
506   Mat_SeqSBAIJ   *a   =(Mat_SeqSBAIJ*)baij->A->data;
507   PetscErrorCode ierr;
508   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
509   PetscInt       *row,*col;
510   PetscBool      other_disassembled;
511   PetscMPIInt    n;
512   PetscBool      r1,r2,r3;
513   MatScalar      *val;
514   InsertMode     addv = mat->insertmode;
515 
516   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
517   PetscFunctionBegin;
518   if (!baij->donotstash &&  !mat->nooffprocentries) {
519     while (1) {
520       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
521       if (!flg) break;
522 
523       for (i=0; i<n;) {
524         /* Now identify the consecutive vals belonging to the same row */
525         for (j=i,rstart=row[j]; j<n; j++) {
526           if (row[j] != rstart) break;
527         }
528         if (j < n) ncols = j-i;
529         else       ncols = n-i;
530         /* Now assemble all these values with a single function call */
531         ierr = MatSetValues_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
532         i    = j;
533       }
534     }
535     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
536     /* Now process the block-stash. Since the values are stashed column-oriented,
537        set the roworiented flag to column oriented, and after MatSetValues()
538        restore the original flags */
539     r1 = baij->roworiented;
540     r2 = a->roworiented;
541     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
542 
543     baij->roworiented = PETSC_FALSE;
544     a->roworiented    = PETSC_FALSE;
545 
546     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworinted */
547     while (1) {
548       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
549       if (!flg) break;
550 
551       for (i=0; i<n;) {
552         /* Now identify the consecutive vals belonging to the same row */
553         for (j=i,rstart=row[j]; j<n; j++) {
554           if (row[j] != rstart) break;
555         }
556         if (j < n) ncols = j-i;
557         else       ncols = n-i;
558         ierr = MatSetValuesBlocked_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
559         i    = j;
560       }
561     }
562     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
563 
564     baij->roworiented = r1;
565     a->roworiented    = r2;
566 
567     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworinted */
568   }
569 
570   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
571   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
572 
573   /* determine if any processor has disassembled, if so we must
574      also disassemble ourselfs, in order that we may reassemble. */
575   /*
576      if nonzero structure of submatrix B cannot change then we know that
577      no processor disassembled thus we can skip this stuff
578   */
579   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
580     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
581     if (mat->was_assembled && !other_disassembled) {
582       ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
583     }
584   }
585 
586   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
587     ierr = MatSetUpMultiply_MPISBAIJ(mat);CHKERRQ(ierr); /* setup Mvctx and sMvctx */
588   }
589   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
590   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
591 
592   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
593 
594   baij->rowvalues = 0;
595 
596   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
597   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
598     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
599     ierr = MPI_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
600   }
601   PetscFunctionReturn(0);
602 }
603 
604 extern PetscErrorCode MatView_SeqSBAIJ_ASCII(Mat,PetscViewer);
605 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
606 #include <petscdraw.h>
607 #undef __FUNCT__
608 #define __FUNCT__ "MatView_MPISBAIJ_ASCIIorDraworSocket"
609 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
610 {
611   Mat_MPISBAIJ      *baij = (Mat_MPISBAIJ*)mat->data;
612   PetscErrorCode    ierr;
613   PetscInt          bs   = mat->rmap->bs;
614   PetscMPIInt       rank = baij->rank;
615   PetscBool         iascii,isdraw;
616   PetscViewer       sviewer;
617   PetscViewerFormat format;
618 
619   PetscFunctionBegin;
620   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
621   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
622   if (iascii) {
623     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
624     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
625       MatInfo info;
626       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
627       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
628       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
629       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
630       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
631       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
632       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
633       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
634       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
635       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
636       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
637       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
638       PetscFunctionReturn(0);
639     } else if (format == PETSC_VIEWER_ASCII_INFO) {
640       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
641       PetscFunctionReturn(0);
642     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
643       PetscFunctionReturn(0);
644     }
645   }
646 
647   if (isdraw) {
648     PetscDraw draw;
649     PetscBool isnull;
650     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
651     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
652   }
653 
654   {
655     /* assemble the entire matrix onto first processor. */
656     Mat          A;
657     Mat_SeqSBAIJ *Aloc;
658     Mat_SeqBAIJ  *Bloc;
659     PetscInt     M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
660     MatScalar    *a;
661 
662     /* Should this be the same type as mat? */
663     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
664     if (!rank) {
665       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
666     } else {
667       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
668     }
669     ierr = MatSetType(A,MATMPISBAIJ);CHKERRQ(ierr);
670     ierr = MatMPISBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
671     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
672     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
673 
674     /* copy over the A part */
675     Aloc = (Mat_SeqSBAIJ*)baij->A->data;
676     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
677     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
678 
679     for (i=0; i<mbs; i++) {
680       rvals[0] = bs*(baij->rstartbs + i);
681       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
682       for (j=ai[i]; j<ai[i+1]; j++) {
683         col = (baij->cstartbs+aj[j])*bs;
684         for (k=0; k<bs; k++) {
685           ierr = MatSetValues_MPISBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
686           col++;
687           a += bs;
688         }
689       }
690     }
691     /* copy over the B part */
692     Bloc = (Mat_SeqBAIJ*)baij->B->data;
693     ai   = Bloc->i; aj = Bloc->j; a = Bloc->a;
694     for (i=0; i<mbs; i++) {
695 
696       rvals[0] = bs*(baij->rstartbs + i);
697       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
698       for (j=ai[i]; j<ai[i+1]; j++) {
699         col = baij->garray[aj[j]]*bs;
700         for (k=0; k<bs; k++) {
701           ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
702           col++;
703           a += bs;
704         }
705       }
706     }
707     ierr = PetscFree(rvals);CHKERRQ(ierr);
708     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
709     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
710     /*
711        Everyone has to call to draw the matrix since the graphics waits are
712        synchronized across all processors that share the PetscDraw object
713     */
714     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
715     if (!rank) {
716         ierr = MatView_SeqSBAIJ_ASCII(((Mat_MPISBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
717     }
718     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
719     ierr = MatDestroy(&A);CHKERRQ(ierr);
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "MatView_MPISBAIJ"
726 PetscErrorCode MatView_MPISBAIJ(Mat mat,PetscViewer viewer)
727 {
728   PetscErrorCode ierr;
729   PetscBool      iascii,isdraw,issocket,isbinary;
730 
731   PetscFunctionBegin;
732   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
733   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
734   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
735   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
736   if (iascii || isdraw || issocket || isbinary) {
737     ierr = MatView_MPISBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
738   }
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "MatDestroy_MPISBAIJ"
744 PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
745 {
746   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
747   PetscErrorCode ierr;
748 
749   PetscFunctionBegin;
750 #if defined(PETSC_USE_LOG)
751   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
752 #endif
753   ierr = MatDestroy_Redundant(&mat->redundant);CHKERRQ(ierr);
754   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
755   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
756   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
757   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
758 #if defined(PETSC_USE_CTABLE)
759   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
760 #else
761   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
762 #endif
763   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
764   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
765   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
766   ierr = VecDestroy(&baij->slvec0);CHKERRQ(ierr);
767   ierr = VecDestroy(&baij->slvec0b);CHKERRQ(ierr);
768   ierr = VecDestroy(&baij->slvec1);CHKERRQ(ierr);
769   ierr = VecDestroy(&baij->slvec1a);CHKERRQ(ierr);
770   ierr = VecDestroy(&baij->slvec1b);CHKERRQ(ierr);
771   ierr = VecScatterDestroy(&baij->sMvctx);CHKERRQ(ierr);
772   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
773   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
774   ierr = PetscFree(baij->hd);CHKERRQ(ierr);
775   ierr = VecDestroy(&baij->diag);CHKERRQ(ierr);
776   ierr = VecDestroy(&baij->bb1);CHKERRQ(ierr);
777   ierr = VecDestroy(&baij->xx1);CHKERRQ(ierr);
778 #if defined(PETSC_USE_REAL_MAT_SINGLE)
779   ierr = PetscFree(baij->setvaluescopy);CHKERRQ(ierr);
780 #endif
781   ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
782   ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
783   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
784   ierr = PetscFree(mat->data);CHKERRQ(ierr);
785 
786   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
787   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
788   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
789   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
790   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPISBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
791   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpisbaij_mpisbstrm_C",NULL);CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 #undef __FUNCT__
796 #define __FUNCT__ "MatMult_MPISBAIJ_Hermitian"
797 PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A,Vec xx,Vec yy)
798 {
799   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
800   PetscErrorCode ierr;
801   PetscInt       nt,mbs=a->mbs,bs=A->rmap->bs;
802   PetscScalar    *x,*from;
803 
804   PetscFunctionBegin;
805   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
806   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
807 
808   /* diagonal part */
809   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
810   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
811 
812   /* subdiagonal part */
813   ierr = (*a->B->ops->multhermitiantranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
814 
815   /* copy x into the vec slvec0 */
816   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
817   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
818 
819   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
820   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
821   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
822 
823   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
824   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
825   /* supperdiagonal part */
826   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "MatMult_MPISBAIJ"
832 PetscErrorCode MatMult_MPISBAIJ(Mat A,Vec xx,Vec yy)
833 {
834   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
835   PetscErrorCode ierr;
836   PetscInt       nt,mbs=a->mbs,bs=A->rmap->bs;
837   PetscScalar    *x,*from;
838 
839   PetscFunctionBegin;
840   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
841   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
842 
843   /* diagonal part */
844   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
845   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
846 
847   /* subdiagonal part */
848   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
849 
850   /* copy x into the vec slvec0 */
851   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
852   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
853 
854   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
855   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
856   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
857 
858   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
859   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
860   /* supperdiagonal part */
861   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
862   PetscFunctionReturn(0);
863 }
864 
865 #undef __FUNCT__
866 #define __FUNCT__ "MatMult_MPISBAIJ_2comm"
867 PetscErrorCode MatMult_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy)
868 {
869   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
870   PetscErrorCode ierr;
871   PetscInt       nt;
872 
873   PetscFunctionBegin;
874   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
875   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
876 
877   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
878   if (nt != A->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
879 
880   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
881   /* do diagonal part */
882   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
883   /* do supperdiagonal part */
884   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
885   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
886   /* do subdiagonal part */
887   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
888   ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
889   ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "MatMultAdd_MPISBAIJ"
895 PetscErrorCode MatMultAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
896 {
897   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
898   PetscErrorCode ierr;
899   PetscInt       mbs=a->mbs,bs=A->rmap->bs;
900   PetscScalar    *x,*from,zero=0.0;
901 
902   PetscFunctionBegin;
903   /*
904   PetscSynchronizedPrintf(PetscObjectComm((PetscObject)A)," MatMultAdd is called ...\n");
905   PetscSynchronizedFlush(PetscObjectComm((PetscObject)A),PETSC_STDOUT);
906   */
907   /* diagonal part */
908   ierr = (*a->A->ops->multadd)(a->A,xx,yy,a->slvec1a);CHKERRQ(ierr);
909   ierr = VecSet(a->slvec1b,zero);CHKERRQ(ierr);
910 
911   /* subdiagonal part */
912   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
913 
914   /* copy x into the vec slvec0 */
915   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
916   ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
917   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
918   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
919 
920   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
921   ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
922   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
923 
924   /* supperdiagonal part */
925   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,zz);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "MatMultAdd_MPISBAIJ_2comm"
931 PetscErrorCode MatMultAdd_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy,Vec zz)
932 {
933   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
934   PetscErrorCode ierr;
935 
936   PetscFunctionBegin;
937   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
938   /* do diagonal part */
939   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
940   /* do supperdiagonal part */
941   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
942   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
943 
944   /* do subdiagonal part */
945   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
946   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
947   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
948   PetscFunctionReturn(0);
949 }
950 
951 /*
952   This only works correctly for square matrices where the subblock A->A is the
953    diagonal block
954 */
955 #undef __FUNCT__
956 #define __FUNCT__ "MatGetDiagonal_MPISBAIJ"
957 PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A,Vec v)
958 {
959   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   /* if (a->rmap->N != a->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
964   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "MatScale_MPISBAIJ"
970 PetscErrorCode MatScale_MPISBAIJ(Mat A,PetscScalar aa)
971 {
972   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
973   PetscErrorCode ierr;
974 
975   PetscFunctionBegin;
976   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
977   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
978   PetscFunctionReturn(0);
979 }
980 
981 #undef __FUNCT__
982 #define __FUNCT__ "MatGetRow_MPISBAIJ"
983 PetscErrorCode MatGetRow_MPISBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
984 {
985   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
986   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
987   PetscErrorCode ierr;
988   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
989   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
990   PetscInt       *cmap,*idx_p,cstart = mat->rstartbs;
991 
992   PetscFunctionBegin;
993   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
994   mat->getrowactive = PETSC_TRUE;
995 
996   if (!mat->rowvalues && (idx || v)) {
997     /*
998         allocate enough space to hold information from the longest row.
999     */
1000     Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ*)mat->A->data;
1001     Mat_SeqBAIJ  *Ba = (Mat_SeqBAIJ*)mat->B->data;
1002     PetscInt     max = 1,mbs = mat->mbs,tmp;
1003     for (i=0; i<mbs; i++) {
1004       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; /* row length */
1005       if (max < tmp) max = tmp;
1006     }
1007     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1008   }
1009 
1010   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1011   lrow = row - brstart;  /* local row index */
1012 
1013   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1014   if (!v)   {pvA = 0; pvB = 0;}
1015   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1016   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1017   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1018   nztot = nzA + nzB;
1019 
1020   cmap = mat->garray;
1021   if (v  || idx) {
1022     if (nztot) {
1023       /* Sort by increasing column numbers, assuming A and B already sorted */
1024       PetscInt imark = -1;
1025       if (v) {
1026         *v = v_p = mat->rowvalues;
1027         for (i=0; i<nzB; i++) {
1028           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1029           else break;
1030         }
1031         imark = i;
1032         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1033         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1034       }
1035       if (idx) {
1036         *idx = idx_p = mat->rowindices;
1037         if (imark > -1) {
1038           for (i=0; i<imark; i++) {
1039             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1040           }
1041         } else {
1042           for (i=0; i<nzB; i++) {
1043             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1044             else break;
1045           }
1046           imark = i;
1047         }
1048         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1049         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1050       }
1051     } else {
1052       if (idx) *idx = 0;
1053       if (v)   *v   = 0;
1054     }
1055   }
1056   *nz  = nztot;
1057   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1058   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 #undef __FUNCT__
1063 #define __FUNCT__ "MatRestoreRow_MPISBAIJ"
1064 PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1065 {
1066   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1067 
1068   PetscFunctionBegin;
1069   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
1070   baij->getrowactive = PETSC_FALSE;
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "MatGetRowUpperTriangular_MPISBAIJ"
1076 PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1077 {
1078   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1079   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1080 
1081   PetscFunctionBegin;
1082   aA->getrow_utriangular = PETSC_TRUE;
1083   PetscFunctionReturn(0);
1084 }
1085 #undef __FUNCT__
1086 #define __FUNCT__ "MatRestoreRowUpperTriangular_MPISBAIJ"
1087 PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1088 {
1089   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1090   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1091 
1092   PetscFunctionBegin;
1093   aA->getrow_utriangular = PETSC_FALSE;
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 #undef __FUNCT__
1098 #define __FUNCT__ "MatRealPart_MPISBAIJ"
1099 PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1100 {
1101   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1102   PetscErrorCode ierr;
1103 
1104   PetscFunctionBegin;
1105   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1106   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 #undef __FUNCT__
1111 #define __FUNCT__ "MatImaginaryPart_MPISBAIJ"
1112 PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1113 {
1114   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1115   PetscErrorCode ierr;
1116 
1117   PetscFunctionBegin;
1118   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
1119   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
1120   PetscFunctionReturn(0);
1121 }
1122 
1123 #undef __FUNCT__
1124 #define __FUNCT__ "MatZeroEntries_MPISBAIJ"
1125 PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1126 {
1127   Mat_MPISBAIJ   *l = (Mat_MPISBAIJ*)A->data;
1128   PetscErrorCode ierr;
1129 
1130   PetscFunctionBegin;
1131   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1132   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1133   PetscFunctionReturn(0);
1134 }
1135 
1136 #undef __FUNCT__
1137 #define __FUNCT__ "MatGetInfo_MPISBAIJ"
1138 PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1139 {
1140   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)matin->data;
1141   Mat            A  = a->A,B = a->B;
1142   PetscErrorCode ierr;
1143   PetscReal      isend[5],irecv[5];
1144 
1145   PetscFunctionBegin;
1146   info->block_size = (PetscReal)matin->rmap->bs;
1147 
1148   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1149 
1150   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1151   isend[3] = info->memory;  isend[4] = info->mallocs;
1152 
1153   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1154 
1155   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1156   isend[3] += info->memory;  isend[4] += info->mallocs;
1157   if (flag == MAT_LOCAL) {
1158     info->nz_used      = isend[0];
1159     info->nz_allocated = isend[1];
1160     info->nz_unneeded  = isend[2];
1161     info->memory       = isend[3];
1162     info->mallocs      = isend[4];
1163   } else if (flag == MAT_GLOBAL_MAX) {
1164     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1165 
1166     info->nz_used      = irecv[0];
1167     info->nz_allocated = irecv[1];
1168     info->nz_unneeded  = irecv[2];
1169     info->memory       = irecv[3];
1170     info->mallocs      = irecv[4];
1171   } else if (flag == MAT_GLOBAL_SUM) {
1172     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1173 
1174     info->nz_used      = irecv[0];
1175     info->nz_allocated = irecv[1];
1176     info->nz_unneeded  = irecv[2];
1177     info->memory       = irecv[3];
1178     info->mallocs      = irecv[4];
1179   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1180   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1181   info->fill_ratio_needed = 0;
1182   info->factor_mallocs    = 0;
1183   PetscFunctionReturn(0);
1184 }
1185 
1186 #undef __FUNCT__
1187 #define __FUNCT__ "MatSetOption_MPISBAIJ"
1188 PetscErrorCode MatSetOption_MPISBAIJ(Mat A,MatOption op,PetscBool flg)
1189 {
1190   Mat_MPISBAIJ   *a  = (Mat_MPISBAIJ*)A->data;
1191   Mat_SeqSBAIJ   *aA = (Mat_SeqSBAIJ*)a->A->data;
1192   PetscErrorCode ierr;
1193 
1194   PetscFunctionBegin;
1195   switch (op) {
1196   case MAT_NEW_NONZERO_LOCATIONS:
1197   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1198   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1199   case MAT_KEEP_NONZERO_PATTERN:
1200   case MAT_NEW_NONZERO_LOCATION_ERR:
1201     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1202     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1203     break;
1204   case MAT_ROW_ORIENTED:
1205     a->roworiented = flg;
1206 
1207     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1208     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1209     break;
1210   case MAT_NEW_DIAGONALS:
1211     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1212     break;
1213   case MAT_IGNORE_OFF_PROC_ENTRIES:
1214     a->donotstash = flg;
1215     break;
1216   case MAT_USE_HASH_TABLE:
1217     a->ht_flag = flg;
1218     break;
1219   case MAT_HERMITIAN:
1220     if (!A->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call MatAssemblyEnd() first");
1221     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1222 
1223     A->ops->mult = MatMult_MPISBAIJ_Hermitian;
1224     break;
1225   case MAT_SPD:
1226     A->spd_set = PETSC_TRUE;
1227     A->spd     = flg;
1228     if (flg) {
1229       A->symmetric                  = PETSC_TRUE;
1230       A->structurally_symmetric     = PETSC_TRUE;
1231       A->symmetric_set              = PETSC_TRUE;
1232       A->structurally_symmetric_set = PETSC_TRUE;
1233     }
1234     break;
1235   case MAT_SYMMETRIC:
1236     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1237     break;
1238   case MAT_STRUCTURALLY_SYMMETRIC:
1239     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1240     break;
1241   case MAT_SYMMETRY_ETERNAL:
1242     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix must be symmetric");
1243     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1244     break;
1245   case MAT_IGNORE_LOWER_TRIANGULAR:
1246     aA->ignore_ltriangular = flg;
1247     break;
1248   case MAT_ERROR_LOWER_TRIANGULAR:
1249     aA->ignore_ltriangular = flg;
1250     break;
1251   case MAT_GETROW_UPPERTRIANGULAR:
1252     aA->getrow_utriangular = flg;
1253     break;
1254   default:
1255     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op);
1256   }
1257   PetscFunctionReturn(0);
1258 }
1259 
1260 #undef __FUNCT__
1261 #define __FUNCT__ "MatTranspose_MPISBAIJ"
1262 PetscErrorCode MatTranspose_MPISBAIJ(Mat A,MatReuse reuse,Mat *B)
1263 {
1264   PetscErrorCode ierr;
1265 
1266   PetscFunctionBegin;
1267   if (MAT_INITIAL_MATRIX || *B != A) {
1268     ierr = MatDuplicate(A,MAT_COPY_VALUES,B);CHKERRQ(ierr);
1269   }
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "MatDiagonalScale_MPISBAIJ"
1275 PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat,Vec ll,Vec rr)
1276 {
1277   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
1278   Mat            a     = baij->A, b=baij->B;
1279   PetscErrorCode ierr;
1280   PetscInt       nv,m,n;
1281   PetscBool      flg;
1282 
1283   PetscFunctionBegin;
1284   if (ll != rr) {
1285     ierr = VecEqual(ll,rr,&flg);CHKERRQ(ierr);
1286     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"For symmetric format, left and right scaling vectors must be same\n");
1287   }
1288   if (!ll) PetscFunctionReturn(0);
1289 
1290   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
1291   if (m != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"For symmetric format, local size %d %d must be same",m,n);
1292 
1293   ierr = VecGetLocalSize(rr,&nv);CHKERRQ(ierr);
1294   if (nv!=n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Left and right vector non-conforming local size");
1295 
1296   ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1297 
1298   /* left diagonalscale the off-diagonal part */
1299   ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1300 
1301   /* scale the diagonal part */
1302   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1303 
1304   /* right diagonalscale the off-diagonal part */
1305   ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1306   ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 #undef __FUNCT__
1311 #define __FUNCT__ "MatSetUnfactored_MPISBAIJ"
1312 PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1313 {
1314   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat,MatDuplicateOption,Mat*);
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "MatEqual_MPISBAIJ"
1326 PetscErrorCode MatEqual_MPISBAIJ(Mat A,Mat B,PetscBool  *flag)
1327 {
1328   Mat_MPISBAIJ   *matB = (Mat_MPISBAIJ*)B->data,*matA = (Mat_MPISBAIJ*)A->data;
1329   Mat            a,b,c,d;
1330   PetscBool      flg;
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   a = matA->A; b = matA->B;
1335   c = matB->A; d = matB->B;
1336 
1337   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1338   if (flg) {
1339     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1340   }
1341   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "MatCopy_MPISBAIJ"
1347 PetscErrorCode MatCopy_MPISBAIJ(Mat A,Mat B,MatStructure str)
1348 {
1349   PetscErrorCode ierr;
1350   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1351   Mat_MPISBAIJ   *b = (Mat_MPISBAIJ*)B->data;
1352 
1353   PetscFunctionBegin;
1354   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1355   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1356     ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1357     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1358     ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1359   } else {
1360     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1361     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1362   }
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 #undef __FUNCT__
1367 #define __FUNCT__ "MatSetUp_MPISBAIJ"
1368 PetscErrorCode MatSetUp_MPISBAIJ(Mat A)
1369 {
1370   PetscErrorCode ierr;
1371 
1372   PetscFunctionBegin;
1373   ierr = MatMPISBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 #undef __FUNCT__
1378 #define __FUNCT__ "MatAXPY_MPISBAIJ"
1379 PetscErrorCode MatAXPY_MPISBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1380 {
1381   PetscErrorCode ierr;
1382   Mat_MPISBAIJ   *xx=(Mat_MPISBAIJ*)X->data,*yy=(Mat_MPISBAIJ*)Y->data;
1383   PetscBLASInt   bnz,one=1;
1384   Mat_SeqSBAIJ   *xa,*ya;
1385   Mat_SeqBAIJ    *xb,*yb;
1386 
1387   PetscFunctionBegin;
1388   if (str == SAME_NONZERO_PATTERN) {
1389     PetscScalar alpha = a;
1390     xa   = (Mat_SeqSBAIJ*)xx->A->data;
1391     ya   = (Mat_SeqSBAIJ*)yy->A->data;
1392     ierr = PetscBLASIntCast(xa->nz,&bnz);CHKERRQ(ierr);
1393     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xa->a,&one,ya->a,&one));
1394     xb   = (Mat_SeqBAIJ*)xx->B->data;
1395     yb   = (Mat_SeqBAIJ*)yy->B->data;
1396     ierr = PetscBLASIntCast(xb->nz,&bnz);CHKERRQ(ierr);
1397     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xb->a,&one,yb->a,&one));
1398     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
1399   } else {
1400     Mat      B;
1401     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
1402     if (bs != X->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrices must have same block size");
1403     ierr = MatGetRowUpperTriangular(X);CHKERRQ(ierr);
1404     ierr = MatGetRowUpperTriangular(Y);CHKERRQ(ierr);
1405     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
1406     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
1407     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
1408     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
1409     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
1410     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
1411     ierr = MatSetType(B,MATMPISBAIJ);CHKERRQ(ierr);
1412     ierr = MatAXPYGetPreallocation_SeqSBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
1413     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
1414     ierr = MatMPISBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
1415     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
1416     ierr = MatHeaderReplace(Y,B);CHKERRQ(ierr);
1417     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
1418     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
1419     ierr = MatRestoreRowUpperTriangular(X);CHKERRQ(ierr);
1420     ierr = MatRestoreRowUpperTriangular(Y);CHKERRQ(ierr);
1421   }
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "MatGetSubMatrices_MPISBAIJ"
1427 PetscErrorCode MatGetSubMatrices_MPISBAIJ(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
1428 {
1429   PetscErrorCode ierr;
1430   PetscInt       i;
1431   PetscBool      flg;
1432 
1433   PetscFunctionBegin;
1434   ierr = MatGetSubMatrices_MPIBAIJ(A,n,irow,icol,scall,B);CHKERRQ(ierr);
1435   for (i=0; i<n; i++) {
1436     ierr = ISEqual(irow[i],icol[i],&flg);CHKERRQ(ierr);
1437     if (!flg) { /* *B[i] is non-symmetric, set flag */
1438       ierr = MatSetOption(*B[i],MAT_SYMMETRIC,PETSC_FALSE);CHKERRQ(ierr);
1439     }
1440   }
1441   PetscFunctionReturn(0);
1442 }
1443 
1444 /* -------------------------------------------------------------------*/
1445 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1446                                        MatGetRow_MPISBAIJ,
1447                                        MatRestoreRow_MPISBAIJ,
1448                                        MatMult_MPISBAIJ,
1449                                /*  4*/ MatMultAdd_MPISBAIJ,
1450                                        MatMult_MPISBAIJ,       /* transpose versions are same as non-transpose */
1451                                        MatMultAdd_MPISBAIJ,
1452                                        0,
1453                                        0,
1454                                        0,
1455                                /* 10*/ 0,
1456                                        0,
1457                                        0,
1458                                        MatSOR_MPISBAIJ,
1459                                        MatTranspose_MPISBAIJ,
1460                                /* 15*/ MatGetInfo_MPISBAIJ,
1461                                        MatEqual_MPISBAIJ,
1462                                        MatGetDiagonal_MPISBAIJ,
1463                                        MatDiagonalScale_MPISBAIJ,
1464                                        MatNorm_MPISBAIJ,
1465                                /* 20*/ MatAssemblyBegin_MPISBAIJ,
1466                                        MatAssemblyEnd_MPISBAIJ,
1467                                        MatSetOption_MPISBAIJ,
1468                                        MatZeroEntries_MPISBAIJ,
1469                                /* 24*/ 0,
1470                                        0,
1471                                        0,
1472                                        0,
1473                                        0,
1474                                /* 29*/ MatSetUp_MPISBAIJ,
1475                                        0,
1476                                        0,
1477                                        0,
1478                                        0,
1479                                /* 34*/ MatDuplicate_MPISBAIJ,
1480                                        0,
1481                                        0,
1482                                        0,
1483                                        0,
1484                                /* 39*/ MatAXPY_MPISBAIJ,
1485                                        MatGetSubMatrices_MPISBAIJ,
1486                                        MatIncreaseOverlap_MPISBAIJ,
1487                                        MatGetValues_MPISBAIJ,
1488                                        MatCopy_MPISBAIJ,
1489                                /* 44*/ 0,
1490                                        MatScale_MPISBAIJ,
1491                                        0,
1492                                        0,
1493                                        0,
1494                                /* 49*/ 0,
1495                                        0,
1496                                        0,
1497                                        0,
1498                                        0,
1499                                /* 54*/ 0,
1500                                        0,
1501                                        MatSetUnfactored_MPISBAIJ,
1502                                        0,
1503                                        MatSetValuesBlocked_MPISBAIJ,
1504                                /* 59*/ 0,
1505                                        0,
1506                                        0,
1507                                        0,
1508                                        0,
1509                                /* 64*/ 0,
1510                                        0,
1511                                        0,
1512                                        0,
1513                                        0,
1514                                /* 69*/ MatGetRowMaxAbs_MPISBAIJ,
1515                                        0,
1516                                        0,
1517                                        0,
1518                                        0,
1519                                /* 74*/ 0,
1520                                        0,
1521                                        0,
1522                                        0,
1523                                        0,
1524                                /* 79*/ 0,
1525                                        0,
1526                                        0,
1527                                        0,
1528                                        MatLoad_MPISBAIJ,
1529                                /* 84*/ 0,
1530                                        0,
1531                                        0,
1532                                        0,
1533                                        0,
1534                                /* 89*/ 0,
1535                                        0,
1536                                        0,
1537                                        0,
1538                                        0,
1539                                /* 94*/ 0,
1540                                        0,
1541                                        0,
1542                                        0,
1543                                        0,
1544                                /* 99*/ 0,
1545                                        0,
1546                                        0,
1547                                        0,
1548                                        0,
1549                                /*104*/ 0,
1550                                        MatRealPart_MPISBAIJ,
1551                                        MatImaginaryPart_MPISBAIJ,
1552                                        MatGetRowUpperTriangular_MPISBAIJ,
1553                                        MatRestoreRowUpperTriangular_MPISBAIJ,
1554                                /*109*/ 0,
1555                                        0,
1556                                        0,
1557                                        0,
1558                                        0,
1559                                /*114*/ 0,
1560                                        0,
1561                                        0,
1562                                        0,
1563                                        0,
1564                                /*119*/ 0,
1565                                        0,
1566                                        0,
1567                                        0,
1568                                        0,
1569                                /*124*/ 0,
1570                                        0,
1571                                        0,
1572                                        0,
1573                                        0,
1574                                /*129*/ 0,
1575                                        0,
1576                                        0,
1577                                        0,
1578                                        0,
1579                                /*134*/ 0,
1580                                        0,
1581                                        0,
1582                                        0,
1583                                        0,
1584                                /*139*/ 0,
1585                                        0,
1586                                        0,
1587                                        0,
1588                                        0,
1589                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPISBAIJ
1590 };
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "MatGetDiagonalBlock_MPISBAIJ"
1594 PetscErrorCode  MatGetDiagonalBlock_MPISBAIJ(Mat A,Mat *a)
1595 {
1596   PetscFunctionBegin;
1597   *a = ((Mat_MPISBAIJ*)A->data)->A;
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "MatMPISBAIJSetPreallocation_MPISBAIJ"
1603 PetscErrorCode  MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
1604 {
1605   Mat_MPISBAIJ   *b;
1606   PetscErrorCode ierr;
1607   PetscInt       i,mbs,Mbs;
1608 
1609   PetscFunctionBegin;
1610   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
1611   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1612   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1613   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1614 
1615   b   = (Mat_MPISBAIJ*)B->data;
1616   mbs = B->rmap->n/bs;
1617   Mbs = B->rmap->N/bs;
1618   if (mbs*bs != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"No of local rows %D must be divisible by blocksize %D",B->rmap->N,bs);
1619 
1620   B->rmap->bs = bs;
1621   b->bs2      = bs*bs;
1622   b->mbs      = mbs;
1623   b->Mbs      = Mbs;
1624   b->nbs      = B->cmap->n/bs;
1625   b->Nbs      = B->cmap->N/bs;
1626 
1627   for (i=0; i<=b->size; i++) {
1628     b->rangebs[i] = B->rmap->range[i]/bs;
1629   }
1630   b->rstartbs = B->rmap->rstart/bs;
1631   b->rendbs   = B->rmap->rend/bs;
1632 
1633   b->cstartbs = B->cmap->rstart/bs;
1634   b->cendbs   = B->cmap->rend/bs;
1635 
1636   if (!B->preallocated) {
1637     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
1638     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
1639     ierr = MatSetType(b->A,MATSEQSBAIJ);CHKERRQ(ierr);
1640     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
1641     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
1642     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
1643     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
1644     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
1645     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
1646   }
1647 
1648   ierr = MatSeqSBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
1649   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
1650 
1651   B->preallocated = PETSC_TRUE;
1652   PetscFunctionReturn(0);
1653 }
1654 
1655 #undef __FUNCT__
1656 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR_MPISBAIJ"
1657 PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
1658 {
1659   PetscInt       m,rstart,cstart,cend;
1660   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
1661   const PetscInt *JJ    =0;
1662   PetscScalar    *values=0;
1663   PetscErrorCode ierr;
1664 
1665   PetscFunctionBegin;
1666   if (bs < 1) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_OUTOFRANGE,"Invalid block size specified, must be positive but it is %D",bs);
1667   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
1668   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
1669   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1670   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1671   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1672   m      = B->rmap->n/bs;
1673   rstart = B->rmap->rstart/bs;
1674   cstart = B->cmap->rstart/bs;
1675   cend   = B->cmap->rend/bs;
1676 
1677   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
1678   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
1679   for (i=0; i<m; i++) {
1680     nz = ii[i+1] - ii[i];
1681     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
1682     nz_max = PetscMax(nz_max,nz);
1683     JJ     = jj + ii[i];
1684     for (j=0; j<nz; j++) {
1685       if (*JJ >= cstart) break;
1686       JJ++;
1687     }
1688     d = 0;
1689     for (; j<nz; j++) {
1690       if (*JJ++ >= cend) break;
1691       d++;
1692     }
1693     d_nnz[i] = d;
1694     o_nnz[i] = nz - d;
1695   }
1696   ierr = MatMPISBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
1697   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
1698 
1699   values = (PetscScalar*)V;
1700   if (!values) {
1701     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
1702     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
1703   }
1704   for (i=0; i<m; i++) {
1705     PetscInt          row    = i + rstart;
1706     PetscInt          ncols  = ii[i+1] - ii[i];
1707     const PetscInt    *icols = jj + ii[i];
1708     const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
1709     ierr = MatSetValuesBlocked_MPISBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
1710   }
1711 
1712   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
1713   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1714   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1715   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 #if defined(PETSC_HAVE_MUMPS)
1720 PETSC_EXTERN PetscErrorCode MatGetFactor_sbaij_mumps(Mat,MatFactorType,Mat*);
1721 #endif
1722 #if defined(PETSC_HAVE_PASTIX)
1723 PETSC_EXTERN PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat,MatFactorType,Mat*);
1724 #endif
1725 
1726 /*MC
1727    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
1728    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
1729    the matrix is stored.
1730 
1731   For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
1732   can call MatSetOption(Mat, MAT_HERMITIAN);
1733 
1734    Options Database Keys:
1735 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to MatSetFromOptions()
1736 
1737   Level: beginner
1738 
1739 .seealso: MatCreateMPISBAIJ
1740 M*/
1741 
1742 PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_MPISBSTRM(Mat,MatType,MatReuse,Mat*);
1743 
1744 #undef __FUNCT__
1745 #define __FUNCT__ "MatCreate_MPISBAIJ"
1746 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
1747 {
1748   Mat_MPISBAIJ   *b;
1749   PetscErrorCode ierr;
1750   PetscBool      flg;
1751 
1752   PetscFunctionBegin;
1753   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
1754   B->data = (void*)b;
1755   ierr    = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1756 
1757   B->ops->destroy = MatDestroy_MPISBAIJ;
1758   B->ops->view    = MatView_MPISBAIJ;
1759   B->assembled    = PETSC_FALSE;
1760   B->insertmode   = NOT_SET_VALUES;
1761 
1762   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
1763   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
1764 
1765   /* build local table of row and column ownerships */
1766   ierr = PetscMalloc1((b->size+2),&b->rangebs);CHKERRQ(ierr);
1767 
1768   /* build cache for off array entries formed */
1769   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
1770 
1771   b->donotstash  = PETSC_FALSE;
1772   b->colmap      = NULL;
1773   b->garray      = NULL;
1774   b->roworiented = PETSC_TRUE;
1775 
1776   /* stuff used in block assembly */
1777   b->barray = 0;
1778 
1779   /* stuff used for matrix vector multiply */
1780   b->lvec    = 0;
1781   b->Mvctx   = 0;
1782   b->slvec0  = 0;
1783   b->slvec0b = 0;
1784   b->slvec1  = 0;
1785   b->slvec1a = 0;
1786   b->slvec1b = 0;
1787   b->sMvctx  = 0;
1788 
1789   /* stuff for MatGetRow() */
1790   b->rowindices   = 0;
1791   b->rowvalues    = 0;
1792   b->getrowactive = PETSC_FALSE;
1793 
1794   /* hash table stuff */
1795   b->ht           = 0;
1796   b->hd           = 0;
1797   b->ht_size      = 0;
1798   b->ht_flag      = PETSC_FALSE;
1799   b->ht_fact      = 0;
1800   b->ht_total_ct  = 0;
1801   b->ht_insert_ct = 0;
1802 
1803   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
1804   b->ijonly = PETSC_FALSE;
1805 
1806   b->in_loc = 0;
1807   b->v_loc  = 0;
1808   b->n_loc  = 0;
1809   ierr      = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPISBAIJ matrix 1","Mat");CHKERRQ(ierr);
1810   ierr      = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,NULL);CHKERRQ(ierr);
1811   if (flg) {
1812     PetscReal fact = 1.39;
1813     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
1814     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
1815     if (fact <= 1.0) fact = 1.39;
1816     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
1817     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
1818   }
1819   ierr = PetscOptionsEnd();CHKERRQ(ierr);
1820 
1821 #if defined(PETSC_HAVE_PASTIX)
1822   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_pastix_C",MatGetFactor_mpisbaij_pastix);CHKERRQ(ierr);
1823 #endif
1824 #if defined(PETSC_HAVE_MUMPS)
1825   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_sbaij_mumps);CHKERRQ(ierr);
1826 #endif
1827   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPISBAIJ);CHKERRQ(ierr);
1828   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPISBAIJ);CHKERRQ(ierr);
1829   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPISBAIJ);CHKERRQ(ierr);
1830   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",MatMPISBAIJSetPreallocation_MPISBAIJ);CHKERRQ(ierr);
1831   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocationCSR_C",MatMPISBAIJSetPreallocationCSR_MPISBAIJ);CHKERRQ(ierr);
1832   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpisbaij_mpisbstrm_C",MatConvert_MPISBAIJ_MPISBSTRM);CHKERRQ(ierr);
1833 
1834   B->symmetric                  = PETSC_TRUE;
1835   B->structurally_symmetric     = PETSC_TRUE;
1836   B->symmetric_set              = PETSC_TRUE;
1837   B->structurally_symmetric_set = PETSC_TRUE;
1838 
1839   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPISBAIJ);CHKERRQ(ierr);
1840   PetscFunctionReturn(0);
1841 }
1842 
1843 /*MC
1844    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
1845 
1846    This matrix type is identical to MATSEQSBAIJ when constructed with a single process communicator,
1847    and MATMPISBAIJ otherwise.
1848 
1849    Options Database Keys:
1850 . -mat_type sbaij - sets the matrix type to "sbaij" during a call to MatSetFromOptions()
1851 
1852   Level: beginner
1853 
1854 .seealso: MatCreateMPISBAIJ,MATSEQSBAIJ,MATMPISBAIJ
1855 M*/
1856 
1857 #undef __FUNCT__
1858 #define __FUNCT__ "MatMPISBAIJSetPreallocation"
1859 /*@C
1860    MatMPISBAIJSetPreallocation - For good matrix assembly performance
1861    the user should preallocate the matrix storage by setting the parameters
1862    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
1863    performance can be increased by more than a factor of 50.
1864 
1865    Collective on Mat
1866 
1867    Input Parameters:
1868 +  B - the matrix
1869 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
1870           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
1871 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
1872            submatrix  (same for all local rows)
1873 .  d_nnz - array containing the number of block nonzeros in the various block rows
1874            in the upper triangular and diagonal part of the in diagonal portion of the local
1875            (possibly different for each block row) or NULL.  If you plan to factor the matrix you must leave room
1876            for the diagonal entry and set a value even if it is zero.
1877 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
1878            submatrix (same for all local rows).
1879 -  o_nnz - array containing the number of nonzeros in the various block rows of the
1880            off-diagonal portion of the local submatrix that is right of the diagonal
1881            (possibly different for each block row) or NULL.
1882 
1883 
1884    Options Database Keys:
1885 .   -mat_no_unroll - uses code that does not unroll the loops in the
1886                      block calculations (much slower)
1887 .   -mat_block_size - size of the blocks to use
1888 
1889    Notes:
1890 
1891    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
1892    than it must be used on all processors that share the object for that argument.
1893 
1894    If the *_nnz parameter is given then the *_nz parameter is ignored
1895 
1896    Storage Information:
1897    For a square global matrix we define each processor's diagonal portion
1898    to be its local rows and the corresponding columns (a square submatrix);
1899    each processor's off-diagonal portion encompasses the remainder of the
1900    local matrix (a rectangular submatrix).
1901 
1902    The user can specify preallocated storage for the diagonal part of
1903    the local submatrix with either d_nz or d_nnz (not both).  Set
1904    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
1905    memory allocation.  Likewise, specify preallocated storage for the
1906    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
1907 
1908    You can call MatGetInfo() to get information on how effective the preallocation was;
1909    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
1910    You can also run with the option -info and look for messages with the string
1911    malloc in them to see if additional memory allocation was needed.
1912 
1913    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
1914    the figure below we depict these three local rows and all columns (0-11).
1915 
1916 .vb
1917            0 1 2 3 4 5 6 7 8 9 10 11
1918           --------------------------
1919    row 3  |. . . d d d o o o o  o  o
1920    row 4  |. . . d d d o o o o  o  o
1921    row 5  |. . . d d d o o o o  o  o
1922           --------------------------
1923 .ve
1924 
1925    Thus, any entries in the d locations are stored in the d (diagonal)
1926    submatrix, and any entries in the o locations are stored in the
1927    o (off-diagonal) submatrix.  Note that the d matrix is stored in
1928    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
1929 
1930    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
1931    plus the diagonal part of the d matrix,
1932    and o_nz should indicate the number of block nonzeros per row in the o matrix
1933 
1934    In general, for PDE problems in which most nonzeros are near the diagonal,
1935    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
1936    or you will get TERRIBLE performance; see the users' manual chapter on
1937    matrices.
1938 
1939    Level: intermediate
1940 
1941 .keywords: matrix, block, aij, compressed row, sparse, parallel
1942 
1943 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ(), PetscSplitOwnership()
1944 @*/
1945 PetscErrorCode  MatMPISBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
1946 {
1947   PetscErrorCode ierr;
1948 
1949   PetscFunctionBegin;
1950   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
1951   PetscValidType(B,1);
1952   PetscValidLogicalCollectiveInt(B,bs,2);
1953   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
1954   PetscFunctionReturn(0);
1955 }
1956 
1957 #undef __FUNCT__
1958 #define __FUNCT__ "MatCreateSBAIJ"
1959 /*@C
1960    MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format
1961    (block compressed row).  For good matrix assembly performance
1962    the user should preallocate the matrix storage by setting the parameters
1963    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
1964    performance can be increased by more than a factor of 50.
1965 
1966    Collective on MPI_Comm
1967 
1968    Input Parameters:
1969 +  comm - MPI communicator
1970 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
1971           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
1972 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
1973            This value should be the same as the local size used in creating the
1974            y vector for the matrix-vector product y = Ax.
1975 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
1976            This value should be the same as the local size used in creating the
1977            x vector for the matrix-vector product y = Ax.
1978 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
1979 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
1980 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
1981            submatrix  (same for all local rows)
1982 .  d_nnz - array containing the number of block nonzeros in the various block rows
1983            in the upper triangular portion of the in diagonal portion of the local
1984            (possibly different for each block block row) or NULL.
1985            If you plan to factor the matrix you must leave room for the diagonal entry and
1986            set its value even if it is zero.
1987 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
1988            submatrix (same for all local rows).
1989 -  o_nnz - array containing the number of nonzeros in the various block rows of the
1990            off-diagonal portion of the local submatrix (possibly different for
1991            each block row) or NULL.
1992 
1993    Output Parameter:
1994 .  A - the matrix
1995 
1996    Options Database Keys:
1997 .   -mat_no_unroll - uses code that does not unroll the loops in the
1998                      block calculations (much slower)
1999 .   -mat_block_size - size of the blocks to use
2000 .   -mat_mpi - use the parallel matrix data structures even on one processor
2001                (defaults to using SeqBAIJ format on one processor)
2002 
2003    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
2004    MatXXXXSetPreallocation() paradgm instead of this routine directly.
2005    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
2006 
2007    Notes:
2008    The number of rows and columns must be divisible by blocksize.
2009    This matrix type does not support complex Hermitian operation.
2010 
2011    The user MUST specify either the local or global matrix dimensions
2012    (possibly both).
2013 
2014    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
2015    than it must be used on all processors that share the object for that argument.
2016 
2017    If the *_nnz parameter is given then the *_nz parameter is ignored
2018 
2019    Storage Information:
2020    For a square global matrix we define each processor's diagonal portion
2021    to be its local rows and the corresponding columns (a square submatrix);
2022    each processor's off-diagonal portion encompasses the remainder of the
2023    local matrix (a rectangular submatrix).
2024 
2025    The user can specify preallocated storage for the diagonal part of
2026    the local submatrix with either d_nz or d_nnz (not both).  Set
2027    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
2028    memory allocation.  Likewise, specify preallocated storage for the
2029    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
2030 
2031    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2032    the figure below we depict these three local rows and all columns (0-11).
2033 
2034 .vb
2035            0 1 2 3 4 5 6 7 8 9 10 11
2036           --------------------------
2037    row 3  |. . . d d d o o o o  o  o
2038    row 4  |. . . d d d o o o o  o  o
2039    row 5  |. . . d d d o o o o  o  o
2040           --------------------------
2041 .ve
2042 
2043    Thus, any entries in the d locations are stored in the d (diagonal)
2044    submatrix, and any entries in the o locations are stored in the
2045    o (off-diagonal) submatrix.  Note that the d matrix is stored in
2046    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
2047 
2048    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
2049    plus the diagonal part of the d matrix,
2050    and o_nz should indicate the number of block nonzeros per row in the o matrix.
2051    In general, for PDE problems in which most nonzeros are near the diagonal,
2052    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
2053    or you will get TERRIBLE performance; see the users' manual chapter on
2054    matrices.
2055 
2056    Level: intermediate
2057 
2058 .keywords: matrix, block, aij, compressed row, sparse, parallel
2059 
2060 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ()
2061 @*/
2062 
2063 PetscErrorCode  MatCreateSBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
2064 {
2065   PetscErrorCode ierr;
2066   PetscMPIInt    size;
2067 
2068   PetscFunctionBegin;
2069   ierr = MatCreate(comm,A);CHKERRQ(ierr);
2070   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
2071   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2072   if (size > 1) {
2073     ierr = MatSetType(*A,MATMPISBAIJ);CHKERRQ(ierr);
2074     ierr = MatMPISBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
2075   } else {
2076     ierr = MatSetType(*A,MATSEQSBAIJ);CHKERRQ(ierr);
2077     ierr = MatSeqSBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2078   }
2079   PetscFunctionReturn(0);
2080 }
2081 
2082 
2083 #undef __FUNCT__
2084 #define __FUNCT__ "MatDuplicate_MPISBAIJ"
2085 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2086 {
2087   Mat            mat;
2088   Mat_MPISBAIJ   *a,*oldmat = (Mat_MPISBAIJ*)matin->data;
2089   PetscErrorCode ierr;
2090   PetscInt       len=0,nt,bs=matin->rmap->bs,mbs=oldmat->mbs;
2091   PetscScalar    *array;
2092 
2093   PetscFunctionBegin;
2094   *newmat = 0;
2095 
2096   ierr = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
2097   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
2098   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
2099   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2100   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
2101   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
2102 
2103   mat->factortype   = matin->factortype;
2104   mat->preallocated = PETSC_TRUE;
2105   mat->assembled    = PETSC_TRUE;
2106   mat->insertmode   = NOT_SET_VALUES;
2107 
2108   a      = (Mat_MPISBAIJ*)mat->data;
2109   a->bs2 = oldmat->bs2;
2110   a->mbs = oldmat->mbs;
2111   a->nbs = oldmat->nbs;
2112   a->Mbs = oldmat->Mbs;
2113   a->Nbs = oldmat->Nbs;
2114 
2115 
2116   a->size         = oldmat->size;
2117   a->rank         = oldmat->rank;
2118   a->donotstash   = oldmat->donotstash;
2119   a->roworiented  = oldmat->roworiented;
2120   a->rowindices   = 0;
2121   a->rowvalues    = 0;
2122   a->getrowactive = PETSC_FALSE;
2123   a->barray       = 0;
2124   a->rstartbs     = oldmat->rstartbs;
2125   a->rendbs       = oldmat->rendbs;
2126   a->cstartbs     = oldmat->cstartbs;
2127   a->cendbs       = oldmat->cendbs;
2128 
2129   /* hash table stuff */
2130   a->ht           = 0;
2131   a->hd           = 0;
2132   a->ht_size      = 0;
2133   a->ht_flag      = oldmat->ht_flag;
2134   a->ht_fact      = oldmat->ht_fact;
2135   a->ht_total_ct  = 0;
2136   a->ht_insert_ct = 0;
2137 
2138   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+2)*sizeof(PetscInt));CHKERRQ(ierr);
2139   if (oldmat->colmap) {
2140 #if defined(PETSC_USE_CTABLE)
2141     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2142 #else
2143     ierr = PetscMalloc1((a->Nbs),&a->colmap);CHKERRQ(ierr);
2144     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2145     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2146 #endif
2147   } else a->colmap = 0;
2148 
2149   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
2150     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
2151     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2152     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
2153   } else a->garray = 0;
2154 
2155   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
2156   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
2157   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
2158   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
2159   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
2160 
2161   ierr =  VecDuplicate(oldmat->slvec0,&a->slvec0);CHKERRQ(ierr);
2162   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2163   ierr =  VecDuplicate(oldmat->slvec1,&a->slvec1);CHKERRQ(ierr);
2164   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2165 
2166   ierr = VecGetLocalSize(a->slvec1,&nt);CHKERRQ(ierr);
2167   ierr = VecGetArray(a->slvec1,&array);CHKERRQ(ierr);
2168   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs*mbs,array,&a->slvec1a);CHKERRQ(ierr);
2169   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec1b);CHKERRQ(ierr);
2170   ierr = VecRestoreArray(a->slvec1,&array);CHKERRQ(ierr);
2171   ierr = VecGetArray(a->slvec0,&array);CHKERRQ(ierr);
2172   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec0b);CHKERRQ(ierr);
2173   ierr = VecRestoreArray(a->slvec0,&array);CHKERRQ(ierr);
2174   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2175   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2176   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0b);CHKERRQ(ierr);
2177   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1a);CHKERRQ(ierr);
2178   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1b);CHKERRQ(ierr);
2179 
2180   /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2181   ierr      = PetscObjectReference((PetscObject)oldmat->sMvctx);CHKERRQ(ierr);
2182   a->sMvctx = oldmat->sMvctx;
2183   ierr      = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->sMvctx);CHKERRQ(ierr);
2184 
2185   ierr    =  MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
2186   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
2187   ierr    =  MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
2188   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
2189   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
2190   *newmat = mat;
2191   PetscFunctionReturn(0);
2192 }
2193 
2194 #undef __FUNCT__
2195 #define __FUNCT__ "MatLoad_MPISBAIJ"
2196 PetscErrorCode MatLoad_MPISBAIJ(Mat newmat,PetscViewer viewer)
2197 {
2198   PetscErrorCode ierr;
2199   PetscInt       i,nz,j,rstart,rend;
2200   PetscScalar    *vals,*buf;
2201   MPI_Comm       comm;
2202   MPI_Status     status;
2203   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,*sndcounts = 0,*browners,maxnz,*rowners,mmbs;
2204   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols,*locrowlens;
2205   PetscInt       *procsnz = 0,jj,*mycols,*ibuf;
2206   PetscInt       bs       =1,Mbs,mbs,extra_rows;
2207   PetscInt       *dlens,*odlens,*mask,*masked1,*masked2,rowcount,odcount;
2208   PetscInt       dcount,kmax,k,nzcount,tmp,sizesset=1,grows,gcols;
2209   int            fd;
2210 
2211   PetscFunctionBegin;
2212   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
2213   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPISBAIJ matrix 2","Mat");CHKERRQ(ierr);
2214   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
2215   ierr = PetscOptionsEnd();CHKERRQ(ierr);
2216 
2217   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2218   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2219   if (!rank) {
2220     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
2221     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
2222     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
2223     if (header[3] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as MPISBAIJ");
2224   }
2225 
2226   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
2227 
2228   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2229   M    = header[1];
2230   N    = header[2];
2231 
2232   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
2233   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
2234   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
2235 
2236   /* If global sizes are set, check if they are consistent with that given in the file */
2237   if (sizesset) {
2238     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
2239   }
2240   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
2241   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
2242 
2243   if (M != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only do square matrices");
2244 
2245   /*
2246      This code adds extra rows to make sure the number of rows is
2247      divisible by the blocksize
2248   */
2249   Mbs        = M/bs;
2250   extra_rows = bs - M + bs*(Mbs);
2251   if (extra_rows == bs) extra_rows = 0;
2252   else                  Mbs++;
2253   if (extra_rows &&!rank) {
2254     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
2255   }
2256 
2257   /* determine ownership of all rows */
2258   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
2259     mbs = Mbs/size + ((Mbs % size) > rank);
2260     m   = mbs*bs;
2261   } else { /* User Set */
2262     m   = newmat->rmap->n;
2263     mbs = m/bs;
2264   }
2265   ierr       = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
2266   ierr       = PetscMPIIntCast(mbs,&mmbs);CHKERRQ(ierr);
2267   ierr       = MPI_Allgather(&mmbs,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
2268   rowners[0] = 0;
2269   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
2270   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
2271   rstart = rowners[rank];
2272   rend   = rowners[rank+1];
2273 
2274   /* distribute row lengths to all processors */
2275   ierr = PetscMalloc1((rend-rstart)*bs,&locrowlens);CHKERRQ(ierr);
2276   if (!rank) {
2277     ierr = PetscMalloc1((M+extra_rows),&rowlengths);CHKERRQ(ierr);
2278     ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr);
2279     for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
2280     ierr = PetscMalloc1(size,&sndcounts);CHKERRQ(ierr);
2281     for (i=0; i<size; i++) sndcounts[i] = browners[i+1] - browners[i];
2282     ierr = MPI_Scatterv(rowlengths,sndcounts,browners,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2283     ierr = PetscFree(sndcounts);CHKERRQ(ierr);
2284   } else {
2285     ierr = MPI_Scatterv(0,0,0,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2286   }
2287 
2288   if (!rank) {   /* procs[0] */
2289     /* calculate the number of nonzeros on each processor */
2290     ierr = PetscMalloc1(size,&procsnz);CHKERRQ(ierr);
2291     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2292     for (i=0; i<size; i++) {
2293       for (j=rowners[i]*bs; j< rowners[i+1]*bs; j++) {
2294         procsnz[i] += rowlengths[j];
2295       }
2296     }
2297     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2298 
2299     /* determine max buffer needed and allocate it */
2300     maxnz = 0;
2301     for (i=0; i<size; i++) {
2302       maxnz = PetscMax(maxnz,procsnz[i]);
2303     }
2304     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
2305 
2306     /* read in my part of the matrix column indices  */
2307     nz     = procsnz[0];
2308     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2309     mycols = ibuf;
2310     if (size == 1) nz -= extra_rows;
2311     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2312     if (size == 1) {
2313       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
2314     }
2315 
2316     /* read in every ones (except the last) and ship off */
2317     for (i=1; i<size-1; i++) {
2318       nz   = procsnz[i];
2319       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2320       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2321     }
2322     /* read in the stuff for the last proc */
2323     if (size != 1) {
2324       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
2325       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2326       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
2327       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
2328     }
2329     ierr = PetscFree(cols);CHKERRQ(ierr);
2330   } else {  /* procs[i], i>0 */
2331     /* determine buffer space needed for message */
2332     nz = 0;
2333     for (i=0; i<m; i++) nz += locrowlens[i];
2334     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2335     mycols = ibuf;
2336     /* receive message of column indices*/
2337     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2338     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
2339     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2340   }
2341 
2342   /* loop over local rows, determining number of off diagonal entries */
2343   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
2344   ierr     = PetscMalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
2345   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2346   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2347   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2348   rowcount = 0;
2349   nzcount  = 0;
2350   for (i=0; i<mbs; i++) {
2351     dcount  = 0;
2352     odcount = 0;
2353     for (j=0; j<bs; j++) {
2354       kmax = locrowlens[rowcount];
2355       for (k=0; k<kmax; k++) {
2356         tmp = mycols[nzcount++]/bs; /* block col. index */
2357         if (!mask[tmp]) {
2358           mask[tmp] = 1;
2359           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp; /* entry in off-diag portion */
2360           else masked1[dcount++] = tmp; /* entry in diag portion */
2361         }
2362       }
2363       rowcount++;
2364     }
2365 
2366     dlens[i]  = dcount;  /* d_nzz[i] */
2367     odlens[i] = odcount; /* o_nzz[i] */
2368 
2369     /* zero out the mask elements we set */
2370     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
2371     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
2372   }
2373   if (!sizesset) {
2374     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
2375   }
2376   ierr = MatMPISBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
2377   ierr = MatSetOption(newmat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2378 
2379   if (!rank) {
2380     ierr = PetscMalloc1(maxnz,&buf);CHKERRQ(ierr);
2381     /* read in my part of the matrix numerical values  */
2382     nz     = procsnz[0];
2383     vals   = buf;
2384     mycols = ibuf;
2385     if (size == 1) nz -= extra_rows;
2386     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2387     if (size == 1) {
2388       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
2389     }
2390 
2391     /* insert into matrix */
2392     jj = rstart*bs;
2393     for (i=0; i<m; i++) {
2394       ierr    = MatSetValues(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2395       mycols += locrowlens[i];
2396       vals   += locrowlens[i];
2397       jj++;
2398     }
2399 
2400     /* read in other processors (except the last one) and ship out */
2401     for (i=1; i<size-1; i++) {
2402       nz   = procsnz[i];
2403       vals = buf;
2404       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2405       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2406     }
2407     /* the last proc */
2408     if (size != 1) {
2409       nz   = procsnz[i] - extra_rows;
2410       vals = buf;
2411       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2412       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
2413       ierr = MPI_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2414     }
2415     ierr = PetscFree(procsnz);CHKERRQ(ierr);
2416 
2417   } else {
2418     /* receive numeric values */
2419     ierr = PetscMalloc1(nz,&buf);CHKERRQ(ierr);
2420 
2421     /* receive message of values*/
2422     vals   = buf;
2423     mycols = ibuf;
2424     ierr   = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm,&status);CHKERRQ(ierr);
2425     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
2426     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2427 
2428     /* insert into matrix */
2429     jj = rstart*bs;
2430     for (i=0; i<m; i++) {
2431       ierr    = MatSetValues_MPISBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2432       mycols += locrowlens[i];
2433       vals   += locrowlens[i];
2434       jj++;
2435     }
2436   }
2437 
2438   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
2439   ierr = PetscFree(buf);CHKERRQ(ierr);
2440   ierr = PetscFree(ibuf);CHKERRQ(ierr);
2441   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
2442   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
2443   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
2444   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2445   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2446   PetscFunctionReturn(0);
2447 }
2448 
2449 #undef __FUNCT__
2450 #define __FUNCT__ "MatMPISBAIJSetHashTableFactor"
2451 /*XXXXX@
2452    MatMPISBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
2453 
2454    Input Parameters:
2455 .  mat  - the matrix
2456 .  fact - factor
2457 
2458    Not Collective on Mat, each process can have a different hash factor
2459 
2460    Level: advanced
2461 
2462   Notes:
2463    This can also be set by the command line option: -mat_use_hash_table fact
2464 
2465 .keywords: matrix, hashtable, factor, HT
2466 
2467 .seealso: MatSetOption()
2468 @XXXXX*/
2469 
2470 
2471 #undef __FUNCT__
2472 #define __FUNCT__ "MatGetRowMaxAbs_MPISBAIJ"
2473 PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A,Vec v,PetscInt idx[])
2474 {
2475   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
2476   Mat_SeqBAIJ    *b = (Mat_SeqBAIJ*)(a->B)->data;
2477   PetscReal      atmp;
2478   PetscReal      *work,*svalues,*rvalues;
2479   PetscErrorCode ierr;
2480   PetscInt       i,bs,mbs,*bi,*bj,brow,j,ncols,krow,kcol,col,row,Mbs,bcol;
2481   PetscMPIInt    rank,size;
2482   PetscInt       *rowners_bs,dest,count,source;
2483   PetscScalar    *va;
2484   MatScalar      *ba;
2485   MPI_Status     stat;
2486 
2487   PetscFunctionBegin;
2488   if (idx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Send email to petsc-maint@mcs.anl.gov");
2489   ierr = MatGetRowMaxAbs(a->A,v,NULL);CHKERRQ(ierr);
2490   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2491 
2492   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2493   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2494 
2495   bs  = A->rmap->bs;
2496   mbs = a->mbs;
2497   Mbs = a->Mbs;
2498   ba  = b->a;
2499   bi  = b->i;
2500   bj  = b->j;
2501 
2502   /* find ownerships */
2503   rowners_bs = A->rmap->range;
2504 
2505   /* each proc creates an array to be distributed */
2506   ierr = PetscMalloc1(bs*Mbs,&work);CHKERRQ(ierr);
2507   ierr = PetscMemzero(work,bs*Mbs*sizeof(PetscReal));CHKERRQ(ierr);
2508 
2509   /* row_max for B */
2510   if (rank != size-1) {
2511     for (i=0; i<mbs; i++) {
2512       ncols = bi[1] - bi[0]; bi++;
2513       brow  = bs*i;
2514       for (j=0; j<ncols; j++) {
2515         bcol = bs*(*bj);
2516         for (kcol=0; kcol<bs; kcol++) {
2517           col  = bcol + kcol;                /* local col index */
2518           col += rowners_bs[rank+1];      /* global col index */
2519           for (krow=0; krow<bs; krow++) {
2520             atmp = PetscAbsScalar(*ba); ba++;
2521             row  = brow + krow;   /* local row index */
2522             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2523             if (work[col] < atmp) work[col] = atmp;
2524           }
2525         }
2526         bj++;
2527       }
2528     }
2529 
2530     /* send values to its owners */
2531     for (dest=rank+1; dest<size; dest++) {
2532       svalues = work + rowners_bs[dest];
2533       count   = rowners_bs[dest+1]-rowners_bs[dest];
2534       ierr    = MPI_Send(svalues,count,MPIU_REAL,dest,rank,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2535     }
2536   }
2537 
2538   /* receive values */
2539   if (rank) {
2540     rvalues = work;
2541     count   = rowners_bs[rank+1]-rowners_bs[rank];
2542     for (source=0; source<rank; source++) {
2543       ierr = MPI_Recv(rvalues,count,MPIU_REAL,MPI_ANY_SOURCE,MPI_ANY_TAG,PetscObjectComm((PetscObject)A),&stat);CHKERRQ(ierr);
2544       /* process values */
2545       for (i=0; i<count; i++) {
2546         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2547       }
2548     }
2549   }
2550 
2551   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2552   ierr = PetscFree(work);CHKERRQ(ierr);
2553   PetscFunctionReturn(0);
2554 }
2555 
2556 #undef __FUNCT__
2557 #define __FUNCT__ "MatSOR_MPISBAIJ"
2558 PetscErrorCode MatSOR_MPISBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2559 {
2560   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)matin->data;
2561   PetscErrorCode    ierr;
2562   PetscInt          mbs=mat->mbs,bs=matin->rmap->bs;
2563   PetscScalar       *x,*ptr,*from;
2564   Vec               bb1;
2565   const PetscScalar *b;
2566 
2567   PetscFunctionBegin;
2568   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2569   if (bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2570 
2571   if (flag == SOR_APPLY_UPPER) {
2572     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2573     PetscFunctionReturn(0);
2574   }
2575 
2576   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2577     if (flag & SOR_ZERO_INITIAL_GUESS) {
2578       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2579       its--;
2580     }
2581 
2582     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2583     while (its--) {
2584 
2585       /* lower triangular part: slvec0b = - B^T*xx */
2586       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2587 
2588       /* copy xx into slvec0a */
2589       ierr = VecGetArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2590       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2591       ierr = PetscMemcpy(ptr,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2592       ierr = VecRestoreArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2593 
2594       ierr = VecScale(mat->slvec0,-1.0);CHKERRQ(ierr);
2595 
2596       /* copy bb into slvec1a */
2597       ierr = VecGetArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2598       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2599       ierr = PetscMemcpy(ptr,b,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2600       ierr = VecRestoreArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2601 
2602       /* set slvec1b = 0 */
2603       ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2604 
2605       ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2606       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2607       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2608       ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2609 
2610       /* upper triangular part: bb1 = bb1 - B*x */
2611       ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,bb1);CHKERRQ(ierr);
2612 
2613       /* local diagonal sweep */
2614       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
2615     }
2616     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2617   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2618     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2619   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2620     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2621   } else if (flag & SOR_EISENSTAT) {
2622     Vec               xx1;
2623     PetscBool         hasop;
2624     const PetscScalar *diag;
2625     PetscScalar       *sl,scale = (omega - 2.0)/omega;
2626     PetscInt          i,n;
2627 
2628     if (!mat->xx1) {
2629       ierr = VecDuplicate(bb,&mat->xx1);CHKERRQ(ierr);
2630       ierr = VecDuplicate(bb,&mat->bb1);CHKERRQ(ierr);
2631     }
2632     xx1 = mat->xx1;
2633     bb1 = mat->bb1;
2634 
2635     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr);
2636 
2637     if (!mat->diag) {
2638       /* this is wrong for same matrix with new nonzero values */
2639       ierr = MatCreateVecs(matin,&mat->diag,NULL);CHKERRQ(ierr);
2640       ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr);
2641     }
2642     ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
2643 
2644     if (hasop) {
2645       ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr);
2646       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2647     } else {
2648       /*
2649           These two lines are replaced by code that may be a bit faster for a good compiler
2650       ierr = VecPointwiseMult(mat->slvec1a,mat->diag,xx);CHKERRQ(ierr);
2651       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2652       */
2653       ierr = VecGetArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2654       ierr = VecGetArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2655       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2656       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2657       ierr = VecGetLocalSize(xx,&n);CHKERRQ(ierr);
2658       if (omega == 1.0) {
2659         for (i=0; i<n; i++) sl[i] = b[i] - diag[i]*x[i];
2660         ierr = PetscLogFlops(2.0*n);CHKERRQ(ierr);
2661       } else {
2662         for (i=0; i<n; i++) sl[i] = b[i] + scale*diag[i]*x[i];
2663         ierr = PetscLogFlops(3.0*n);CHKERRQ(ierr);
2664       }
2665       ierr = VecRestoreArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2666       ierr = VecRestoreArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2667       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2668       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2669     }
2670 
2671     /* multiply off-diagonal portion of matrix */
2672     ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2673     ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2674     ierr = VecGetArray(mat->slvec0,&from);CHKERRQ(ierr);
2675     ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2676     ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2677     ierr = VecRestoreArray(mat->slvec0,&from);CHKERRQ(ierr);
2678     ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2679     ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2680     ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2681     ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,mat->slvec1a);CHKERRQ(ierr);
2682 
2683     /* local sweep */
2684     ierr = (*mat->A->ops->sor)(mat->A,mat->slvec1a,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr);
2685     ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr);
2686   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
2687   PetscFunctionReturn(0);
2688 }
2689 
2690 #undef __FUNCT__
2691 #define __FUNCT__ "MatSOR_MPISBAIJ_2comm"
2692 PetscErrorCode MatSOR_MPISBAIJ_2comm(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2693 {
2694   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
2695   PetscErrorCode ierr;
2696   Vec            lvec1,bb1;
2697 
2698   PetscFunctionBegin;
2699   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2700   if (matin->rmap->bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2701 
2702   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2703     if (flag & SOR_ZERO_INITIAL_GUESS) {
2704       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2705       its--;
2706     }
2707 
2708     ierr = VecDuplicate(mat->lvec,&lvec1);CHKERRQ(ierr);
2709     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2710     while (its--) {
2711       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2712 
2713       /* lower diagonal part: bb1 = bb - B^T*xx */
2714       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,lvec1);CHKERRQ(ierr);
2715       ierr = VecScale(lvec1,-1.0);CHKERRQ(ierr);
2716 
2717       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2718       ierr = VecCopy(bb,bb1);CHKERRQ(ierr);
2719       ierr = VecScatterBegin(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2720 
2721       /* upper diagonal part: bb1 = bb1 - B*x */
2722       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2723       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr);
2724 
2725       ierr = VecScatterEnd(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
2726 
2727       /* diagonal sweep */
2728       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
2729     }
2730     ierr = VecDestroy(&lvec1);CHKERRQ(ierr);
2731     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2732   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
2733   PetscFunctionReturn(0);
2734 }
2735 
2736 #undef __FUNCT__
2737 #define __FUNCT__ "MatCreateMPISBAIJWithArrays"
2738 /*@
2739      MatCreateMPISBAIJWithArrays - creates a MPI SBAIJ matrix using arrays that contain in standard
2740          CSR format the local rows.
2741 
2742    Collective on MPI_Comm
2743 
2744    Input Parameters:
2745 +  comm - MPI communicator
2746 .  bs - the block size, only a block size of 1 is supported
2747 .  m - number of local rows (Cannot be PETSC_DECIDE)
2748 .  n - This value should be the same as the local size used in creating the
2749        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
2750        calculated if N is given) For square matrices n is almost always m.
2751 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
2752 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
2753 .   i - row indices
2754 .   j - column indices
2755 -   a - matrix values
2756 
2757    Output Parameter:
2758 .   mat - the matrix
2759 
2760    Level: intermediate
2761 
2762    Notes:
2763        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
2764      thus you CANNOT change the matrix entries by changing the values of a[] after you have
2765      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
2766 
2767        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
2768 
2769 .keywords: matrix, aij, compressed row, sparse, parallel
2770 
2771 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
2772           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
2773 @*/
2774 PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
2775 {
2776   PetscErrorCode ierr;
2777 
2778 
2779   PetscFunctionBegin;
2780   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
2781   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
2782   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
2783   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
2784   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
2785   ierr = MatMPISBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
2786   PetscFunctionReturn(0);
2787 }
2788 
2789 
2790 #undef __FUNCT__
2791 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR"
2792 /*@C
2793    MatMPISBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2794    (the default parallel PETSc format).
2795 
2796    Collective on MPI_Comm
2797 
2798    Input Parameters:
2799 +  B - the matrix
2800 .  bs - the block size
2801 .  i - the indices into j for the start of each local row (starts with zero)
2802 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2803 -  v - optional values in the matrix
2804 
2805    Level: developer
2806 
2807 .keywords: matrix, aij, compressed row, sparse, parallel
2808 
2809 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ
2810 @*/
2811 PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2812 {
2813   PetscErrorCode ierr;
2814 
2815   PetscFunctionBegin;
2816   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2817   PetscFunctionReturn(0);
2818 }
2819 
2820 #undef __FUNCT__
2821 #define __FUNCT__ "MatCreateMPISBAIJConcatenateSeqSBAIJSymbolic"
2822 PetscErrorCode MatCreateMPISBAIJConcatenateSeqSBAIJSymbolic(MPI_Comm comm,Mat inmat,PetscInt n,Mat *outmat)
2823 {
2824   PetscErrorCode ierr;
2825   Mat_SeqSBAIJ   *a = (Mat_SeqSBAIJ*)inmat->data;
2826   PetscInt       m,N,i,rstart,nnz,*dnz,*onz,sum,bs,cbs;
2827   PetscInt       *indx,*bindx,rmax=a->rmax,j;
2828 
2829   PetscFunctionBegin;
2830   /* This routine will ONLY return MPISBAIJ type matrix */
2831   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
2832   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
2833   m = m/bs; N = N/cbs;
2834   if (n == PETSC_DECIDE) {
2835     ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
2836   }
2837   /* Check sum(n) = N */
2838   ierr = MPI_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2839   if (sum != N) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",N);
2840 
2841   ierr    = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2842   rstart -= m;
2843 
2844   ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
2845   ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr);
2846   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2847   for (i=0; i<m; i++) {
2848     ierr = MatGetRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
2849     nnz = nnz/bs;
2850     for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
2851     ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
2852     ierr = MatRestoreRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
2853   }
2854   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
2855   ierr = PetscFree(bindx);CHKERRQ(ierr);
2856 
2857   ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
2858   ierr = MatSetSizes(*outmat,m*bs,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2859   ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
2860   ierr = MatSetType(*outmat,MATMPISBAIJ);CHKERRQ(ierr);
2861   ierr = MatMPISBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
2862   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
2863   PetscFunctionReturn(0);
2864 }
2865 
2866 #undef __FUNCT__
2867 #define __FUNCT__ "MatCreateMPISBAIJConcatenateSeqSBAIJNumeric"
2868 PetscErrorCode MatCreateMPISBAIJConcatenateSeqSBAIJNumeric(MPI_Comm comm,Mat inmat,PetscInt n,Mat outmat)
2869 {
2870   PetscErrorCode ierr;
2871   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
2872   PetscInt       *indx;
2873   PetscScalar    *values;
2874 
2875   PetscFunctionBegin;
2876   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
2877    ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
2878   ierr = MatGetOwnershipRange(outmat,&rstart,NULL);CHKERRQ(ierr);
2879 
2880   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2881   for (i=0; i<m; i++) {
2882     ierr = MatGetRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
2883     Ii   = i + rstart;
2884     ierr = MatSetValues(outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
2885     ierr = MatRestoreRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
2886   }
2887   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
2888   ierr = MatAssemblyBegin(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2889   ierr = MatAssemblyEnd(outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2890   PetscFunctionReturn(0);
2891 }
2892 
2893 #undef __FUNCT__
2894 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPISBAIJ"
2895 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
2896 {
2897   PetscErrorCode ierr;
2898   PetscMPIInt    size;
2899 
2900   PetscFunctionBegin;
2901   /* same as MatCreateMPIAIJConcatenateSeqAIJ() */
2902   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2903   ierr = PetscLogEventBegin(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr);
2904   if (size == 1) {
2905     if (scall == MAT_INITIAL_MATRIX) {
2906       ierr = MatDuplicate(inmat,MAT_COPY_VALUES,outmat);CHKERRQ(ierr);
2907     } else {
2908       ierr = MatCopy(inmat,*outmat,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
2909     }
2910   } else {
2911     if (scall == MAT_INITIAL_MATRIX) {
2912       ierr = MatCreateMPISBAIJConcatenateSeqSBAIJSymbolic(comm,inmat,n,outmat);CHKERRQ(ierr);
2913     }
2914     ierr = MatCreateMPISBAIJConcatenateSeqSBAIJNumeric(comm,inmat,n,*outmat);CHKERRQ(ierr);
2915   }
2916   ierr = PetscLogEventEnd(MAT_Merge,inmat,0,0,0);CHKERRQ(ierr);
2917   PetscFunctionReturn(0);
2918 }
2919