xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision 417c5e9afaacda2cf655e58897fdfc9d4dd1055e)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>    /*I "petscmat.h" I*/
3 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
4 #include <../src/mat/impls/sbaij/seq/sbaij.h>
5 #include <petscblaslapack.h>
6 
7 #if defined(PETSC_HAVE_ELEMENTAL)
8 PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat,MatType,MatReuse,Mat*);
9 #endif
10 #undef __FUNCT__
11 #define __FUNCT__ "MatStoreValues_MPISBAIJ"
12 PetscErrorCode  MatStoreValues_MPISBAIJ(Mat mat)
13 {
14   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
15   PetscErrorCode ierr;
16 
17   PetscFunctionBegin;
18   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
19   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
20   PetscFunctionReturn(0);
21 }
22 
23 #undef __FUNCT__
24 #define __FUNCT__ "MatRetrieveValues_MPISBAIJ"
25 PetscErrorCode  MatRetrieveValues_MPISBAIJ(Mat mat)
26 {
27   Mat_MPISBAIJ   *aij = (Mat_MPISBAIJ*)mat->data;
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
32   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 #define  MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv,orow,ocol)      \
37   { \
38  \
39     brow = row/bs;  \
40     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
41     rmax = aimax[brow]; nrow = ailen[brow]; \
42     bcol = col/bs; \
43     ridx = row % bs; cidx = col % bs; \
44     low  = 0; high = nrow; \
45     while (high-low > 3) { \
46       t = (low+high)/2; \
47       if (rp[t] > bcol) high = t; \
48       else              low  = t; \
49     } \
50     for (_i=low; _i<high; _i++) { \
51       if (rp[_i] > bcol) break; \
52       if (rp[_i] == bcol) { \
53         bap = ap + bs2*_i + bs*cidx + ridx; \
54         if (addv == ADD_VALUES) *bap += value;  \
55         else                    *bap  = value;  \
56         goto a_noinsert; \
57       } \
58     } \
59     if (a->nonew == 1) goto a_noinsert; \
60     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
61     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
62     N = nrow++ - 1;  \
63     /* shift up all the later entries in this row */ \
64     for (ii=N; ii>=_i; ii--) { \
65       rp[ii+1] = rp[ii]; \
66       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
67     } \
68     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
69     rp[_i]                      = bcol;  \
70     ap[bs2*_i + bs*cidx + ridx] = value;  \
71     A->nonzerostate++;\
72 a_noinsert:; \
73     ailen[brow] = nrow; \
74   }
75 
76 #define  MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv,orow,ocol) \
77   { \
78     brow = row/bs;  \
79     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
80     rmax = bimax[brow]; nrow = bilen[brow]; \
81     bcol = col/bs; \
82     ridx = row % bs; cidx = col % bs; \
83     low  = 0; high = nrow; \
84     while (high-low > 3) { \
85       t = (low+high)/2; \
86       if (rp[t] > bcol) high = t; \
87       else              low  = t; \
88     } \
89     for (_i=low; _i<high; _i++) { \
90       if (rp[_i] > bcol) break; \
91       if (rp[_i] == bcol) { \
92         bap = ap + bs2*_i + bs*cidx + ridx; \
93         if (addv == ADD_VALUES) *bap += value;  \
94         else                    *bap  = value;  \
95         goto b_noinsert; \
96       } \
97     } \
98     if (b->nonew == 1) goto b_noinsert; \
99     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
100     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
101     N = nrow++ - 1;  \
102     /* shift up all the later entries in this row */ \
103     for (ii=N; ii>=_i; ii--) { \
104       rp[ii+1] = rp[ii]; \
105       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
106     } \
107     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
108     rp[_i]                      = bcol;  \
109     ap[bs2*_i + bs*cidx + ridx] = value;  \
110     B->nonzerostate++;\
111 b_noinsert:; \
112     bilen[brow] = nrow; \
113   }
114 
115 /* Only add/insert a(i,j) with i<=j (blocks).
116    Any a(i,j) with i>j input by user is ingored.
117 */
118 #undef __FUNCT__
119 #define __FUNCT__ "MatSetValues_MPISBAIJ"
120 PetscErrorCode MatSetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
121 {
122   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
123   MatScalar      value;
124   PetscBool      roworiented = baij->roworiented;
125   PetscErrorCode ierr;
126   PetscInt       i,j,row,col;
127   PetscInt       rstart_orig=mat->rmap->rstart;
128   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
129   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
130 
131   /* Some Variables required in the macro */
132   Mat          A     = baij->A;
133   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ*)(A)->data;
134   PetscInt     *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
135   MatScalar    *aa   =a->a;
136 
137   Mat         B     = baij->B;
138   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
139   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
140   MatScalar   *ba   =b->a;
141 
142   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
143   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
144   MatScalar *ap,*bap;
145 
146   /* for stash */
147   PetscInt  n_loc, *in_loc = NULL;
148   MatScalar *v_loc = NULL;
149 
150   PetscFunctionBegin;
151   if (!baij->donotstash) {
152     if (n > baij->n_loc) {
153       ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
154       ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
155       ierr = PetscMalloc1(n,&baij->in_loc);CHKERRQ(ierr);
156       ierr = PetscMalloc1(n,&baij->v_loc);CHKERRQ(ierr);
157 
158       baij->n_loc = n;
159     }
160     in_loc = baij->in_loc;
161     v_loc  = baij->v_loc;
162   }
163 
164   for (i=0; i<m; i++) {
165     if (im[i] < 0) continue;
166 #if defined(PETSC_USE_DEBUG)
167     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
168 #endif
169     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
170       row = im[i] - rstart_orig;              /* local row index */
171       for (j=0; j<n; j++) {
172         if (im[i]/bs > in[j]/bs) {
173           if (a->ignore_ltriangular) {
174             continue;    /* ignore lower triangular blocks */
175           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
176         }
177         if (in[j] >= cstart_orig && in[j] < cend_orig) {  /* diag entry (A) */
178           col  = in[j] - cstart_orig;         /* local col index */
179           brow = row/bs; bcol = col/bs;
180           if (brow > bcol) continue;  /* ignore lower triangular blocks of A */
181           if (roworiented) value = v[i*n+j];
182           else             value = v[i+j*m];
183           MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
184           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
185         } else if (in[j] < 0) continue;
186 #if defined(PETSC_USE_DEBUG)
187         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
188 #endif
189         else {  /* off-diag entry (B) */
190           if (mat->was_assembled) {
191             if (!baij->colmap) {
192               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
193             }
194 #if defined(PETSC_USE_CTABLE)
195             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
196             col  = col - 1;
197 #else
198             col = baij->colmap[in[j]/bs] - 1;
199 #endif
200             if (col < 0 && !((Mat_SeqSBAIJ*)(baij->A->data))->nonew) {
201               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
202               col  =  in[j];
203               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
204               B    = baij->B;
205               b    = (Mat_SeqBAIJ*)(B)->data;
206               bimax= b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
207               ba   = b->a;
208             } else col += in[j]%bs;
209           } else col = in[j];
210           if (roworiented) value = v[i*n+j];
211           else             value = v[i+j*m];
212           MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
213           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
214         }
215       }
216     } else {  /* off processor entry */
217       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
218       if (!baij->donotstash) {
219         mat->assembled = PETSC_FALSE;
220         n_loc          = 0;
221         for (j=0; j<n; j++) {
222           if (im[i]/bs > in[j]/bs) continue; /* ignore lower triangular blocks */
223           in_loc[n_loc] = in[j];
224           if (roworiented) {
225             v_loc[n_loc] = v[i*n+j];
226           } else {
227             v_loc[n_loc] = v[j*m+i];
228           }
229           n_loc++;
230         }
231         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n_loc,in_loc,v_loc,PETSC_FALSE);CHKERRQ(ierr);
232       }
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatSetValuesBlocked_SeqSBAIJ_Inlined"
240 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
241 {
242   Mat_SeqSBAIJ      *a = (Mat_SeqSBAIJ*)A->data;
243   PetscErrorCode    ierr;
244   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
245   PetscInt          *imax      =a->imax,*ai=a->i,*ailen=a->ilen;
246   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
247   PetscBool         roworiented=a->roworiented;
248   const PetscScalar *value     = v;
249   MatScalar         *ap,*aa = a->a,*bap;
250 
251   PetscFunctionBegin;
252   if (col < row) {
253     if (a->ignore_ltriangular) PetscFunctionReturn(0); /* ignore lower triangular block */
254     else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
255   }
256   rp   = aj + ai[row];
257   ap   = aa + bs2*ai[row];
258   rmax = imax[row];
259   nrow = ailen[row];
260   value = v;
261   low   = 0;
262   high  = nrow;
263 
264   while (high-low > 7) {
265     t = (low+high)/2;
266     if (rp[t] > col) high = t;
267     else             low  = t;
268   }
269   for (i=low; i<high; i++) {
270     if (rp[i] > col) break;
271     if (rp[i] == col) {
272       bap = ap +  bs2*i;
273       if (roworiented) {
274         if (is == ADD_VALUES) {
275           for (ii=0; ii<bs; ii++) {
276             for (jj=ii; jj<bs2; jj+=bs) {
277               bap[jj] += *value++;
278             }
279           }
280         } else {
281           for (ii=0; ii<bs; ii++) {
282             for (jj=ii; jj<bs2; jj+=bs) {
283               bap[jj] = *value++;
284             }
285           }
286         }
287       } else {
288         if (is == ADD_VALUES) {
289           for (ii=0; ii<bs; ii++) {
290             for (jj=0; jj<bs; jj++) {
291               *bap++ += *value++;
292             }
293           }
294         } else {
295           for (ii=0; ii<bs; ii++) {
296             for (jj=0; jj<bs; jj++) {
297               *bap++  = *value++;
298             }
299           }
300         }
301       }
302       goto noinsert2;
303     }
304   }
305   if (nonew == 1) goto noinsert2;
306   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new block index nonzero block (%D, %D) in the matrix", orow, ocol);
307   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
308   N = nrow++ - 1; high++;
309   /* shift up all the later entries in this row */
310   for (ii=N; ii>=i; ii--) {
311     rp[ii+1] = rp[ii];
312     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
313   }
314   if (N >= i) {
315     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
316   }
317   rp[i] = col;
318   bap   = ap +  bs2*i;
319   if (roworiented) {
320     for (ii=0; ii<bs; ii++) {
321       for (jj=ii; jj<bs2; jj+=bs) {
322         bap[jj] = *value++;
323       }
324     }
325   } else {
326     for (ii=0; ii<bs; ii++) {
327       for (jj=0; jj<bs; jj++) {
328         *bap++ = *value++;
329       }
330     }
331   }
332   noinsert2:;
333   ailen[row] = nrow;
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
339 /*
340    This routine is exactly duplicated in mpibaij.c
341 */
342 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
343 {
344   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
345   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
346   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
347   PetscErrorCode    ierr;
348   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
349   PetscBool         roworiented=a->roworiented;
350   const PetscScalar *value     = v;
351   MatScalar         *ap,*aa = a->a,*bap;
352 
353   PetscFunctionBegin;
354   rp   = aj + ai[row];
355   ap   = aa + bs2*ai[row];
356   rmax = imax[row];
357   nrow = ailen[row];
358   low  = 0;
359   high = nrow;
360   value = v;
361   while (high-low > 7) {
362     t = (low+high)/2;
363     if (rp[t] > col) high = t;
364     else             low  = t;
365   }
366   for (i=low; i<high; i++) {
367     if (rp[i] > col) break;
368     if (rp[i] == col) {
369       bap = ap +  bs2*i;
370       if (roworiented) {
371         if (is == ADD_VALUES) {
372           for (ii=0; ii<bs; ii++) {
373             for (jj=ii; jj<bs2; jj+=bs) {
374               bap[jj] += *value++;
375             }
376           }
377         } else {
378           for (ii=0; ii<bs; ii++) {
379             for (jj=ii; jj<bs2; jj+=bs) {
380               bap[jj] = *value++;
381             }
382           }
383         }
384       } else {
385         if (is == ADD_VALUES) {
386           for (ii=0; ii<bs; ii++,value+=bs) {
387             for (jj=0; jj<bs; jj++) {
388               bap[jj] += value[jj];
389             }
390             bap += bs;
391           }
392         } else {
393           for (ii=0; ii<bs; ii++,value+=bs) {
394             for (jj=0; jj<bs; jj++) {
395               bap[jj]  = value[jj];
396             }
397             bap += bs;
398           }
399         }
400       }
401       goto noinsert2;
402     }
403   }
404   if (nonew == 1) goto noinsert2;
405   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
406   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
407   N = nrow++ - 1; high++;
408   /* shift up all the later entries in this row */
409   for (ii=N; ii>=i; ii--) {
410     rp[ii+1] = rp[ii];
411     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
412   }
413   if (N >= i) {
414     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
415   }
416   rp[i] = col;
417   bap   = ap +  bs2*i;
418   if (roworiented) {
419     for (ii=0; ii<bs; ii++) {
420       for (jj=ii; jj<bs2; jj+=bs) {
421         bap[jj] = *value++;
422       }
423     }
424   } else {
425     for (ii=0; ii<bs; ii++) {
426       for (jj=0; jj<bs; jj++) {
427         *bap++ = *value++;
428       }
429     }
430   }
431   noinsert2:;
432   ailen[row] = nrow;
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "MatSetValuesBlocked_MPISBAIJ"
438 /*
439     This routine could be optimized by removing the need for the block copy below and passing stride information
440   to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
441 */
442 PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const MatScalar v[],InsertMode addv)
443 {
444   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ*)mat->data;
445   const MatScalar *value;
446   MatScalar       *barray     =baij->barray;
447   PetscBool       roworiented = baij->roworiented,ignore_ltriangular = ((Mat_SeqSBAIJ*)baij->A->data)->ignore_ltriangular;
448   PetscErrorCode  ierr;
449   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
450   PetscInt        rend=baij->rendbs,cstart=baij->rstartbs,stepval;
451   PetscInt        cend=baij->rendbs,bs=mat->rmap->bs,bs2=baij->bs2;
452 
453   PetscFunctionBegin;
454   if (!barray) {
455     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
456     baij->barray = barray;
457   }
458 
459   if (roworiented) {
460     stepval = (n-1)*bs;
461   } else {
462     stepval = (m-1)*bs;
463   }
464   for (i=0; i<m; i++) {
465     if (im[i] < 0) continue;
466 #if defined(PETSC_USE_DEBUG)
467     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
468 #endif
469     if (im[i] >= rstart && im[i] < rend) {
470       row = im[i] - rstart;
471       for (j=0; j<n; j++) {
472         if (im[i] > in[j]) {
473           if (ignore_ltriangular) continue; /* ignore lower triangular blocks */
474           else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
475         }
476         /* If NumCol = 1 then a copy is not required */
477         if ((roworiented) && (n == 1)) {
478           barray = (MatScalar*) v + i*bs2;
479         } else if ((!roworiented) && (m == 1)) {
480           barray = (MatScalar*) v + j*bs2;
481         } else { /* Here a copy is required */
482           if (roworiented) {
483             value = v + i*(stepval+bs)*bs + j*bs;
484           } else {
485             value = v + j*(stepval+bs)*bs + i*bs;
486           }
487           for (ii=0; ii<bs; ii++,value+=stepval) {
488             for (jj=0; jj<bs; jj++) {
489               *barray++ = *value++;
490             }
491           }
492           barray -=bs2;
493         }
494 
495         if (in[j] >= cstart && in[j] < cend) {
496           col  = in[j] - cstart;
497           ierr = MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
498         } else if (in[j] < 0) continue;
499 #if defined(PETSC_USE_DEBUG)
500         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
501 #endif
502         else {
503           if (mat->was_assembled) {
504             if (!baij->colmap) {
505               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
506             }
507 
508 #if defined(PETSC_USE_DEBUG)
509 #if defined(PETSC_USE_CTABLE)
510             { PetscInt data;
511               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
512               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
513             }
514 #else
515             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
516 #endif
517 #endif
518 #if defined(PETSC_USE_CTABLE)
519             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
520             col  = (col - 1)/bs;
521 #else
522             col = (baij->colmap[in[j]] - 1)/bs;
523 #endif
524             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
525               ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
526               col  = in[j];
527             }
528           } else col = in[j];
529           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
530         }
531       }
532     } else {
533       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
534       if (!baij->donotstash) {
535         if (roworiented) {
536           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
537         } else {
538           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
539         }
540       }
541     }
542   }
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "MatGetValues_MPISBAIJ"
548 PetscErrorCode MatGetValues_MPISBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
549 {
550   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
551   PetscErrorCode ierr;
552   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
553   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
554 
555   PetscFunctionBegin;
556   for (i=0; i<m; i++) {
557     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]); */
558     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
559     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
560       row = idxm[i] - bsrstart;
561       for (j=0; j<n; j++) {
562         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column %D",idxn[j]); */
563         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
564         if (idxn[j] >= bscstart && idxn[j] < bscend) {
565           col  = idxn[j] - bscstart;
566           ierr = MatGetValues_SeqSBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
567         } else {
568           if (!baij->colmap) {
569             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
570           }
571 #if defined(PETSC_USE_CTABLE)
572           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
573           data--;
574 #else
575           data = baij->colmap[idxn[j]/bs]-1;
576 #endif
577           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
578           else {
579             col  = data + idxn[j]%bs;
580             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
581           }
582         }
583       }
584     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
585   }
586   PetscFunctionReturn(0);
587 }
588 
589 #undef __FUNCT__
590 #define __FUNCT__ "MatNorm_MPISBAIJ"
591 PetscErrorCode MatNorm_MPISBAIJ(Mat mat,NormType type,PetscReal *norm)
592 {
593   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
594   PetscErrorCode ierr;
595   PetscReal      sum[2],*lnorm2;
596 
597   PetscFunctionBegin;
598   if (baij->size == 1) {
599     ierr =  MatNorm(baij->A,type,norm);CHKERRQ(ierr);
600   } else {
601     if (type == NORM_FROBENIUS) {
602       ierr    = PetscMalloc1(2,&lnorm2);CHKERRQ(ierr);
603       ierr    =  MatNorm(baij->A,type,lnorm2);CHKERRQ(ierr);
604       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2++;            /* squar power of norm(A) */
605       ierr    =  MatNorm(baij->B,type,lnorm2);CHKERRQ(ierr);
606       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2--;             /* squar power of norm(B) */
607       ierr    = MPI_Allreduce(lnorm2,sum,2,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
608       *norm   = PetscSqrtReal(sum[0] + 2*sum[1]);
609       ierr    = PetscFree(lnorm2);CHKERRQ(ierr);
610     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
611       Mat_SeqSBAIJ *amat=(Mat_SeqSBAIJ*)baij->A->data;
612       Mat_SeqBAIJ  *bmat=(Mat_SeqBAIJ*)baij->B->data;
613       PetscReal    *rsum,*rsum2,vabs;
614       PetscInt     *jj,*garray=baij->garray,rstart=baij->rstartbs,nz;
615       PetscInt     brow,bcol,col,bs=baij->A->rmap->bs,row,grow,gcol,mbs=amat->mbs;
616       MatScalar    *v;
617 
618       ierr = PetscMalloc2(mat->cmap->N,&rsum,mat->cmap->N,&rsum2);CHKERRQ(ierr);
619       ierr = PetscMemzero(rsum,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
620       /* Amat */
621       v = amat->a; jj = amat->j;
622       for (brow=0; brow<mbs; brow++) {
623         grow = bs*(rstart + brow);
624         nz   = amat->i[brow+1] - amat->i[brow];
625         for (bcol=0; bcol<nz; bcol++) {
626           gcol = bs*(rstart + *jj); jj++;
627           for (col=0; col<bs; col++) {
628             for (row=0; row<bs; row++) {
629               vabs            = PetscAbsScalar(*v); v++;
630               rsum[gcol+col] += vabs;
631               /* non-diagonal block */
632               if (bcol > 0 && vabs > 0.0) rsum[grow+row] += vabs;
633             }
634           }
635         }
636       }
637       /* Bmat */
638       v = bmat->a; jj = bmat->j;
639       for (brow=0; brow<mbs; brow++) {
640         grow = bs*(rstart + brow);
641         nz = bmat->i[brow+1] - bmat->i[brow];
642         for (bcol=0; bcol<nz; bcol++) {
643           gcol = bs*garray[*jj]; jj++;
644           for (col=0; col<bs; col++) {
645             for (row=0; row<bs; row++) {
646               vabs            = PetscAbsScalar(*v); v++;
647               rsum[gcol+col] += vabs;
648               rsum[grow+row] += vabs;
649             }
650           }
651         }
652       }
653       ierr  = MPI_Allreduce(rsum,rsum2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
654       *norm = 0.0;
655       for (col=0; col<mat->cmap->N; col++) {
656         if (rsum2[col] > *norm) *norm = rsum2[col];
657       }
658       ierr = PetscFree2(rsum,rsum2);CHKERRQ(ierr);
659     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support for this norm yet");
660   }
661   PetscFunctionReturn(0);
662 }
663 
664 #undef __FUNCT__
665 #define __FUNCT__ "MatAssemblyBegin_MPISBAIJ"
666 PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat,MatAssemblyType mode)
667 {
668   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
669   PetscErrorCode ierr;
670   PetscInt       nstash,reallocs;
671 
672   PetscFunctionBegin;
673   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
674 
675   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
676   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
677   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
678   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
679   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
680   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatAssemblyEnd_MPISBAIJ"
686 PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat,MatAssemblyType mode)
687 {
688   Mat_MPISBAIJ   *baij=(Mat_MPISBAIJ*)mat->data;
689   Mat_SeqSBAIJ   *a   =(Mat_SeqSBAIJ*)baij->A->data;
690   PetscErrorCode ierr;
691   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
692   PetscInt       *row,*col;
693   PetscBool      other_disassembled;
694   PetscMPIInt    n;
695   PetscBool      r1,r2,r3;
696   MatScalar      *val;
697 
698   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
699   PetscFunctionBegin;
700   if (!baij->donotstash &&  !mat->nooffprocentries) {
701     while (1) {
702       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
703       if (!flg) break;
704 
705       for (i=0; i<n;) {
706         /* Now identify the consecutive vals belonging to the same row */
707         for (j=i,rstart=row[j]; j<n; j++) {
708           if (row[j] != rstart) break;
709         }
710         if (j < n) ncols = j-i;
711         else       ncols = n-i;
712         /* Now assemble all these values with a single function call */
713         ierr = MatSetValues_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
714         i    = j;
715       }
716     }
717     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
718     /* Now process the block-stash. Since the values are stashed column-oriented,
719        set the roworiented flag to column oriented, and after MatSetValues()
720        restore the original flags */
721     r1 = baij->roworiented;
722     r2 = a->roworiented;
723     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
724 
725     baij->roworiented = PETSC_FALSE;
726     a->roworiented    = PETSC_FALSE;
727 
728     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworinted */
729     while (1) {
730       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
731       if (!flg) break;
732 
733       for (i=0; i<n;) {
734         /* Now identify the consecutive vals belonging to the same row */
735         for (j=i,rstart=row[j]; j<n; j++) {
736           if (row[j] != rstart) break;
737         }
738         if (j < n) ncols = j-i;
739         else       ncols = n-i;
740         ierr = MatSetValuesBlocked_MPISBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
741         i    = j;
742       }
743     }
744     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
745 
746     baij->roworiented = r1;
747     a->roworiented    = r2;
748 
749     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworinted */
750   }
751 
752   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
753   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
754 
755   /* determine if any processor has disassembled, if so we must
756      also disassemble ourselfs, in order that we may reassemble. */
757   /*
758      if nonzero structure of submatrix B cannot change then we know that
759      no processor disassembled thus we can skip this stuff
760   */
761   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
762     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
763     if (mat->was_assembled && !other_disassembled) {
764       ierr = MatDisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
765     }
766   }
767 
768   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
769     ierr = MatSetUpMultiply_MPISBAIJ(mat);CHKERRQ(ierr); /* setup Mvctx and sMvctx */
770   }
771   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
772   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
773 
774   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
775 
776   baij->rowvalues = 0;
777 
778   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
779   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
780     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
781     ierr = MPI_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
782   }
783   PetscFunctionReturn(0);
784 }
785 
786 extern PetscErrorCode MatView_SeqSBAIJ_ASCII(Mat,PetscViewer);
787 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
788 #include <petscdraw.h>
789 #undef __FUNCT__
790 #define __FUNCT__ "MatView_MPISBAIJ_ASCIIorDraworSocket"
791 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
792 {
793   Mat_MPISBAIJ      *baij = (Mat_MPISBAIJ*)mat->data;
794   PetscErrorCode    ierr;
795   PetscInt          bs   = mat->rmap->bs;
796   PetscMPIInt       rank = baij->rank;
797   PetscBool         iascii,isdraw;
798   PetscViewer       sviewer;
799   PetscViewerFormat format;
800 
801   PetscFunctionBegin;
802   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
803   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
804   if (iascii) {
805     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
806     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
807       MatInfo info;
808       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
809       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
810       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
811       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
812       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
813       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
814       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
815       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
816       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
817       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
818       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
819       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
820       PetscFunctionReturn(0);
821     } else if (format == PETSC_VIEWER_ASCII_INFO) {
822       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
823       PetscFunctionReturn(0);
824     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
825       PetscFunctionReturn(0);
826     }
827   }
828 
829   if (isdraw) {
830     PetscDraw draw;
831     PetscBool isnull;
832     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
833     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
834   }
835 
836   {
837     /* assemble the entire matrix onto first processor. */
838     Mat          A;
839     Mat_SeqSBAIJ *Aloc;
840     Mat_SeqBAIJ  *Bloc;
841     PetscInt     M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
842     MatScalar    *a;
843     const char   *matname;
844 
845     /* Should this be the same type as mat? */
846     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
847     if (!rank) {
848       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
849     } else {
850       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
851     }
852     ierr = MatSetType(A,MATMPISBAIJ);CHKERRQ(ierr);
853     ierr = MatMPISBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
854     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
855     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
856 
857     /* copy over the A part */
858     Aloc = (Mat_SeqSBAIJ*)baij->A->data;
859     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
860     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
861 
862     for (i=0; i<mbs; i++) {
863       rvals[0] = bs*(baij->rstartbs + i);
864       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
865       for (j=ai[i]; j<ai[i+1]; j++) {
866         col = (baij->cstartbs+aj[j])*bs;
867         for (k=0; k<bs; k++) {
868           ierr = MatSetValues_MPISBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
869           col++;
870           a += bs;
871         }
872       }
873     }
874     /* copy over the B part */
875     Bloc = (Mat_SeqBAIJ*)baij->B->data;
876     ai   = Bloc->i; aj = Bloc->j; a = Bloc->a;
877     for (i=0; i<mbs; i++) {
878 
879       rvals[0] = bs*(baij->rstartbs + i);
880       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
881       for (j=ai[i]; j<ai[i+1]; j++) {
882         col = baij->garray[aj[j]]*bs;
883         for (k=0; k<bs; k++) {
884           ierr = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
885           col++;
886           a += bs;
887         }
888       }
889     }
890     ierr = PetscFree(rvals);CHKERRQ(ierr);
891     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
892     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
893     /*
894        Everyone has to call to draw the matrix since the graphics waits are
895        synchronized across all processors that share the PetscDraw object
896     */
897     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
898     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
899     if (!rank) {
900       ierr = PetscObjectSetName((PetscObject)((Mat_MPISBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
901       ierr = MatView_SeqSBAIJ_ASCII(((Mat_MPISBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
902     }
903     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
904     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
905     ierr = MatDestroy(&A);CHKERRQ(ierr);
906   }
907   PetscFunctionReturn(0);
908 }
909 
910 #undef __FUNCT__
911 #define __FUNCT__ "MatView_MPISBAIJ"
912 PetscErrorCode MatView_MPISBAIJ(Mat mat,PetscViewer viewer)
913 {
914   PetscErrorCode ierr;
915   PetscBool      iascii,isdraw,issocket,isbinary;
916 
917   PetscFunctionBegin;
918   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
919   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
920   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
921   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
922   if (iascii || isdraw || issocket || isbinary) {
923     ierr = MatView_MPISBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
924   }
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "MatDestroy_MPISBAIJ"
930 PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
931 {
932   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
933   PetscErrorCode ierr;
934 
935   PetscFunctionBegin;
936 #if defined(PETSC_USE_LOG)
937   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
938 #endif
939   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
940   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
941   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
942   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
943 #if defined(PETSC_USE_CTABLE)
944   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
945 #else
946   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
947 #endif
948   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
949   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
950   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
951   ierr = VecDestroy(&baij->slvec0);CHKERRQ(ierr);
952   ierr = VecDestroy(&baij->slvec0b);CHKERRQ(ierr);
953   ierr = VecDestroy(&baij->slvec1);CHKERRQ(ierr);
954   ierr = VecDestroy(&baij->slvec1a);CHKERRQ(ierr);
955   ierr = VecDestroy(&baij->slvec1b);CHKERRQ(ierr);
956   ierr = VecScatterDestroy(&baij->sMvctx);CHKERRQ(ierr);
957   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
958   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
959   ierr = PetscFree(baij->hd);CHKERRQ(ierr);
960   ierr = VecDestroy(&baij->diag);CHKERRQ(ierr);
961   ierr = VecDestroy(&baij->bb1);CHKERRQ(ierr);
962   ierr = VecDestroy(&baij->xx1);CHKERRQ(ierr);
963 #if defined(PETSC_USE_REAL_MAT_SINGLE)
964   ierr = PetscFree(baij->setvaluescopy);CHKERRQ(ierr);
965 #endif
966   ierr = PetscFree(baij->in_loc);CHKERRQ(ierr);
967   ierr = PetscFree(baij->v_loc);CHKERRQ(ierr);
968   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
969   ierr = PetscFree(mat->data);CHKERRQ(ierr);
970 
971   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
972   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
973   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
974   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
975   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPISBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
976   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpisbaij_mpisbstrm_C",NULL);CHKERRQ(ierr);
977 #if defined(PETSC_HAVE_ELEMENTAL)
978   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpisbaij_elemental_C",NULL);CHKERRQ(ierr);
979 #endif
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "MatMult_MPISBAIJ_Hermitian"
985 PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A,Vec xx,Vec yy)
986 {
987   Mat_MPISBAIJ      *a = (Mat_MPISBAIJ*)A->data;
988   PetscErrorCode    ierr;
989   PetscInt          nt,mbs=a->mbs,bs=A->rmap->bs;
990   PetscScalar       *from;
991   const PetscScalar *x;
992 
993   PetscFunctionBegin;
994   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
995   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
996 
997   /* diagonal part */
998   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
999   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
1000 
1001   /* subdiagonal part */
1002   ierr = (*a->B->ops->multhermitiantranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
1003 
1004   /* copy x into the vec slvec0 */
1005   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
1006   ierr = VecGetArrayRead(xx,&x);CHKERRQ(ierr);
1007 
1008   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
1009   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
1010   ierr = VecRestoreArrayRead(xx,&x);CHKERRQ(ierr);
1011 
1012   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1013   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1014   /* supperdiagonal part */
1015   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "MatMult_MPISBAIJ"
1021 PetscErrorCode MatMult_MPISBAIJ(Mat A,Vec xx,Vec yy)
1022 {
1023   Mat_MPISBAIJ      *a = (Mat_MPISBAIJ*)A->data;
1024   PetscErrorCode    ierr;
1025   PetscInt          nt,mbs=a->mbs,bs=A->rmap->bs;
1026   PetscScalar       *from;
1027   const PetscScalar *x;
1028 
1029   PetscFunctionBegin;
1030   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1031   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1032 
1033   /* diagonal part */
1034   ierr = (*a->A->ops->mult)(a->A,xx,a->slvec1a);CHKERRQ(ierr);
1035   ierr = VecSet(a->slvec1b,0.0);CHKERRQ(ierr);
1036 
1037   /* subdiagonal part */
1038   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
1039 
1040   /* copy x into the vec slvec0 */
1041   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
1042   ierr = VecGetArrayRead(xx,&x);CHKERRQ(ierr);
1043 
1044   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
1045   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
1046   ierr = VecRestoreArrayRead(xx,&x);CHKERRQ(ierr);
1047 
1048   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1049   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1050   /* supperdiagonal part */
1051   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,yy);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "MatMult_MPISBAIJ_2comm"
1057 PetscErrorCode MatMult_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy)
1058 {
1059   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1060   PetscErrorCode ierr;
1061   PetscInt       nt;
1062 
1063   PetscFunctionBegin;
1064   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1065   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1066 
1067   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1068   if (nt != A->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1069 
1070   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1071   /* do diagonal part */
1072   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1073   /* do supperdiagonal part */
1074   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1075   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1076   /* do subdiagonal part */
1077   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1078   ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1079   ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1080   PetscFunctionReturn(0);
1081 }
1082 
1083 #undef __FUNCT__
1084 #define __FUNCT__ "MatMultAdd_MPISBAIJ"
1085 PetscErrorCode MatMultAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1086 {
1087   Mat_MPISBAIJ      *a = (Mat_MPISBAIJ*)A->data;
1088   PetscErrorCode    ierr;
1089   PetscInt          mbs=a->mbs,bs=A->rmap->bs;
1090   PetscScalar       *from,zero=0.0;
1091   const PetscScalar *x;
1092 
1093   PetscFunctionBegin;
1094   /*
1095   PetscSynchronizedPrintf(PetscObjectComm((PetscObject)A)," MatMultAdd is called ...\n");
1096   PetscSynchronizedFlush(PetscObjectComm((PetscObject)A),PETSC_STDOUT);
1097   */
1098   /* diagonal part */
1099   ierr = (*a->A->ops->multadd)(a->A,xx,yy,a->slvec1a);CHKERRQ(ierr);
1100   ierr = VecSet(a->slvec1b,zero);CHKERRQ(ierr);
1101 
1102   /* subdiagonal part */
1103   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->slvec0b);CHKERRQ(ierr);
1104 
1105   /* copy x into the vec slvec0 */
1106   ierr = VecGetArray(a->slvec0,&from);CHKERRQ(ierr);
1107   ierr = VecGetArrayRead(xx,&x);CHKERRQ(ierr);
1108   ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
1109   ierr = VecRestoreArray(a->slvec0,&from);CHKERRQ(ierr);
1110 
1111   ierr = VecScatterBegin(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1112   ierr = VecRestoreArrayRead(xx,&x);CHKERRQ(ierr);
1113   ierr = VecScatterEnd(a->sMvctx,a->slvec0,a->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1114 
1115   /* supperdiagonal part */
1116   ierr = (*a->B->ops->multadd)(a->B,a->slvec1b,a->slvec1a,zz);CHKERRQ(ierr);
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 #undef __FUNCT__
1121 #define __FUNCT__ "MatMultAdd_MPISBAIJ_2comm"
1122 PetscErrorCode MatMultAdd_MPISBAIJ_2comm(Mat A,Vec xx,Vec yy,Vec zz)
1123 {
1124   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1129   /* do diagonal part */
1130   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1131   /* do supperdiagonal part */
1132   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1133   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1134 
1135   /* do subdiagonal part */
1136   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1137   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1138   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141 
1142 /*
1143   This only works correctly for square matrices where the subblock A->A is the
1144    diagonal block
1145 */
1146 #undef __FUNCT__
1147 #define __FUNCT__ "MatGetDiagonal_MPISBAIJ"
1148 PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A,Vec v)
1149 {
1150   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1151   PetscErrorCode ierr;
1152 
1153   PetscFunctionBegin;
1154   /* if (a->rmap->N != a->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1155   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1156   PetscFunctionReturn(0);
1157 }
1158 
1159 #undef __FUNCT__
1160 #define __FUNCT__ "MatScale_MPISBAIJ"
1161 PetscErrorCode MatScale_MPISBAIJ(Mat A,PetscScalar aa)
1162 {
1163   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1164   PetscErrorCode ierr;
1165 
1166   PetscFunctionBegin;
1167   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1168   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1169   PetscFunctionReturn(0);
1170 }
1171 
1172 #undef __FUNCT__
1173 #define __FUNCT__ "MatGetRow_MPISBAIJ"
1174 PetscErrorCode MatGetRow_MPISBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1175 {
1176   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
1177   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1178   PetscErrorCode ierr;
1179   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1180   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1181   PetscInt       *cmap,*idx_p,cstart = mat->rstartbs;
1182 
1183   PetscFunctionBegin;
1184   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1185   mat->getrowactive = PETSC_TRUE;
1186 
1187   if (!mat->rowvalues && (idx || v)) {
1188     /*
1189         allocate enough space to hold information from the longest row.
1190     */
1191     Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ*)mat->A->data;
1192     Mat_SeqBAIJ  *Ba = (Mat_SeqBAIJ*)mat->B->data;
1193     PetscInt     max = 1,mbs = mat->mbs,tmp;
1194     for (i=0; i<mbs; i++) {
1195       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; /* row length */
1196       if (max < tmp) max = tmp;
1197     }
1198     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1199   }
1200 
1201   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1202   lrow = row - brstart;  /* local row index */
1203 
1204   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1205   if (!v)   {pvA = 0; pvB = 0;}
1206   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1207   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1208   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1209   nztot = nzA + nzB;
1210 
1211   cmap = mat->garray;
1212   if (v  || idx) {
1213     if (nztot) {
1214       /* Sort by increasing column numbers, assuming A and B already sorted */
1215       PetscInt imark = -1;
1216       if (v) {
1217         *v = v_p = mat->rowvalues;
1218         for (i=0; i<nzB; i++) {
1219           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1220           else break;
1221         }
1222         imark = i;
1223         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1224         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1225       }
1226       if (idx) {
1227         *idx = idx_p = mat->rowindices;
1228         if (imark > -1) {
1229           for (i=0; i<imark; i++) {
1230             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1231           }
1232         } else {
1233           for (i=0; i<nzB; i++) {
1234             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1235             else break;
1236           }
1237           imark = i;
1238         }
1239         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1240         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1241       }
1242     } else {
1243       if (idx) *idx = 0;
1244       if (v)   *v   = 0;
1245     }
1246   }
1247   *nz  = nztot;
1248   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1249   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "MatRestoreRow_MPISBAIJ"
1255 PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1256 {
1257   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1258 
1259   PetscFunctionBegin;
1260   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first");
1261   baij->getrowactive = PETSC_FALSE;
1262   PetscFunctionReturn(0);
1263 }
1264 
1265 #undef __FUNCT__
1266 #define __FUNCT__ "MatGetRowUpperTriangular_MPISBAIJ"
1267 PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1268 {
1269   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1270   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1271 
1272   PetscFunctionBegin;
1273   aA->getrow_utriangular = PETSC_TRUE;
1274   PetscFunctionReturn(0);
1275 }
1276 #undef __FUNCT__
1277 #define __FUNCT__ "MatRestoreRowUpperTriangular_MPISBAIJ"
1278 PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1279 {
1280   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ*)A->data;
1281   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ*)a->A->data;
1282 
1283   PetscFunctionBegin;
1284   aA->getrow_utriangular = PETSC_FALSE;
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #undef __FUNCT__
1289 #define __FUNCT__ "MatRealPart_MPISBAIJ"
1290 PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1291 {
1292   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1293   PetscErrorCode ierr;
1294 
1295   PetscFunctionBegin;
1296   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1297   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1298   PetscFunctionReturn(0);
1299 }
1300 
1301 #undef __FUNCT__
1302 #define __FUNCT__ "MatImaginaryPart_MPISBAIJ"
1303 PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1304 {
1305   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1306   PetscErrorCode ierr;
1307 
1308   PetscFunctionBegin;
1309   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
1310   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 /* Check if isrow is a subset of iscol_local, called by MatGetSubMatrix_MPISBAIJ()
1315    Input: isrow       - distributed(parallel),
1316           iscol_local - locally owned (seq)
1317 */
1318 #undef __FUNCT__
1319 #define __FUNCT__ "ISEqual_private"
1320 PetscErrorCode ISEqual_private(IS isrow,IS iscol_local,PetscBool  *flg)
1321 {
1322   PetscErrorCode ierr;
1323   PetscInt       sz1,sz2,*a1,*a2,i,j,k,nmatch;
1324   const PetscInt *ptr1,*ptr2;
1325 
1326   PetscFunctionBegin;
1327   ierr = ISGetLocalSize(isrow,&sz1);CHKERRQ(ierr);
1328   ierr = ISGetLocalSize(iscol_local,&sz2);CHKERRQ(ierr);
1329   if (sz1 > sz2) {
1330     *flg = PETSC_FALSE;
1331     PetscFunctionReturn(0);
1332   }
1333 
1334   ierr = ISGetIndices(isrow,&ptr1);CHKERRQ(ierr);
1335   ierr = ISGetIndices(iscol_local,&ptr2);CHKERRQ(ierr);
1336 
1337   ierr = PetscMalloc1(sz1,&a1);CHKERRQ(ierr);
1338   ierr = PetscMalloc1(sz2,&a2);CHKERRQ(ierr);
1339   ierr = PetscMemcpy(a1,ptr1,sz1*sizeof(PetscInt));CHKERRQ(ierr);
1340   ierr = PetscMemcpy(a2,ptr2,sz2*sizeof(PetscInt));CHKERRQ(ierr);
1341   ierr = PetscSortInt(sz1,a1);CHKERRQ(ierr);
1342   ierr = PetscSortInt(sz2,a2);CHKERRQ(ierr);
1343 
1344   nmatch=0;
1345   k     = 0;
1346   for (i=0; i<sz1; i++){
1347     for (j=k; j<sz2; j++){
1348       if (a1[i] == a2[j]) {
1349         k = j; nmatch++;
1350         break;
1351       }
1352     }
1353   }
1354   ierr = ISRestoreIndices(isrow,&ptr1);CHKERRQ(ierr);
1355   ierr = ISRestoreIndices(iscol_local,&ptr2);CHKERRQ(ierr);
1356   ierr = PetscFree(a1);CHKERRQ(ierr);
1357   ierr = PetscFree(a2);CHKERRQ(ierr);
1358   if (nmatch < sz1) {
1359     *flg = PETSC_FALSE;
1360   } else {
1361     *flg = PETSC_TRUE;
1362   }
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 #undef __FUNCT__
1367 #define __FUNCT__ "MatGetSubMatrix_MPISBAIJ"
1368 PetscErrorCode MatGetSubMatrix_MPISBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
1369 {
1370   PetscErrorCode ierr;
1371   IS             iscol_local;
1372   PetscInt       csize;
1373   PetscBool      isequal;
1374 
1375   PetscFunctionBegin;
1376   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
1377   if (call == MAT_REUSE_MATRIX) {
1378     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
1379     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
1380   } else {
1381     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
1382     ierr = ISEqual_private(isrow,iscol_local,&isequal);CHKERRQ(ierr);
1383     if (!isequal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"For symmetric format, iscol must equal isrow");
1384   }
1385 
1386   /* now call MatGetSubMatrix_MPIBAIJ() */
1387   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
1388   if (call == MAT_INITIAL_MATRIX) {
1389     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
1390     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
1391   }
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "MatZeroEntries_MPISBAIJ"
1397 PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1398 {
1399   Mat_MPISBAIJ   *l = (Mat_MPISBAIJ*)A->data;
1400   PetscErrorCode ierr;
1401 
1402   PetscFunctionBegin;
1403   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1404   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1405   PetscFunctionReturn(0);
1406 }
1407 
1408 #undef __FUNCT__
1409 #define __FUNCT__ "MatGetInfo_MPISBAIJ"
1410 PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1411 {
1412   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)matin->data;
1413   Mat            A  = a->A,B = a->B;
1414   PetscErrorCode ierr;
1415   PetscReal      isend[5],irecv[5];
1416 
1417   PetscFunctionBegin;
1418   info->block_size = (PetscReal)matin->rmap->bs;
1419 
1420   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1421 
1422   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1423   isend[3] = info->memory;  isend[4] = info->mallocs;
1424 
1425   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1426 
1427   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1428   isend[3] += info->memory;  isend[4] += info->mallocs;
1429   if (flag == MAT_LOCAL) {
1430     info->nz_used      = isend[0];
1431     info->nz_allocated = isend[1];
1432     info->nz_unneeded  = isend[2];
1433     info->memory       = isend[3];
1434     info->mallocs      = isend[4];
1435   } else if (flag == MAT_GLOBAL_MAX) {
1436     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1437 
1438     info->nz_used      = irecv[0];
1439     info->nz_allocated = irecv[1];
1440     info->nz_unneeded  = irecv[2];
1441     info->memory       = irecv[3];
1442     info->mallocs      = irecv[4];
1443   } else if (flag == MAT_GLOBAL_SUM) {
1444     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1445 
1446     info->nz_used      = irecv[0];
1447     info->nz_allocated = irecv[1];
1448     info->nz_unneeded  = irecv[2];
1449     info->memory       = irecv[3];
1450     info->mallocs      = irecv[4];
1451   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1452   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1453   info->fill_ratio_needed = 0;
1454   info->factor_mallocs    = 0;
1455   PetscFunctionReturn(0);
1456 }
1457 
1458 #undef __FUNCT__
1459 #define __FUNCT__ "MatSetOption_MPISBAIJ"
1460 PetscErrorCode MatSetOption_MPISBAIJ(Mat A,MatOption op,PetscBool flg)
1461 {
1462   Mat_MPISBAIJ   *a  = (Mat_MPISBAIJ*)A->data;
1463   Mat_SeqSBAIJ   *aA = (Mat_SeqSBAIJ*)a->A->data;
1464   PetscErrorCode ierr;
1465 
1466   PetscFunctionBegin;
1467   switch (op) {
1468   case MAT_NEW_NONZERO_LOCATIONS:
1469   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1470   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1471   case MAT_KEEP_NONZERO_PATTERN:
1472   case MAT_NEW_NONZERO_LOCATION_ERR:
1473     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1474     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1475     break;
1476   case MAT_ROW_ORIENTED:
1477     a->roworiented = flg;
1478 
1479     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1480     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1481     break;
1482   case MAT_NEW_DIAGONALS:
1483     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1484     break;
1485   case MAT_IGNORE_OFF_PROC_ENTRIES:
1486     a->donotstash = flg;
1487     break;
1488   case MAT_USE_HASH_TABLE:
1489     a->ht_flag = flg;
1490     break;
1491   case MAT_HERMITIAN:
1492     if (!A->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call MatAssemblyEnd() first");
1493     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1494 
1495     A->ops->mult = MatMult_MPISBAIJ_Hermitian;
1496     break;
1497   case MAT_SPD:
1498     A->spd_set = PETSC_TRUE;
1499     A->spd     = flg;
1500     if (flg) {
1501       A->symmetric                  = PETSC_TRUE;
1502       A->structurally_symmetric     = PETSC_TRUE;
1503       A->symmetric_set              = PETSC_TRUE;
1504       A->structurally_symmetric_set = PETSC_TRUE;
1505     }
1506     break;
1507   case MAT_SYMMETRIC:
1508     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1509     break;
1510   case MAT_STRUCTURALLY_SYMMETRIC:
1511     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1512     break;
1513   case MAT_SYMMETRY_ETERNAL:
1514     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix must be symmetric");
1515     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1516     break;
1517   case MAT_IGNORE_LOWER_TRIANGULAR:
1518     aA->ignore_ltriangular = flg;
1519     break;
1520   case MAT_ERROR_LOWER_TRIANGULAR:
1521     aA->ignore_ltriangular = flg;
1522     break;
1523   case MAT_GETROW_UPPERTRIANGULAR:
1524     aA->getrow_utriangular = flg;
1525     break;
1526   default:
1527     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op);
1528   }
1529   PetscFunctionReturn(0);
1530 }
1531 
1532 #undef __FUNCT__
1533 #define __FUNCT__ "MatTranspose_MPISBAIJ"
1534 PetscErrorCode MatTranspose_MPISBAIJ(Mat A,MatReuse reuse,Mat *B)
1535 {
1536   PetscErrorCode ierr;
1537 
1538   PetscFunctionBegin;
1539   if (MAT_INITIAL_MATRIX || *B != A) {
1540     ierr = MatDuplicate(A,MAT_COPY_VALUES,B);CHKERRQ(ierr);
1541   }
1542   PetscFunctionReturn(0);
1543 }
1544 
1545 #undef __FUNCT__
1546 #define __FUNCT__ "MatDiagonalScale_MPISBAIJ"
1547 PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat,Vec ll,Vec rr)
1548 {
1549   Mat_MPISBAIJ   *baij = (Mat_MPISBAIJ*)mat->data;
1550   Mat            a     = baij->A, b=baij->B;
1551   PetscErrorCode ierr;
1552   PetscInt       nv,m,n;
1553   PetscBool      flg;
1554 
1555   PetscFunctionBegin;
1556   if (ll != rr) {
1557     ierr = VecEqual(ll,rr,&flg);CHKERRQ(ierr);
1558     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"For symmetric format, left and right scaling vectors must be same\n");
1559   }
1560   if (!ll) PetscFunctionReturn(0);
1561 
1562   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
1563   if (m != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"For symmetric format, local size %d %d must be same",m,n);
1564 
1565   ierr = VecGetLocalSize(rr,&nv);CHKERRQ(ierr);
1566   if (nv!=n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Left and right vector non-conforming local size");
1567 
1568   ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1569 
1570   /* left diagonalscale the off-diagonal part */
1571   ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1572 
1573   /* scale the diagonal part */
1574   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1575 
1576   /* right diagonalscale the off-diagonal part */
1577   ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1578   ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1579   PetscFunctionReturn(0);
1580 }
1581 
1582 #undef __FUNCT__
1583 #define __FUNCT__ "MatSetUnfactored_MPISBAIJ"
1584 PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1585 {
1586   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1587   PetscErrorCode ierr;
1588 
1589   PetscFunctionBegin;
1590   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1591   PetscFunctionReturn(0);
1592 }
1593 
1594 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat,MatDuplicateOption,Mat*);
1595 
1596 #undef __FUNCT__
1597 #define __FUNCT__ "MatEqual_MPISBAIJ"
1598 PetscErrorCode MatEqual_MPISBAIJ(Mat A,Mat B,PetscBool  *flag)
1599 {
1600   Mat_MPISBAIJ   *matB = (Mat_MPISBAIJ*)B->data,*matA = (Mat_MPISBAIJ*)A->data;
1601   Mat            a,b,c,d;
1602   PetscBool      flg;
1603   PetscErrorCode ierr;
1604 
1605   PetscFunctionBegin;
1606   a = matA->A; b = matA->B;
1607   c = matB->A; d = matB->B;
1608 
1609   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1610   if (flg) {
1611     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1612   }
1613   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1614   PetscFunctionReturn(0);
1615 }
1616 
1617 #undef __FUNCT__
1618 #define __FUNCT__ "MatCopy_MPISBAIJ"
1619 PetscErrorCode MatCopy_MPISBAIJ(Mat A,Mat B,MatStructure str)
1620 {
1621   PetscErrorCode ierr;
1622   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
1623   Mat_MPISBAIJ   *b = (Mat_MPISBAIJ*)B->data;
1624 
1625   PetscFunctionBegin;
1626   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1627   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1628     ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1629     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1630     ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1631   } else {
1632     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1633     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1634   }
1635   PetscFunctionReturn(0);
1636 }
1637 
1638 #undef __FUNCT__
1639 #define __FUNCT__ "MatSetUp_MPISBAIJ"
1640 PetscErrorCode MatSetUp_MPISBAIJ(Mat A)
1641 {
1642   PetscErrorCode ierr;
1643 
1644   PetscFunctionBegin;
1645   ierr = MatMPISBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1646   PetscFunctionReturn(0);
1647 }
1648 
1649 #undef __FUNCT__
1650 #define __FUNCT__ "MatAXPY_MPISBAIJ"
1651 PetscErrorCode MatAXPY_MPISBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1652 {
1653   PetscErrorCode ierr;
1654   Mat_MPISBAIJ   *xx=(Mat_MPISBAIJ*)X->data,*yy=(Mat_MPISBAIJ*)Y->data;
1655   PetscBLASInt   bnz,one=1;
1656   Mat_SeqSBAIJ   *xa,*ya;
1657   Mat_SeqBAIJ    *xb,*yb;
1658 
1659   PetscFunctionBegin;
1660   if (str == SAME_NONZERO_PATTERN) {
1661     PetscScalar alpha = a;
1662     xa   = (Mat_SeqSBAIJ*)xx->A->data;
1663     ya   = (Mat_SeqSBAIJ*)yy->A->data;
1664     ierr = PetscBLASIntCast(xa->nz,&bnz);CHKERRQ(ierr);
1665     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xa->a,&one,ya->a,&one));
1666     xb   = (Mat_SeqBAIJ*)xx->B->data;
1667     yb   = (Mat_SeqBAIJ*)yy->B->data;
1668     ierr = PetscBLASIntCast(xb->nz,&bnz);CHKERRQ(ierr);
1669     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,xb->a,&one,yb->a,&one));
1670     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
1671   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1672     ierr = MatSetOption(X,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
1673     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1674     ierr = MatSetOption(X,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
1675   } else {
1676     Mat      B;
1677     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
1678     if (bs != X->rmap->bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrices must have same block size");
1679     ierr = MatGetRowUpperTriangular(X);CHKERRQ(ierr);
1680     ierr = MatGetRowUpperTriangular(Y);CHKERRQ(ierr);
1681     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
1682     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
1683     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
1684     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
1685     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
1686     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
1687     ierr = MatSetType(B,MATMPISBAIJ);CHKERRQ(ierr);
1688     ierr = MatAXPYGetPreallocation_SeqSBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
1689     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
1690     ierr = MatMPISBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
1691     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
1692     ierr = MatHeaderReplace(Y,B);CHKERRQ(ierr);
1693     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
1694     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
1695     ierr = MatRestoreRowUpperTriangular(X);CHKERRQ(ierr);
1696     ierr = MatRestoreRowUpperTriangular(Y);CHKERRQ(ierr);
1697   }
1698   PetscFunctionReturn(0);
1699 }
1700 
1701 #undef __FUNCT__
1702 #define __FUNCT__ "MatGetSubMatrices_MPISBAIJ"
1703 PetscErrorCode MatGetSubMatrices_MPISBAIJ(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
1704 {
1705   PetscErrorCode ierr;
1706   PetscInt       i;
1707   PetscBool      flg,sorted;
1708 
1709   PetscFunctionBegin;
1710   for (i = 0; i < n; i++) {
1711     ierr = ISSorted(irow[i],&sorted);CHKERRQ(ierr);
1712     if (!sorted) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Row index set %d not sorted",i);
1713     ierr = ISSorted(icol[i],&sorted);CHKERRQ(ierr);
1714     if (!sorted) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Column index set %d not sorted",i);
1715   }
1716   ierr = MatGetSubMatrices_MPIBAIJ(A,n,irow,icol,scall,B);CHKERRQ(ierr);
1717   for (i=0; i<n; i++) {
1718     ierr = ISEqual(irow[i],icol[i],&flg);CHKERRQ(ierr);
1719     if (!flg) { /* *B[i] is non-symmetric, set flag */
1720       ierr = MatSetOption(*B[i],MAT_SYMMETRIC,PETSC_FALSE);CHKERRQ(ierr);
1721     }
1722   }
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 #undef __FUNCT__
1727 #define __FUNCT__ "MatShift_MPISBAIJ"
1728 PetscErrorCode MatShift_MPISBAIJ(Mat Y,PetscScalar a)
1729 {
1730   PetscErrorCode ierr;
1731   Mat_MPISBAIJ    *maij = (Mat_MPISBAIJ*)Y->data;
1732   Mat_SeqSBAIJ    *aij = (Mat_SeqSBAIJ*)maij->A->data;
1733 
1734   PetscFunctionBegin;
1735   if (!Y->preallocated) {
1736     ierr = MatMPISBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
1737   } else if (!aij->nz) {
1738     ierr = MatSeqSBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
1739   }
1740   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 /* -------------------------------------------------------------------*/
1745 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1746                                        MatGetRow_MPISBAIJ,
1747                                        MatRestoreRow_MPISBAIJ,
1748                                        MatMult_MPISBAIJ,
1749                                /*  4*/ MatMultAdd_MPISBAIJ,
1750                                        MatMult_MPISBAIJ,       /* transpose versions are same as non-transpose */
1751                                        MatMultAdd_MPISBAIJ,
1752                                        0,
1753                                        0,
1754                                        0,
1755                                /* 10*/ 0,
1756                                        0,
1757                                        0,
1758                                        MatSOR_MPISBAIJ,
1759                                        MatTranspose_MPISBAIJ,
1760                                /* 15*/ MatGetInfo_MPISBAIJ,
1761                                        MatEqual_MPISBAIJ,
1762                                        MatGetDiagonal_MPISBAIJ,
1763                                        MatDiagonalScale_MPISBAIJ,
1764                                        MatNorm_MPISBAIJ,
1765                                /* 20*/ MatAssemblyBegin_MPISBAIJ,
1766                                        MatAssemblyEnd_MPISBAIJ,
1767                                        MatSetOption_MPISBAIJ,
1768                                        MatZeroEntries_MPISBAIJ,
1769                                /* 24*/ 0,
1770                                        0,
1771                                        0,
1772                                        0,
1773                                        0,
1774                                /* 29*/ MatSetUp_MPISBAIJ,
1775                                        0,
1776                                        0,
1777                                        0,
1778                                        0,
1779                                /* 34*/ MatDuplicate_MPISBAIJ,
1780                                        0,
1781                                        0,
1782                                        0,
1783                                        0,
1784                                /* 39*/ MatAXPY_MPISBAIJ,
1785                                        MatGetSubMatrices_MPISBAIJ,
1786                                        MatIncreaseOverlap_MPISBAIJ,
1787                                        MatGetValues_MPISBAIJ,
1788                                        MatCopy_MPISBAIJ,
1789                                /* 44*/ 0,
1790                                        MatScale_MPISBAIJ,
1791                                        MatShift_MPISBAIJ,
1792                                        0,
1793                                        0,
1794                                /* 49*/ 0,
1795                                        0,
1796                                        0,
1797                                        0,
1798                                        0,
1799                                /* 54*/ 0,
1800                                        0,
1801                                        MatSetUnfactored_MPISBAIJ,
1802                                        0,
1803                                        MatSetValuesBlocked_MPISBAIJ,
1804                                /* 59*/ MatGetSubMatrix_MPISBAIJ,
1805                                        0,
1806                                        0,
1807                                        0,
1808                                        0,
1809                                /* 64*/ 0,
1810                                        0,
1811                                        0,
1812                                        0,
1813                                        0,
1814                                /* 69*/ MatGetRowMaxAbs_MPISBAIJ,
1815                                        0,
1816                                        0,
1817                                        0,
1818                                        0,
1819                                /* 74*/ 0,
1820                                        0,
1821                                        0,
1822                                        0,
1823                                        0,
1824                                /* 79*/ 0,
1825                                        0,
1826                                        0,
1827                                        0,
1828                                        MatLoad_MPISBAIJ,
1829                                /* 84*/ 0,
1830                                        0,
1831                                        0,
1832                                        0,
1833                                        0,
1834                                /* 89*/ 0,
1835                                        0,
1836                                        0,
1837                                        0,
1838                                        0,
1839                                /* 94*/ 0,
1840                                        0,
1841                                        0,
1842                                        0,
1843                                        0,
1844                                /* 99*/ 0,
1845                                        0,
1846                                        0,
1847                                        0,
1848                                        0,
1849                                /*104*/ 0,
1850                                        MatRealPart_MPISBAIJ,
1851                                        MatImaginaryPart_MPISBAIJ,
1852                                        MatGetRowUpperTriangular_MPISBAIJ,
1853                                        MatRestoreRowUpperTriangular_MPISBAIJ,
1854                                /*109*/ 0,
1855                                        0,
1856                                        0,
1857                                        0,
1858                                        0,
1859                                /*114*/ 0,
1860                                        0,
1861                                        0,
1862                                        0,
1863                                        0,
1864                                /*119*/ 0,
1865                                        0,
1866                                        0,
1867                                        0,
1868                                        0,
1869                                /*124*/ 0,
1870                                        0,
1871                                        0,
1872                                        0,
1873                                        0,
1874                                /*129*/ 0,
1875                                        0,
1876                                        0,
1877                                        0,
1878                                        0,
1879                                /*134*/ 0,
1880                                        0,
1881                                        0,
1882                                        0,
1883                                        0,
1884                                /*139*/ 0,
1885                                        0,
1886                                        0,
1887                                        0,
1888                                        0,
1889                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPISBAIJ
1890 };
1891 
1892 #undef __FUNCT__
1893 #define __FUNCT__ "MatGetDiagonalBlock_MPISBAIJ"
1894 PetscErrorCode  MatGetDiagonalBlock_MPISBAIJ(Mat A,Mat *a)
1895 {
1896   PetscFunctionBegin;
1897   *a = ((Mat_MPISBAIJ*)A->data)->A;
1898   PetscFunctionReturn(0);
1899 }
1900 
1901 #undef __FUNCT__
1902 #define __FUNCT__ "MatMPISBAIJSetPreallocation_MPISBAIJ"
1903 PetscErrorCode  MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
1904 {
1905   Mat_MPISBAIJ   *b;
1906   PetscErrorCode ierr;
1907   PetscInt       i,mbs,Mbs;
1908 
1909   PetscFunctionBegin;
1910   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
1911   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1912   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1913   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1914 
1915   b   = (Mat_MPISBAIJ*)B->data;
1916   mbs = B->rmap->n/bs;
1917   Mbs = B->rmap->N/bs;
1918   if (mbs*bs != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"No of local rows %D must be divisible by blocksize %D",B->rmap->N,bs);
1919 
1920   B->rmap->bs = bs;
1921   b->bs2      = bs*bs;
1922   b->mbs      = mbs;
1923   b->Mbs      = Mbs;
1924   b->nbs      = B->cmap->n/bs;
1925   b->Nbs      = B->cmap->N/bs;
1926 
1927   for (i=0; i<=b->size; i++) {
1928     b->rangebs[i] = B->rmap->range[i]/bs;
1929   }
1930   b->rstartbs = B->rmap->rstart/bs;
1931   b->rendbs   = B->rmap->rend/bs;
1932 
1933   b->cstartbs = B->cmap->rstart/bs;
1934   b->cendbs   = B->cmap->rend/bs;
1935 
1936   if (!B->preallocated) {
1937     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
1938     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
1939     ierr = MatSetType(b->A,MATSEQSBAIJ);CHKERRQ(ierr);
1940     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
1941     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
1942     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
1943     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
1944     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
1945     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
1946   }
1947 
1948   ierr = MatSeqSBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
1949   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
1950 
1951   B->preallocated = PETSC_TRUE;
1952   PetscFunctionReturn(0);
1953 }
1954 
1955 #undef __FUNCT__
1956 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR_MPISBAIJ"
1957 PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
1958 {
1959   PetscInt       m,rstart,cstart,cend;
1960   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
1961   const PetscInt *JJ    =0;
1962   PetscScalar    *values=0;
1963   PetscErrorCode ierr;
1964 
1965   PetscFunctionBegin;
1966   if (bs < 1) SETERRQ1(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_OUTOFRANGE,"Invalid block size specified, must be positive but it is %D",bs);
1967   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
1968   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
1969   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
1970   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
1971   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
1972   m      = B->rmap->n/bs;
1973   rstart = B->rmap->rstart/bs;
1974   cstart = B->cmap->rstart/bs;
1975   cend   = B->cmap->rend/bs;
1976 
1977   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
1978   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
1979   for (i=0; i<m; i++) {
1980     nz = ii[i+1] - ii[i];
1981     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
1982     nz_max = PetscMax(nz_max,nz);
1983     JJ     = jj + ii[i];
1984     for (j=0; j<nz; j++) {
1985       if (*JJ >= cstart) break;
1986       JJ++;
1987     }
1988     d = 0;
1989     for (; j<nz; j++) {
1990       if (*JJ++ >= cend) break;
1991       d++;
1992     }
1993     d_nnz[i] = d;
1994     o_nnz[i] = nz - d;
1995   }
1996   ierr = MatMPISBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
1997   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
1998 
1999   values = (PetscScalar*)V;
2000   if (!values) {
2001     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2002     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2003   }
2004   for (i=0; i<m; i++) {
2005     PetscInt          row    = i + rstart;
2006     PetscInt          ncols  = ii[i+1] - ii[i];
2007     const PetscInt    *icols = jj + ii[i];
2008     const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2009     ierr = MatSetValuesBlocked_MPISBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2010   }
2011 
2012   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2013   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2014   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2015   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2016   PetscFunctionReturn(0);
2017 }
2018 
2019 /*MC
2020    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2021    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
2022    the matrix is stored.
2023 
2024   For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2025   can call MatSetOption(Mat, MAT_HERMITIAN);
2026 
2027    Options Database Keys:
2028 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to MatSetFromOptions()
2029 
2030   Level: beginner
2031 
2032 .seealso: MatCreateMPISBAIJ
2033 M*/
2034 
2035 PETSC_EXTERN PetscErrorCode MatConvert_MPISBAIJ_MPISBSTRM(Mat,MatType,MatReuse,Mat*);
2036 
2037 #undef __FUNCT__
2038 #define __FUNCT__ "MatCreate_MPISBAIJ"
2039 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2040 {
2041   Mat_MPISBAIJ   *b;
2042   PetscErrorCode ierr;
2043   PetscBool      flg = PETSC_FALSE;
2044 
2045   PetscFunctionBegin;
2046   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
2047   B->data = (void*)b;
2048   ierr    = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
2049 
2050   B->ops->destroy = MatDestroy_MPISBAIJ;
2051   B->ops->view    = MatView_MPISBAIJ;
2052   B->assembled    = PETSC_FALSE;
2053   B->insertmode   = NOT_SET_VALUES;
2054 
2055   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
2056   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
2057 
2058   /* build local table of row and column ownerships */
2059   ierr = PetscMalloc1(b->size+2,&b->rangebs);CHKERRQ(ierr);
2060 
2061   /* build cache for off array entries formed */
2062   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
2063 
2064   b->donotstash  = PETSC_FALSE;
2065   b->colmap      = NULL;
2066   b->garray      = NULL;
2067   b->roworiented = PETSC_TRUE;
2068 
2069   /* stuff used in block assembly */
2070   b->barray = 0;
2071 
2072   /* stuff used for matrix vector multiply */
2073   b->lvec    = 0;
2074   b->Mvctx   = 0;
2075   b->slvec0  = 0;
2076   b->slvec0b = 0;
2077   b->slvec1  = 0;
2078   b->slvec1a = 0;
2079   b->slvec1b = 0;
2080   b->sMvctx  = 0;
2081 
2082   /* stuff for MatGetRow() */
2083   b->rowindices   = 0;
2084   b->rowvalues    = 0;
2085   b->getrowactive = PETSC_FALSE;
2086 
2087   /* hash table stuff */
2088   b->ht           = 0;
2089   b->hd           = 0;
2090   b->ht_size      = 0;
2091   b->ht_flag      = PETSC_FALSE;
2092   b->ht_fact      = 0;
2093   b->ht_total_ct  = 0;
2094   b->ht_insert_ct = 0;
2095 
2096   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
2097   b->ijonly = PETSC_FALSE;
2098 
2099   b->in_loc = 0;
2100   b->v_loc  = 0;
2101   b->n_loc  = 0;
2102 
2103   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPISBAIJ);CHKERRQ(ierr);
2104   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPISBAIJ);CHKERRQ(ierr);
2105   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPISBAIJ);CHKERRQ(ierr);
2106   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocation_C",MatMPISBAIJSetPreallocation_MPISBAIJ);CHKERRQ(ierr);
2107   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPISBAIJSetPreallocationCSR_C",MatMPISBAIJSetPreallocationCSR_MPISBAIJ);CHKERRQ(ierr);
2108   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpisbaij_mpisbstrm_C",MatConvert_MPISBAIJ_MPISBSTRM);CHKERRQ(ierr);
2109 #if defined(PETSC_HAVE_ELEMENTAL)
2110   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpisbaij_elemental_C",MatConvert_MPISBAIJ_Elemental);CHKERRQ(ierr);
2111 #endif
2112 
2113   B->symmetric                  = PETSC_TRUE;
2114   B->structurally_symmetric     = PETSC_TRUE;
2115   B->symmetric_set              = PETSC_TRUE;
2116   B->structurally_symmetric_set = PETSC_TRUE;
2117 
2118   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPISBAIJ);CHKERRQ(ierr);
2119   ierr      = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPISBAIJ matrix 1","Mat");CHKERRQ(ierr);
2120   ierr      = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
2121   if (flg) {
2122     PetscReal fact = 1.39;
2123     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
2124     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
2125     if (fact <= 1.0) fact = 1.39;
2126     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
2127     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
2128   }
2129   ierr = PetscOptionsEnd();CHKERRQ(ierr);
2130   PetscFunctionReturn(0);
2131 }
2132 
2133 /*MC
2134    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
2135 
2136    This matrix type is identical to MATSEQSBAIJ when constructed with a single process communicator,
2137    and MATMPISBAIJ otherwise.
2138 
2139    Options Database Keys:
2140 . -mat_type sbaij - sets the matrix type to "sbaij" during a call to MatSetFromOptions()
2141 
2142   Level: beginner
2143 
2144 .seealso: MatCreateMPISBAIJ,MATSEQSBAIJ,MATMPISBAIJ
2145 M*/
2146 
2147 #undef __FUNCT__
2148 #define __FUNCT__ "MatMPISBAIJSetPreallocation"
2149 /*@C
2150    MatMPISBAIJSetPreallocation - For good matrix assembly performance
2151    the user should preallocate the matrix storage by setting the parameters
2152    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2153    performance can be increased by more than a factor of 50.
2154 
2155    Collective on Mat
2156 
2157    Input Parameters:
2158 +  B - the matrix
2159 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2160           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2161 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
2162            submatrix  (same for all local rows)
2163 .  d_nnz - array containing the number of block nonzeros in the various block rows
2164            in the upper triangular and diagonal part of the in diagonal portion of the local
2165            (possibly different for each block row) or NULL.  If you plan to factor the matrix you must leave room
2166            for the diagonal entry and set a value even if it is zero.
2167 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2168            submatrix (same for all local rows).
2169 -  o_nnz - array containing the number of nonzeros in the various block rows of the
2170            off-diagonal portion of the local submatrix that is right of the diagonal
2171            (possibly different for each block row) or NULL.
2172 
2173 
2174    Options Database Keys:
2175 .   -mat_no_unroll - uses code that does not unroll the loops in the
2176                      block calculations (much slower)
2177 .   -mat_block_size - size of the blocks to use
2178 
2179    Notes:
2180 
2181    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
2182    than it must be used on all processors that share the object for that argument.
2183 
2184    If the *_nnz parameter is given then the *_nz parameter is ignored
2185 
2186    Storage Information:
2187    For a square global matrix we define each processor's diagonal portion
2188    to be its local rows and the corresponding columns (a square submatrix);
2189    each processor's off-diagonal portion encompasses the remainder of the
2190    local matrix (a rectangular submatrix).
2191 
2192    The user can specify preallocated storage for the diagonal part of
2193    the local submatrix with either d_nz or d_nnz (not both).  Set
2194    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
2195    memory allocation.  Likewise, specify preallocated storage for the
2196    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
2197 
2198    You can call MatGetInfo() to get information on how effective the preallocation was;
2199    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2200    You can also run with the option -info and look for messages with the string
2201    malloc in them to see if additional memory allocation was needed.
2202 
2203    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2204    the figure below we depict these three local rows and all columns (0-11).
2205 
2206 .vb
2207            0 1 2 3 4 5 6 7 8 9 10 11
2208           --------------------------
2209    row 3  |. . . d d d o o o o  o  o
2210    row 4  |. . . d d d o o o o  o  o
2211    row 5  |. . . d d d o o o o  o  o
2212           --------------------------
2213 .ve
2214 
2215    Thus, any entries in the d locations are stored in the d (diagonal)
2216    submatrix, and any entries in the o locations are stored in the
2217    o (off-diagonal) submatrix.  Note that the d matrix is stored in
2218    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
2219 
2220    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
2221    plus the diagonal part of the d matrix,
2222    and o_nz should indicate the number of block nonzeros per row in the o matrix
2223 
2224    In general, for PDE problems in which most nonzeros are near the diagonal,
2225    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
2226    or you will get TERRIBLE performance; see the users' manual chapter on
2227    matrices.
2228 
2229    Level: intermediate
2230 
2231 .keywords: matrix, block, aij, compressed row, sparse, parallel
2232 
2233 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ(), PetscSplitOwnership()
2234 @*/
2235 PetscErrorCode  MatMPISBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
2236 {
2237   PetscErrorCode ierr;
2238 
2239   PetscFunctionBegin;
2240   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2241   PetscValidType(B,1);
2242   PetscValidLogicalCollectiveInt(B,bs,2);
2243   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
2244   PetscFunctionReturn(0);
2245 }
2246 
2247 #undef __FUNCT__
2248 #define __FUNCT__ "MatCreateSBAIJ"
2249 /*@C
2250    MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format
2251    (block compressed row).  For good matrix assembly performance
2252    the user should preallocate the matrix storage by setting the parameters
2253    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2254    performance can be increased by more than a factor of 50.
2255 
2256    Collective on MPI_Comm
2257 
2258    Input Parameters:
2259 +  comm - MPI communicator
2260 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2261           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2262 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
2263            This value should be the same as the local size used in creating the
2264            y vector for the matrix-vector product y = Ax.
2265 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
2266            This value should be the same as the local size used in creating the
2267            x vector for the matrix-vector product y = Ax.
2268 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
2269 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
2270 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
2271            submatrix  (same for all local rows)
2272 .  d_nnz - array containing the number of block nonzeros in the various block rows
2273            in the upper triangular portion of the in diagonal portion of the local
2274            (possibly different for each block block row) or NULL.
2275            If you plan to factor the matrix you must leave room for the diagonal entry and
2276            set its value even if it is zero.
2277 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2278            submatrix (same for all local rows).
2279 -  o_nnz - array containing the number of nonzeros in the various block rows of the
2280            off-diagonal portion of the local submatrix (possibly different for
2281            each block row) or NULL.
2282 
2283    Output Parameter:
2284 .  A - the matrix
2285 
2286    Options Database Keys:
2287 .   -mat_no_unroll - uses code that does not unroll the loops in the
2288                      block calculations (much slower)
2289 .   -mat_block_size - size of the blocks to use
2290 .   -mat_mpi - use the parallel matrix data structures even on one processor
2291                (defaults to using SeqBAIJ format on one processor)
2292 
2293    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
2294    MatXXXXSetPreallocation() paradgm instead of this routine directly.
2295    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
2296 
2297    Notes:
2298    The number of rows and columns must be divisible by blocksize.
2299    This matrix type does not support complex Hermitian operation.
2300 
2301    The user MUST specify either the local or global matrix dimensions
2302    (possibly both).
2303 
2304    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
2305    than it must be used on all processors that share the object for that argument.
2306 
2307    If the *_nnz parameter is given then the *_nz parameter is ignored
2308 
2309    Storage Information:
2310    For a square global matrix we define each processor's diagonal portion
2311    to be its local rows and the corresponding columns (a square submatrix);
2312    each processor's off-diagonal portion encompasses the remainder of the
2313    local matrix (a rectangular submatrix).
2314 
2315    The user can specify preallocated storage for the diagonal part of
2316    the local submatrix with either d_nz or d_nnz (not both).  Set
2317    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
2318    memory allocation.  Likewise, specify preallocated storage for the
2319    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
2320 
2321    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2322    the figure below we depict these three local rows and all columns (0-11).
2323 
2324 .vb
2325            0 1 2 3 4 5 6 7 8 9 10 11
2326           --------------------------
2327    row 3  |. . . d d d o o o o  o  o
2328    row 4  |. . . d d d o o o o  o  o
2329    row 5  |. . . d d d o o o o  o  o
2330           --------------------------
2331 .ve
2332 
2333    Thus, any entries in the d locations are stored in the d (diagonal)
2334    submatrix, and any entries in the o locations are stored in the
2335    o (off-diagonal) submatrix.  Note that the d matrix is stored in
2336    MatSeqSBAIJ format and the o submatrix in MATSEQBAIJ format.
2337 
2338    Now d_nz should indicate the number of block nonzeros per row in the upper triangular
2339    plus the diagonal part of the d matrix,
2340    and o_nz should indicate the number of block nonzeros per row in the o matrix.
2341    In general, for PDE problems in which most nonzeros are near the diagonal,
2342    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
2343    or you will get TERRIBLE performance; see the users' manual chapter on
2344    matrices.
2345 
2346    Level: intermediate
2347 
2348 .keywords: matrix, block, aij, compressed row, sparse, parallel
2349 
2350 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateBAIJ()
2351 @*/
2352 
2353 PetscErrorCode  MatCreateSBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
2354 {
2355   PetscErrorCode ierr;
2356   PetscMPIInt    size;
2357 
2358   PetscFunctionBegin;
2359   ierr = MatCreate(comm,A);CHKERRQ(ierr);
2360   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
2361   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2362   if (size > 1) {
2363     ierr = MatSetType(*A,MATMPISBAIJ);CHKERRQ(ierr);
2364     ierr = MatMPISBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
2365   } else {
2366     ierr = MatSetType(*A,MATSEQSBAIJ);CHKERRQ(ierr);
2367     ierr = MatSeqSBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
2368   }
2369   PetscFunctionReturn(0);
2370 }
2371 
2372 
2373 #undef __FUNCT__
2374 #define __FUNCT__ "MatDuplicate_MPISBAIJ"
2375 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2376 {
2377   Mat            mat;
2378   Mat_MPISBAIJ   *a,*oldmat = (Mat_MPISBAIJ*)matin->data;
2379   PetscErrorCode ierr;
2380   PetscInt       len=0,nt,bs=matin->rmap->bs,mbs=oldmat->mbs;
2381   PetscScalar    *array;
2382 
2383   PetscFunctionBegin;
2384   *newmat = 0;
2385 
2386   ierr = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
2387   ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
2388   ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
2389   ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
2390   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
2391   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
2392 
2393   mat->factortype   = matin->factortype;
2394   mat->preallocated = PETSC_TRUE;
2395   mat->assembled    = PETSC_TRUE;
2396   mat->insertmode   = NOT_SET_VALUES;
2397 
2398   a      = (Mat_MPISBAIJ*)mat->data;
2399   a->bs2 = oldmat->bs2;
2400   a->mbs = oldmat->mbs;
2401   a->nbs = oldmat->nbs;
2402   a->Mbs = oldmat->Mbs;
2403   a->Nbs = oldmat->Nbs;
2404 
2405 
2406   a->size         = oldmat->size;
2407   a->rank         = oldmat->rank;
2408   a->donotstash   = oldmat->donotstash;
2409   a->roworiented  = oldmat->roworiented;
2410   a->rowindices   = 0;
2411   a->rowvalues    = 0;
2412   a->getrowactive = PETSC_FALSE;
2413   a->barray       = 0;
2414   a->rstartbs     = oldmat->rstartbs;
2415   a->rendbs       = oldmat->rendbs;
2416   a->cstartbs     = oldmat->cstartbs;
2417   a->cendbs       = oldmat->cendbs;
2418 
2419   /* hash table stuff */
2420   a->ht           = 0;
2421   a->hd           = 0;
2422   a->ht_size      = 0;
2423   a->ht_flag      = oldmat->ht_flag;
2424   a->ht_fact      = oldmat->ht_fact;
2425   a->ht_total_ct  = 0;
2426   a->ht_insert_ct = 0;
2427 
2428   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+2)*sizeof(PetscInt));CHKERRQ(ierr);
2429   if (oldmat->colmap) {
2430 #if defined(PETSC_USE_CTABLE)
2431     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2432 #else
2433     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
2434     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2435     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
2436 #endif
2437   } else a->colmap = 0;
2438 
2439   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
2440     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
2441     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
2442     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
2443   } else a->garray = 0;
2444 
2445   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
2446   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
2447   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
2448   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
2449   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
2450 
2451   ierr = VecDuplicate(oldmat->slvec0,&a->slvec0);CHKERRQ(ierr);
2452   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2453   ierr = VecDuplicate(oldmat->slvec1,&a->slvec1);CHKERRQ(ierr);
2454   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2455 
2456   ierr = VecGetLocalSize(a->slvec1,&nt);CHKERRQ(ierr);
2457   ierr = VecGetArray(a->slvec1,&array);CHKERRQ(ierr);
2458   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,bs*mbs,array,&a->slvec1a);CHKERRQ(ierr);
2459   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec1b);CHKERRQ(ierr);
2460   ierr = VecRestoreArray(a->slvec1,&array);CHKERRQ(ierr);
2461   ierr = VecGetArray(a->slvec0,&array);CHKERRQ(ierr);
2462   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,nt-bs*mbs,array+bs*mbs,&a->slvec0b);CHKERRQ(ierr);
2463   ierr = VecRestoreArray(a->slvec0,&array);CHKERRQ(ierr);
2464   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0);CHKERRQ(ierr);
2465   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1);CHKERRQ(ierr);
2466   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec0b);CHKERRQ(ierr);
2467   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1a);CHKERRQ(ierr);
2468   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->slvec1b);CHKERRQ(ierr);
2469 
2470   /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2471   ierr      = PetscObjectReference((PetscObject)oldmat->sMvctx);CHKERRQ(ierr);
2472   a->sMvctx = oldmat->sMvctx;
2473   ierr      = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->sMvctx);CHKERRQ(ierr);
2474 
2475   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
2476   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
2477   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
2478   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
2479   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
2480   *newmat = mat;
2481   PetscFunctionReturn(0);
2482 }
2483 
2484 #undef __FUNCT__
2485 #define __FUNCT__ "MatLoad_MPISBAIJ"
2486 PetscErrorCode MatLoad_MPISBAIJ(Mat newmat,PetscViewer viewer)
2487 {
2488   PetscErrorCode ierr;
2489   PetscInt       i,nz,j,rstart,rend;
2490   PetscScalar    *vals,*buf;
2491   MPI_Comm       comm;
2492   MPI_Status     status;
2493   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,*sndcounts = 0,*browners,maxnz,*rowners,mmbs;
2494   PetscInt       header[4],*rowlengths = 0,M,N,m,*cols,*locrowlens;
2495   PetscInt       *procsnz = 0,jj,*mycols,*ibuf;
2496   PetscInt       bs = newmat->rmap->bs,Mbs,mbs,extra_rows;
2497   PetscInt       *dlens,*odlens,*mask,*masked1,*masked2,rowcount,odcount;
2498   PetscInt       dcount,kmax,k,nzcount,tmp;
2499   int            fd;
2500 
2501   PetscFunctionBegin;
2502   /* force binary viewer to load .info file if it has not yet done so */
2503   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
2504   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
2505   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPISBAIJ matrix 2","Mat");CHKERRQ(ierr);
2506   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
2507   ierr = PetscOptionsEnd();CHKERRQ(ierr);
2508   if (bs < 0) bs = 1;
2509 
2510   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2511   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2512   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
2513   if (!rank) {
2514     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
2515     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
2516     if (header[3] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as MPISBAIJ");
2517   }
2518 
2519   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
2520   M    = header[1];
2521   N    = header[2];
2522 
2523   /* If global sizes are set, check if they are consistent with that given in the file */
2524   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
2525   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
2526 
2527   if (M != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Can only do square matrices");
2528 
2529   /*
2530      This code adds extra rows to make sure the number of rows is
2531      divisible by the blocksize
2532   */
2533   Mbs        = M/bs;
2534   extra_rows = bs - M + bs*(Mbs);
2535   if (extra_rows == bs) extra_rows = 0;
2536   else                  Mbs++;
2537   if (extra_rows &&!rank) {
2538     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
2539   }
2540 
2541   /* determine ownership of all rows */
2542   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
2543     mbs = Mbs/size + ((Mbs % size) > rank);
2544     m   = mbs*bs;
2545   } else { /* User Set */
2546     m   = newmat->rmap->n;
2547     mbs = m/bs;
2548   }
2549   ierr       = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
2550   ierr       = PetscMPIIntCast(mbs,&mmbs);CHKERRQ(ierr);
2551   ierr       = MPI_Allgather(&mmbs,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
2552   rowners[0] = 0;
2553   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
2554   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
2555   rstart = rowners[rank];
2556   rend   = rowners[rank+1];
2557 
2558   /* distribute row lengths to all processors */
2559   ierr = PetscMalloc1((rend-rstart)*bs,&locrowlens);CHKERRQ(ierr);
2560   if (!rank) {
2561     ierr = PetscMalloc1(M+extra_rows,&rowlengths);CHKERRQ(ierr);
2562     ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr);
2563     for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
2564     ierr = PetscMalloc1(size,&sndcounts);CHKERRQ(ierr);
2565     for (i=0; i<size; i++) sndcounts[i] = browners[i+1] - browners[i];
2566     ierr = MPI_Scatterv(rowlengths,sndcounts,browners,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2567     ierr = PetscFree(sndcounts);CHKERRQ(ierr);
2568   } else {
2569     ierr = MPI_Scatterv(0,0,0,MPIU_INT,locrowlens,(rend-rstart)*bs,MPIU_INT,0,comm);CHKERRQ(ierr);
2570   }
2571 
2572   if (!rank) {   /* procs[0] */
2573     /* calculate the number of nonzeros on each processor */
2574     ierr = PetscMalloc1(size,&procsnz);CHKERRQ(ierr);
2575     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
2576     for (i=0; i<size; i++) {
2577       for (j=rowners[i]*bs; j< rowners[i+1]*bs; j++) {
2578         procsnz[i] += rowlengths[j];
2579       }
2580     }
2581     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2582 
2583     /* determine max buffer needed and allocate it */
2584     maxnz = 0;
2585     for (i=0; i<size; i++) {
2586       maxnz = PetscMax(maxnz,procsnz[i]);
2587     }
2588     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
2589 
2590     /* read in my part of the matrix column indices  */
2591     nz     = procsnz[0];
2592     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2593     mycols = ibuf;
2594     if (size == 1) nz -= extra_rows;
2595     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2596     if (size == 1) {
2597       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
2598     }
2599 
2600     /* read in every ones (except the last) and ship off */
2601     for (i=1; i<size-1; i++) {
2602       nz   = procsnz[i];
2603       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2604       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
2605     }
2606     /* read in the stuff for the last proc */
2607     if (size != 1) {
2608       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
2609       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2610       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
2611       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
2612     }
2613     ierr = PetscFree(cols);CHKERRQ(ierr);
2614   } else {  /* procs[i], i>0 */
2615     /* determine buffer space needed for message */
2616     nz = 0;
2617     for (i=0; i<m; i++) nz += locrowlens[i];
2618     ierr   = PetscMalloc1(nz,&ibuf);CHKERRQ(ierr);
2619     mycols = ibuf;
2620     /* receive message of column indices*/
2621     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
2622     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
2623     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2624   }
2625 
2626   /* loop over local rows, determining number of off diagonal entries */
2627   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
2628   ierr     = PetscMalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
2629   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2630   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2631   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
2632   rowcount = 0;
2633   nzcount  = 0;
2634   for (i=0; i<mbs; i++) {
2635     dcount  = 0;
2636     odcount = 0;
2637     for (j=0; j<bs; j++) {
2638       kmax = locrowlens[rowcount];
2639       for (k=0; k<kmax; k++) {
2640         tmp = mycols[nzcount++]/bs; /* block col. index */
2641         if (!mask[tmp]) {
2642           mask[tmp] = 1;
2643           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp; /* entry in off-diag portion */
2644           else masked1[dcount++] = tmp; /* entry in diag portion */
2645         }
2646       }
2647       rowcount++;
2648     }
2649 
2650     dlens[i]  = dcount;  /* d_nzz[i] */
2651     odlens[i] = odcount; /* o_nzz[i] */
2652 
2653     /* zero out the mask elements we set */
2654     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
2655     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
2656   }
2657   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
2658   ierr = MatMPISBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
2659   ierr = MatSetOption(newmat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
2660 
2661   if (!rank) {
2662     ierr = PetscMalloc1(maxnz,&buf);CHKERRQ(ierr);
2663     /* read in my part of the matrix numerical values  */
2664     nz     = procsnz[0];
2665     vals   = buf;
2666     mycols = ibuf;
2667     if (size == 1) nz -= extra_rows;
2668     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2669     if (size == 1) {
2670       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
2671     }
2672 
2673     /* insert into matrix */
2674     jj = rstart*bs;
2675     for (i=0; i<m; i++) {
2676       ierr    = MatSetValues(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2677       mycols += locrowlens[i];
2678       vals   += locrowlens[i];
2679       jj++;
2680     }
2681 
2682     /* read in other processors (except the last one) and ship out */
2683     for (i=1; i<size-1; i++) {
2684       nz   = procsnz[i];
2685       vals = buf;
2686       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2687       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2688     }
2689     /* the last proc */
2690     if (size != 1) {
2691       nz   = procsnz[i] - extra_rows;
2692       vals = buf;
2693       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2694       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
2695       ierr = MPI_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
2696     }
2697     ierr = PetscFree(procsnz);CHKERRQ(ierr);
2698 
2699   } else {
2700     /* receive numeric values */
2701     ierr = PetscMalloc1(nz,&buf);CHKERRQ(ierr);
2702 
2703     /* receive message of values*/
2704     vals   = buf;
2705     mycols = ibuf;
2706     ierr   = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm,&status);CHKERRQ(ierr);
2707     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
2708     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
2709 
2710     /* insert into matrix */
2711     jj = rstart*bs;
2712     for (i=0; i<m; i++) {
2713       ierr    = MatSetValues_MPISBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2714       mycols += locrowlens[i];
2715       vals   += locrowlens[i];
2716       jj++;
2717     }
2718   }
2719 
2720   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
2721   ierr = PetscFree(buf);CHKERRQ(ierr);
2722   ierr = PetscFree(ibuf);CHKERRQ(ierr);
2723   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
2724   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
2725   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
2726   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2727   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2728   PetscFunctionReturn(0);
2729 }
2730 
2731 #undef __FUNCT__
2732 #define __FUNCT__ "MatMPISBAIJSetHashTableFactor"
2733 /*XXXXX@
2734    MatMPISBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
2735 
2736    Input Parameters:
2737 .  mat  - the matrix
2738 .  fact - factor
2739 
2740    Not Collective on Mat, each process can have a different hash factor
2741 
2742    Level: advanced
2743 
2744   Notes:
2745    This can also be set by the command line option: -mat_use_hash_table fact
2746 
2747 .keywords: matrix, hashtable, factor, HT
2748 
2749 .seealso: MatSetOption()
2750 @XXXXX*/
2751 
2752 
2753 #undef __FUNCT__
2754 #define __FUNCT__ "MatGetRowMaxAbs_MPISBAIJ"
2755 PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A,Vec v,PetscInt idx[])
2756 {
2757   Mat_MPISBAIJ   *a = (Mat_MPISBAIJ*)A->data;
2758   Mat_SeqBAIJ    *b = (Mat_SeqBAIJ*)(a->B)->data;
2759   PetscReal      atmp;
2760   PetscReal      *work,*svalues,*rvalues;
2761   PetscErrorCode ierr;
2762   PetscInt       i,bs,mbs,*bi,*bj,brow,j,ncols,krow,kcol,col,row,Mbs,bcol;
2763   PetscMPIInt    rank,size;
2764   PetscInt       *rowners_bs,dest,count,source;
2765   PetscScalar    *va;
2766   MatScalar      *ba;
2767   MPI_Status     stat;
2768 
2769   PetscFunctionBegin;
2770   if (idx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Send email to petsc-maint@mcs.anl.gov");
2771   ierr = MatGetRowMaxAbs(a->A,v,NULL);CHKERRQ(ierr);
2772   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
2773 
2774   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2775   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2776 
2777   bs  = A->rmap->bs;
2778   mbs = a->mbs;
2779   Mbs = a->Mbs;
2780   ba  = b->a;
2781   bi  = b->i;
2782   bj  = b->j;
2783 
2784   /* find ownerships */
2785   rowners_bs = A->rmap->range;
2786 
2787   /* each proc creates an array to be distributed */
2788   ierr = PetscMalloc1(bs*Mbs,&work);CHKERRQ(ierr);
2789   ierr = PetscMemzero(work,bs*Mbs*sizeof(PetscReal));CHKERRQ(ierr);
2790 
2791   /* row_max for B */
2792   if (rank != size-1) {
2793     for (i=0; i<mbs; i++) {
2794       ncols = bi[1] - bi[0]; bi++;
2795       brow  = bs*i;
2796       for (j=0; j<ncols; j++) {
2797         bcol = bs*(*bj);
2798         for (kcol=0; kcol<bs; kcol++) {
2799           col  = bcol + kcol;                /* local col index */
2800           col += rowners_bs[rank+1];      /* global col index */
2801           for (krow=0; krow<bs; krow++) {
2802             atmp = PetscAbsScalar(*ba); ba++;
2803             row  = brow + krow;   /* local row index */
2804             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2805             if (work[col] < atmp) work[col] = atmp;
2806           }
2807         }
2808         bj++;
2809       }
2810     }
2811 
2812     /* send values to its owners */
2813     for (dest=rank+1; dest<size; dest++) {
2814       svalues = work + rowners_bs[dest];
2815       count   = rowners_bs[dest+1]-rowners_bs[dest];
2816       ierr    = MPI_Send(svalues,count,MPIU_REAL,dest,rank,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2817     }
2818   }
2819 
2820   /* receive values */
2821   if (rank) {
2822     rvalues = work;
2823     count   = rowners_bs[rank+1]-rowners_bs[rank];
2824     for (source=0; source<rank; source++) {
2825       ierr = MPI_Recv(rvalues,count,MPIU_REAL,MPI_ANY_SOURCE,MPI_ANY_TAG,PetscObjectComm((PetscObject)A),&stat);CHKERRQ(ierr);
2826       /* process values */
2827       for (i=0; i<count; i++) {
2828         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2829       }
2830     }
2831   }
2832 
2833   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
2834   ierr = PetscFree(work);CHKERRQ(ierr);
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #undef __FUNCT__
2839 #define __FUNCT__ "MatSOR_MPISBAIJ"
2840 PetscErrorCode MatSOR_MPISBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2841 {
2842   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ*)matin->data;
2843   PetscErrorCode    ierr;
2844   PetscInt          mbs=mat->mbs,bs=matin->rmap->bs;
2845   PetscScalar       *x,*ptr,*from;
2846   Vec               bb1;
2847   const PetscScalar *b;
2848 
2849   PetscFunctionBegin;
2850   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2851   if (bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2852 
2853   if (flag == SOR_APPLY_UPPER) {
2854     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2855     PetscFunctionReturn(0);
2856   }
2857 
2858   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2859     if (flag & SOR_ZERO_INITIAL_GUESS) {
2860       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2861       its--;
2862     }
2863 
2864     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2865     while (its--) {
2866 
2867       /* lower triangular part: slvec0b = - B^T*xx */
2868       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2869 
2870       /* copy xx into slvec0a */
2871       ierr = VecGetArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2872       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2873       ierr = PetscMemcpy(ptr,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2874       ierr = VecRestoreArray(mat->slvec0,&ptr);CHKERRQ(ierr);
2875 
2876       ierr = VecScale(mat->slvec0,-1.0);CHKERRQ(ierr);
2877 
2878       /* copy bb into slvec1a */
2879       ierr = VecGetArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2880       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2881       ierr = PetscMemcpy(ptr,b,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2882       ierr = VecRestoreArray(mat->slvec1,&ptr);CHKERRQ(ierr);
2883 
2884       /* set slvec1b = 0 */
2885       ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2886 
2887       ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2888       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2889       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2890       ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2891 
2892       /* upper triangular part: bb1 = bb1 - B*x */
2893       ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,bb1);CHKERRQ(ierr);
2894 
2895       /* local diagonal sweep */
2896       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
2897     }
2898     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2899   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2900     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2901   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2902     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2903   } else if (flag & SOR_EISENSTAT) {
2904     Vec               xx1;
2905     PetscBool         hasop;
2906     const PetscScalar *diag;
2907     PetscScalar       *sl,scale = (omega - 2.0)/omega;
2908     PetscInt          i,n;
2909 
2910     if (!mat->xx1) {
2911       ierr = VecDuplicate(bb,&mat->xx1);CHKERRQ(ierr);
2912       ierr = VecDuplicate(bb,&mat->bb1);CHKERRQ(ierr);
2913     }
2914     xx1 = mat->xx1;
2915     bb1 = mat->bb1;
2916 
2917     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr);
2918 
2919     if (!mat->diag) {
2920       /* this is wrong for same matrix with new nonzero values */
2921       ierr = MatCreateVecs(matin,&mat->diag,NULL);CHKERRQ(ierr);
2922       ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr);
2923     }
2924     ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr);
2925 
2926     if (hasop) {
2927       ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr);
2928       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2929     } else {
2930       /*
2931           These two lines are replaced by code that may be a bit faster for a good compiler
2932       ierr = VecPointwiseMult(mat->slvec1a,mat->diag,xx);CHKERRQ(ierr);
2933       ierr = VecAYPX(mat->slvec1a,scale,bb);CHKERRQ(ierr);
2934       */
2935       ierr = VecGetArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2936       ierr = VecGetArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2937       ierr = VecGetArrayRead(bb,&b);CHKERRQ(ierr);
2938       ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2939       ierr = VecGetLocalSize(xx,&n);CHKERRQ(ierr);
2940       if (omega == 1.0) {
2941         for (i=0; i<n; i++) sl[i] = b[i] - diag[i]*x[i];
2942         ierr = PetscLogFlops(2.0*n);CHKERRQ(ierr);
2943       } else {
2944         for (i=0; i<n; i++) sl[i] = b[i] + scale*diag[i]*x[i];
2945         ierr = PetscLogFlops(3.0*n);CHKERRQ(ierr);
2946       }
2947       ierr = VecRestoreArray(mat->slvec1a,&sl);CHKERRQ(ierr);
2948       ierr = VecRestoreArrayRead(mat->diag,&diag);CHKERRQ(ierr);
2949       ierr = VecRestoreArrayRead(bb,&b);CHKERRQ(ierr);
2950       ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2951     }
2952 
2953     /* multiply off-diagonal portion of matrix */
2954     ierr = VecSet(mat->slvec1b,0.0);CHKERRQ(ierr);
2955     ierr = (*mat->B->ops->multtranspose)(mat->B,xx,mat->slvec0b);CHKERRQ(ierr);
2956     ierr = VecGetArray(mat->slvec0,&from);CHKERRQ(ierr);
2957     ierr = VecGetArray(xx,&x);CHKERRQ(ierr);
2958     ierr = PetscMemcpy(from,x,bs*mbs*sizeof(MatScalar));CHKERRQ(ierr);
2959     ierr = VecRestoreArray(mat->slvec0,&from);CHKERRQ(ierr);
2960     ierr = VecRestoreArray(xx,&x);CHKERRQ(ierr);
2961     ierr = VecScatterBegin(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2962     ierr = VecScatterEnd(mat->sMvctx,mat->slvec0,mat->slvec1,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2963     ierr = (*mat->B->ops->multadd)(mat->B,mat->slvec1b,mat->slvec1a,mat->slvec1a);CHKERRQ(ierr);
2964 
2965     /* local sweep */
2966     ierr = (*mat->A->ops->sor)(mat->A,mat->slvec1a,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr);
2967     ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr);
2968   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
2969   PetscFunctionReturn(0);
2970 }
2971 
2972 #undef __FUNCT__
2973 #define __FUNCT__ "MatSOR_MPISBAIJ_2comm"
2974 PetscErrorCode MatSOR_MPISBAIJ_2comm(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2975 {
2976   Mat_MPISBAIJ   *mat = (Mat_MPISBAIJ*)matin->data;
2977   PetscErrorCode ierr;
2978   Vec            lvec1,bb1;
2979 
2980   PetscFunctionBegin;
2981   if (its <= 0 || lits <= 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D and local its %D both positive",its,lits);
2982   if (matin->rmap->bs > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"SSOR for block size > 1 is not yet implemented");
2983 
2984   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2985     if (flag & SOR_ZERO_INITIAL_GUESS) {
2986       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr);
2987       its--;
2988     }
2989 
2990     ierr = VecDuplicate(mat->lvec,&lvec1);CHKERRQ(ierr);
2991     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2992     while (its--) {
2993       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2994 
2995       /* lower diagonal part: bb1 = bb - B^T*xx */
2996       ierr = (*mat->B->ops->multtranspose)(mat->B,xx,lvec1);CHKERRQ(ierr);
2997       ierr = VecScale(lvec1,-1.0);CHKERRQ(ierr);
2998 
2999       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3000       ierr = VecCopy(bb,bb1);CHKERRQ(ierr);
3001       ierr = VecScatterBegin(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
3002 
3003       /* upper diagonal part: bb1 = bb1 - B*x */
3004       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
3005       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr);
3006 
3007       ierr = VecScatterEnd(mat->Mvctx,lvec1,bb1,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
3008 
3009       /* diagonal sweep */
3010       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr);
3011     }
3012     ierr = VecDestroy(&lvec1);CHKERRQ(ierr);
3013     ierr = VecDestroy(&bb1);CHKERRQ(ierr);
3014   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatSORType is not supported for SBAIJ matrix format");
3015   PetscFunctionReturn(0);
3016 }
3017 
3018 #undef __FUNCT__
3019 #define __FUNCT__ "MatCreateMPISBAIJWithArrays"
3020 /*@
3021      MatCreateMPISBAIJWithArrays - creates a MPI SBAIJ matrix using arrays that contain in standard
3022          CSR format the local rows.
3023 
3024    Collective on MPI_Comm
3025 
3026    Input Parameters:
3027 +  comm - MPI communicator
3028 .  bs - the block size, only a block size of 1 is supported
3029 .  m - number of local rows (Cannot be PETSC_DECIDE)
3030 .  n - This value should be the same as the local size used in creating the
3031        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3032        calculated if N is given) For square matrices n is almost always m.
3033 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3034 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3035 .   i - row indices
3036 .   j - column indices
3037 -   a - matrix values
3038 
3039    Output Parameter:
3040 .   mat - the matrix
3041 
3042    Level: intermediate
3043 
3044    Notes:
3045        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
3046      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3047      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
3048 
3049        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
3050 
3051 .keywords: matrix, aij, compressed row, sparse, parallel
3052 
3053 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
3054           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
3055 @*/
3056 PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
3057 {
3058   PetscErrorCode ierr;
3059 
3060 
3061   PetscFunctionBegin;
3062   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
3063   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
3064   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
3065   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
3066   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
3067   ierr = MatMPISBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
3068   PetscFunctionReturn(0);
3069 }
3070 
3071 
3072 #undef __FUNCT__
3073 #define __FUNCT__ "MatMPISBAIJSetPreallocationCSR"
3074 /*@C
3075    MatMPISBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
3076    (the default parallel PETSc format).
3077 
3078    Collective on MPI_Comm
3079 
3080    Input Parameters:
3081 +  B - the matrix
3082 .  bs - the block size
3083 .  i - the indices into j for the start of each local row (starts with zero)
3084 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
3085 -  v - optional values in the matrix
3086 
3087    Level: developer
3088 
3089 .keywords: matrix, aij, compressed row, sparse, parallel
3090 
3091 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ
3092 @*/
3093 PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3094 {
3095   PetscErrorCode ierr;
3096 
3097   PetscFunctionBegin;
3098   ierr = PetscTryMethod(B,"MatMPISBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
3099   PetscFunctionReturn(0);
3100 }
3101 
3102 #undef __FUNCT__
3103 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPISBAIJ"
3104 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
3105 {
3106   PetscErrorCode ierr;
3107   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
3108   PetscInt       *indx;
3109   PetscScalar    *values;
3110 
3111   PetscFunctionBegin;
3112   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
3113   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
3114     Mat_SeqSBAIJ   *a = (Mat_SeqSBAIJ*)inmat->data;
3115     PetscInt       *dnz,*onz,sum,bs,cbs,mbs,Nbs;
3116     PetscInt       *bindx,rmax=a->rmax,j;
3117 
3118     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
3119     mbs = m/bs; Nbs = N/cbs;
3120     if (n == PETSC_DECIDE) {
3121       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
3122     }
3123     /* Check sum(n) = Nbs */
3124     ierr = MPI_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3125     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
3126 
3127     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
3128     rstart -= mbs;
3129 
3130     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
3131     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
3132     ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
3133     for (i=0; i<mbs; i++) {
3134       ierr = MatGetRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
3135       nnz = nnz/bs;
3136       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
3137       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
3138       ierr = MatRestoreRow_SeqSBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
3139     }
3140     ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
3141     ierr = PetscFree(bindx);CHKERRQ(ierr);
3142 
3143     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
3144     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
3145     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
3146     ierr = MatSetType(*outmat,MATMPISBAIJ);CHKERRQ(ierr);
3147     ierr = MatMPISBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
3148     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
3149   }
3150 
3151   /* numeric phase */
3152   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
3153   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
3154 
3155   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_TRUE);CHKERRQ(ierr);
3156   for (i=0; i<m; i++) {
3157     ierr = MatGetRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3158     Ii   = i + rstart;
3159     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
3160     ierr = MatRestoreRow_SeqSBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
3161   }
3162   ierr = MatSetOption(inmat,MAT_GETROW_UPPERTRIANGULAR,PETSC_FALSE);CHKERRQ(ierr);
3163   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3164   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3165   PetscFunctionReturn(0);
3166 }
3167