xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision de8b6608baf6aee60dffeb7dae859cca4fb467b0)
1 /*$Id: mpisbaij.c,v 1.7 2000/07/19 16:34:18 hzhang Exp hzhang $*/
2 
3 #include "src/mat/impls/baij/mpi/mpibaij.h"
4 #include "src/vec/vecimpl.h"
5 #include "mpisbaij.h"
6 #include "src/mat/impls/sbaij/seq/sbaij.h"
7 
8 extern int MatSetUpMultiply_MPISBAIJ(Mat);
9 extern int DisAssemble_MPISBAIJ(Mat);
10 extern int MatIncreaseOverlap_MPISBAIJ(Mat,int,IS *,int);
11 extern int MatGetSubMatrices_MPISBAIJ(Mat,int,IS *,IS *,MatReuse,Mat **);
12 extern int MatGetValues_SeqSBAIJ(Mat,int,int *,int,int *,Scalar *);
13 extern int MatSetValues_SeqSBAIJ(Mat,int,int *,int,int *,Scalar *,InsertMode);
14 extern int MatSetValuesBlocked_SeqSBAIJ(Mat,int,int*,int,int*,Scalar*,InsertMode);
15 extern int MatGetRow_SeqSBAIJ(Mat,int,int*,int**,Scalar**);
16 extern int MatRestoreRow_SeqSBAIJ(Mat,int,int*,int**,Scalar**);
17 extern int MatPrintHelp_SeqSBAIJ(Mat);
18 extern int MatZeroRows_SeqSBAIJ(Mat,IS,Scalar*);
19 
20 /*  UGLY, ugly, ugly
21    When MatScalar == Scalar the function MatSetValuesBlocked_MPIBAIJ_MatScalar() does
22    not exist. Otherwise ..._MatScalar() takes matrix elements in single precision and
23    inserts them into the single precision data structure. The function MatSetValuesBlocked_MPIBAIJ()
24    converts the entries into single precision and then calls ..._MatScalar() to put them
25    into the single precision data structures.
26 */
27 #if defined(PETSC_USE_MAT_SINGLE)
28 extern int MatSetValuesBlocked_SeqSBAIJ_MatScalar(Mat,int,int*,int,int*,MatScalar*,InsertMode);
29 extern int MatSetValues_MPISBAIJ_MatScalar(Mat,int,int*,int,int*,MatScalar*,InsertMode);
30 extern int MatSetValuesBlocked_MPISBAIJ_MatScalar(Mat,int,int*,int,int*,MatScalar*,InsertMode);
31 extern int MatSetValues_MPISBAIJ_HT_MatScalar(Mat,int,int*,int,int*,MatScalar*,InsertMode);
32 extern int MatSetValuesBlocked_MPISBAIJ_HT_MatScalar(Mat,int,int*,int,int*,MatScalar*,InsertMode);
33 #else
34 #define MatSetValuesBlocked_SeqSBAIJ_MatScalar      MatSetValuesBlocked_SeqSBAIJ
35 #define MatSetValues_MPISBAIJ_MatScalar             MatSetValues_MPISBAIJ
36 #define MatSetValuesBlocked_MPISBAIJ_MatScalar      MatSetValuesBlocked_MPISBAIJ
37 #define MatSetValues_MPISBAIJ_HT_MatScalar          MatSetValues_MPISBAIJ_HT
38 #define MatSetValuesBlocked_MPISBAIJ_HT_MatScalar   MatSetValuesBlocked_MPISBAIJ_HT
39 #endif
40 
41 EXTERN_C_BEGIN
42 #undef __FUNC__
43 #define __FUNC__ /*<a name="MatStoreValues_MPIBAIJ"></a>*/"MatStoreValues_MPIBAIJ"
44 int MatStoreValues_MPISBAIJ(Mat mat)
45 {
46   Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)mat->data;
47   int         ierr;
48 
49   PetscFunctionBegin;
50   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
51   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 EXTERN_C_END
55 
56 EXTERN_C_BEGIN
57 #undef __FUNC__
58 #define __FUNC__ /*<a name="MatRetrieveValues_MPIBAIJ"></a>*/"MatRetrieveValues_MPIBAIJ"
59 int MatRetrieveValues_MPISBAIJ(Mat mat)
60 {
61   Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)mat->data;
62   int         ierr;
63 
64   PetscFunctionBegin;
65   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
66   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
67   PetscFunctionReturn(0);
68 }
69 EXTERN_C_END
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNC__
78 #define __FUNC__ /*<a name="CreateColmap_MPISBAIJ_Private"></a>*/"CreateColmap_MPISBAIJ_Private"
79 static int CreateColmap_MPISBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ *B = (Mat_SeqBAIJ*)baij->B->data;
83   int         nbs = B->nbs,i,bs=B->bs,ierr;
84 
85   PetscFunctionBegin;
86 #if defined (PETSC_USE_CTABLE)
87   ierr = PetscTableCreate(baij->nbs/5,&baij->colmap);CHKERRQ(ierr);
88   for (i=0; i<nbs; i++){
89     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1);CHKERRQ(ierr);
90   }
91 #else
92   baij->colmap = (int*)PetscMalloc((baij->Nbs+1)*sizeof(int));CHKPTRQ(baij->colmap);
93   PLogObjectMemory(mat,baij->Nbs*sizeof(int));
94   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(int));CHKERRQ(ierr);
95   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
96 #endif
97   PetscFunctionReturn(0);
98 }
99 
100 #define CHUNKSIZE  10
101 
102 #define  MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv) \
103 { \
104  \
105     brow = row/bs;  \
106     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
107     rmax = aimax[brow]; nrow = ailen[brow]; \
108       bcol = col/bs; \
109       ridx = row % bs; cidx = col % bs; \
110       low = 0; high = nrow; \
111       while (high-low > 3) { \
112         t = (low+high)/2; \
113         if (rp[t] > bcol) high = t; \
114         else              low  = t; \
115       } \
116       for (_i=low; _i<high; _i++) { \
117         if (rp[_i] > bcol) break; \
118         if (rp[_i] == bcol) { \
119           bap  = ap +  bs2*_i + bs*cidx + ridx; \
120           if (addv == ADD_VALUES) *bap += value;  \
121           else                    *bap  = value;  \
122           goto a_noinsert; \
123         } \
124       } \
125       if (a->nonew == 1) goto a_noinsert; \
126       else if (a->nonew == -1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Inserting a new nonzero into matrix"); \
127       if (nrow >= rmax) { \
128         /* there is no extra room in row, therefore enlarge */ \
129         int       new_nz = ai[a->mbs] + CHUNKSIZE,len,*new_i,*new_j; \
130         MatScalar *new_a; \
131  \
132         if (a->nonew == -2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Inserting a new nonzero in the matrix"); \
133  \
134         /* malloc new storage space */ \
135         len     = new_nz*(sizeof(int)+bs2*sizeof(MatScalar))+(a->mbs+1)*sizeof(int); \
136         new_a   = (MatScalar*)PetscMalloc(len);CHKPTRQ(new_a); \
137         new_j   = (int*)(new_a + bs2*new_nz); \
138         new_i   = new_j + new_nz; \
139  \
140         /* copy over old data into new slots */ \
141         for (ii=0; ii<brow+1; ii++) {new_i[ii] = ai[ii];} \
142         for (ii=brow+1; ii<a->mbs+1; ii++) {new_i[ii] = ai[ii]+CHUNKSIZE;} \
143         ierr = PetscMemcpy(new_j,aj,(ai[brow]+nrow)*sizeof(int));CHKERRQ(ierr); \
144         len = (new_nz - CHUNKSIZE - ai[brow] - nrow); \
145         ierr = PetscMemcpy(new_j+ai[brow]+nrow+CHUNKSIZE,aj+ai[brow]+nrow,len*sizeof(int));CHKERRQ(ierr); \
146         ierr = PetscMemcpy(new_a,aa,(ai[brow]+nrow)*bs2*sizeof(MatScalar));CHKERRQ(ierr); \
147         ierr = PetscMemzero(new_a+bs2*(ai[brow]+nrow),bs2*CHUNKSIZE*sizeof(Scalar));CHKERRQ(ierr); \
148         ierr = PetscMemcpy(new_a+bs2*(ai[brow]+nrow+CHUNKSIZE), \
149                     aa+bs2*(ai[brow]+nrow),bs2*len*sizeof(MatScalar));CHKERRQ(ierr);  \
150         /* free up old matrix storage */ \
151         ierr = PetscFree(a->a);CHKERRQ(ierr);  \
152         if (!a->singlemalloc) { \
153           ierr = PetscFree(a->i);CHKERRQ(ierr); \
154           ierr = PetscFree(a->j);CHKERRQ(ierr);\
155         } \
156         aa = a->a = new_a; ai = a->i = new_i; aj = a->j = new_j;  \
157         a->singlemalloc = PETSC_TRUE; \
158  \
159         rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
160         rmax = aimax[brow] = aimax[brow] + CHUNKSIZE; \
161         PLogObjectMemory(A,CHUNKSIZE*(sizeof(int) + bs2*sizeof(MatScalar))); \
162         a->s_maxnz += bs2*CHUNKSIZE; \
163         a->reallocs++; \
164         a->s_nz++; \
165       } \
166       N = nrow++ - 1;  \
167       /* shift up all the later entries in this row */ \
168       for (ii=N; ii>=_i; ii--) { \
169         rp[ii+1] = rp[ii]; \
170         ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
171       } \
172       if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
173       rp[_i]                      = bcol;  \
174       ap[bs2*_i + bs*cidx + ridx] = value;  \
175       a_noinsert:; \
176     ailen[brow] = nrow; \
177 }
178 #ifndef MatSetValues_SeqBAIJ_B_Private
179 #define  MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv) \
180 { \
181     brow = row/bs;  \
182     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
183     rmax = bimax[brow]; nrow = bilen[brow]; \
184       bcol = col/bs; \
185       ridx = row % bs; cidx = col % bs; \
186       low = 0; high = nrow; \
187       while (high-low > 3) { \
188         t = (low+high)/2; \
189         if (rp[t] > bcol) high = t; \
190         else              low  = t; \
191       } \
192       for (_i=low; _i<high; _i++) { \
193         if (rp[_i] > bcol) break; \
194         if (rp[_i] == bcol) { \
195           bap  = ap +  bs2*_i + bs*cidx + ridx; \
196           if (addv == ADD_VALUES) *bap += value;  \
197           else                    *bap  = value;  \
198           goto b_noinsert; \
199         } \
200       } \
201       if (b->nonew == 1) goto b_noinsert; \
202       else if (b->nonew == -1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Inserting a new nonzero into matrix"); \
203       if (nrow >= rmax) { \
204         /* there is no extra room in row, therefore enlarge */ \
205         int       new_nz = bi[b->mbs] + CHUNKSIZE,len,*new_i,*new_j; \
206         MatScalar *new_a; \
207  \
208         if (b->nonew == -2) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Inserting a new nonzero in the matrix"); \
209  \
210         /* malloc new storage space */ \
211         len     = new_nz*(sizeof(int)+bs2*sizeof(MatScalar))+(b->mbs+1)*sizeof(int); \
212         new_a   = (MatScalar*)PetscMalloc(len);CHKPTRQ(new_a); \
213         new_j   = (int*)(new_a + bs2*new_nz); \
214         new_i   = new_j + new_nz; \
215  \
216         /* copy over old data into new slots */ \
217         for (ii=0; ii<brow+1; ii++) {new_i[ii] = bi[ii];} \
218         for (ii=brow+1; ii<b->mbs+1; ii++) {new_i[ii] = bi[ii]+CHUNKSIZE;} \
219         ierr = PetscMemcpy(new_j,bj,(bi[brow]+nrow)*sizeof(int));CHKERRQ(ierr); \
220         len  = (new_nz - CHUNKSIZE - bi[brow] - nrow); \
221         ierr = PetscMemcpy(new_j+bi[brow]+nrow+CHUNKSIZE,bj+bi[brow]+nrow,len*sizeof(int));CHKERRQ(ierr); \
222         ierr = PetscMemcpy(new_a,ba,(bi[brow]+nrow)*bs2*sizeof(MatScalar));CHKERRQ(ierr); \
223         ierr = PetscMemzero(new_a+bs2*(bi[brow]+nrow),bs2*CHUNKSIZE*sizeof(MatScalar));CHKERRQ(ierr); \
224         ierr = PetscMemcpy(new_a+bs2*(bi[brow]+nrow+CHUNKSIZE), \
225                     ba+bs2*(bi[brow]+nrow),bs2*len*sizeof(MatScalar));CHKERRQ(ierr);  \
226         /* free up old matrix storage */ \
227         ierr = PetscFree(b->a);CHKERRQ(ierr);  \
228         if (!b->singlemalloc) { \
229           ierr = PetscFree(b->i);CHKERRQ(ierr); \
230           ierr = PetscFree(b->j);CHKERRQ(ierr); \
231         } \
232         ba = b->a = new_a; bi = b->i = new_i; bj = b->j = new_j;  \
233         b->singlemalloc = PETSC_TRUE; \
234  \
235         rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
236         rmax = bimax[brow] = bimax[brow] + CHUNKSIZE; \
237         PLogObjectMemory(B,CHUNKSIZE*(sizeof(int) + bs2*sizeof(MatScalar))); \
238         b->maxnz += bs2*CHUNKSIZE; \
239         b->reallocs++; \
240         b->nz++; \
241       } \
242       N = nrow++ - 1;  \
243       /* shift up all the later entries in this row */ \
244       for (ii=N; ii>=_i; ii--) { \
245         rp[ii+1] = rp[ii]; \
246         ierr = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
247       } \
248       if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
249       rp[_i]                      = bcol;  \
250       ap[bs2*_i + bs*cidx + ridx] = value;  \
251       b_noinsert:; \
252     bilen[brow] = nrow; \
253 }
254 #endif
255 
256 #if defined(PETSC_USE_MAT_SINGLE)
257 #undef __FUNC__
258 #define __FUNC__ /*<a name="MatSetValues_MPISBAIJ"></a>*/"MatSetValues_MPISBAIJ"
259 int MatSetValues_MPISBAIJ(Mat mat,int m,int *im,int n,int *in,Scalar *v,InsertMode addv)
260 {
261   Mat_MPISBAIJ *b = (Mat_MPISBAIJ*)mat->data;
262   int         ierr,i,N = m*n;
263   MatScalar   *vsingle;
264 
265   PetscFunctionBegin;
266   if (N > b->setvalueslen) {
267     if (b->setvaluescopy) {ierr = PetscFree(b->setvaluescopy);CHKERRQ(ierr);}
268     b->setvaluescopy = (MatScalar*)PetscMalloc(N*sizeof(MatScalar));CHKPTRQ(b->setvaluescopy);
269     b->setvalueslen  = N;
270   }
271   vsingle = b->setvaluescopy;
272 
273   for (i=0; i<N; i++) {
274     vsingle[i] = v[i];
275   }
276   ierr = MatSetValues_MPISBAIJ_MatScalar(mat,m,im,n,in,vsingle,addv);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 #undef __FUNC__
281 #define __FUNC__ /*<a name="MatSetValuesBlocked_MPISBAIJ"></a>*/"MatSetValuesBlocked_MPISBAIJ"
282 int MatSetValuesBlocked_MPISBAIJ(Mat mat,int m,int *im,int n,int *in,Scalar *v,InsertMode addv)
283 {
284   Mat_MPIBAIJ *b = (Mat_MPIBAIJ*)mat->data;
285   int         ierr,i,N = m*n*b->bs2;
286   MatScalar   *vsingle;
287 
288   PetscFunctionBegin;
289   if (N > b->setvalueslen) {
290     if (b->setvaluescopy) {ierr = PetscFree(b->setvaluescopy);CHKERRQ(ierr);}
291     b->setvaluescopy = (MatScalar*)PetscMalloc(N*sizeof(MatScalar));CHKPTRQ(b->setvaluescopy);
292     b->setvalueslen  = N;
293   }
294   vsingle = b->setvaluescopy;
295   for (i=0; i<N; i++) {
296     vsingle[i] = v[i];
297   }
298   ierr = MatSetValuesBlocked_MPISBAIJ_MatScalar(mat,m,im,n,in,vsingle,addv);CHKERRQ(ierr);
299   PetscFunctionReturn(0);
300 }
301 
302 #undef __FUNC__
303 #define __FUNC__ /*<a name="MatSetValues_MPISBAIJ_HT"></a>*/"MatSetValues_MPISBAIJ_HT"
304 int MatSetValues_MPISBAIJ_HT(Mat mat,int m,int *im,int n,int *in,Scalar *v,InsertMode addv)
305 {
306   Mat_MPIBAIJ *b = (Mat_MPIBAIJ*)mat->data;
307   int         ierr,i,N = m*n;
308   MatScalar   *vsingle;
309 
310   PetscFunctionBegin;
311   if (N > b->setvalueslen) {
312     if (b->setvaluescopy) {ierr = PetscFree(b->setvaluescopy);CHKERRQ(ierr);}
313     b->setvaluescopy = (MatScalar*)PetscMalloc(N*sizeof(MatScalar));CHKPTRQ(b->setvaluescopy);
314     b->setvalueslen  = N;
315   }
316   vsingle = b->setvaluescopy;
317   for (i=0; i<N; i++) {
318     vsingle[i] = v[i];
319   }
320   ierr = MatSetValues_MPIBAIJ_HT_MatScalar(mat,m,im,n,in,vsingle,addv);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNC__
325 #define __FUNC__ /*<a name="MatSetValuesBlocked_MPISBAIJ_HT"></a>*/"MatSetValuesBlocked_MPISBAIJ_HT"
326 int MatSetValuesBlocked_MPISBAIJ_HT(Mat mat,int m,int *im,int n,int *in,Scalar *v,InsertMode addv)
327 {
328   Mat_MPIBAIJ *b = (Mat_MPIBAIJ*)mat->data;
329   int         ierr,i,N = m*n*b->bs2;
330   MatScalar   *vsingle;
331 
332   PetscFunctionBegin;
333   if (N > b->setvalueslen) {
334     if (b->setvaluescopy) {ierr = PetscFree(b->setvaluescopy);CHKERRQ(ierr);}
335     b->setvaluescopy = (MatScalar*)PetscMalloc(N*sizeof(MatScalar));CHKPTRQ(b->setvaluescopy);
336     b->setvalueslen  = N;
337   }
338   vsingle = b->setvaluescopy;
339   for (i=0; i<N; i++) {
340     vsingle[i] = v[i];
341   }
342   ierr = MatSetValuesBlocked_MPIBAIJ_HT_MatScalar(mat,m,im,n,in,vsingle,addv);CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 #endif
346 
347 /* Only add/insert a(i,j) with i<=j (blocks).
348    Any a(i,j) with i>j input by user is ingored.
349 */
350 #undef __FUNC__
351 #define __FUNC__ /*<a name="MatSetValues_MPIBAIJ"></a>*/"MatSetValues_MPIBAIJ"
352 int MatSetValues_MPISBAIJ_MatScalar(Mat mat,int m,int *im,int n,int *in,MatScalar *v,InsertMode addv)
353 {
354   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
355   MatScalar   value;
356   int         ierr,i,j,row,col;
357   int         roworiented = baij->roworiented,rstart_orig=baij->rstart_bs ;
358   int         rend_orig=baij->rend_bs,cstart_orig=baij->cstart_bs;
359   int         cend_orig=baij->cend_bs,bs=baij->bs;
360 
361   /* Some Variables required in the macro */
362   Mat         A = baij->A;
363   Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ*)(A)->data;
364   int         *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
365   MatScalar   *aa=a->a;
366 
367   Mat         B = baij->B;
368   Mat_SeqBAIJ *b = (Mat_SeqBAIJ*)(B)->data;
369   int         *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
370   MatScalar   *ba=b->a;
371 
372   int         *rp,ii,nrow,_i,rmax,N,brow,bcol;
373   int         low,high,t,ridx,cidx,bs2=a->bs2;
374   MatScalar   *ap,*bap;
375 
376   /* for stash */
377   int         n_loc, *in_loc=0;
378   MatScalar   *v_loc=0;
379 
380   PetscFunctionBegin;
381 
382   if(!baij->donotstash){
383     in_loc = (int*)PetscMalloc(n*sizeof(int)); CHKPTRQ(in_loc);
384     v_loc  = (MatScalar*)PetscMalloc(n*sizeof(MatScalar)); CHKPTRQ(v_loc);
385   }
386 
387   for (i=0; i<m; i++) {
388     if (im[i] < 0) continue;
389 #if defined(PETSC_USE_BOPT_g)
390     if (im[i] >= baij->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row too large");
391 #endif
392     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
393       row = im[i] - rstart_orig;              /* local row index */
394       for (j=0; j<n; j++) {
395         if (im[i]/bs > in[j]/bs) continue;    /* ignore lower triangular blocks */
396         if (in[j] >= cstart_orig && in[j] < cend_orig){  /* diag entry (A) */
397           col = in[j] - cstart_orig;          /* local col index */
398           brow = row/bs; bcol = col/bs;
399           if (brow > bcol) continue;  /* ignore lower triangular blocks of A */
400           if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
401           MatSetValues_SeqSBAIJ_A_Private(row,col,value,addv);
402           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
403         } else if (in[j] < 0) continue;
404 #if defined(PETSC_USE_BOPT_g)
405         else if (in[j] >= baij->N) {SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Col too large");}
406 #endif
407         else {  /* off-diag entry (B) */
408           if (mat->was_assembled) {
409             if (!baij->colmap) {
410               ierr = CreateColmap_MPISBAIJ_Private(mat);CHKERRQ(ierr);
411             }
412 #if defined (PETSC_USE_CTABLE)
413             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
414             col  = col - 1 + in[j]%bs;
415 #else
416             col = baij->colmap[in[j]/bs] - 1 + in[j]%bs;
417 #endif
418             if (col < 0 && !((Mat_SeqSBAIJ*)(baij->A->data))->nonew) {
419               ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
420               col =  in[j];
421               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
422               B = baij->B;
423               b = (Mat_SeqBAIJ*)(B)->data;
424               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
425               ba=b->a;
426             }
427           } else col = in[j];
428           if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
429           MatSetValues_SeqSBAIJ_B_Private(row,col,value,addv);
430           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
431         }
432       }
433     } else {  /* off processor entry */
434       if (!baij->donotstash) {
435         n_loc = 0;
436         for (j=0; j<n; j++){
437           if (im[i]/bs > in[j]/bs) continue; /* ignore lower triangular blocks */
438           in_loc[n_loc] = in[j];
439           if (roworiented) {
440             v_loc[n_loc] = v[i*n+j];
441           } else {
442             v_loc[n_loc] = v[j*m+i];
443           }
444           n_loc++;
445         }
446         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n_loc,in_loc,v_loc);CHKERRQ(ierr);
447       }
448     }
449   }
450 
451   if(!baij->donotstash){
452     ierr = PetscFree(in_loc);CHKERRQ(ierr);
453     ierr = PetscFree(v_loc);CHKERRQ(ierr);
454   }
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNC__
459 #define __FUNC__ /*<a name="MatSetValuesBlocked_MPISBAIJ"></a>*/"MatSetValuesBlocked_MPISBAIJ"
460 int MatSetValuesBlocked_MPISBAIJ_MatScalar(Mat mat,int m,int *im,int n,int *in,MatScalar *v,InsertMode addv)
461 {
462   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
463   MatScalar   *value,*barray=baij->barray;
464   int         ierr,i,j,ii,jj,row,col;
465   int         roworiented = baij->roworiented,rstart=baij->rstart ;
466   int         rend=baij->rend,cstart=baij->cstart,stepval;
467   int         cend=baij->cend,bs=baij->bs,bs2=baij->bs2;
468 
469   PetscFunctionBegin;
470   if(!barray) {
471     baij->barray = barray = (MatScalar*)PetscMalloc(bs2*sizeof(MatScalar));CHKPTRQ(barray);
472   }
473 
474   if (roworiented) {
475     stepval = (n-1)*bs;
476   } else {
477     stepval = (m-1)*bs;
478   }
479   for (i=0; i<m; i++) {
480     if (im[i] < 0) continue;
481 #if defined(PETSC_USE_BOPT_g)
482     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,0,"Row too large, row %d max %d",im[i],baij->Mbs);
483 #endif
484     if (im[i] >= rstart && im[i] < rend) {
485       row = im[i] - rstart;
486       for (j=0; j<n; j++) {
487         /* If NumCol = 1 then a copy is not required */
488         if ((roworiented) && (n == 1)) {
489           barray = v + i*bs2;
490         } else if((!roworiented) && (m == 1)) {
491           barray = v + j*bs2;
492         } else { /* Here a copy is required */
493           if (roworiented) {
494             value = v + i*(stepval+bs)*bs + j*bs;
495           } else {
496             value = v + j*(stepval+bs)*bs + i*bs;
497           }
498           for (ii=0; ii<bs; ii++,value+=stepval) {
499             for (jj=0; jj<bs; jj++) {
500               *barray++  = *value++;
501             }
502           }
503           barray -=bs2;
504         }
505 
506         if (in[j] >= cstart && in[j] < cend){
507           col  = in[j] - cstart;
508           ierr = MatSetValuesBlocked_SeqSBAIJ_MatScalar(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
509         }
510         else if (in[j] < 0) continue;
511 #if defined(PETSC_USE_BOPT_g)
512         else if (in[j] >= baij->Nbs) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,0,"Column too large, col %d max %d",in[j],baij->Nbs);}
513 #endif
514         else {
515           if (mat->was_assembled) {
516             if (!baij->colmap) {
517               ierr = CreateColmap_MPISBAIJ_Private(mat);CHKERRQ(ierr);
518             }
519 
520 #if defined(PETSC_USE_BOPT_g)
521 #if defined (PETSC_USE_CTABLE)
522             { int data;
523               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
524               if ((data - 1) % bs) SETERRQ(PETSC_ERR_PLIB,0,"Incorrect colmap");
525             }
526 #else
527             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_ERR_PLIB,0,"Incorrect colmap");
528 #endif
529 #endif
530 #if defined (PETSC_USE_CTABLE)
531 	    ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
532             col  = (col - 1)/bs;
533 #else
534             col = (baij->colmap[in[j]] - 1)/bs;
535 #endif
536             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
537               ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
538               col =  in[j];
539             }
540           }
541           else col = in[j];
542           ierr = MatSetValuesBlocked_SeqSBAIJ_MatScalar(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
543         }
544       }
545     } else {
546       if (!baij->donotstash) {
547         if (roworiented) {
548           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
549         } else {
550           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
551         }
552       }
553     }
554   }
555   PetscFunctionReturn(0);
556 }
557 
558 #define HASH_KEY 0.6180339887
559 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(int)((size)*(tmp-(int)tmp)))
560 /* #define HASH(size,key) ((int)((size)*fmod(((key)*HASH_KEY),1))) */
561 /* #define HASH(size,key,tmp) ((int)((size)*fmod(((key)*HASH_KEY),1))) */
562 #undef __FUNC__
563 #define __FUNC__ /*<a name="MatSetValues_MPISBAIJ_HT_MatScalar"></a>*/"MatSetValues_MPISBAIJ_HT_MatScalar"
564 int MatSetValues_MPISBAIJ_HT_MatScalar(Mat mat,int m,int *im,int n,int *in,MatScalar *v,InsertMode addv)
565 {
566   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
567   int         ierr,i,j,row,col;
568   int         roworiented = baij->roworiented,rstart_orig=baij->rstart_bs ;
569   int         rend_orig=baij->rend_bs,Nbs=baij->Nbs;
570   int         h1,key,size=baij->ht_size,bs=baij->bs,*HT=baij->ht,idx;
571   PetscReal   tmp;
572   MatScalar   **HD = baij->hd,value;
573 #if defined(PETSC_USE_BOPT_g)
574   int         total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
575 #endif
576 
577   PetscFunctionBegin;
578 
579   for (i=0; i<m; i++) {
580 #if defined(PETSC_USE_BOPT_g)
581     if (im[i] < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Negative row");
582     if (im[i] >= baij->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row too large");
583 #endif
584       row = im[i];
585     if (row >= rstart_orig && row < rend_orig) {
586       for (j=0; j<n; j++) {
587         col = in[j];
588         if (roworiented) value = v[i*n+j]; else value = v[i+j*m];
589         /* Look up into the Hash Table */
590         key = (row/bs)*Nbs+(col/bs)+1;
591         h1  = HASH(size,key,tmp);
592 
593 
594         idx = h1;
595 #if defined(PETSC_USE_BOPT_g)
596         insert_ct++;
597         total_ct++;
598         if (HT[idx] != key) {
599           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++);
600           if (idx == size) {
601             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++);
602             if (idx == h1) {
603               SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"(row,col) has no entry in the hash table");
604             }
605           }
606         }
607 #else
608         if (HT[idx] != key) {
609           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++);
610           if (idx == size) {
611             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++);
612             if (idx == h1) {
613               SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"(row,col) has no entry in the hash table");
614             }
615           }
616         }
617 #endif
618         /* A HASH table entry is found, so insert the values at the correct address */
619         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
620         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
621       }
622     } else {
623       if (!baij->donotstash) {
624         if (roworiented) {
625           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr);
626         } else {
627           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr);
628         }
629       }
630     }
631   }
632 #if defined(PETSC_USE_BOPT_g)
633   baij->ht_total_ct = total_ct;
634   baij->ht_insert_ct = insert_ct;
635 #endif
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNC__
640 #define __FUNC__ /*<a name=""></a>*/"MatSetValuesBlocked_MPISBAIJ_HT_MatScalar"
641 int MatSetValuesBlocked_MPISBAIJ_HT_MatScalar(Mat mat,int m,int *im,int n,int *in,MatScalar *v,InsertMode addv)
642 {
643   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
644   int         ierr,i,j,ii,jj,row,col;
645   int         roworiented = baij->roworiented,rstart=baij->rstart ;
646   int         rend=baij->rend,stepval,bs=baij->bs,bs2=baij->bs2;
647   int         h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
648   PetscReal   tmp;
649   MatScalar   **HD = baij->hd,*baij_a;
650   MatScalar   *v_t,*value;
651 #if defined(PETSC_USE_BOPT_g)
652   int         total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
653 #endif
654 
655   PetscFunctionBegin;
656 
657   if (roworiented) {
658     stepval = (n-1)*bs;
659   } else {
660     stepval = (m-1)*bs;
661   }
662   for (i=0; i<m; i++) {
663 #if defined(PETSC_USE_BOPT_g)
664     if (im[i] < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Negative row");
665     if (im[i] >= baij->Mbs) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row too large");
666 #endif
667     row   = im[i];
668     v_t   = v + i*bs2;
669     if (row >= rstart && row < rend) {
670       for (j=0; j<n; j++) {
671         col = in[j];
672 
673         /* Look up into the Hash Table */
674         key = row*Nbs+col+1;
675         h1  = HASH(size,key,tmp);
676 
677         idx = h1;
678 #if defined(PETSC_USE_BOPT_g)
679         total_ct++;
680         insert_ct++;
681        if (HT[idx] != key) {
682           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++);
683           if (idx == size) {
684             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++);
685             if (idx == h1) {
686               SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"(row,col) has no entry in the hash table");
687             }
688           }
689         }
690 #else
691         if (HT[idx] != key) {
692           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++);
693           if (idx == size) {
694             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++);
695             if (idx == h1) {
696               SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"(row,col) has no entry in the hash table");
697             }
698           }
699         }
700 #endif
701         baij_a = HD[idx];
702         if (roworiented) {
703           /*value = v + i*(stepval+bs)*bs + j*bs;*/
704           /* value = v + (i*(stepval+bs)+j)*bs; */
705           value = v_t;
706           v_t  += bs;
707           if (addv == ADD_VALUES) {
708             for (ii=0; ii<bs; ii++,value+=stepval) {
709               for (jj=ii; jj<bs2; jj+=bs) {
710                 baij_a[jj]  += *value++;
711               }
712             }
713           } else {
714             for (ii=0; ii<bs; ii++,value+=stepval) {
715               for (jj=ii; jj<bs2; jj+=bs) {
716                 baij_a[jj]  = *value++;
717               }
718             }
719           }
720         } else {
721           value = v + j*(stepval+bs)*bs + i*bs;
722           if (addv == ADD_VALUES) {
723             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
724               for (jj=0; jj<bs; jj++) {
725                 baij_a[jj]  += *value++;
726               }
727             }
728           } else {
729             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
730               for (jj=0; jj<bs; jj++) {
731                 baij_a[jj]  = *value++;
732               }
733             }
734           }
735         }
736       }
737     } else {
738       if (!baij->donotstash) {
739         if (roworiented) {
740           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
741         } else {
742           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
743         }
744       }
745     }
746   }
747 #if defined(PETSC_USE_BOPT_g)
748   baij->ht_total_ct = total_ct;
749   baij->ht_insert_ct = insert_ct;
750 #endif
751   PetscFunctionReturn(0);
752 }
753 
754 #undef __FUNC__
755 #define __FUNC__ /*<a name=""></a>*/"MatGetValues_MPISBAIJ"
756 int MatGetValues_MPISBAIJ(Mat mat,int m,int *idxm,int n,int *idxn,Scalar *v)
757 {
758   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
759   int        bs=baij->bs,ierr,i,j,bsrstart = baij->rstart*bs,bsrend = baij->rend*bs;
760   int        bscstart = baij->cstart*bs,bscend = baij->cend*bs,row,col,data;
761 
762   PetscFunctionBegin;
763   for (i=0; i<m; i++) {
764     if (idxm[i] < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Negative row");
765     if (idxm[i] >= baij->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row too large");
766     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
767       row = idxm[i] - bsrstart;
768       for (j=0; j<n; j++) {
769         if (idxn[j] < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Negative column");
770         if (idxn[j] >= baij->N) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Column too large");
771         if (idxn[j] >= bscstart && idxn[j] < bscend){
772           col = idxn[j] - bscstart;
773           ierr = MatGetValues_SeqSBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
774         } else {
775           if (!baij->colmap) {
776             ierr = CreateColmap_MPISBAIJ_Private(mat);CHKERRQ(ierr);
777           }
778 #if defined (PETSC_USE_CTABLE)
779           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
780           data --;
781 #else
782           data = baij->colmap[idxn[j]/bs]-1;
783 #endif
784           if((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
785           else {
786             col  = data + idxn[j]%bs;
787             ierr = MatGetValues_SeqSBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
788           }
789         }
790       }
791     } else {
792       SETERRQ(PETSC_ERR_SUP,0,"Only local values currently supported");
793     }
794   }
795  PetscFunctionReturn(0);
796 }
797 
798 #undef __FUNC__
799 #define __FUNC__ /*<a name=""></a>*/"MatNorm_MPISBAIJ"
800 int MatNorm_MPISBAIJ(Mat mat,NormType type,PetscReal *norm)
801 {
802   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
803   /* Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ*)baij->A->data; */
804   /* Mat_SeqBAIJ  *bmat = (Mat_SeqBAIJ*)baij->B->data; */
805   int        ierr;
806   PetscReal  sum[2],*lnorm2;
807 
808   PetscFunctionBegin;
809   if (baij->size == 1) {
810     ierr =  MatNorm(baij->A,type,norm);CHKERRQ(ierr);
811   } else {
812     if (type == NORM_FROBENIUS) {
813       lnorm2 = (double*)PetscMalloc(2*sizeof(double));CHKPTRQ(lnorm2);
814       ierr =  MatNorm(baij->A,type,lnorm2);CHKERRQ(ierr);
815       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2++;            /* squar power of norm(A) */
816       ierr =  MatNorm(baij->B,type,lnorm2);CHKERRQ(ierr);
817       *lnorm2 = (*lnorm2)*(*lnorm2); lnorm2--;             /* squar power of norm(B) */
818       /*
819       ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
820       PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d], lnorm2=%g, %g\n",rank,lnorm2[0],lnorm2[1]);
821       */
822       ierr = MPI_Allreduce(lnorm2,&sum,2,MPI_DOUBLE,MPI_SUM,mat->comm);CHKERRQ(ierr);
823       /*
824       PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d], sum=%g, %g\n",rank,sum[0],sum[1]);
825       PetscSynchronizedFlush(PETSC_COMM_WORLD); */
826 
827       *norm = sqrt(sum[0] + 2*sum[1]);
828       ierr = PetscFree(lnorm2);CHKERRQ(ierr);
829     } else {
830       SETERRQ(PETSC_ERR_SUP,0,"No support for this norm yet");
831     }
832   }
833   PetscFunctionReturn(0);
834 }
835 
836 /*
837   Creates the hash table, and sets the table
838   This table is created only once.
839   If new entried need to be added to the matrix
840   then the hash table has to be destroyed and
841   recreated.
842 */
843 #undef __FUNC__
844 #define __FUNC__ /*<a name=""></a>*/"MatCreateHashTable_MPISBAIJ_Private"
845 int MatCreateHashTable_MPISBAIJ_Private(Mat mat,PetscReal factor)
846 {
847   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
848   Mat         A = baij->A,B=baij->B;
849   Mat_SeqBAIJ *a=(Mat_SeqBAIJ *)A->data,*b=(Mat_SeqBAIJ *)B->data;
850   int         i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
851   int         size,bs2=baij->bs2,rstart=baij->rstart,ierr;
852   int         cstart=baij->cstart,*garray=baij->garray,row,col,Nbs=baij->Nbs;
853   int         *HT,key;
854   MatScalar   **HD;
855   PetscReal   tmp;
856 #if defined(PETSC_USE_BOPT_g)
857   int         ct=0,max=0;
858 #endif
859 
860   PetscFunctionBegin;
861   baij->ht_size=(int)(factor*nz);
862   size = baij->ht_size;
863 
864   if (baij->ht) {
865     PetscFunctionReturn(0);
866   }
867 
868   /* Allocate Memory for Hash Table */
869   baij->hd = (MatScalar**)PetscMalloc((size)*(sizeof(int)+sizeof(MatScalar*))+1);CHKPTRQ(baij->hd);
870   baij->ht = (int*)(baij->hd + size);
871   HD = baij->hd;
872   HT = baij->ht;
873 
874 
875   ierr = PetscMemzero(HD,size*(sizeof(int)+sizeof(Scalar*)));CHKERRQ(ierr);
876 
877 
878   /* Loop Over A */
879   for (i=0; i<a->mbs; i++) {
880     for (j=ai[i]; j<ai[i+1]; j++) {
881       row = i+rstart;
882       col = aj[j]+cstart;
883 
884       key = row*Nbs + col + 1;
885       h1  = HASH(size,key,tmp);
886       for (k=0; k<size; k++){
887         if (HT[(h1+k)%size] == 0.0) {
888           HT[(h1+k)%size] = key;
889           HD[(h1+k)%size] = a->a + j*bs2;
890           break;
891 #if defined(PETSC_USE_BOPT_g)
892         } else {
893           ct++;
894 #endif
895         }
896       }
897 #if defined(PETSC_USE_BOPT_g)
898       if (k> max) max = k;
899 #endif
900     }
901   }
902   /* Loop Over B */
903   for (i=0; i<b->mbs; i++) {
904     for (j=bi[i]; j<bi[i+1]; j++) {
905       row = i+rstart;
906       col = garray[bj[j]];
907       key = row*Nbs + col + 1;
908       h1  = HASH(size,key,tmp);
909       for (k=0; k<size; k++){
910         if (HT[(h1+k)%size] == 0.0) {
911           HT[(h1+k)%size] = key;
912           HD[(h1+k)%size] = b->a + j*bs2;
913           break;
914 #if defined(PETSC_USE_BOPT_g)
915         } else {
916           ct++;
917 #endif
918         }
919       }
920 #if defined(PETSC_USE_BOPT_g)
921       if (k> max) max = k;
922 #endif
923     }
924   }
925 
926   /* Print Summary */
927 #if defined(PETSC_USE_BOPT_g)
928   for (i=0,j=0; i<size; i++) {
929     if (HT[i]) {j++;}
930   }
931   PLogInfo(0,"MatCreateHashTable_MPIBAIJ_Private: Average Search = %5.2f,max search = %d\n",(j== 0)? 0.0:((PetscReal)(ct+j))/j,max);
932 #endif
933   PetscFunctionReturn(0);
934 }
935 
936 #undef __FUNC__
937 #define __FUNC__ /*<a name=""></a>*/"MatAssemblyBegin_MPISBAIJ"
938 int MatAssemblyBegin_MPISBAIJ(Mat mat,MatAssemblyType mode)
939 {
940   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
941   int         ierr,nstash,reallocs;
942   InsertMode  addv;
943 
944   PetscFunctionBegin;
945   if (baij->donotstash) {
946     PetscFunctionReturn(0);
947   }
948 
949   /* make sure all processors are either in INSERTMODE or ADDMODE */
950   ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,mat->comm);CHKERRQ(ierr);
951   if (addv == (ADD_VALUES|INSERT_VALUES)) {
952     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,0,"Some processors inserted others added");
953   }
954   mat->insertmode = addv; /* in case this processor had no cache */
955 
956   ierr = MatStashScatterBegin_Private(&mat->stash,baij->rowners_bs);CHKERRQ(ierr);
957   ierr = MatStashScatterBegin_Private(&mat->bstash,baij->rowners);CHKERRQ(ierr);
958   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
959   PLogInfo(0,"MatAssemblyBegin_MPISBAIJ:Stash has %d entries,uses %d mallocs.\n",nstash,reallocs);
960   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
961   PLogInfo(0,"MatAssemblyBegin_MPISBAIJ:Block-Stash has %d entries, uses %d mallocs.\n",nstash,reallocs);
962   PetscFunctionReturn(0);
963 }
964 
965 #undef __FUNC__
966 #define __FUNC__ /*<a name=""></a>*/"MatAssemblyEnd_MPISBAIJ"
967 int MatAssemblyEnd_MPISBAIJ(Mat mat,MatAssemblyType mode)
968 {
969   Mat_MPISBAIJ *baij=(Mat_MPISBAIJ*)mat->data;
970   Mat_SeqSBAIJ  *a=(Mat_SeqSBAIJ*)baij->A->data;
971   Mat_SeqBAIJ  *b=(Mat_SeqBAIJ*)baij->B->data;
972   int         i,j,rstart,ncols,n,ierr,flg,bs2=baij->bs2;
973   int         *row,*col,other_disassembled;
974   PetscTruth  r1,r2,r3;
975   MatScalar   *val;
976   InsertMode  addv = mat->insertmode;
977   int         rank;
978 
979   PetscFunctionBegin;
980   /* remove 2 line below later */
981   ierr = MPI_Comm_rank(PETSC_COMM_WORLD, &rank);CHKERRQ(ierr);
982 
983   if (!baij->donotstash) {
984     while (1) {
985       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
986       /*
987       PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d]: in AssemblyEnd, stash, flg=%d\n",rank,flg);
988       PetscSynchronizedFlush(PETSC_COMM_WORLD);
989       */
990       if (!flg) break;
991 
992       for (i=0; i<n;) {
993         /* Now identify the consecutive vals belonging to the same row */
994         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
995         if (j < n) ncols = j-i;
996         else       ncols = n-i;
997         /* Now assemble all these values with a single function call */
998         ierr = MatSetValues_MPISBAIJ_MatScalar(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
999         i = j;
1000       }
1001     }
1002     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
1003     /* Now process the block-stash. Since the values are stashed column-oriented,
1004        set the roworiented flag to column oriented, and after MatSetValues()
1005        restore the original flags */
1006     r1 = baij->roworiented;
1007     r2 = a->roworiented;
1008     r3 = b->roworiented;
1009     baij->roworiented = PETSC_FALSE;
1010     a->roworiented    = PETSC_FALSE;
1011     b->roworiented    = PETSC_FALSE;
1012     while (1) {
1013       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
1014       if (!flg) break;
1015 
1016       for (i=0; i<n;) {
1017         /* Now identify the consecutive vals belonging to the same row */
1018         for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
1019         if (j < n) ncols = j-i;
1020         else       ncols = n-i;
1021         ierr = MatSetValuesBlocked_MPISBAIJ_MatScalar(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
1022         i = j;
1023       }
1024     }
1025     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
1026     baij->roworiented = r1;
1027     a->roworiented    = r2;
1028     b->roworiented    = r3;
1029   }
1030 
1031   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
1032   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
1033 
1034   /* determine if any processor has disassembled, if so we must
1035      also disassemble ourselfs, in order that we may reassemble. */
1036   /*
1037      if nonzero structure of submatrix B cannot change then we know that
1038      no processor disassembled thus we can skip this stuff
1039   */
1040   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew)  {
1041     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,mat->comm);CHKERRQ(ierr);
1042     if (mat->was_assembled && !other_disassembled) {
1043       ierr = DisAssemble_MPISBAIJ(mat);CHKERRQ(ierr);
1044     }
1045   }
1046 
1047   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
1048     ierr = MatSetUpMultiply_MPISBAIJ(mat);CHKERRQ(ierr);
1049   }
1050   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
1051   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
1052 
1053 #if defined(PETSC_USE_BOPT_g)
1054   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
1055     PLogInfo(0,"MatAssemblyEnd_MPISBAIJ:Average Hash Table Search in MatSetValues = %5.2f\n",((double)baij->ht_total_ct)/baij->ht_insert_ct);
1056     baij->ht_total_ct  = 0;
1057     baij->ht_insert_ct = 0;
1058   }
1059 #endif
1060   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1061     ierr = MatCreateHashTable_MPISBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1062     mat->ops->setvalues        = MatSetValues_MPISBAIJ_HT;
1063     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPISBAIJ_HT;
1064   }
1065 
1066   if (baij->rowvalues) {
1067     ierr = PetscFree(baij->rowvalues);CHKERRQ(ierr);
1068     baij->rowvalues = 0;
1069   }
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNC__
1074 #define __FUNC__ /*<a name=""></a>*/"MatView_MPISBAIJ_ASCIIorDraworSocket"
1075 static int MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat,Viewer viewer)
1076 {
1077   Mat_MPISBAIJ  *baij = (Mat_MPISBAIJ*)mat->data;
1078   int          ierr,format,bs = baij->bs,size = baij->size,rank = baij->rank;
1079   PetscTruth   isascii,isdraw;
1080   Viewer       sviewer;
1081 
1082   PetscFunctionBegin;
1083   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
1084   ierr = PetscTypeCompare((PetscObject)viewer,DRAW_VIEWER,&isdraw);CHKERRQ(ierr);
1085   if (isascii) {
1086     ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1087     if (format == VIEWER_FORMAT_ASCII_INFO_LONG) {
1088       MatInfo info;
1089       ierr = MPI_Comm_rank(mat->comm,&rank);CHKERRQ(ierr);
1090       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1091       ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %d nz %d nz alloced %d bs %d mem %d\n",
1092               rank,baij->m,(int)info.nz_used*bs,(int)info.nz_allocated*bs,
1093               baij->bs,(int)info.memory);CHKERRQ(ierr);
1094       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1095       ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %d \n",rank,(int)info.nz_used*bs);CHKERRQ(ierr);
1096       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1097       ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %d \n",rank,(int)info.nz_used*bs);CHKERRQ(ierr);
1098       ierr = ViewerFlush(viewer);CHKERRQ(ierr);
1099       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1100       PetscFunctionReturn(0);
1101     } else if (format == VIEWER_FORMAT_ASCII_INFO) {
1102       ierr = ViewerASCIIPrintf(viewer,"  block size is %d\n",bs);CHKERRQ(ierr);
1103       PetscFunctionReturn(0);
1104     }
1105   }
1106 
1107   if (isdraw) {
1108     Draw       draw;
1109     PetscTruth isnull;
1110     ierr = ViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1111     ierr = DrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
1112   }
1113 
1114   if (size == 1) {
1115     ierr = MatView(baij->A,viewer);CHKERRQ(ierr);
1116   } else {
1117     /* assemble the entire matrix onto first processor. */
1118     Mat         A;
1119     Mat_SeqSBAIJ *Aloc;
1120     Mat_SeqBAIJ *Bloc;
1121     int         M = baij->M,N = baij->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1122     MatScalar   *a;
1123 
1124     if (!rank) {
1125       ierr = MatCreateMPISBAIJ(mat->comm,baij->bs,M,N,M,N,0,PETSC_NULL,0,PETSC_NULL,&A);CHKERRQ(ierr);
1126     } else {
1127       ierr = MatCreateMPISBAIJ(mat->comm,baij->bs,0,0,M,N,0,PETSC_NULL,0,PETSC_NULL,&A);CHKERRQ(ierr);
1128     }
1129     PLogObjectParent(mat,A);
1130 
1131     /* copy over the A part */
1132     Aloc  = (Mat_SeqSBAIJ*)baij->A->data;
1133     ai    = Aloc->i; aj = Aloc->j; a = Aloc->a;
1134     rvals = (int*)PetscMalloc(bs*sizeof(int));CHKPTRQ(rvals);
1135 
1136     for (i=0; i<mbs; i++) {
1137       rvals[0] = bs*(baij->rstart + i);
1138       for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1139       for (j=ai[i]; j<ai[i+1]; j++) {
1140         col = (baij->cstart+aj[j])*bs;
1141         for (k=0; k<bs; k++) {
1142           ierr = MatSetValues_MPISBAIJ_MatScalar(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1143           col++; a += bs;
1144         }
1145       }
1146     }
1147     /* copy over the B part */
1148     Bloc = (Mat_SeqBAIJ*)baij->B->data;
1149     ai = Bloc->i; aj = Bloc->j; a = Bloc->a;
1150     for (i=0; i<mbs; i++) {
1151       rvals[0] = bs*(baij->rstart + i);
1152       for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1153       for (j=ai[i]; j<ai[i+1]; j++) {
1154         col = baij->garray[aj[j]]*bs;
1155         for (k=0; k<bs; k++) {
1156           ierr = MatSetValues_MPISBAIJ_MatScalar(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1157           col++; a += bs;
1158         }
1159       }
1160     }
1161     ierr = PetscFree(rvals);CHKERRQ(ierr);
1162     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1163     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1164     /*
1165        Everyone has to call to draw the matrix since the graphics waits are
1166        synchronized across all processors that share the Draw object
1167     */
1168     ierr = ViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1169     if (!rank) {
1170       ierr = MatView(((Mat_MPISBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1171     }
1172     ierr = ViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
1173     ierr = MatDestroy(A);CHKERRQ(ierr);
1174   }
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #undef __FUNC__
1179 #define __FUNC__ /*<a name=""></a>*/"MatView_MPISBAIJ"
1180 int MatView_MPISBAIJ(Mat mat,Viewer viewer)
1181 {
1182   int        ierr;
1183   PetscTruth isascii,isdraw,issocket,isbinary;
1184 
1185   PetscFunctionBegin;
1186   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
1187   ierr = PetscTypeCompare((PetscObject)viewer,DRAW_VIEWER,&isdraw);CHKERRQ(ierr);
1188   ierr = PetscTypeCompare((PetscObject)viewer,SOCKET_VIEWER,&issocket);CHKERRQ(ierr);
1189   ierr = PetscTypeCompare((PetscObject)viewer,BINARY_VIEWER,&isbinary);CHKERRQ(ierr);
1190   if (isascii || isdraw || issocket || isbinary) {
1191     ierr = MatView_MPISBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1192   } else {
1193     SETERRQ1(1,1,"Viewer type %s not supported by MPISBAIJ matrices",((PetscObject)viewer)->type_name);
1194   }
1195   PetscFunctionReturn(0);
1196 }
1197 
1198 #undef __FUNC__
1199 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPISBAIJ"
1200 int MatDestroy_MPISBAIJ(Mat mat)
1201 {
1202   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1203   int         ierr;
1204 
1205   PetscFunctionBegin;
1206 
1207   if (mat->mapping) {
1208     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
1209   }
1210   if (mat->bmapping) {
1211     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
1212   }
1213   if (mat->rmap) {
1214     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
1215   }
1216   if (mat->cmap) {
1217     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
1218   }
1219 #if defined(PETSC_USE_LOG)
1220   PLogObjectState((PetscObject)mat,"Rows=%d,Cols=%d",baij->M,baij->N);
1221 #endif
1222 
1223   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1224   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1225 
1226   ierr = PetscFree(baij->rowners);CHKERRQ(ierr);
1227   ierr = MatDestroy(baij->A);CHKERRQ(ierr);
1228   ierr = MatDestroy(baij->B);CHKERRQ(ierr);
1229 #if defined (PETSC_USE_CTABLE)
1230   if (baij->colmap) {ierr = PetscTableDelete(baij->colmap);CHKERRQ(ierr);}
1231 #else
1232   if (baij->colmap) {ierr = PetscFree(baij->colmap);CHKERRQ(ierr);}
1233 #endif
1234   if (baij->garray) {ierr = PetscFree(baij->garray);CHKERRQ(ierr);}
1235   if (baij->lvec)   {ierr = VecDestroy(baij->lvec);CHKERRQ(ierr);}
1236   if (baij->Mvctx)  {ierr = VecScatterDestroy(baij->Mvctx);CHKERRQ(ierr);}
1237   if (baij->rowvalues) {ierr = PetscFree(baij->rowvalues);CHKERRQ(ierr);}
1238   if (baij->barray) {ierr = PetscFree(baij->barray);CHKERRQ(ierr);}
1239   if (baij->hd) {ierr = PetscFree(baij->hd);CHKERRQ(ierr);}
1240 #if defined(PETSC_USE_MAT_SINGLE)
1241   if (baij->setvaluescopy) {ierr = PetscFree(baij->setvaluescopy);CHKERRQ(ierr);}
1242 #endif
1243   ierr = PetscFree(baij);CHKERRQ(ierr);
1244   PLogObjectDestroy(mat);
1245   PetscHeaderDestroy(mat);
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 #undef __FUNC__
1250 #define __FUNC__ /*<a name=""></a>*/"MatMult_MPISBAIJ"
1251 int MatMult_MPISBAIJ(Mat A,Vec xx,Vec yy)
1252 {
1253   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data;
1254   int         ierr,nt;
1255 
1256   PetscFunctionBegin;
1257   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1258   if (nt != a->n) {
1259     SETERRQ(PETSC_ERR_ARG_SIZ,0,"Incompatible partition of A and xx");
1260   }
1261   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1262   if (nt != a->m) {
1263     SETERRQ(PETSC_ERR_ARG_SIZ,0,"Incompatible parition of A and yy");
1264   }
1265 
1266   ierr = VecScatterBegin(xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1267   /* do diagonal part */
1268   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1269   /* do supperdiagonal part */
1270   ierr = VecScatterEnd(xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1271   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1272   /* do subdiagonal part */
1273   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1274   ierr = VecScatterBegin(a->lvec,yy,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1275   ierr = VecScatterEnd(a->lvec,yy,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1276 
1277   PetscFunctionReturn(0);
1278 }
1279 
1280 #undef __FUNC__
1281 #define __FUNC__ /*<a name=""></a>*/"MatMultAdd_MPISBAIJ"
1282 int MatMultAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1283 {
1284   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data;
1285   int        ierr,i,high;
1286   Scalar     zero = 0.0;
1287 
1288   PetscFunctionBegin;
1289   /*
1290   PetscSynchronizedPrintf(PETSC_COMM_WORLD,"calledMatMultAdd_MPISBAIJ \n");
1291   PetscSynchronizedFlush(PETSC_COMM_WORLD);
1292   */
1293 
1294   ierr = VecScatterBegin(xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1295   /* do diagonal part */
1296   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1297   /* do supperdiagonal part */
1298   ierr = VecScatterEnd(xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1299   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1300 
1301   /* do subdiagonal part */
1302   ierr = VecGetOwnershipRange(yy,PETSC_NULL,&high); CHKERRA(ierr);
1303   PetscSynchronizedPrintf(PETSC_COMM_WORLD,"high=%d \n",high);
1304   PetscSynchronizedFlush(PETSC_COMM_WORLD);
1305 
1306   ierr = VecScatterBegin(yy,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1307   ierr = VecScatterEnd(yy,a->lvec,INSERT_VALUES,SCATTER_FORWARD,a->Mvctx);CHKERRQ(ierr);
1308   /* set the begining part of lvec zero ? */
1309 
1310   /*
1311   for (i=0; i<high; i++){
1312     ierr = VecSetValues(a->lvec,1,&i,&zero,INSERT_VALUES);CHKERRA(ierr);
1313   }
1314   */
1315   /* VecView(a->lvec, VIEWER_STDOUT_WORLD);  */
1316 
1317   ierr = (*a->B->ops->multtransposeadd)(a->B,xx,a->lvec,a->lvec);CHKERRQ(ierr);
1318   ierr = VecScatterBegin(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1319   ierr = VecScatterEnd(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1320 #ifdef old
1321   /* do subdiagonal part */
1322   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1323   /* send it on its way */
1324   ierr = VecScatterBegin(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1325   /* do local part */
1326   ierr = (*a->B->ops->multadd)(a->B,xx,yy,zz);CHKERRQ(ierr);
1327   ierr = (*a->A->ops->multadd)(a->A,xx,zz,zz);CHKERRQ(ierr);
1328   /* receive remote parts */
1329   ierr = VecScatterEnd(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1330 #endif
1331   PetscFunctionReturn(0);
1332 }
1333 
1334 #undef __FUNC__
1335 #define __FUNC__ /*<a name=""></a>*/"MatMultTranspose_MPISBAIJ"
1336 int MatMultTranspose_MPISBAIJ(Mat A,Vec xx,Vec yy)
1337 {
1338   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
1339   int         ierr;
1340 
1341   PetscFunctionBegin;
1342   /* do nondiagonal part */
1343   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1344   /* send it on its way */
1345   ierr = VecScatterBegin(a->lvec,yy,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1346   /* do local part */
1347   ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1348   /* receive remote parts: note this assumes the values are not actually */
1349   /* inserted in yy until the next line, which is true for my implementation*/
1350   /* but is not perhaps always true. */
1351   ierr = VecScatterEnd(a->lvec,yy,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 #undef __FUNC__
1356 #define __FUNC__ /*<a name=""></a>*/"MatMultTransposeAdd_MPISBAIJ"
1357 int MatMultTransposeAdd_MPISBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1358 {
1359   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
1360   int         ierr;
1361 
1362   PetscFunctionBegin;
1363   /* do nondiagonal part */
1364   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1365   /* send it on its way */
1366   ierr = VecScatterBegin(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1367   /* do local part */
1368   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1369   /* receive remote parts: note this assumes the values are not actually */
1370   /* inserted in yy until the next line, which is true for my implementation*/
1371   /* but is not perhaps always true. */
1372   ierr = VecScatterEnd(a->lvec,zz,ADD_VALUES,SCATTER_REVERSE,a->Mvctx);CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 /*
1377   This only works correctly for square matrices where the subblock A->A is the
1378    diagonal block
1379 */
1380 #undef __FUNC__
1381 #define __FUNC__ /*<a name=""></a>*/"MatGetDiagonal_MPISBAIJ"
1382 int MatGetDiagonal_MPISBAIJ(Mat A,Vec v)
1383 {
1384   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data;
1385   int         ierr;
1386 
1387   PetscFunctionBegin;
1388   /* if (a->M != a->N) SETERRQ(PETSC_ERR_SUP,0,"Supports only square matrix where A->A is diag block"); */
1389   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNC__
1394 #define __FUNC__ /*<a name=""></a>*/"MatScale_MPISBAIJ"
1395 int MatScale_MPISBAIJ(Scalar *aa,Mat A)
1396 {
1397   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data;
1398   int         ierr;
1399 
1400   PetscFunctionBegin;
1401   ierr = MatScale(aa,a->A);CHKERRQ(ierr);
1402   ierr = MatScale(aa,a->B);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 #undef __FUNC__
1407 #define __FUNC__ /*<a name=""></a>*/"MatGetSize_MPISBAIJ"
1408 int MatGetSize_MPISBAIJ(Mat matin,int *m,int *n)
1409 {
1410   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data;
1411 
1412   PetscFunctionBegin;
1413   if (m) *m = mat->M;
1414   if (n) *n = mat->N;
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 #undef __FUNC__
1419 #define __FUNC__ /*<a name=""></a>*/"MatGetLocalSize_MPISBAIJ"
1420 int MatGetLocalSize_MPISBAIJ(Mat matin,int *m,int *n)
1421 {
1422   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data;
1423 
1424   PetscFunctionBegin;
1425   *m = mat->m; *n = mat->n;
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 #undef __FUNC__
1430 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPISBAIJ"
1431 int MatGetOwnershipRange_MPISBAIJ(Mat matin,int *m,int *n)
1432 {
1433   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data;
1434 
1435   PetscFunctionBegin;
1436   if (m) *m = mat->rstart*mat->bs;
1437   if (n) *n = mat->rend*mat->bs;
1438   PetscFunctionReturn(0);
1439 }
1440 
1441 #undef __FUNC__
1442 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPISBAIJ"
1443 int MatGetRow_MPISBAIJ(Mat matin,int row,int *nz,int **idx,Scalar **v)
1444 {
1445   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ*)matin->data;
1446   Scalar     *vworkA,*vworkB,**pvA,**pvB,*v_p;
1447   int        bs = mat->bs,bs2 = mat->bs2,i,ierr,*cworkA,*cworkB,**pcA,**pcB;
1448   int        nztot,nzA,nzB,lrow,brstart = mat->rstart*bs,brend = mat->rend*bs;
1449   int        *cmap,*idx_p,cstart = mat->cstart;
1450 
1451   PetscFunctionBegin;
1452   if (mat->getrowactive == PETSC_TRUE) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,0,"Already active");
1453   mat->getrowactive = PETSC_TRUE;
1454 
1455   if (!mat->rowvalues && (idx || v)) {
1456     /*
1457         allocate enough space to hold information from the longest row.
1458     */
1459     Mat_SeqSBAIJ *Aa = (Mat_SeqSBAIJ*)mat->A->data;
1460     Mat_SeqBAIJ  *Ba = (Mat_SeqBAIJ*)mat->B->data;
1461     int     max = 1,mbs = mat->mbs,tmp;
1462     for (i=0; i<mbs; i++) {
1463       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; /* row length */
1464       if (max < tmp) { max = tmp; }
1465     }
1466     mat->rowvalues = (Scalar*)PetscMalloc(max*bs2*(sizeof(int)+sizeof(Scalar)));CHKPTRQ(mat->rowvalues);
1467     mat->rowindices = (int*)(mat->rowvalues + max*bs2);
1468   }
1469 
1470   if (row < brstart || row >= brend) SETERRQ(PETSC_ERR_SUP,0,"Only local rows")
1471   lrow = row - brstart;  /* local row index */
1472 
1473   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1474   if (!v)   {pvA = 0; pvB = 0;}
1475   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1476   ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1477   ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1478   nztot = nzA + nzB;
1479 
1480   cmap  = mat->garray;
1481   if (v  || idx) {
1482     if (nztot) {
1483       /* Sort by increasing column numbers, assuming A and B already sorted */
1484       int imark = -1;
1485       if (v) {
1486         *v = v_p = mat->rowvalues;
1487         for (i=0; i<nzB; i++) {
1488           if (cmap[cworkB[i]/bs] < cstart)   v_p[i] = vworkB[i];
1489           else break;
1490         }
1491         imark = i;
1492         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1493         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1494       }
1495       if (idx) {
1496         *idx = idx_p = mat->rowindices;
1497         if (imark > -1) {
1498           for (i=0; i<imark; i++) {
1499             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1500           }
1501         } else {
1502           for (i=0; i<nzB; i++) {
1503             if (cmap[cworkB[i]/bs] < cstart)
1504               idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1505             else break;
1506           }
1507           imark = i;
1508         }
1509         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1510         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1511       }
1512     } else {
1513       if (idx) *idx = 0;
1514       if (v)   *v   = 0;
1515     }
1516   }
1517   *nz = nztot;
1518   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1519   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1520   PetscFunctionReturn(0);
1521 }
1522 
1523 #undef __FUNC__
1524 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPISBAIJ"
1525 int MatRestoreRow_MPISBAIJ(Mat mat,int row,int *nz,int **idx,Scalar **v)
1526 {
1527   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1528 
1529   PetscFunctionBegin;
1530   if (baij->getrowactive == PETSC_FALSE) {
1531     SETERRQ(PETSC_ERR_ARG_WRONGSTATE,0,"MatGetRow not called");
1532   }
1533   baij->getrowactive = PETSC_FALSE;
1534   PetscFunctionReturn(0);
1535 }
1536 
1537 #undef __FUNC__
1538 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPISBAIJ"
1539 int MatGetBlockSize_MPISBAIJ(Mat mat,int *bs)
1540 {
1541   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1542 
1543   PetscFunctionBegin;
1544   *bs = baij->bs;
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 #undef __FUNC__
1549 #define __FUNC__ /*<a name=""></a>*/"MatZeroEntries_MPISBAIJ"
1550 int MatZeroEntries_MPISBAIJ(Mat A)
1551 {
1552   Mat_MPISBAIJ *l = (Mat_MPISBAIJ*)A->data;
1553   int         ierr;
1554 
1555   PetscFunctionBegin;
1556   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1557   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 #undef __FUNC__
1562 #define __FUNC__ /*<a name=""></a>*/"MatGetInfo_MPISBAIJ"
1563 int MatGetInfo_MPISBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1564 {
1565   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)matin->data;
1566   Mat         A = a->A,B = a->B;
1567   int         ierr;
1568   PetscReal   isend[5],irecv[5];
1569 
1570   PetscFunctionBegin;
1571   info->block_size     = (double)a->bs;
1572   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1573   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1574   isend[3] = info->memory;  isend[4] = info->mallocs;
1575   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1576   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1577   isend[3] += info->memory;  isend[4] += info->mallocs;
1578   if (flag == MAT_LOCAL) {
1579     info->nz_used      = isend[0];
1580     info->nz_allocated = isend[1];
1581     info->nz_unneeded  = isend[2];
1582     info->memory       = isend[3];
1583     info->mallocs      = isend[4];
1584   } else if (flag == MAT_GLOBAL_MAX) {
1585     ierr = MPI_Allreduce(isend,irecv,5,MPI_DOUBLE,MPI_MAX,matin->comm);CHKERRQ(ierr);
1586     info->nz_used      = irecv[0];
1587     info->nz_allocated = irecv[1];
1588     info->nz_unneeded  = irecv[2];
1589     info->memory       = irecv[3];
1590     info->mallocs      = irecv[4];
1591   } else if (flag == MAT_GLOBAL_SUM) {
1592     ierr = MPI_Allreduce(isend,irecv,5,MPI_DOUBLE,MPI_SUM,matin->comm);CHKERRQ(ierr);
1593     info->nz_used      = irecv[0];
1594     info->nz_allocated = irecv[1];
1595     info->nz_unneeded  = irecv[2];
1596     info->memory       = irecv[3];
1597     info->mallocs      = irecv[4];
1598   } else {
1599     SETERRQ1(1,1,"Unknown MatInfoType argument %d",flag);
1600   }
1601   info->rows_global       = (double)a->M;
1602   info->columns_global    = (double)a->N;
1603   info->rows_local        = (double)a->m;
1604   info->columns_local     = (double)a->N;
1605   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1606   info->fill_ratio_needed = 0;
1607   info->factor_mallocs    = 0;
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 #undef __FUNC__
1612 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPISBAIJ"
1613 int MatSetOption_MPISBAIJ(Mat A,MatOption op)
1614 {
1615   Mat_MPISBAIJ *a = (Mat_MPISBAIJ*)A->data;
1616   int         ierr;
1617 
1618   PetscFunctionBegin;
1619   if (op == MAT_NO_NEW_NONZERO_LOCATIONS ||
1620       op == MAT_YES_NEW_NONZERO_LOCATIONS ||
1621       op == MAT_COLUMNS_UNSORTED ||
1622       op == MAT_COLUMNS_SORTED ||
1623       op == MAT_NEW_NONZERO_ALLOCATION_ERR ||
1624       op == MAT_KEEP_ZEROED_ROWS ||
1625       op == MAT_NEW_NONZERO_LOCATION_ERR) {
1626         ierr = MatSetOption(a->A,op);CHKERRQ(ierr);
1627         ierr = MatSetOption(a->B,op);CHKERRQ(ierr);
1628   } else if (op == MAT_ROW_ORIENTED) {
1629         a->roworiented = PETSC_TRUE;
1630         ierr = MatSetOption(a->A,op);CHKERRQ(ierr);
1631         ierr = MatSetOption(a->B,op);CHKERRQ(ierr);
1632   } else if (op == MAT_ROWS_SORTED ||
1633              op == MAT_ROWS_UNSORTED ||
1634              op == MAT_SYMMETRIC ||
1635              op == MAT_STRUCTURALLY_SYMMETRIC ||
1636              op == MAT_YES_NEW_DIAGONALS ||
1637              op == MAT_USE_HASH_TABLE) {
1638     PLogInfo(A,"Info:MatSetOption_MPIBAIJ:Option ignored\n");
1639   } else if (op == MAT_COLUMN_ORIENTED) {
1640     a->roworiented = PETSC_FALSE;
1641     ierr = MatSetOption(a->A,op);CHKERRQ(ierr);
1642     ierr = MatSetOption(a->B,op);CHKERRQ(ierr);
1643   } else if (op == MAT_IGNORE_OFF_PROC_ENTRIES) {
1644     a->donotstash = PETSC_TRUE;
1645   } else if (op == MAT_NO_NEW_DIAGONALS) {
1646     SETERRQ(PETSC_ERR_SUP,0,"MAT_NO_NEW_DIAGONALS");
1647   } else if (op == MAT_USE_HASH_TABLE) {
1648     a->ht_flag = PETSC_TRUE;
1649   } else {
1650     SETERRQ(PETSC_ERR_SUP,0,"unknown option");
1651   }
1652   PetscFunctionReturn(0);
1653 }
1654 
1655 #undef __FUNC__
1656 #define __FUNC__ /*<a name=""></a>*/"MatTranspose_MPISBAIJ("
1657 int MatTranspose_MPISBAIJ(Mat A,Mat *matout)
1658 {
1659   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)A->data;
1660   Mat_SeqBAIJ *Aloc;
1661   Mat         B;
1662   int         ierr,M=baij->M,N=baij->N,*ai,*aj,i,*rvals,j,k,col;
1663   int         bs=baij->bs,mbs=baij->mbs;
1664   MatScalar   *a;
1665 
1666   PetscFunctionBegin;
1667   if (!matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,0,"Square matrix only for in-place");
1668   ierr = MatCreateMPIBAIJ(A->comm,baij->bs,baij->n,baij->m,N,M,0,PETSC_NULL,0,PETSC_NULL,&B);CHKERRQ(ierr);
1669 
1670   /* copy over the A part */
1671   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1672   ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1673   rvals = (int*)PetscMalloc(bs*sizeof(int));CHKPTRQ(rvals);
1674 
1675   for (i=0; i<mbs; i++) {
1676     rvals[0] = bs*(baij->rstart + i);
1677     for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1678     for (j=ai[i]; j<ai[i+1]; j++) {
1679       col = (baij->cstart+aj[j])*bs;
1680       for (k=0; k<bs; k++) {
1681         ierr = MatSetValues_MPISBAIJ_MatScalar(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1682         col++; a += bs;
1683       }
1684     }
1685   }
1686   /* copy over the B part */
1687   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1688   ai = Aloc->i; aj = Aloc->j; a = Aloc->a;
1689   for (i=0; i<mbs; i++) {
1690     rvals[0] = bs*(baij->rstart + i);
1691     for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
1692     for (j=ai[i]; j<ai[i+1]; j++) {
1693       col = baij->garray[aj[j]]*bs;
1694       for (k=0; k<bs; k++) {
1695         ierr = MatSetValues_MPISBAIJ_MatScalar(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1696         col++; a += bs;
1697       }
1698     }
1699   }
1700   ierr = PetscFree(rvals);CHKERRQ(ierr);
1701   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1702   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1703 
1704   if (matout) {
1705     *matout = B;
1706   } else {
1707     PetscOps *Abops;
1708     MatOps   Aops;
1709 
1710     /* This isn't really an in-place transpose .... but free data structures from baij */
1711     ierr = PetscFree(baij->rowners);CHKERRQ(ierr);
1712     ierr = MatDestroy(baij->A);CHKERRQ(ierr);
1713     ierr = MatDestroy(baij->B);CHKERRQ(ierr);
1714 #if defined (PETSC_USE_CTABLE)
1715     if (baij->colmap) {ierr = PetscTableDelete(baij->colmap);CHKERRQ(ierr);}
1716 #else
1717     if (baij->colmap) {ierr = PetscFree(baij->colmap);CHKERRQ(ierr);}
1718 #endif
1719     if (baij->garray) {ierr = PetscFree(baij->garray);CHKERRQ(ierr);}
1720     if (baij->lvec) {ierr = VecDestroy(baij->lvec);CHKERRQ(ierr);}
1721     if (baij->Mvctx) {ierr = VecScatterDestroy(baij->Mvctx);CHKERRQ(ierr);}
1722     ierr = PetscFree(baij);CHKERRQ(ierr);
1723 
1724     /*
1725        This is horrible, horrible code. We need to keep the
1726       A pointers for the bops and ops but copy everything
1727       else from C.
1728     */
1729     Abops   = A->bops;
1730     Aops    = A->ops;
1731     ierr    = PetscMemcpy(A,B,sizeof(struct _p_Mat));CHKERRQ(ierr);
1732     A->bops = Abops;
1733     A->ops  = Aops;
1734 
1735     PetscHeaderDestroy(B);
1736   }
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 #undef __FUNC__
1741 #define __FUNC__ /*<a name=""></a>*/"MatDiagonalScale_MPISBAIJ"
1742 int MatDiagonalScale_MPISBAIJ(Mat mat,Vec ll,Vec rr)
1743 {
1744   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
1745   Mat         a = baij->A,b = baij->B;
1746   int         ierr,s1,s2,s3;
1747 
1748   PetscFunctionBegin;
1749   if (ll != rr) {
1750     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"For symmetric format, left and right scaling vectors must be same\n");
1751   }
1752   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1753   if (rr) {
1754     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1755     if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,0,"right vector non-conforming local size");
1756     /* Overlap communication with computation. */
1757     ierr = VecScatterBegin(rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD,baij->Mvctx);CHKERRQ(ierr);
1758     /*} if (ll) { */
1759     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1760     if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,0,"left vector non-conforming local size");
1761     ierr = (*b->ops->diagonalscale)(b,ll,PETSC_NULL);CHKERRQ(ierr);
1762     /* } */
1763   /* scale  the diagonal block */
1764   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1765 
1766   /* if (rr) { */
1767     /* Do a scatter end and then right scale the off-diagonal block */
1768     ierr = VecScatterEnd(rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD,baij->Mvctx);CHKERRQ(ierr);
1769     ierr = (*b->ops->diagonalscale)(b,PETSC_NULL,baij->lvec);CHKERRQ(ierr);
1770   }
1771 
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 #undef __FUNC__
1776 #define __FUNC__ /*<a name=""></a>*/"MatZeroRows_MPISBAIJ"
1777 int MatZeroRows_MPISBAIJ(Mat A,IS is,Scalar *diag)
1778 {
1779   Mat_MPISBAIJ    *l = (Mat_MPISBAIJ*)A->data;
1780   int            i,ierr,N,*rows,*owners = l->rowners,size = l->size;
1781   int            *procs,*nprocs,j,found,idx,nsends,*work,row;
1782   int            nmax,*svalues,*starts,*owner,nrecvs,rank = l->rank;
1783   int            *rvalues,tag = A->tag,count,base,slen,n,*source;
1784   int            *lens,imdex,*lrows,*values,bs=l->bs,rstart_bs=l->rstart_bs;
1785   MPI_Comm       comm = A->comm;
1786   MPI_Request    *send_waits,*recv_waits;
1787   MPI_Status     recv_status,*send_status;
1788   IS             istmp;
1789 
1790   PetscFunctionBegin;
1791   ierr = ISGetSize(is,&N);CHKERRQ(ierr);
1792   ierr = ISGetIndices(is,&rows);CHKERRQ(ierr);
1793 
1794   /*  first count number of contributors to each processor */
1795   nprocs = (int*)PetscMalloc(2*size*sizeof(int));CHKPTRQ(nprocs);
1796   ierr   = PetscMemzero(nprocs,2*size*sizeof(int));CHKERRQ(ierr);
1797   procs  = nprocs + size;
1798   owner  = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(owner); /* see note*/
1799   for (i=0; i<N; i++) {
1800     idx   = rows[i];
1801     found = 0;
1802     for (j=0; j<size; j++) {
1803       if (idx >= owners[j]*bs && idx < owners[j+1]*bs) {
1804         nprocs[j]++; procs[j] = 1; owner[i] = j; found = 1; break;
1805       }
1806     }
1807     if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Index out of range");
1808   }
1809   nsends = 0;  for (i=0; i<size; i++) { nsends += procs[i];}
1810 
1811   /* inform other processors of number of messages and max length*/
1812   work   = (int*)PetscMalloc(2*size*sizeof(int));CHKPTRQ(work);
1813   ierr   = MPI_Allreduce(nprocs,work,2*size,MPI_INT,PetscMaxSum_Op,comm);CHKERRQ(ierr);
1814   nmax   = work[rank];
1815   nrecvs = work[size+rank];
1816   ierr = PetscFree(work);CHKERRQ(ierr);
1817 
1818   /* post receives:   */
1819   rvalues = (int*)PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(int));CHKPTRQ(rvalues);
1820   recv_waits = (MPI_Request*)PetscMalloc((nrecvs+1)*sizeof(MPI_Request));CHKPTRQ(recv_waits);
1821   for (i=0; i<nrecvs; i++) {
1822     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPI_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
1823   }
1824 
1825   /* do sends:
1826      1) starts[i] gives the starting index in svalues for stuff going to
1827      the ith processor
1828   */
1829   svalues    = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(svalues);
1830   send_waits = (MPI_Request*)PetscMalloc((nsends+1)*sizeof(MPI_Request));CHKPTRQ(send_waits);
1831   starts     = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(starts);
1832   starts[0]  = 0;
1833   for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
1834   for (i=0; i<N; i++) {
1835     svalues[starts[owner[i]]++] = rows[i];
1836   }
1837   ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr);
1838 
1839   starts[0] = 0;
1840   for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
1841   count = 0;
1842   for (i=0; i<size; i++) {
1843     if (procs[i]) {
1844       ierr = MPI_Isend(svalues+starts[i],nprocs[i],MPI_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
1845     }
1846   }
1847   ierr = PetscFree(starts);CHKERRQ(ierr);
1848 
1849   base = owners[rank]*bs;
1850 
1851   /*  wait on receives */
1852   lens   = (int*)PetscMalloc(2*(nrecvs+1)*sizeof(int));CHKPTRQ(lens);
1853   source = lens + nrecvs;
1854   count  = nrecvs; slen = 0;
1855   while (count) {
1856     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1857     /* unpack receives into our local space */
1858     ierr = MPI_Get_count(&recv_status,MPI_INT,&n);CHKERRQ(ierr);
1859     source[imdex]  = recv_status.MPI_SOURCE;
1860     lens[imdex]    = n;
1861     slen          += n;
1862     count--;
1863   }
1864   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1865 
1866   /* move the data into the send scatter */
1867   lrows = (int*)PetscMalloc((slen+1)*sizeof(int));CHKPTRQ(lrows);
1868   count = 0;
1869   for (i=0; i<nrecvs; i++) {
1870     values = rvalues + i*nmax;
1871     for (j=0; j<lens[i]; j++) {
1872       lrows[count++] = values[j] - base;
1873     }
1874   }
1875   ierr = PetscFree(rvalues);CHKERRQ(ierr);
1876   ierr = PetscFree(lens);CHKERRQ(ierr);
1877   ierr = PetscFree(owner);CHKERRQ(ierr);
1878   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1879 
1880   /* actually zap the local rows */
1881   ierr = ISCreateGeneral(PETSC_COMM_SELF,slen,lrows,&istmp);CHKERRQ(ierr);
1882   PLogObjectParent(A,istmp);
1883 
1884   /*
1885         Zero the required rows. If the "diagonal block" of the matrix
1886      is square and the user wishes to set the diagonal we use seperate
1887      code so that MatSetValues() is not called for each diagonal allocating
1888      new memory, thus calling lots of mallocs and slowing things down.
1889 
1890        Contributed by: Mathew Knepley
1891   */
1892   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1893   ierr = MatZeroRows_SeqSBAIJ(l->B,istmp,0);CHKERRQ(ierr);
1894   if (diag && (l->A->M == l->A->N)) {
1895     ierr = MatZeroRows_SeqSBAIJ(l->A,istmp,diag);CHKERRQ(ierr);
1896   } else if (diag) {
1897     ierr = MatZeroRows_SeqSBAIJ(l->A,istmp,0);CHKERRQ(ierr);
1898     if (((Mat_SeqSBAIJ*)l->A->data)->nonew) {
1899       SETERRQ(PETSC_ERR_SUP,0,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1900 MAT_NO_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1901     }
1902     for (i=0; i<slen; i++) {
1903       row  = lrows[i] + rstart_bs;
1904       ierr = MatSetValues(A,1,&row,1,&row,diag,INSERT_VALUES);CHKERRQ(ierr);
1905     }
1906     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1907     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1908   } else {
1909     ierr = MatZeroRows_SeqSBAIJ(l->A,istmp,0);CHKERRQ(ierr);
1910   }
1911 
1912   ierr = ISDestroy(istmp);CHKERRQ(ierr);
1913   ierr = PetscFree(lrows);CHKERRQ(ierr);
1914 
1915   /* wait on sends */
1916   if (nsends) {
1917     send_status = (MPI_Status*)PetscMalloc(nsends*sizeof(MPI_Status));CHKPTRQ(send_status);
1918     ierr        = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1919     ierr        = PetscFree(send_status);CHKERRQ(ierr);
1920   }
1921   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1922   ierr = PetscFree(svalues);CHKERRQ(ierr);
1923 
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 #undef __FUNC__
1928 #define __FUNC__ /*<a name=""></a>*/"MatPrintHelp_MPISBAIJ"
1929 int MatPrintHelp_MPISBAIJ(Mat A)
1930 {
1931   Mat_MPISBAIJ *a   = (Mat_MPISBAIJ*)A->data;
1932   MPI_Comm    comm = A->comm;
1933   static int  called = 0;
1934   int         ierr;
1935 
1936   PetscFunctionBegin;
1937   if (!a->rank) {
1938     ierr = MatPrintHelp_SeqSBAIJ(a->A);CHKERRQ(ierr);
1939   }
1940   if (called) {PetscFunctionReturn(0);} else called = 1;
1941   ierr = (*PetscHelpPrintf)(comm," Options for MATMPISBAIJ matrix format (the defaults):\n");CHKERRQ(ierr);
1942   ierr = (*PetscHelpPrintf)(comm,"  -mat_use_hash_table <factor>: Use hashtable for efficient matrix assembly\n");CHKERRQ(ierr);
1943   PetscFunctionReturn(0);
1944 }
1945 
1946 #undef __FUNC__
1947 #define __FUNC__ /*<a name=""></a>*/"MatSetUnfactored_MPISBAIJ"
1948 int MatSetUnfactored_MPISBAIJ(Mat A)
1949 {
1950   Mat_MPIBAIJ *a   = (Mat_MPIBAIJ*)A->data;
1951   int         ierr;
1952 
1953   PetscFunctionBegin;
1954   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 static int MatDuplicate_MPISBAIJ(Mat,MatDuplicateOption,Mat *);
1959 
1960 #undef __FUNC__
1961 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPISBAIJ"
1962 int MatEqual_MPISBAIJ(Mat A,Mat B,PetscTruth *flag)
1963 {
1964   Mat_MPISBAIJ *matB = (Mat_MPISBAIJ*)B->data,*matA = (Mat_MPISBAIJ*)A->data;
1965   Mat         a,b,c,d;
1966   PetscTruth  flg;
1967   int         ierr;
1968 
1969   PetscFunctionBegin;
1970   if (B->type != MATMPISBAIJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
1971   a = matA->A; b = matA->B;
1972   c = matB->A; d = matB->B;
1973 
1974   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1975   if (flg == PETSC_TRUE) {
1976     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1977   }
1978   ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
1979   PetscFunctionReturn(0);
1980 }
1981 
1982 /* -------------------------------------------------------------------*/
1983 static struct _MatOps MatOps_Values = {
1984   MatSetValues_MPISBAIJ,
1985   MatGetRow_MPISBAIJ,
1986   MatRestoreRow_MPISBAIJ,
1987   MatMult_MPISBAIJ,
1988   MatMultAdd_MPISBAIJ,
1989   MatMultTranspose_MPISBAIJ,
1990   MatMultTransposeAdd_MPISBAIJ,
1991   0,
1992   0,
1993   0,
1994   0,
1995   0,
1996   0,
1997   0,
1998   MatTranspose_MPISBAIJ,
1999   MatGetInfo_MPISBAIJ,
2000   MatEqual_MPISBAIJ,
2001   MatGetDiagonal_MPISBAIJ,
2002   MatDiagonalScale_MPISBAIJ,
2003   MatNorm_MPISBAIJ,
2004   MatAssemblyBegin_MPISBAIJ,
2005   MatAssemblyEnd_MPISBAIJ,
2006   0,
2007   MatSetOption_MPISBAIJ,
2008   MatZeroEntries_MPISBAIJ,
2009   MatZeroRows_MPISBAIJ,
2010   0,
2011   0,
2012   0,
2013   0,
2014   MatGetSize_MPISBAIJ,
2015   MatGetLocalSize_MPISBAIJ,
2016   MatGetOwnershipRange_MPISBAIJ,
2017   0,
2018   0,
2019   0,
2020   0,
2021   MatDuplicate_MPISBAIJ,
2022   0,
2023   0,
2024   0,
2025   0,
2026   0,
2027   MatGetSubMatrices_MPISBAIJ,
2028   MatIncreaseOverlap_MPISBAIJ,
2029   MatGetValues_MPISBAIJ,
2030   0,
2031   MatPrintHelp_MPISBAIJ,
2032   MatScale_MPISBAIJ,
2033   0,
2034   0,
2035   0,
2036   MatGetBlockSize_MPISBAIJ,
2037   0,
2038   0,
2039   0,
2040   0,
2041   0,
2042   0,
2043   MatSetUnfactored_MPISBAIJ,
2044   0,
2045   MatSetValuesBlocked_MPISBAIJ,
2046   0,
2047   0,
2048   0,
2049   MatGetMaps_Petsc};
2050 
2051 
2052 EXTERN_C_BEGIN
2053 #undef __FUNC__
2054 #define __FUNC__ /*<a name=""></a>*/"MatGetDiagonalBlock_MPISBAIJ"
2055 int MatGetDiagonalBlock_MPISBAIJ(Mat A,PetscTruth *iscopy,MatReuse reuse,Mat *a)
2056 {
2057   PetscFunctionBegin;
2058   *a      = ((Mat_MPISBAIJ *)A->data)->A;
2059   *iscopy = PETSC_FALSE;
2060   PetscFunctionReturn(0);
2061 }
2062 EXTERN_C_END
2063 
2064 #undef __FUNC__
2065 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPISBAIJ"
2066 /*@C
2067    MatCreateMPISBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format
2068    (block compressed row).  For good matrix assembly performance
2069    the user should preallocate the matrix storage by setting the parameters
2070    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2071    performance can be increased by more than a factor of 50.
2072 
2073    Collective on MPI_Comm
2074 
2075    Input Parameters:
2076 +  comm - MPI communicator
2077 .  bs   - size of blockk
2078 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
2079            This value should be the same as the local size used in creating the
2080            y vector for the matrix-vector product y = Ax.
2081 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
2082            This value should be the same as the local size used in creating the
2083            x vector for the matrix-vector product y = Ax.
2084 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
2085 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
2086 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
2087            submatrix  (same for all local rows)
2088 .  d_nnz - array containing the number of block nonzeros in the various block rows
2089            of the in diagonal portion of the local (possibly different for each block
2090            row) or PETSC_NULL.  You must leave room for the diagonal entry even if it is zero.
2091 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2092            submatrix (same for all local rows).
2093 -  o_nnz - array containing the number of nonzeros in the various block rows of the
2094            off-diagonal portion of the local submatrix (possibly different for
2095            each block row) or PETSC_NULL.
2096 
2097    Output Parameter:
2098 .  A - the matrix
2099 
2100    Options Database Keys:
2101 .   -mat_no_unroll - uses code that does not unroll the loops in the
2102                      block calculations (much slower)
2103 .   -mat_block_size - size of the blocks to use
2104 .   -mat_mpi - use the parallel matrix data structures even on one processor
2105                (defaults to using SeqBAIJ format on one processor)
2106 
2107    Notes:
2108    The user MUST specify either the local or global matrix dimensions
2109    (possibly both).
2110 
2111    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
2112    than it must be used on all processors that share the object for that argument.
2113 
2114    Storage Information:
2115    For a square global matrix we define each processor's diagonal portion
2116    to be its local rows and the corresponding columns (a square submatrix);
2117    each processor's off-diagonal portion encompasses the remainder of the
2118    local matrix (a rectangular submatrix).
2119 
2120    The user can specify preallocated storage for the diagonal part of
2121    the local submatrix with either d_nz or d_nnz (not both).  Set
2122    d_nz=PETSC_DEFAULT and d_nnz=PETSC_NULL for PETSc to control dynamic
2123    memory allocation.  Likewise, specify preallocated storage for the
2124    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
2125 
2126    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2127    the figure below we depict these three local rows and all columns (0-11).
2128 
2129 .vb
2130            0 1 2 3 4 5 6 7 8 9 10 11
2131           -------------------
2132    row 3  |  o o o d d d o o o o o o
2133    row 4  |  o o o d d d o o o o o o
2134    row 5  |  o o o d d d o o o o o o
2135           -------------------
2136 .ve
2137 
2138    Thus, any entries in the d locations are stored in the d (diagonal)
2139    submatrix, and any entries in the o locations are stored in the
2140    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
2141    stored simply in the MATSEQBAIJ format for compressed row storage.
2142 
2143    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
2144    and o_nz should indicate the number of block nonzeros per row in the o matrix.
2145    In general, for PDE problems in which most nonzeros are near the diagonal,
2146    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
2147    or you will get TERRIBLE performance; see the users' manual chapter on
2148    matrices.
2149 
2150    Level: intermediate
2151 
2152 .keywords: matrix, block, aij, compressed row, sparse, parallel
2153 
2154 .seealso: MatCreate(), MatCreateSeqSBAIJ(), MatSetValues(), MatCreateMPISBAIJ()
2155 @*/
2156 
2157 int MatCreateMPISBAIJ(MPI_Comm comm,int bs,int m,int n,int M,int N,int d_nz,int *d_nnz,int o_nz,int *o_nnz,Mat *A)
2158 {
2159   Mat          B;
2160   Mat_MPISBAIJ  *b;
2161   int          ierr,i,sum[1],work[1],mbs,Mbs=PETSC_DECIDE,size;
2162   PetscTruth   flag1,flag2,flg;
2163 
2164   PetscFunctionBegin;
2165   if (M != N || m != n){ /* N and n are not used after this */
2166     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"For symmetric format, set M=N and m=n");
2167   }
2168   ierr = OptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);CHKERRQ(ierr);
2169 
2170   if (bs < 1) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Invalid block size specified, must be positive");
2171   if (d_nz < -2) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,0,"d_nz cannot be less than -2: value %d",d_nz);
2172   if (o_nz < -2) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,0,"o_nz cannot be less than -2: value %d",o_nz);
2173   if (d_nnz) {
2174     for (i=0; i<m/bs; i++) {
2175       if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,0,"d_nnz cannot be less than -1: local row %d value %d",i,d_nnz[i]);
2176     }
2177   }
2178   if (o_nnz) {
2179     for (i=0; i<m/bs; i++) {
2180       if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,0,"o_nnz cannot be less than -1: local row %d value %d",i,o_nnz[i]);
2181     }
2182   }
2183 
2184   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2185   ierr = OptionsHasName(PETSC_NULL,"-mat_mpisbaij",&flag1);CHKERRQ(ierr);
2186   ierr = OptionsHasName(PETSC_NULL,"-mat_mpi",&flag2);CHKERRQ(ierr);
2187   if (!flag1 && !flag2 && size == 1) {
2188     if (M == PETSC_DECIDE) M = m;
2189     ierr = MatCreateSeqSBAIJ(comm,bs,M,M,d_nz,d_nnz,A);CHKERRQ(ierr);
2190     PetscFunctionReturn(0);
2191   }
2192 
2193   *A = 0;
2194   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPISBAIJ,"Mat",comm,MatDestroy,MatView);
2195   PLogObjectCreate(B);
2196   B->data = (void*)(b = PetscNew(Mat_MPISBAIJ));CHKPTRQ(b);
2197   ierr    = PetscMemzero(b,sizeof(Mat_MPISBAIJ));CHKERRQ(ierr);
2198   ierr    = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
2199 
2200   B->ops->destroy    = MatDestroy_MPISBAIJ;
2201   B->ops->view       = MatView_MPISBAIJ;
2202   B->mapping    = 0;
2203   B->factor     = 0;
2204   B->assembled  = PETSC_FALSE;
2205 
2206   B->insertmode = NOT_SET_VALUES;
2207   ierr = MPI_Comm_rank(comm,&b->rank);CHKERRQ(ierr);
2208   ierr = MPI_Comm_size(comm,&b->size);CHKERRQ(ierr);
2209 
2210   if (m == PETSC_DECIDE && (d_nnz || o_nnz)) {
2211     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Cannot have PETSC_DECIDE rows but set d_nnz or o_nnz");
2212   }
2213   if (M == PETSC_DECIDE && m == PETSC_DECIDE) {
2214     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"either M or m should be specified");
2215   }
2216   if (M != PETSC_DECIDE && m != PETSC_DECIDE) M = PETSC_DECIDE;
2217 
2218   if (M == PETSC_DECIDE) {
2219     work[0] = m; mbs = m/bs;
2220     ierr = MPI_Allreduce(work,sum,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
2221     M = sum[0]; Mbs = M/bs;
2222   } else { /* M is specified */
2223     Mbs = M/bs;
2224     if (Mbs*bs != M) SETERRQ(PETSC_ERR_ARG_SIZ,0,"No of global rows must be divisible by blocksize");
2225     mbs = Mbs/b->size + ((Mbs % b->size) > b->rank);
2226     m   = mbs*bs;
2227   }
2228 
2229   if (mbs*bs != m) {
2230     SETERRQ(PETSC_ERR_ARG_SIZ,0,"No of local rows/cols must be divisible by blocksize");
2231   }
2232 
2233   b->m = m; B->m = m;
2234   b->n = m; B->n = m;
2235   b->N = M; B->N = M;
2236   b->M = M;
2237   B->M = M;
2238   b->bs  = bs;
2239   b->bs2 = bs*bs;
2240   b->mbs = mbs;
2241   b->nbs = mbs;
2242   b->Mbs = Mbs;
2243   b->Nbs = Mbs;
2244 
2245   /* the information in the maps duplicates the information computed below, eventually
2246      we should remove the duplicate information that is not contained in the maps */
2247   ierr = MapCreateMPI(B->comm,m,M,&B->rmap);CHKERRQ(ierr);
2248   ierr = MapCreateMPI(B->comm,m,M,&B->cmap);CHKERRQ(ierr);
2249 
2250   /* build local table of row and column ownerships */
2251   b->rowners = (int*)PetscMalloc(3*(b->size+2)*sizeof(int));CHKPTRQ(b->rowners);
2252   PLogObjectMemory(B,3*(b->size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPISBAIJ));
2253   b->cowners    = b->rowners + b->size + 2;
2254   b->rowners_bs = b->cowners + b->size + 2;
2255   ierr = MPI_Allgather(&mbs,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
2256   b->rowners[0]    = 0;
2257   for (i=2; i<=b->size; i++) {
2258     b->rowners[i] += b->rowners[i-1];
2259   }
2260   for (i=0; i<=b->size; i++) {
2261     b->rowners_bs[i] = b->rowners[i]*bs;
2262   }
2263   b->rstart    = b->rowners[b->rank];
2264   b->rend      = b->rowners[b->rank+1];
2265   b->rstart_bs = b->rstart * bs;
2266   b->rend_bs   = b->rend * bs;
2267 
2268   b->cstart    = b->rstart;
2269   b->cend      = b->rend;
2270   b->cstart_bs = b->cstart * bs;
2271   b->cend_bs   = b->cend * bs;
2272 
2273 
2274   if (d_nz == PETSC_DEFAULT) d_nz = 5;
2275   ierr = MatCreateSeqSBAIJ(PETSC_COMM_SELF,bs,m,m,d_nz,d_nnz,&b->A);CHKERRQ(ierr);
2276   PLogObjectParent(B,b->A);
2277   if (o_nz == PETSC_DEFAULT) o_nz = 0;
2278   ierr = MatCreateSeqBAIJ(PETSC_COMM_SELF,bs,m,M,o_nz,o_nnz,&b->B);CHKERRQ(ierr);
2279   PLogObjectParent(B,b->B);
2280 
2281   /* build cache for off array entries formed */
2282   ierr = MatStashCreate_Private(B->comm,1,&B->stash);CHKERRQ(ierr);
2283   ierr = MatStashCreate_Private(B->comm,bs,&B->bstash);CHKERRQ(ierr);
2284   b->donotstash  = PETSC_FALSE;
2285   b->colmap      = PETSC_NULL;
2286   b->garray      = PETSC_NULL;
2287   b->roworiented = PETSC_TRUE;
2288 
2289 #if defined(PEYSC_USE_MAT_SINGLE)
2290   /* stuff for MatSetValues_XXX in single precision */
2291   b->lensetvalues     = 0;
2292   b->setvaluescopy    = PETSC_NULL;
2293 #endif
2294 
2295   /* stuff used in block assembly */
2296   b->barray       = 0;
2297 
2298   /* stuff used for matrix vector multiply */
2299   b->lvec         = 0;
2300   b->Mvctx        = 0;
2301 
2302   /* stuff for MatGetRow() */
2303   b->rowindices   = 0;
2304   b->rowvalues    = 0;
2305   b->getrowactive = PETSC_FALSE;
2306 
2307   /* hash table stuff */
2308   b->ht           = 0;
2309   b->hd           = 0;
2310   b->ht_size      = 0;
2311   b->ht_flag      = PETSC_FALSE;
2312   b->ht_fact      = 0;
2313   b->ht_total_ct  = 0;
2314   b->ht_insert_ct = 0;
2315 
2316   *A = B;
2317   ierr = OptionsHasName(PETSC_NULL,"-mat_use_hash_table",&flg);CHKERRQ(ierr);
2318   if (flg) {
2319     double fact = 1.39;
2320     ierr = MatSetOption(B,MAT_USE_HASH_TABLE);CHKERRQ(ierr);
2321     ierr = OptionsGetDouble(PETSC_NULL,"-mat_use_hash_table",&fact,PETSC_NULL);CHKERRQ(ierr);
2322     if (fact <= 1.0) fact = 1.39;
2323     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
2324     PLogInfo(0,"MatCreateMPISBAIJ:Hash table Factor used %5.2f\n",fact);
2325   }
2326   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C",
2327                                      "MatStoreValues_MPISBAIJ",
2328                                      MatStoreValues_MPISBAIJ);CHKERRQ(ierr);
2329   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C",
2330                                      "MatRetrieveValues_MPISBAIJ",
2331                                      MatRetrieveValues_MPISBAIJ);CHKERRQ(ierr);
2332   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C",
2333                                      "MatGetDiagonalBlock_MPISBAIJ",
2334                                      MatGetDiagonalBlock_MPISBAIJ);CHKERRQ(ierr);
2335   PetscFunctionReturn(0);
2336 }
2337 
2338 
2339 #undef __FUNC__
2340 #define __FUNC__ /*<a name=""></a>*/"MatDuplicate_MPISBAIJ"
2341 static int MatDuplicate_MPISBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
2342 {
2343   Mat         mat;
2344   Mat_MPISBAIJ *a,*oldmat = (Mat_MPISBAIJ*)matin->data;
2345   int         ierr,len=0;
2346   PetscTruth  flg;
2347 
2348   PetscFunctionBegin;
2349   *newmat       = 0;
2350   PetscHeaderCreate(mat,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPISBAIJ,"Mat",matin->comm,MatDestroy,MatView);
2351   PLogObjectCreate(mat);
2352   mat->data         = (void*)(a = PetscNew(Mat_MPISBAIJ));CHKPTRQ(a);
2353   ierr              = PetscMemcpy(mat->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
2354   mat->ops->destroy = MatDestroy_MPISBAIJ;
2355   mat->ops->view    = MatView_MPISBAIJ;
2356   mat->factor       = matin->factor;
2357   mat->assembled    = PETSC_TRUE;
2358   mat->insertmode   = NOT_SET_VALUES;
2359 
2360   a->m = mat->m   = oldmat->m;
2361   a->n = mat->n   = oldmat->n;
2362   a->M = mat->M   = oldmat->M;
2363   a->N = mat->N   = oldmat->N;
2364 
2365   a->bs  = oldmat->bs;
2366   a->bs2 = oldmat->bs2;
2367   a->mbs = oldmat->mbs;
2368   a->nbs = oldmat->nbs;
2369   a->Mbs = oldmat->Mbs;
2370   a->Nbs = oldmat->Nbs;
2371 
2372   a->rstart       = oldmat->rstart;
2373   a->rend         = oldmat->rend;
2374   a->cstart       = oldmat->cstart;
2375   a->cend         = oldmat->cend;
2376   a->size         = oldmat->size;
2377   a->rank         = oldmat->rank;
2378   a->donotstash   = oldmat->donotstash;
2379   a->roworiented  = oldmat->roworiented;
2380   a->rowindices   = 0;
2381   a->rowvalues    = 0;
2382   a->getrowactive = PETSC_FALSE;
2383   a->barray       = 0;
2384   a->rstart_bs    = oldmat->rstart_bs;
2385   a->rend_bs      = oldmat->rend_bs;
2386   a->cstart_bs    = oldmat->cstart_bs;
2387   a->cend_bs      = oldmat->cend_bs;
2388 
2389   /* hash table stuff */
2390   a->ht           = 0;
2391   a->hd           = 0;
2392   a->ht_size      = 0;
2393   a->ht_flag      = oldmat->ht_flag;
2394   a->ht_fact      = oldmat->ht_fact;
2395   a->ht_total_ct  = 0;
2396   a->ht_insert_ct = 0;
2397 
2398 
2399   a->rowners = (int*)PetscMalloc(3*(a->size+2)*sizeof(int));CHKPTRQ(a->rowners);
2400   PLogObjectMemory(mat,3*(a->size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPISBAIJ));
2401   a->cowners    = a->rowners + a->size + 2;
2402   a->rowners_bs = a->cowners + a->size + 2;
2403   ierr = PetscMemcpy(a->rowners,oldmat->rowners,3*(a->size+2)*sizeof(int));CHKERRQ(ierr);
2404   ierr = MatStashCreate_Private(matin->comm,1,&mat->stash);CHKERRQ(ierr);
2405   ierr = MatStashCreate_Private(matin->comm,oldmat->bs,&mat->bstash);CHKERRQ(ierr);
2406   if (oldmat->colmap) {
2407 #if defined (PETSC_USE_CTABLE)
2408   ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
2409 #else
2410     a->colmap = (int*)PetscMalloc((a->Nbs)*sizeof(int));CHKPTRQ(a->colmap);
2411     PLogObjectMemory(mat,(a->Nbs)*sizeof(int));
2412     ierr      = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(int));CHKERRQ(ierr);
2413 #endif
2414   } else a->colmap = 0;
2415   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
2416     a->garray = (int*)PetscMalloc(len*sizeof(int));CHKPTRQ(a->garray);
2417     PLogObjectMemory(mat,len*sizeof(int));
2418     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(int));CHKERRQ(ierr);
2419   } else a->garray = 0;
2420 
2421   ierr =  VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
2422   PLogObjectParent(mat,a->lvec);
2423   ierr =  VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
2424 
2425   PLogObjectParent(mat,a->Mvctx);
2426   ierr =  MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
2427   PLogObjectParent(mat,a->A);
2428   ierr =  MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
2429   PLogObjectParent(mat,a->B);
2430   ierr = OptionsHasName(PETSC_NULL,"-help",&flg);CHKERRQ(ierr);
2431   ierr = FListDuplicate(mat->qlist,&matin->qlist);CHKERRQ(ierr);
2432   if (flg) {
2433     ierr = MatPrintHelp(mat);CHKERRQ(ierr);
2434   }
2435   *newmat = mat;
2436   PetscFunctionReturn(0);
2437 }
2438 
2439 #include "petscsys.h"
2440 
2441 #undef __FUNC__
2442 #define __FUNC__ /*<a name=""></a>*/"MatLoad_MPISBAIJ"
2443 int MatLoad_MPISBAIJ(Viewer viewer,MatType type,Mat *newmat)
2444 {
2445   Mat          A;
2446   int          i,nz,ierr,j,rstart,rend,fd;
2447   Scalar       *vals,*buf;
2448   MPI_Comm     comm = ((PetscObject)viewer)->comm;
2449   MPI_Status   status;
2450   int          header[4],rank,size,*rowlengths = 0,M,N,m,*rowners,*browners,maxnz,*cols;
2451   int          *locrowlens,*sndcounts = 0,*procsnz = 0,jj,*mycols,*ibuf;
2452   int          tag = ((PetscObject)viewer)->tag,bs=1,Mbs,mbs,extra_rows;
2453   int          *dlens,*odlens,*mask,*masked1,*masked2,rowcount,odcount;
2454   int          dcount,kmax,k,nzcount,tmp;
2455 
2456   PetscFunctionBegin;
2457   ierr = OptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);CHKERRQ(ierr);
2458 
2459   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2460   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2461   if (!rank) {
2462     ierr = ViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
2463     ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr);
2464     if (header[0] != MAT_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,0,"not matrix object");
2465     if (header[3] < 0) {
2466       SETERRQ(PETSC_ERR_FILE_UNEXPECTED,1,"Matrix stored in special format, cannot load as MPISBAIJ");
2467     }
2468   }
2469 
2470   ierr = MPI_Bcast(header+1,3,MPI_INT,0,comm);CHKERRQ(ierr);
2471   M = header[1]; N = header[2];
2472 
2473   if (M != N) SETERRQ(PETSC_ERR_SUP,0,"Can only do square matrices");
2474 
2475   /*
2476      This code adds extra rows to make sure the number of rows is
2477      divisible by the blocksize
2478   */
2479   Mbs        = M/bs;
2480   extra_rows = bs - M + bs*(Mbs);
2481   if (extra_rows == bs) extra_rows = 0;
2482   else                  Mbs++;
2483   if (extra_rows &&!rank) {
2484     PLogInfo(0,"MatLoad_MPISBAIJ:Padding loaded matrix to match blocksize\n");
2485   }
2486 
2487   /* determine ownership of all rows */
2488   mbs = Mbs/size + ((Mbs % size) > rank);
2489   m   = mbs * bs;
2490   rowners = (int*)PetscMalloc(2*(size+2)*sizeof(int));CHKPTRQ(rowners);
2491   browners = rowners + size + 1;
2492   ierr = MPI_Allgather(&mbs,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
2493   rowners[0] = 0;
2494   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
2495   for (i=0; i<=size;  i++) browners[i] = rowners[i]*bs;
2496   rstart = rowners[rank];
2497   rend   = rowners[rank+1];
2498 
2499   /* distribute row lengths to all processors */
2500   locrowlens = (int*)PetscMalloc((rend-rstart)*bs*sizeof(int));CHKPTRQ(locrowlens);
2501   if (!rank) {
2502     rowlengths = (int*)PetscMalloc((M+extra_rows)*sizeof(int));CHKPTRQ(rowlengths);
2503     ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr);
2504     for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
2505     sndcounts = (int*)PetscMalloc(size*sizeof(int));CHKPTRQ(sndcounts);
2506     for (i=0; i<size; i++) sndcounts[i] = browners[i+1] - browners[i];
2507     ierr = MPI_Scatterv(rowlengths,sndcounts,browners,MPI_INT,locrowlens,(rend-rstart)*bs,MPI_INT,0,comm);CHKERRQ(ierr);
2508     ierr = PetscFree(sndcounts);CHKERRQ(ierr);
2509   } else {
2510     ierr = MPI_Scatterv(0,0,0,MPI_INT,locrowlens,(rend-rstart)*bs,MPI_INT,0,comm);CHKERRQ(ierr);
2511   }
2512 
2513   if (!rank) {
2514     /* calculate the number of nonzeros on each processor */
2515     procsnz = (int*)PetscMalloc(size*sizeof(int));CHKPTRQ(procsnz);
2516     ierr    = PetscMemzero(procsnz,size*sizeof(int));CHKERRQ(ierr);
2517     for (i=0; i<size; i++) {
2518       for (j=rowners[i]*bs; j< rowners[i+1]*bs; j++) {
2519         procsnz[i] += rowlengths[j];
2520       }
2521     }
2522     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
2523 
2524     /* determine max buffer needed and allocate it */
2525     maxnz = 0;
2526     for (i=0; i<size; i++) {
2527       maxnz = PetscMax(maxnz,procsnz[i]);
2528     }
2529     cols = (int*)PetscMalloc(maxnz*sizeof(int));CHKPTRQ(cols);
2530 
2531     /* read in my part of the matrix column indices  */
2532     nz = procsnz[0];
2533     ibuf = (int*)PetscMalloc(nz*sizeof(int));CHKPTRQ(ibuf);
2534     mycols = ibuf;
2535     if (size == 1)  nz -= extra_rows;
2536     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
2537     if (size == 1)  for (i=0; i< extra_rows; i++) { mycols[nz+i] = M+i; }
2538 
2539     /* read in every ones (except the last) and ship off */
2540     for (i=1; i<size-1; i++) {
2541       nz   = procsnz[i];
2542       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2543       ierr = MPI_Send(cols,nz,MPI_INT,i,tag,comm);CHKERRQ(ierr);
2544     }
2545     /* read in the stuff for the last proc */
2546     if (size != 1) {
2547       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
2548       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
2549       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
2550       ierr = MPI_Send(cols,nz+extra_rows,MPI_INT,size-1,tag,comm);CHKERRQ(ierr);
2551     }
2552     ierr = PetscFree(cols);CHKERRQ(ierr);
2553   } else {
2554     /* determine buffer space needed for message */
2555     nz = 0;
2556     for (i=0; i<m; i++) {
2557       nz += locrowlens[i];
2558     }
2559     ibuf   = (int*)PetscMalloc(nz*sizeof(int));CHKPTRQ(ibuf);
2560     mycols = ibuf;
2561     /* receive message of column indices*/
2562     ierr = MPI_Recv(mycols,nz,MPI_INT,0,tag,comm,&status);CHKERRQ(ierr);
2563     ierr = MPI_Get_count(&status,MPI_INT,&maxnz);CHKERRQ(ierr);
2564     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,0,"something is wrong with file");
2565   }
2566 
2567   /* loop over local rows, determining number of off diagonal entries */
2568   dlens  = (int*)PetscMalloc(2*(rend-rstart+1)*sizeof(int));CHKPTRQ(dlens);
2569   odlens = dlens + (rend-rstart);
2570   mask   = (int*)PetscMalloc(3*Mbs*sizeof(int));CHKPTRQ(mask);
2571   ierr   = PetscMemzero(mask,3*Mbs*sizeof(int));CHKERRQ(ierr);
2572   masked1 = mask    + Mbs;
2573   masked2 = masked1 + Mbs;
2574   rowcount = 0; nzcount = 0;
2575   for (i=0; i<mbs; i++) {
2576     dcount  = 0;
2577     odcount = 0;
2578     for (j=0; j<bs; j++) {
2579       kmax = locrowlens[rowcount];
2580       for (k=0; k<kmax; k++) {
2581         tmp = mycols[nzcount++]/bs;
2582         if (!mask[tmp]) {
2583           mask[tmp] = 1;
2584           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
2585           else masked1[dcount++] = tmp;
2586         }
2587       }
2588       rowcount++;
2589     }
2590 
2591     dlens[i]  = dcount;
2592     odlens[i] = odcount;
2593 
2594     /* zero out the mask elements we set */
2595     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
2596     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
2597   }
2598 
2599   /* create our matrix */
2600   ierr = MatCreateMPISBAIJ(comm,bs,m,PETSC_DECIDE,M+extra_rows,N+extra_rows,0,dlens,0,odlens,newmat);CHKERRQ(ierr);
2601   A = *newmat;
2602   MatSetOption(A,MAT_COLUMNS_SORTED);
2603 
2604   if (!rank) {
2605     buf = (Scalar*)PetscMalloc(maxnz*sizeof(Scalar));CHKPTRQ(buf);
2606     /* read in my part of the matrix numerical values  */
2607     nz = procsnz[0];
2608     vals = buf;
2609     mycols = ibuf;
2610     if (size == 1)  nz -= extra_rows;
2611     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2612     if (size == 1)  for (i=0; i< extra_rows; i++) { vals[nz+i] = 1.0; }
2613 
2614     /* insert into matrix */
2615     jj      = rstart*bs;
2616     for (i=0; i<m; i++) {
2617       ierr = MatSetValues_MPISBAIJ(A,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2618       mycols += locrowlens[i];
2619       vals   += locrowlens[i];
2620       jj++;
2621     }
2622     /* read in other processors (except the last one) and ship out */
2623     for (i=1; i<size-1; i++) {
2624       nz   = procsnz[i];
2625       vals = buf;
2626       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2627       ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,A->tag,comm);CHKERRQ(ierr);
2628     }
2629     /* the last proc */
2630     if (size != 1){
2631       nz   = procsnz[i] - extra_rows;
2632       vals = buf;
2633       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
2634       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
2635       ierr = MPI_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,A->tag,comm);CHKERRQ(ierr);
2636     }
2637     ierr = PetscFree(procsnz);CHKERRQ(ierr);
2638   } else {
2639     /* receive numeric values */
2640     buf = (Scalar*)PetscMalloc(nz*sizeof(Scalar));CHKPTRQ(buf);
2641 
2642     /* receive message of values*/
2643     vals   = buf;
2644     mycols = ibuf;
2645     ierr   = MPI_Recv(vals,nz,MPIU_SCALAR,0,A->tag,comm,&status);CHKERRQ(ierr);
2646     ierr   = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr);
2647     if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,0,"something is wrong with file");
2648 
2649     /* insert into matrix */
2650     jj      = rstart*bs;
2651     for (i=0; i<m; i++) {
2652       ierr    = MatSetValues_MPISBAIJ(A,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
2653       mycols += locrowlens[i];
2654       vals   += locrowlens[i];
2655       jj++;
2656     }
2657   }
2658   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
2659   ierr = PetscFree(buf);CHKERRQ(ierr);
2660   ierr = PetscFree(ibuf);CHKERRQ(ierr);
2661   ierr = PetscFree(rowners);CHKERRQ(ierr);
2662   ierr = PetscFree(dlens);CHKERRQ(ierr);
2663   ierr = PetscFree(mask);CHKERRQ(ierr);
2664   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2665   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2666   PetscFunctionReturn(0);
2667 }
2668 
2669 #undef __FUNC__
2670 #define __FUNC__ /*<a name=""></a>*/"MatMPISBAIJSetHashTableFactor"
2671 /*@
2672    MatMPISBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
2673 
2674    Input Parameters:
2675 .  mat  - the matrix
2676 .  fact - factor
2677 
2678    Collective on Mat
2679 
2680    Level: advanced
2681 
2682   Notes:
2683    This can also be set by the command line option: -mat_use_hash_table fact
2684 
2685 .keywords: matrix, hashtable, factor, HT
2686 
2687 .seealso: MatSetOption()
2688 @*/
2689 int MatMPISBAIJSetHashTableFactor(Mat mat,PetscReal fact)
2690 {
2691   Mat_MPIBAIJ *baij;
2692 
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(mat,MAT_COOKIE);
2695   if (mat->type != MATMPIBAIJ) {
2696     SETERRQ(PETSC_ERR_ARG_WRONG,1,"Incorrect matrix type. Use MPIBAIJ only.");
2697   }
2698   baij = (Mat_MPIBAIJ*)mat->data;
2699   baij->ht_fact = fact;
2700   PetscFunctionReturn(0);
2701 }
2702