xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 3bbf0e9209c918da710d8f50ca5c48af17a42e60)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 #include <petscblaslapack.h>
4 
5 extern PetscErrorCode MatSetUpMultiply_MPIBAIJ(Mat);
6 extern PetscErrorCode MatDisAssemble_MPIBAIJ(Mat);
7 extern PetscErrorCode MatGetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],PetscScalar []);
8 extern PetscErrorCode MatSetValues_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
9 extern PetscErrorCode MatSetValuesBlocked_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
10 extern PetscErrorCode MatGetRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
11 extern PetscErrorCode MatRestoreRow_SeqBAIJ(Mat,PetscInt,PetscInt*,PetscInt*[],PetscScalar*[]);
12 extern PetscErrorCode MatZeroRows_SeqBAIJ(Mat,PetscInt,const PetscInt[],PetscScalar,Vec,Vec);
13 
14 #undef __FUNCT__
15 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
16 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
17 {
18   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
19   PetscErrorCode ierr;
20   PetscInt       i,*idxb = 0;
21   PetscScalar    *va,*vb;
22   Vec            vtmp;
23 
24   PetscFunctionBegin;
25   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
26   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
27   if (idx) {
28     for (i=0; i<A->rmap->n; i++) {
29       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
30     }
31   }
32 
33   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
34   if (idx) {ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr);}
35   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
36   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
37 
38   for (i=0; i<A->rmap->n; i++) {
39     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
40       va[i] = vb[i];
41       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
42     }
43   }
44 
45   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
46   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
47   ierr = PetscFree(idxb);CHKERRQ(ierr);
48   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
49   PetscFunctionReturn(0);
50 }
51 
52 #undef __FUNCT__
53 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
54 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
55 {
56   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
61   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
67 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
68 {
69   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
74   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 /*
79      Local utility routine that creates a mapping from the global column
80    number to the local number in the off-diagonal part of the local
81    storage of the matrix.  This is done in a non scalable way since the
82    length of colmap equals the global matrix length.
83 */
84 #undef __FUNCT__
85 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
86 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
87 {
88   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
89   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
90   PetscErrorCode ierr;
91   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
92 
93   PetscFunctionBegin;
94 #if defined(PETSC_USE_CTABLE)
95   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) {
97     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
98   }
99 #else
100   ierr = PetscMalloc((baij->Nbs+1)*sizeof(PetscInt),&baij->colmap);CHKERRQ(ierr);
101   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
102   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
103   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
104 #endif
105   PetscFunctionReturn(0);
106 }
107 
108 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv) \
109   { \
110  \
111     brow = row/bs;  \
112     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
113     rmax = aimax[brow]; nrow = ailen[brow]; \
114     bcol = col/bs; \
115     ridx = row % bs; cidx = col % bs; \
116     low  = 0; high = nrow; \
117     while (high-low > 3) { \
118       t = (low+high)/2; \
119       if (rp[t] > bcol) high = t; \
120       else              low  = t; \
121     } \
122     for (_i=low; _i<high; _i++) { \
123       if (rp[_i] > bcol) break; \
124       if (rp[_i] == bcol) { \
125         bap = ap +  bs2*_i + bs*cidx + ridx; \
126         if (addv == ADD_VALUES) *bap += value;  \
127         else                    *bap  = value;  \
128         goto a_noinsert; \
129       } \
130     } \
131     if (a->nonew == 1) goto a_noinsert; \
132     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
133     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
134     N = nrow++ - 1;  \
135     /* shift up all the later entries in this row */ \
136     for (ii=N; ii>=_i; ii--) { \
137       rp[ii+1] = rp[ii]; \
138       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
139     } \
140     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
141     rp[_i]                      = bcol;  \
142     ap[bs2*_i + bs*cidx + ridx] = value;  \
143 a_noinsert:; \
144     ailen[brow] = nrow; \
145   }
146 
147 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv) \
148   { \
149     brow = row/bs;  \
150     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
151     rmax = bimax[brow]; nrow = bilen[brow]; \
152     bcol = col/bs; \
153     ridx = row % bs; cidx = col % bs; \
154     low  = 0; high = nrow; \
155     while (high-low > 3) { \
156       t = (low+high)/2; \
157       if (rp[t] > bcol) high = t; \
158       else              low  = t; \
159     } \
160     for (_i=low; _i<high; _i++) { \
161       if (rp[_i] > bcol) break; \
162       if (rp[_i] == bcol) { \
163         bap = ap +  bs2*_i + bs*cidx + ridx; \
164         if (addv == ADD_VALUES) *bap += value;  \
165         else                    *bap  = value;  \
166         goto b_noinsert; \
167       } \
168     } \
169     if (b->nonew == 1) goto b_noinsert; \
170     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \
171     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
172     N = nrow++ - 1;  \
173     /* shift up all the later entries in this row */ \
174     for (ii=N; ii>=_i; ii--) { \
175       rp[ii+1] = rp[ii]; \
176       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
177     } \
178     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
179     rp[_i]                      = bcol;  \
180     ap[bs2*_i + bs*cidx + ridx] = value;  \
181 b_noinsert:; \
182     bilen[brow] = nrow; \
183   }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "MatSetValues_MPIBAIJ"
187 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
188 {
189   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
190   MatScalar      value;
191   PetscBool      roworiented = baij->roworiented;
192   PetscErrorCode ierr;
193   PetscInt       i,j,row,col;
194   PetscInt       rstart_orig=mat->rmap->rstart;
195   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
196   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
197 
198   /* Some Variables required in the macro */
199   Mat         A     = baij->A;
200   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
201   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
202   MatScalar   *aa   =a->a;
203 
204   Mat         B     = baij->B;
205   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
206   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
207   MatScalar   *ba   =b->a;
208 
209   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
210   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
211   MatScalar *ap,*bap;
212 
213   PetscFunctionBegin;
214   if (v) PetscValidScalarPointer(v,6);
215   for (i=0; i<m; i++) {
216     if (im[i] < 0) continue;
217 #if defined(PETSC_USE_DEBUG)
218     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
219 #endif
220     if (im[i] >= rstart_orig && im[i] < rend_orig) {
221       row = im[i] - rstart_orig;
222       for (j=0; j<n; j++) {
223         if (in[j] >= cstart_orig && in[j] < cend_orig) {
224           col = in[j] - cstart_orig;
225           if (roworiented) value = v[i*n+j];
226           else             value = v[i+j*m];
227           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv);
228           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
229         } else if (in[j] < 0) continue;
230 #if defined(PETSC_USE_DEBUG)
231         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
232 #endif
233         else {
234           if (mat->was_assembled) {
235             if (!baij->colmap) {
236               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
237             }
238 #if defined(PETSC_USE_CTABLE)
239             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
240             col  = col - 1;
241 #else
242             col = baij->colmap[in[j]/bs] - 1;
243 #endif
244             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
245               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
246               col  =  in[j];
247               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
248               B    = baij->B;
249               b    = (Mat_SeqBAIJ*)(B)->data;
250               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
251               ba   =b->a;
252             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
253             else col += in[j]%bs;
254           } else col = in[j];
255           if (roworiented) value = v[i*n+j];
256           else             value = v[i+j*m];
257           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv);
258           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
259         }
260       }
261     } else {
262       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
263       if (!baij->donotstash) {
264         mat->assembled = PETSC_FALSE;
265         if (roworiented) {
266           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
267         } else {
268           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
269         }
270       }
271     }
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
278 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
279 {
280   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
281   const PetscScalar *value;
282   MatScalar         *barray     = baij->barray;
283   PetscBool         roworiented = baij->roworiented;
284   PetscErrorCode    ierr;
285   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
286   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
287   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
288 
289   PetscFunctionBegin;
290   if (!barray) {
291     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
292     baij->barray = barray;
293   }
294 
295   if (roworiented) stepval = (n-1)*bs;
296   else stepval = (m-1)*bs;
297 
298   for (i=0; i<m; i++) {
299     if (im[i] < 0) continue;
300 #if defined(PETSC_USE_DEBUG)
301     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
302 #endif
303     if (im[i] >= rstart && im[i] < rend) {
304       row = im[i] - rstart;
305       for (j=0; j<n; j++) {
306         /* If NumCol = 1 then a copy is not required */
307         if ((roworiented) && (n == 1)) {
308           barray = (MatScalar*)v + i*bs2;
309         } else if ((!roworiented) && (m == 1)) {
310           barray = (MatScalar*)v + j*bs2;
311         } else { /* Here a copy is required */
312           if (roworiented) {
313             value = v + (i*(stepval+bs) + j)*bs;
314           } else {
315             value = v + (j*(stepval+bs) + i)*bs;
316           }
317           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
318             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
319             barray += bs;
320           }
321           barray -= bs2;
322         }
323 
324         if (in[j] >= cstart && in[j] < cend) {
325           col  = in[j] - cstart;
326           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
327         } else if (in[j] < 0) continue;
328 #if defined(PETSC_USE_DEBUG)
329         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
330 #endif
331         else {
332           if (mat->was_assembled) {
333             if (!baij->colmap) {
334               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
335             }
336 
337 #if defined(PETSC_USE_DEBUG)
338 #if defined(PETSC_USE_CTABLE)
339             { PetscInt data;
340               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
341               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
342             }
343 #else
344             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
345 #endif
346 #endif
347 #if defined(PETSC_USE_CTABLE)
348             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
349             col  = (col - 1)/bs;
350 #else
351             col = (baij->colmap[in[j]] - 1)/bs;
352 #endif
353             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
354               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
355               col  =  in[j];
356             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", bs*im[i], bs*in[j]);
357           } else col = in[j];
358           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
359         }
360       }
361     } else {
362       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
363       if (!baij->donotstash) {
364         if (roworiented) {
365           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
366         } else {
367           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
368         }
369       }
370     }
371   }
372   PetscFunctionReturn(0);
373 }
374 
375 #define HASH_KEY 0.6180339887
376 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
377 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
378 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
379 #undef __FUNCT__
380 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
381 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
382 {
383   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
384   PetscBool      roworiented = baij->roworiented;
385   PetscErrorCode ierr;
386   PetscInt       i,j,row,col;
387   PetscInt       rstart_orig=mat->rmap->rstart;
388   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
389   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
390   PetscReal      tmp;
391   MatScalar      **HD = baij->hd,value;
392 #if defined(PETSC_USE_DEBUG)
393   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
394 #endif
395 
396   PetscFunctionBegin;
397   if (v) PetscValidScalarPointer(v,6);
398   for (i=0; i<m; i++) {
399 #if defined(PETSC_USE_DEBUG)
400     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
401     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
402 #endif
403     row = im[i];
404     if (row >= rstart_orig && row < rend_orig) {
405       for (j=0; j<n; j++) {
406         col = in[j];
407         if (roworiented) value = v[i*n+j];
408         else             value = v[i+j*m];
409         /* Look up PetscInto the Hash Table */
410         key = (row/bs)*Nbs+(col/bs)+1;
411         h1  = HASH(size,key,tmp);
412 
413 
414         idx = h1;
415 #if defined(PETSC_USE_DEBUG)
416         insert_ct++;
417         total_ct++;
418         if (HT[idx] != key) {
419           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
420           if (idx == size) {
421             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
422             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
423           }
424         }
425 #else
426         if (HT[idx] != key) {
427           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
428           if (idx == size) {
429             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
430             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
431           }
432         }
433 #endif
434         /* A HASH table entry is found, so insert the values at the correct address */
435         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
436         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
437       }
438     } else if (!baij->donotstash) {
439       if (roworiented) {
440         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
441       } else {
442         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
443       }
444     }
445   }
446 #if defined(PETSC_USE_DEBUG)
447   baij->ht_total_ct  = total_ct;
448   baij->ht_insert_ct = insert_ct;
449 #endif
450   PetscFunctionReturn(0);
451 }
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
455 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
456 {
457   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
458   PetscBool         roworiented = baij->roworiented;
459   PetscErrorCode    ierr;
460   PetscInt          i,j,ii,jj,row,col;
461   PetscInt          rstart=baij->rstartbs;
462   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
463   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
464   PetscReal         tmp;
465   MatScalar         **HD = baij->hd,*baij_a;
466   const PetscScalar *v_t,*value;
467 #if defined(PETSC_USE_DEBUG)
468   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
469 #endif
470 
471   PetscFunctionBegin;
472   if (roworiented) stepval = (n-1)*bs;
473   else stepval = (m-1)*bs;
474 
475   for (i=0; i<m; i++) {
476 #if defined(PETSC_USE_DEBUG)
477     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
478     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
479 #endif
480     row = im[i];
481     v_t = v + i*nbs2;
482     if (row >= rstart && row < rend) {
483       for (j=0; j<n; j++) {
484         col = in[j];
485 
486         /* Look up into the Hash Table */
487         key = row*Nbs+col+1;
488         h1  = HASH(size,key,tmp);
489 
490         idx = h1;
491 #if defined(PETSC_USE_DEBUG)
492         total_ct++;
493         insert_ct++;
494         if (HT[idx] != key) {
495           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
496           if (idx == size) {
497             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
498             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
499           }
500         }
501 #else
502         if (HT[idx] != key) {
503           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
504           if (idx == size) {
505             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
506             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
507           }
508         }
509 #endif
510         baij_a = HD[idx];
511         if (roworiented) {
512           /*value = v + i*(stepval+bs)*bs + j*bs;*/
513           /* value = v + (i*(stepval+bs)+j)*bs; */
514           value = v_t;
515           v_t  += bs;
516           if (addv == ADD_VALUES) {
517             for (ii=0; ii<bs; ii++,value+=stepval) {
518               for (jj=ii; jj<bs2; jj+=bs) {
519                 baij_a[jj] += *value++;
520               }
521             }
522           } else {
523             for (ii=0; ii<bs; ii++,value+=stepval) {
524               for (jj=ii; jj<bs2; jj+=bs) {
525                 baij_a[jj] = *value++;
526               }
527             }
528           }
529         } else {
530           value = v + j*(stepval+bs)*bs + i*bs;
531           if (addv == ADD_VALUES) {
532             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
533               for (jj=0; jj<bs; jj++) {
534                 baij_a[jj] += *value++;
535               }
536             }
537           } else {
538             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
539               for (jj=0; jj<bs; jj++) {
540                 baij_a[jj] = *value++;
541               }
542             }
543           }
544         }
545       }
546     } else {
547       if (!baij->donotstash) {
548         if (roworiented) {
549           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
550         } else {
551           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
552         }
553       }
554     }
555   }
556 #if defined(PETSC_USE_DEBUG)
557   baij->ht_total_ct  = total_ct;
558   baij->ht_insert_ct = insert_ct;
559 #endif
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "MatGetValues_MPIBAIJ"
565 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
566 {
567   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
568   PetscErrorCode ierr;
569   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
570   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
571 
572   PetscFunctionBegin;
573   for (i=0; i<m; i++) {
574     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
575     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
576     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
577       row = idxm[i] - bsrstart;
578       for (j=0; j<n; j++) {
579         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
580         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
581         if (idxn[j] >= bscstart && idxn[j] < bscend) {
582           col  = idxn[j] - bscstart;
583           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
584         } else {
585           if (!baij->colmap) {
586             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
587           }
588 #if defined(PETSC_USE_CTABLE)
589           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
590           data--;
591 #else
592           data = baij->colmap[idxn[j]/bs]-1;
593 #endif
594           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
595           else {
596             col  = data + idxn[j]%bs;
597             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
598           }
599         }
600       }
601     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "MatNorm_MPIBAIJ"
608 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
609 {
610   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
611   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
612   PetscErrorCode ierr;
613   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
614   PetscReal      sum = 0.0;
615   MatScalar      *v;
616 
617   PetscFunctionBegin;
618   if (baij->size == 1) {
619     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
620   } else {
621     if (type == NORM_FROBENIUS) {
622       v  = amat->a;
623       nz = amat->nz*bs2;
624       for (i=0; i<nz; i++) {
625         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
626       }
627       v  = bmat->a;
628       nz = bmat->nz*bs2;
629       for (i=0; i<nz; i++) {
630         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
631       }
632       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
633       *nrm = PetscSqrtReal(*nrm);
634     } else if (type == NORM_1) { /* max column sum */
635       PetscReal *tmp,*tmp2;
636       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
637       ierr = PetscMalloc2(mat->cmap->N,PetscReal,&tmp,mat->cmap->N,PetscReal,&tmp2);CHKERRQ(ierr);
638       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
639       v    = amat->a; jj = amat->j;
640       for (i=0; i<amat->nz; i++) {
641         for (j=0; j<bs; j++) {
642           col = bs*(cstart + *jj) + j; /* column index */
643           for (row=0; row<bs; row++) {
644             tmp[col] += PetscAbsScalar(*v);  v++;
645           }
646         }
647         jj++;
648       }
649       v = bmat->a; jj = bmat->j;
650       for (i=0; i<bmat->nz; i++) {
651         for (j=0; j<bs; j++) {
652           col = bs*garray[*jj] + j;
653           for (row=0; row<bs; row++) {
654             tmp[col] += PetscAbsScalar(*v); v++;
655           }
656         }
657         jj++;
658       }
659       ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
660       *nrm = 0.0;
661       for (j=0; j<mat->cmap->N; j++) {
662         if (tmp2[j] > *nrm) *nrm = tmp2[j];
663       }
664       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
665     } else if (type == NORM_INFINITY) { /* max row sum */
666       PetscReal *sums;
667       ierr = PetscMalloc(bs*sizeof(PetscReal),&sums);CHKERRQ(ierr);
668       sum  = 0.0;
669       for (j=0; j<amat->mbs; j++) {
670         for (row=0; row<bs; row++) sums[row] = 0.0;
671         v  = amat->a + bs2*amat->i[j];
672         nz = amat->i[j+1]-amat->i[j];
673         for (i=0; i<nz; i++) {
674           for (col=0; col<bs; col++) {
675             for (row=0; row<bs; row++) {
676               sums[row] += PetscAbsScalar(*v); v++;
677             }
678           }
679         }
680         v  = bmat->a + bs2*bmat->i[j];
681         nz = bmat->i[j+1]-bmat->i[j];
682         for (i=0; i<nz; i++) {
683           for (col=0; col<bs; col++) {
684             for (row=0; row<bs; row++) {
685               sums[row] += PetscAbsScalar(*v); v++;
686             }
687           }
688         }
689         for (row=0; row<bs; row++) {
690           if (sums[row] > sum) sum = sums[row];
691         }
692       }
693       ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
694       ierr = PetscFree(sums);CHKERRQ(ierr);
695     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
696   }
697   PetscFunctionReturn(0);
698 }
699 
700 /*
701   Creates the hash table, and sets the table
702   This table is created only once.
703   If new entried need to be added to the matrix
704   then the hash table has to be destroyed and
705   recreated.
706 */
707 #undef __FUNCT__
708 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
709 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
710 {
711   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
712   Mat            A     = baij->A,B=baij->B;
713   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
714   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
715   PetscErrorCode ierr;
716   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
717   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
718   PetscInt       *HT,key;
719   MatScalar      **HD;
720   PetscReal      tmp;
721 #if defined(PETSC_USE_INFO)
722   PetscInt ct=0,max=0;
723 #endif
724 
725   PetscFunctionBegin;
726   if (baij->ht) PetscFunctionReturn(0);
727 
728   baij->ht_size = (PetscInt)(factor*nz);
729   ht_size       = baij->ht_size;
730 
731   /* Allocate Memory for Hash Table */
732   ierr = PetscMalloc2(ht_size,MatScalar*,&baij->hd,ht_size,PetscInt,&baij->ht);CHKERRQ(ierr);
733   ierr = PetscMemzero(baij->hd,ht_size*sizeof(MatScalar*));CHKERRQ(ierr);
734   ierr = PetscMemzero(baij->ht,ht_size*sizeof(PetscInt));CHKERRQ(ierr);
735   HD   = baij->hd;
736   HT   = baij->ht;
737 
738   /* Loop Over A */
739   for (i=0; i<a->mbs; i++) {
740     for (j=ai[i]; j<ai[i+1]; j++) {
741       row = i+rstart;
742       col = aj[j]+cstart;
743 
744       key = row*Nbs + col + 1;
745       h1  = HASH(ht_size,key,tmp);
746       for (k=0; k<ht_size; k++) {
747         if (!HT[(h1+k)%ht_size]) {
748           HT[(h1+k)%ht_size] = key;
749           HD[(h1+k)%ht_size] = a->a + j*bs2;
750           break;
751 #if defined(PETSC_USE_INFO)
752         } else {
753           ct++;
754 #endif
755         }
756       }
757 #if defined(PETSC_USE_INFO)
758       if (k> max) max = k;
759 #endif
760     }
761   }
762   /* Loop Over B */
763   for (i=0; i<b->mbs; i++) {
764     for (j=bi[i]; j<bi[i+1]; j++) {
765       row = i+rstart;
766       col = garray[bj[j]];
767       key = row*Nbs + col + 1;
768       h1  = HASH(ht_size,key,tmp);
769       for (k=0; k<ht_size; k++) {
770         if (!HT[(h1+k)%ht_size]) {
771           HT[(h1+k)%ht_size] = key;
772           HD[(h1+k)%ht_size] = b->a + j*bs2;
773           break;
774 #if defined(PETSC_USE_INFO)
775         } else {
776           ct++;
777 #endif
778         }
779       }
780 #if defined(PETSC_USE_INFO)
781       if (k> max) max = k;
782 #endif
783     }
784   }
785 
786   /* Print Summary */
787 #if defined(PETSC_USE_INFO)
788   for (i=0,j=0; i<ht_size; i++) {
789     if (HT[i]) j++;
790   }
791   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
792 #endif
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
798 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
799 {
800   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
801   PetscErrorCode ierr;
802   PetscInt       nstash,reallocs;
803   InsertMode     addv;
804 
805   PetscFunctionBegin;
806   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
807 
808   /* make sure all processors are either in INSERTMODE or ADDMODE */
809   ierr = MPI_Allreduce((PetscEnum*)&mat->insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
810   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
811   mat->insertmode = addv; /* in case this processor had no cache */
812 
813   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
814   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
815   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
816   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
817   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
818   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
824 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
825 {
826   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
827   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
828   PetscErrorCode ierr;
829   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
830   PetscInt       *row,*col;
831   PetscBool      r1,r2,r3,other_disassembled;
832   MatScalar      *val;
833   InsertMode     addv = mat->insertmode;
834   PetscMPIInt    n;
835 
836   PetscFunctionBegin;
837   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
838   if (!baij->donotstash && !mat->nooffprocentries) {
839     while (1) {
840       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
841       if (!flg) break;
842 
843       for (i=0; i<n;) {
844         /* Now identify the consecutive vals belonging to the same row */
845         for (j=i,rstart=row[j]; j<n; j++) {
846           if (row[j] != rstart) break;
847         }
848         if (j < n) ncols = j-i;
849         else       ncols = n-i;
850         /* Now assemble all these values with a single function call */
851         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr);
852         i    = j;
853       }
854     }
855     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
856     /* Now process the block-stash. Since the values are stashed column-oriented,
857        set the roworiented flag to column oriented, and after MatSetValues()
858        restore the original flags */
859     r1 = baij->roworiented;
860     r2 = a->roworiented;
861     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
862 
863     baij->roworiented = PETSC_FALSE;
864     a->roworiented    = PETSC_FALSE;
865 
866     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
867     while (1) {
868       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
869       if (!flg) break;
870 
871       for (i=0; i<n;) {
872         /* Now identify the consecutive vals belonging to the same row */
873         for (j=i,rstart=row[j]; j<n; j++) {
874           if (row[j] != rstart) break;
875         }
876         if (j < n) ncols = j-i;
877         else       ncols = n-i;
878         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,addv);CHKERRQ(ierr);
879         i    = j;
880       }
881     }
882     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
883 
884     baij->roworiented = r1;
885     a->roworiented    = r2;
886 
887     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
888   }
889 
890   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
891   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
892 
893   /* determine if any processor has disassembled, if so we must
894      also disassemble ourselfs, in order that we may reassemble. */
895   /*
896      if nonzero structure of submatrix B cannot change then we know that
897      no processor disassembled thus we can skip this stuff
898   */
899   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
900     ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
901     if (mat->was_assembled && !other_disassembled) {
902       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
903     }
904   }
905 
906   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
907     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
908   }
909   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
910   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
911 
912 #if defined(PETSC_USE_INFO)
913   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
914     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
915 
916     baij->ht_total_ct  = 0;
917     baij->ht_insert_ct = 0;
918   }
919 #endif
920   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
921     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
922 
923     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
924     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
925   }
926 
927   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
928 
929   baij->rowvalues = 0;
930   PetscFunctionReturn(0);
931 }
932 
933 #include <petscdraw.h>
934 #undef __FUNCT__
935 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
936 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
937 {
938   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
939   PetscErrorCode    ierr;
940   PetscMPIInt       size = baij->size,rank = baij->rank;
941   PetscInt          bs   = mat->rmap->bs;
942   PetscBool         iascii,isdraw;
943   PetscViewer       sviewer;
944   PetscViewerFormat format;
945 
946   PetscFunctionBegin;
947   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
948   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
949   if (iascii) {
950     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
951     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
952       MatInfo info;
953       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
954       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
955       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
956       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
957                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
958       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
959       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
960       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
961       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
962       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
963       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
964       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
965       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
966       PetscFunctionReturn(0);
967     } else if (format == PETSC_VIEWER_ASCII_INFO) {
968       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
969       PetscFunctionReturn(0);
970     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
971       PetscFunctionReturn(0);
972     }
973   }
974 
975   if (isdraw) {
976     PetscDraw draw;
977     PetscBool isnull;
978     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
979     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
980   }
981 
982   if (size == 1) {
983     ierr = PetscObjectSetName((PetscObject)baij->A,((PetscObject)mat)->name);CHKERRQ(ierr);
984     ierr = MatView(baij->A,viewer);CHKERRQ(ierr);
985   } else {
986     /* assemble the entire matrix onto first processor. */
987     Mat         A;
988     Mat_SeqBAIJ *Aloc;
989     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
990     MatScalar   *a;
991 
992     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
993     /* Perhaps this should be the type of mat? */
994     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
995     if (!rank) {
996       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
997     } else {
998       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
999     }
1000     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1001     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1002     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1003     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1004 
1005     /* copy over the A part */
1006     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1007     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1008     ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1009 
1010     for (i=0; i<mbs; i++) {
1011       rvals[0] = bs*(baij->rstartbs + i);
1012       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1013       for (j=ai[i]; j<ai[i+1]; j++) {
1014         col = (baij->cstartbs+aj[j])*bs;
1015         for (k=0; k<bs; k++) {
1016           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1017           col++; a += bs;
1018         }
1019       }
1020     }
1021     /* copy over the B part */
1022     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1023     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1024     for (i=0; i<mbs; i++) {
1025       rvals[0] = bs*(baij->rstartbs + i);
1026       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1027       for (j=ai[i]; j<ai[i+1]; j++) {
1028         col = baij->garray[aj[j]]*bs;
1029         for (k=0; k<bs; k++) {
1030           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1031           col++; a += bs;
1032         }
1033       }
1034     }
1035     ierr = PetscFree(rvals);CHKERRQ(ierr);
1036     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1037     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1038     /*
1039        Everyone has to call to draw the matrix since the graphics waits are
1040        synchronized across all processors that share the PetscDraw object
1041     */
1042     ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
1043     if (!rank) {
1044       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr);
1045       /* Set the type name to MATMPIBAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqBAIJ_ASCII()*/
1046       PetscStrcpy(((PetscObject)((Mat_MPIBAIJ*)(A->data))->A)->type_name,MATMPIBAIJ);
1047       ierr = MatView(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1048     }
1049     ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
1050     ierr = MatDestroy(&A);CHKERRQ(ierr);
1051   }
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 #undef __FUNCT__
1056 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1057 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1058 {
1059   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1060   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1061   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1062   PetscErrorCode ierr;
1063   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1064   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1065   int            fd;
1066   PetscScalar    *column_values;
1067   FILE           *file;
1068   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1069   PetscInt       message_count,flowcontrolcount;
1070 
1071   PetscFunctionBegin;
1072   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1073   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1074   nz   = bs2*(A->nz + B->nz);
1075   rlen = mat->rmap->n;
1076   if (!rank) {
1077     header[0] = MAT_FILE_CLASSID;
1078     header[1] = mat->rmap->N;
1079     header[2] = mat->cmap->N;
1080 
1081     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1082     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1083     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1084     /* get largest number of rows any processor has */
1085     range = mat->rmap->range;
1086     for (i=1; i<size; i++) {
1087       rlen = PetscMax(rlen,range[i+1] - range[i]);
1088     }
1089   } else {
1090     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1091   }
1092 
1093   ierr = PetscMalloc((rlen/bs)*sizeof(PetscInt),&crow_lens);CHKERRQ(ierr);
1094   /* compute lengths of each row  */
1095   for (i=0; i<a->mbs; i++) {
1096     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1097   }
1098   /* store the row lengths to the file */
1099   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1100   if (!rank) {
1101     MPI_Status status;
1102     ierr = PetscMalloc(rlen*sizeof(PetscInt),&row_lens);CHKERRQ(ierr);
1103     rlen = (range[1] - range[0])/bs;
1104     for (i=0; i<rlen; i++) {
1105       for (j=0; j<bs; j++) {
1106         row_lens[i*bs+j] = bs*crow_lens[i];
1107       }
1108     }
1109     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1110     for (i=1; i<size; i++) {
1111       rlen = (range[i+1] - range[i])/bs;
1112       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1113       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1114       for (k=0; k<rlen; k++) {
1115         for (j=0; j<bs; j++) {
1116           row_lens[k*bs+j] = bs*crow_lens[k];
1117         }
1118       }
1119       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1120     }
1121     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1122     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1123   } else {
1124     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1125     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1126     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1127   }
1128   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1129 
1130   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1131      information needed to make it for each row from a block row. This does require more communication but still not more than
1132      the communication needed for the nonzero values  */
1133   nzmax = nz; /*  space a largest processor needs */
1134   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1135   ierr  = PetscMalloc(nzmax*sizeof(PetscInt),&column_indices);CHKERRQ(ierr);
1136   cnt   = 0;
1137   for (i=0; i<a->mbs; i++) {
1138     pcnt = cnt;
1139     for (j=B->i[i]; j<B->i[i+1]; j++) {
1140       if ((col = garray[B->j[j]]) > cstart) break;
1141       for (l=0; l<bs; l++) {
1142         column_indices[cnt++] = bs*col+l;
1143       }
1144     }
1145     for (k=A->i[i]; k<A->i[i+1]; k++) {
1146       for (l=0; l<bs; l++) {
1147         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1148       }
1149     }
1150     for (; j<B->i[i+1]; j++) {
1151       for (l=0; l<bs; l++) {
1152         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1153       }
1154     }
1155     len = cnt - pcnt;
1156     for (k=1; k<bs; k++) {
1157       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1158       cnt += len;
1159     }
1160   }
1161   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1162 
1163   /* store the columns to the file */
1164   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1165   if (!rank) {
1166     MPI_Status status;
1167     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1168     for (i=1; i<size; i++) {
1169       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1170       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1171       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1172       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1173     }
1174     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1175   } else {
1176     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1177     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1178     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1180   }
1181   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1182 
1183   /* load up the numerical values */
1184   ierr = PetscMalloc(nzmax*sizeof(PetscScalar),&column_values);CHKERRQ(ierr);
1185   cnt  = 0;
1186   for (i=0; i<a->mbs; i++) {
1187     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1188     for (j=B->i[i]; j<B->i[i+1]; j++) {
1189       if (garray[B->j[j]] > cstart) break;
1190       for (l=0; l<bs; l++) {
1191         for (ll=0; ll<bs; ll++) {
1192           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1193         }
1194       }
1195       cnt += bs;
1196     }
1197     for (k=A->i[i]; k<A->i[i+1]; k++) {
1198       for (l=0; l<bs; l++) {
1199         for (ll=0; ll<bs; ll++) {
1200           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1201         }
1202       }
1203       cnt += bs;
1204     }
1205     for (; j<B->i[i+1]; j++) {
1206       for (l=0; l<bs; l++) {
1207         for (ll=0; ll<bs; ll++) {
1208           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1209         }
1210       }
1211       cnt += bs;
1212     }
1213     cnt += (bs-1)*rlen;
1214   }
1215   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1216 
1217   /* store the column values to the file */
1218   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1219   if (!rank) {
1220     MPI_Status status;
1221     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1222     for (i=1; i<size; i++) {
1223       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1224       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1225       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1226       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1227     }
1228     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1229   } else {
1230     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1231     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1232     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1233     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1234   }
1235   ierr = PetscFree(column_values);CHKERRQ(ierr);
1236 
1237   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1238   if (file) {
1239     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1240   }
1241   PetscFunctionReturn(0);
1242 }
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "MatView_MPIBAIJ"
1246 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1247 {
1248   PetscErrorCode ierr;
1249   PetscBool      iascii,isdraw,issocket,isbinary;
1250 
1251   PetscFunctionBegin;
1252   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1253   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1254   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1255   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1256   if (iascii || isdraw || issocket) {
1257     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1258   } else if (isbinary) {
1259     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1260   }
1261   PetscFunctionReturn(0);
1262 }
1263 
1264 #undef __FUNCT__
1265 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1266 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1267 {
1268   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1269   PetscErrorCode ierr;
1270 
1271   PetscFunctionBegin;
1272 #if defined(PETSC_USE_LOG)
1273   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1274 #endif
1275   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1276   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1277   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1278   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1279 #if defined(PETSC_USE_CTABLE)
1280   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1281 #else
1282   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1283 #endif
1284   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1285   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1286   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1287   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1288   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1289   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1290   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1291   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1292 
1293   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1294   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1295   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1296   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C",NULL);CHKERRQ(ierr);
1297   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1298   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1299   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1300   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1301   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1302   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "MatMult_MPIBAIJ"
1308 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1309 {
1310   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1311   PetscErrorCode ierr;
1312   PetscInt       nt;
1313 
1314   PetscFunctionBegin;
1315   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1316   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1317   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1318   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1319   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1320   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1321   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1322   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1328 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1329 {
1330   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1331   PetscErrorCode ierr;
1332 
1333   PetscFunctionBegin;
1334   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1335   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1336   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1337   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1343 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1344 {
1345   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1346   PetscErrorCode ierr;
1347   PetscBool      merged;
1348 
1349   PetscFunctionBegin;
1350   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1351   /* do nondiagonal part */
1352   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1353   if (!merged) {
1354     /* send it on its way */
1355     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1356     /* do local part */
1357     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1358     /* receive remote parts: note this assumes the values are not actually */
1359     /* inserted in yy until the next line */
1360     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1361   } else {
1362     /* do local part */
1363     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1364     /* send it on its way */
1365     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1366     /* values actually were received in the Begin() but we need to call this nop */
1367     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1368   }
1369   PetscFunctionReturn(0);
1370 }
1371 
1372 #undef __FUNCT__
1373 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1374 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1375 {
1376   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1377   PetscErrorCode ierr;
1378 
1379   PetscFunctionBegin;
1380   /* do nondiagonal part */
1381   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1382   /* send it on its way */
1383   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1384   /* do local part */
1385   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1386   /* receive remote parts: note this assumes the values are not actually */
1387   /* inserted in yy until the next line, which is true for my implementation*/
1388   /* but is not perhaps always true. */
1389   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 /*
1394   This only works correctly for square matrices where the subblock A->A is the
1395    diagonal block
1396 */
1397 #undef __FUNCT__
1398 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1399 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1400 {
1401   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1402   PetscErrorCode ierr;
1403 
1404   PetscFunctionBegin;
1405   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1406   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 #undef __FUNCT__
1411 #define __FUNCT__ "MatScale_MPIBAIJ"
1412 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1413 {
1414   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1415   PetscErrorCode ierr;
1416 
1417   PetscFunctionBegin;
1418   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1419   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 #undef __FUNCT__
1424 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1425 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1426 {
1427   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1428   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1429   PetscErrorCode ierr;
1430   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1431   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1432   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1433 
1434   PetscFunctionBegin;
1435   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1436   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1437   mat->getrowactive = PETSC_TRUE;
1438 
1439   if (!mat->rowvalues && (idx || v)) {
1440     /*
1441         allocate enough space to hold information from the longest row.
1442     */
1443     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1444     PetscInt    max = 1,mbs = mat->mbs,tmp;
1445     for (i=0; i<mbs; i++) {
1446       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1447       if (max < tmp) max = tmp;
1448     }
1449     ierr = PetscMalloc2(max*bs2,PetscScalar,&mat->rowvalues,max*bs2,PetscInt,&mat->rowindices);CHKERRQ(ierr);
1450   }
1451   lrow = row - brstart;
1452 
1453   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1454   if (!v)   {pvA = 0; pvB = 0;}
1455   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1456   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1457   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1458   nztot = nzA + nzB;
1459 
1460   cmap = mat->garray;
1461   if (v  || idx) {
1462     if (nztot) {
1463       /* Sort by increasing column numbers, assuming A and B already sorted */
1464       PetscInt imark = -1;
1465       if (v) {
1466         *v = v_p = mat->rowvalues;
1467         for (i=0; i<nzB; i++) {
1468           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1469           else break;
1470         }
1471         imark = i;
1472         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1473         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1474       }
1475       if (idx) {
1476         *idx = idx_p = mat->rowindices;
1477         if (imark > -1) {
1478           for (i=0; i<imark; i++) {
1479             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1480           }
1481         } else {
1482           for (i=0; i<nzB; i++) {
1483             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1484             else break;
1485           }
1486           imark = i;
1487         }
1488         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1489         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1490       }
1491     } else {
1492       if (idx) *idx = 0;
1493       if (v)   *v   = 0;
1494     }
1495   }
1496   *nz  = nztot;
1497   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1498   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1504 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1505 {
1506   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1507 
1508   PetscFunctionBegin;
1509   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1510   baij->getrowactive = PETSC_FALSE;
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 #undef __FUNCT__
1515 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1516 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1517 {
1518   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1519   PetscErrorCode ierr;
1520 
1521   PetscFunctionBegin;
1522   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1523   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 #undef __FUNCT__
1528 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1529 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1530 {
1531   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1532   Mat            A  = a->A,B = a->B;
1533   PetscErrorCode ierr;
1534   PetscReal      isend[5],irecv[5];
1535 
1536   PetscFunctionBegin;
1537   info->block_size = (PetscReal)matin->rmap->bs;
1538 
1539   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1540 
1541   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1542   isend[3] = info->memory;  isend[4] = info->mallocs;
1543 
1544   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1545 
1546   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1547   isend[3] += info->memory;  isend[4] += info->mallocs;
1548 
1549   if (flag == MAT_LOCAL) {
1550     info->nz_used      = isend[0];
1551     info->nz_allocated = isend[1];
1552     info->nz_unneeded  = isend[2];
1553     info->memory       = isend[3];
1554     info->mallocs      = isend[4];
1555   } else if (flag == MAT_GLOBAL_MAX) {
1556     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1557 
1558     info->nz_used      = irecv[0];
1559     info->nz_allocated = irecv[1];
1560     info->nz_unneeded  = irecv[2];
1561     info->memory       = irecv[3];
1562     info->mallocs      = irecv[4];
1563   } else if (flag == MAT_GLOBAL_SUM) {
1564     ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1565 
1566     info->nz_used      = irecv[0];
1567     info->nz_allocated = irecv[1];
1568     info->nz_unneeded  = irecv[2];
1569     info->memory       = irecv[3];
1570     info->mallocs      = irecv[4];
1571   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1572   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1573   info->fill_ratio_needed = 0;
1574   info->factor_mallocs    = 0;
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 #undef __FUNCT__
1579 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1580 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1581 {
1582   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1583   PetscErrorCode ierr;
1584 
1585   PetscFunctionBegin;
1586   switch (op) {
1587   case MAT_NEW_NONZERO_LOCATIONS:
1588   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1589   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1590   case MAT_KEEP_NONZERO_PATTERN:
1591   case MAT_NEW_NONZERO_LOCATION_ERR:
1592     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1593     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1594     break;
1595   case MAT_ROW_ORIENTED:
1596     a->roworiented = flg;
1597 
1598     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1599     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1600     break;
1601   case MAT_NEW_DIAGONALS:
1602     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1603     break;
1604   case MAT_IGNORE_OFF_PROC_ENTRIES:
1605     a->donotstash = flg;
1606     break;
1607   case MAT_USE_HASH_TABLE:
1608     a->ht_flag = flg;
1609     break;
1610   case MAT_SYMMETRIC:
1611   case MAT_STRUCTURALLY_SYMMETRIC:
1612   case MAT_HERMITIAN:
1613   case MAT_SYMMETRY_ETERNAL:
1614     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1615     break;
1616   default:
1617     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1618   }
1619   PetscFunctionReturn(0);
1620 }
1621 
1622 #undef __FUNCT__
1623 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1624 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1625 {
1626   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1627   Mat_SeqBAIJ    *Aloc;
1628   Mat            B;
1629   PetscErrorCode ierr;
1630   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1631   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1632   MatScalar      *a;
1633 
1634   PetscFunctionBegin;
1635   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1636   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1637     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1638     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1639     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1640     /* Do not know preallocation information, but must set block size */
1641     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1642   } else {
1643     B = *matout;
1644   }
1645 
1646   /* copy over the A part */
1647   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1648   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1649   ierr = PetscMalloc(bs*sizeof(PetscInt),&rvals);CHKERRQ(ierr);
1650 
1651   for (i=0; i<mbs; i++) {
1652     rvals[0] = bs*(baij->rstartbs + i);
1653     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1654     for (j=ai[i]; j<ai[i+1]; j++) {
1655       col = (baij->cstartbs+aj[j])*bs;
1656       for (k=0; k<bs; k++) {
1657         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1658 
1659         col++; a += bs;
1660       }
1661     }
1662   }
1663   /* copy over the B part */
1664   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1665   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1666   for (i=0; i<mbs; i++) {
1667     rvals[0] = bs*(baij->rstartbs + i);
1668     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1669     for (j=ai[i]; j<ai[i+1]; j++) {
1670       col = baij->garray[aj[j]]*bs;
1671       for (k=0; k<bs; k++) {
1672         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1673         col++;
1674         a += bs;
1675       }
1676     }
1677   }
1678   ierr = PetscFree(rvals);CHKERRQ(ierr);
1679   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1680   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1681 
1682   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1683   else {
1684     ierr = MatHeaderMerge(A,B);CHKERRQ(ierr);
1685   }
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 #undef __FUNCT__
1690 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1691 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1692 {
1693   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1694   Mat            a     = baij->A,b = baij->B;
1695   PetscErrorCode ierr;
1696   PetscInt       s1,s2,s3;
1697 
1698   PetscFunctionBegin;
1699   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1700   if (rr) {
1701     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1702     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1703     /* Overlap communication with computation. */
1704     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1705   }
1706   if (ll) {
1707     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1708     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1709     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1710   }
1711   /* scale  the diagonal block */
1712   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1713 
1714   if (rr) {
1715     /* Do a scatter end and then right scale the off-diagonal block */
1716     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1717     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1718   }
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 #undef __FUNCT__
1723 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1724 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1725 {
1726   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1727   PetscErrorCode    ierr;
1728   PetscMPIInt       imdex,size = l->size,n,rank = l->rank;
1729   PetscInt          i,*owners = A->rmap->range;
1730   PetscInt          *nprocs,j,idx,nsends,row;
1731   PetscInt          nmax,*svalues,*starts,*owner,nrecvs;
1732   PetscInt          *rvalues,tag = ((PetscObject)A)->tag,count,base,slen,*source,lastidx = -1;
1733   PetscInt          *lens,*lrows,*values,rstart_bs=A->rmap->rstart;
1734   MPI_Comm          comm;
1735   MPI_Request       *send_waits,*recv_waits;
1736   MPI_Status        recv_status,*send_status;
1737   const PetscScalar *xx;
1738   PetscScalar       *bb;
1739 #if defined(PETSC_DEBUG)
1740   PetscBool         found = PETSC_FALSE;
1741 #endif
1742 
1743   PetscFunctionBegin;
1744   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1745   /*  first count number of contributors to each processor */
1746   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
1747   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1748   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr);  /* see note*/
1749   j    = 0;
1750   for (i=0; i<N; i++) {
1751     if (lastidx > (idx = rows[i])) j = 0;
1752     lastidx = idx;
1753     for (; j<size; j++) {
1754       if (idx >= owners[j] && idx < owners[j+1]) {
1755         nprocs[2*j]++;
1756         nprocs[2*j+1] = 1;
1757         owner[i]      = j;
1758 #if defined(PETSC_DEBUG)
1759         found = PETSC_TRUE;
1760 #endif
1761         break;
1762       }
1763     }
1764 #if defined(PETSC_DEBUG)
1765     if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range");
1766     found = PETSC_FALSE;
1767 #endif
1768   }
1769   nsends = 0;  for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1770 
1771   if (A->nooffproczerorows) {
1772     if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row");
1773     nrecvs = nsends;
1774     nmax   = N;
1775   } else {
1776     /* inform other processors of number of messages and max length*/
1777     ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1778   }
1779 
1780   /* post receives:   */
1781   ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr);
1782   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
1783   for (i=0; i<nrecvs; i++) {
1784     ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
1785   }
1786 
1787   /* do sends:
1788      1) starts[i] gives the starting index in svalues for stuff going to
1789      the ith processor
1790   */
1791   ierr      = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr);
1792   ierr      = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
1793   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
1794   starts[0] = 0;
1795   for (i=1; i<size; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1796   for (i=0; i<N; i++) {
1797     svalues[starts[owner[i]]++] = rows[i];
1798   }
1799 
1800   starts[0] = 0;
1801   for (i=1; i<size+1; i++) starts[i] = starts[i-1] + nprocs[2*i-2];
1802   count = 0;
1803   for (i=0; i<size; i++) {
1804     if (nprocs[2*i+1]) {
1805       ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
1806     }
1807   }
1808   ierr = PetscFree(starts);CHKERRQ(ierr);
1809 
1810   base = owners[rank];
1811 
1812   /*  wait on receives */
1813   ierr  = PetscMalloc2(nrecvs+1,PetscInt,&lens,nrecvs+1,PetscInt,&source);CHKERRQ(ierr);
1814   count = nrecvs;
1815   slen  = 0;
1816   while (count) {
1817     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1818     /* unpack receives into our local space */
1819     ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
1820 
1821     source[imdex] = recv_status.MPI_SOURCE;
1822     lens[imdex]   = n;
1823     slen         += n;
1824     count--;
1825   }
1826   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1827 
1828   /* move the data into the send scatter */
1829   ierr  = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr);
1830   count = 0;
1831   for (i=0; i<nrecvs; i++) {
1832     values = rvalues + i*nmax;
1833     for (j=0; j<lens[i]; j++) {
1834       lrows[count++] = values[j] - base;
1835     }
1836   }
1837   ierr = PetscFree(rvalues);CHKERRQ(ierr);
1838   ierr = PetscFree2(lens,source);CHKERRQ(ierr);
1839   ierr = PetscFree(owner);CHKERRQ(ierr);
1840   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1841 
1842   /* fix right hand side if needed */
1843   if (x && b) {
1844     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1845     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1846     for (i=0; i<slen; i++) {
1847       bb[lrows[i]] = diag*xx[lrows[i]];
1848     }
1849     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1850     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1851   }
1852 
1853   /* actually zap the local rows */
1854   /*
1855         Zero the required rows. If the "diagonal block" of the matrix
1856      is square and the user wishes to set the diagonal we use separate
1857      code so that MatSetValues() is not called for each diagonal allocating
1858      new memory, thus calling lots of mallocs and slowing things down.
1859 
1860   */
1861   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1862   ierr = MatZeroRows_SeqBAIJ(l->B,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1863   if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) {
1864     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr);
1865   } else if (diag != 0.0) {
1866     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1867     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1868        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1869     for (i=0; i<slen; i++) {
1870       row  = lrows[i] + rstart_bs;
1871       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1872     }
1873     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1874     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1875   } else {
1876     ierr = MatZeroRows_SeqBAIJ(l->A,slen,lrows,0.0,0,0);CHKERRQ(ierr);
1877   }
1878 
1879   ierr = PetscFree(lrows);CHKERRQ(ierr);
1880 
1881   /* wait on sends */
1882   if (nsends) {
1883     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
1884     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1885     ierr = PetscFree(send_status);CHKERRQ(ierr);
1886   }
1887   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1888   ierr = PetscFree(svalues);CHKERRQ(ierr);
1889   PetscFunctionReturn(0);
1890 }
1891 
1892 #undef __FUNCT__
1893 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1894 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1895 {
1896   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1897   PetscErrorCode ierr;
1898 
1899   PetscFunctionBegin;
1900   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1905 
1906 #undef __FUNCT__
1907 #define __FUNCT__ "MatEqual_MPIBAIJ"
1908 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1909 {
1910   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1911   Mat            a,b,c,d;
1912   PetscBool      flg;
1913   PetscErrorCode ierr;
1914 
1915   PetscFunctionBegin;
1916   a = matA->A; b = matA->B;
1917   c = matB->A; d = matB->B;
1918 
1919   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
1920   if (flg) {
1921     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
1922   }
1923   ierr = MPI_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 #undef __FUNCT__
1928 #define __FUNCT__ "MatCopy_MPIBAIJ"
1929 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
1930 {
1931   PetscErrorCode ierr;
1932   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1933   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
1934 
1935   PetscFunctionBegin;
1936   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1937   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1938     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
1939   } else {
1940     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
1941     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
1942   }
1943   PetscFunctionReturn(0);
1944 }
1945 
1946 #undef __FUNCT__
1947 #define __FUNCT__ "MatSetUp_MPIBAIJ"
1948 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
1949 {
1950   PetscErrorCode ierr;
1951 
1952   PetscFunctionBegin;
1953   ierr =  MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
1954   PetscFunctionReturn(0);
1955 }
1956 
1957 #undef __FUNCT__
1958 #define __FUNCT__ "MatAXPY_MPIBAIJ"
1959 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
1960 {
1961   PetscErrorCode ierr;
1962   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
1963   PetscBLASInt   bnz,one=1;
1964   Mat_SeqBAIJ    *x,*y;
1965 
1966   PetscFunctionBegin;
1967   if (str == SAME_NONZERO_PATTERN) {
1968     PetscScalar alpha = a;
1969     x    = (Mat_SeqBAIJ*)xx->A->data;
1970     y    = (Mat_SeqBAIJ*)yy->A->data;
1971     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1972     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1973     x    = (Mat_SeqBAIJ*)xx->B->data;
1974     y    = (Mat_SeqBAIJ*)yy->B->data;
1975     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
1976     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
1977   } else {
1978     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
1979   }
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 #undef __FUNCT__
1984 #define __FUNCT__ "MatRealPart_MPIBAIJ"
1985 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
1986 {
1987   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1988   PetscErrorCode ierr;
1989 
1990   PetscFunctionBegin;
1991   ierr = MatRealPart(a->A);CHKERRQ(ierr);
1992   ierr = MatRealPart(a->B);CHKERRQ(ierr);
1993   PetscFunctionReturn(0);
1994 }
1995 
1996 #undef __FUNCT__
1997 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
1998 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
1999 {
2000   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2001   PetscErrorCode ierr;
2002 
2003   PetscFunctionBegin;
2004   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2005   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2006   PetscFunctionReturn(0);
2007 }
2008 
2009 #undef __FUNCT__
2010 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2011 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2012 {
2013   PetscErrorCode ierr;
2014   IS             iscol_local;
2015   PetscInt       csize;
2016 
2017   PetscFunctionBegin;
2018   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2019   if (call == MAT_REUSE_MATRIX) {
2020     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2021     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2022   } else {
2023     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2024   }
2025   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2026   if (call == MAT_INITIAL_MATRIX) {
2027     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2028     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2029   }
2030   PetscFunctionReturn(0);
2031 }
2032 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2033 #undef __FUNCT__
2034 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2035 /*
2036   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2037   in local and then by concatenating the local matrices the end result.
2038   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ()
2039 */
2040 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2041 {
2042   PetscErrorCode ierr;
2043   PetscMPIInt    rank,size;
2044   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2045   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2046   Mat            M,Mreuse;
2047   MatScalar      *vwork,*aa;
2048   MPI_Comm       comm;
2049   IS             isrow_new, iscol_new;
2050   PetscBool      idflag,allrows, allcols;
2051   Mat_SeqBAIJ    *aij;
2052 
2053   PetscFunctionBegin;
2054   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2055   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2056   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2057   /* The compression and expansion should be avoided. Doesn't point
2058      out errors, might change the indices, hence buggey */
2059   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2060   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2061 
2062   /* Check for special case: each processor gets entire matrix columns */
2063   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2064   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2065   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2066   else allcols = PETSC_FALSE;
2067 
2068   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2069   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2070   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2071   else allrows = PETSC_FALSE;
2072 
2073   if (call ==  MAT_REUSE_MATRIX) {
2074     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2075     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2076     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2077   } else {
2078     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2079   }
2080   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2081   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2082   /*
2083       m - number of local rows
2084       n - number of columns (same on all processors)
2085       rstart - first row in new global matrix generated
2086   */
2087   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2088   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2089   m    = m/bs;
2090   n    = n/bs;
2091 
2092   if (call == MAT_INITIAL_MATRIX) {
2093     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2094     ii  = aij->i;
2095     jj  = aij->j;
2096 
2097     /*
2098         Determine the number of non-zeros in the diagonal and off-diagonal
2099         portions of the matrix in order to do correct preallocation
2100     */
2101 
2102     /* first get start and end of "diagonal" columns */
2103     if (csize == PETSC_DECIDE) {
2104       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2105       if (mglobal == n*bs) { /* square matrix */
2106         nlocal = m;
2107       } else {
2108         nlocal = n/size + ((n % size) > rank);
2109       }
2110     } else {
2111       nlocal = csize/bs;
2112     }
2113     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2114     rstart = rend - nlocal;
2115     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2116 
2117     /* next, compute all the lengths */
2118     ierr  = PetscMalloc2(m+1,PetscInt,&dlens,m+1,PetscInt,&olens);CHKERRQ(ierr);
2119     for (i=0; i<m; i++) {
2120       jend = ii[i+1] - ii[i];
2121       olen = 0;
2122       dlen = 0;
2123       for (j=0; j<jend; j++) {
2124         if (*jj < rstart || *jj >= rend) olen++;
2125         else dlen++;
2126         jj++;
2127       }
2128       olens[i] = olen;
2129       dlens[i] = dlen;
2130     }
2131     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2132     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2133     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2134     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2135     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2136   } else {
2137     PetscInt ml,nl;
2138 
2139     M    = *newmat;
2140     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2141     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2142     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2143     /*
2144          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2145        rather than the slower MatSetValues().
2146     */
2147     M->was_assembled = PETSC_TRUE;
2148     M->assembled     = PETSC_FALSE;
2149   }
2150   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2151   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2152   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2153   ii   = aij->i;
2154   jj   = aij->j;
2155   aa   = aij->a;
2156   for (i=0; i<m; i++) {
2157     row   = rstart/bs + i;
2158     nz    = ii[i+1] - ii[i];
2159     cwork = jj;     jj += nz;
2160     vwork = aa;     aa += nz*bs*bs;
2161     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2162   }
2163 
2164   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2165   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2166   *newmat = M;
2167 
2168   /* save submatrix used in processor for next request */
2169   if (call ==  MAT_INITIAL_MATRIX) {
2170     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2171     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2172   }
2173   PetscFunctionReturn(0);
2174 }
2175 
2176 #undef __FUNCT__
2177 #define __FUNCT__ "MatPermute_MPIBAIJ"
2178 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2179 {
2180   MPI_Comm       comm,pcomm;
2181   PetscInt       clocal_size,nrows;
2182   const PetscInt *rows;
2183   PetscMPIInt    size;
2184   IS             crowp,lcolp;
2185   PetscErrorCode ierr;
2186 
2187   PetscFunctionBegin;
2188   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2189   /* make a collective version of 'rowp' */
2190   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2191   if (pcomm==comm) {
2192     crowp = rowp;
2193   } else {
2194     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2195     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2196     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2197     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2198   }
2199   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2200   /* make a local version of 'colp' */
2201   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2202   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2203   if (size==1) {
2204     lcolp = colp;
2205   } else {
2206     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2207   }
2208   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2209   /* now we just get the submatrix */
2210   ierr = MatGetLocalSize(A,PETSC_NULL,&clocal_size);CHKERRQ(ierr);
2211   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2212   /* clean up */
2213   if (pcomm!=comm) {
2214     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2215   }
2216   if (size>1) {
2217     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2218   }
2219   PetscFunctionReturn(0);
2220 }
2221 
2222 #undef __FUNCT__
2223 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2224 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2225 {
2226   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2227   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2228 
2229   PetscFunctionBegin;
2230   if (nghosts) *nghosts = B->nbs;
2231   if (ghosts) *ghosts = baij->garray;
2232   PetscFunctionReturn(0);
2233 }
2234 
2235 extern PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat);
2236 
2237 #undef __FUNCT__
2238 #define __FUNCT__ "MatFDColoringCreate_MPIBAIJ"
2239 /*
2240     This routine is almost identical to MatFDColoringCreate_MPIBAIJ()!
2241 */
2242 PetscErrorCode MatFDColoringCreate_MPIBAIJ(Mat mat,ISColoring iscoloring,MatFDColoring c)
2243 {
2244   Mat_MPIBAIJ            *baij = (Mat_MPIBAIJ*)mat->data;
2245   PetscErrorCode         ierr;
2246   PetscMPIInt            size,*ncolsonproc,*disp,nn;
2247   PetscInt               bs,i,n,nrows,j,k,m,ncols,col;
2248   const PetscInt         *is,*rows = 0,*A_ci,*A_cj,*B_ci,*B_cj,*ltog;
2249   PetscInt               nis = iscoloring->n,nctot,*cols;
2250   PetscInt               *rowhit,M,cstart,cend,colb;
2251   PetscInt               *columnsforrow,l;
2252   IS                     *isa;
2253   PetscBool              done,flg;
2254   ISLocalToGlobalMapping map = mat->cmap->bmapping;
2255   PetscInt               ctype=c->ctype;
2256 
2257   PetscFunctionBegin;
2258   if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be assembled first; MatAssemblyBegin/End();");
2259   if (ctype == IS_COLORING_GHOSTED && !map) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_INCOMP,"When using ghosted differencing matrix must have local to global mapping provided with MatSetLocalToGlobalMappingBlock");
2260 
2261   if (map) {ierr = ISLocalToGlobalMappingGetIndices(map,&ltog);CHKERRQ(ierr);}
2262   else     ltog = NULL;
2263   ierr = ISColoringGetIS(iscoloring,PETSC_IGNORE,&isa);CHKERRQ(ierr);
2264   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2265 
2266   M         = mat->rmap->n/bs;
2267   cstart    = mat->cmap->rstart/bs;
2268   cend      = mat->cmap->rend/bs;
2269   c->M      = mat->rmap->N/bs;         /* set the global rows and columns and local rows */
2270   c->N      = mat->cmap->N/bs;
2271   c->m      = mat->rmap->n/bs;
2272   c->rstart = mat->rmap->rstart/bs;
2273 
2274   c->ncolors = nis;
2275   ierr       = PetscMalloc(nis*sizeof(PetscInt),&c->ncolumns);CHKERRQ(ierr);
2276   ierr       = PetscMalloc(nis*sizeof(PetscInt*),&c->columns);CHKERRQ(ierr);
2277   ierr       = PetscMalloc(nis*sizeof(PetscInt),&c->nrows);CHKERRQ(ierr);
2278   ierr       = PetscMalloc(nis*sizeof(PetscInt*),&c->rows);CHKERRQ(ierr);
2279   ierr       = PetscMalloc(nis*sizeof(PetscInt*),&c->columnsforrow);CHKERRQ(ierr);
2280   ierr       = PetscLogObjectMemory((PetscObject)c,5*nis*sizeof(PetscInt));CHKERRQ(ierr);
2281 
2282   /* Allow access to data structures of local part of matrix */
2283   if (!baij->colmap) {
2284     ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
2285   }
2286   ierr = MatGetColumnIJ(baij->A,0,PETSC_FALSE,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);CHKERRQ(ierr);
2287   ierr = MatGetColumnIJ(baij->B,0,PETSC_FALSE,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);CHKERRQ(ierr);
2288 
2289   ierr = PetscMalloc((M+1)*sizeof(PetscInt),&rowhit);CHKERRQ(ierr);
2290   ierr = PetscMalloc((M+1)*sizeof(PetscInt),&columnsforrow);CHKERRQ(ierr);
2291 
2292   for (i=0; i<nis; i++) {
2293     ierr = ISGetLocalSize(isa[i],&n);CHKERRQ(ierr);
2294     ierr = ISGetIndices(isa[i],&is);CHKERRQ(ierr);
2295 
2296     c->ncolumns[i] = n;
2297     if (n) {
2298       ierr = PetscMalloc(n*sizeof(PetscInt),&c->columns[i]);CHKERRQ(ierr);
2299       ierr = PetscLogObjectMemory((PetscObject)c,n*sizeof(PetscInt));CHKERRQ(ierr);
2300       ierr = PetscMemcpy(c->columns[i],is,n*sizeof(PetscInt));CHKERRQ(ierr);
2301     } else {
2302       c->columns[i] = 0;
2303     }
2304 
2305     if (ctype == IS_COLORING_GLOBAL) {
2306       /* Determine the total (parallel) number of columns of this color */
2307       ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
2308       ierr = PetscMalloc2(size,PetscMPIInt,&ncolsonproc,size,PetscMPIInt,&disp);CHKERRQ(ierr);
2309 
2310       ierr  = PetscMPIIntCast(n,&nn);CHKERRQ(ierr);
2311       ierr  = MPI_Allgather(&nn,1,MPI_INT,ncolsonproc,1,MPI_INT,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
2312       nctot = 0; for (j=0; j<size; j++) nctot += ncolsonproc[j];
2313       if (!nctot) {
2314         ierr = PetscInfo(mat,"Coloring of matrix has some unneeded colors with no corresponding rows\n");CHKERRQ(ierr);
2315       }
2316 
2317       disp[0] = 0;
2318       for (j=1; j<size; j++) {
2319         disp[j] = disp[j-1] + ncolsonproc[j-1];
2320       }
2321 
2322       /* Get complete list of columns for color on each processor */
2323       ierr = PetscMalloc((nctot+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2324       ierr = MPI_Allgatherv((void*)is,n,MPIU_INT,cols,ncolsonproc,disp,MPIU_INT,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
2325       ierr = PetscFree2(ncolsonproc,disp);CHKERRQ(ierr);
2326     } else if (ctype == IS_COLORING_GHOSTED) {
2327       /* Determine local number of columns of this color on this process, including ghost points */
2328       nctot = n;
2329       ierr  = PetscMalloc((nctot+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr);
2330       ierr  = PetscMemcpy(cols,is,n*sizeof(PetscInt));CHKERRQ(ierr);
2331     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not provided for this MatFDColoring type");
2332 
2333     /*
2334        Mark all rows affect by these columns
2335     */
2336     /* Temporary option to allow for debugging/testing */
2337     flg  = PETSC_FALSE;
2338     ierr = PetscOptionsGetBool(NULL,"-matfdcoloring_slow",&flg,NULL);CHKERRQ(ierr);
2339     if (!flg) { /*-----------------------------------------------------------------------------*/
2340       /* crude, fast version */
2341       ierr = PetscMemzero(rowhit,M*sizeof(PetscInt));CHKERRQ(ierr);
2342       /* loop over columns*/
2343       for (j=0; j<nctot; j++) {
2344         if (ctype == IS_COLORING_GHOSTED) {
2345           col = ltog[cols[j]];
2346         } else {
2347           col = cols[j];
2348         }
2349         if (col >= cstart && col < cend) {
2350           /* column is in diagonal block of matrix */
2351           rows = A_cj + A_ci[col-cstart];
2352           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
2353         } else {
2354 #if defined(PETSC_USE_CTABLE)
2355           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2356           colb--;
2357 #else
2358           colb = baij->colmap[col] - 1;
2359 #endif
2360           if (colb == -1) {
2361             m = 0;
2362           } else {
2363             colb = colb/bs;
2364             rows = B_cj + B_ci[colb];
2365             m    = B_ci[colb+1] - B_ci[colb];
2366           }
2367         }
2368         /* loop over columns marking them in rowhit */
2369         for (k=0; k<m; k++) {
2370           rowhit[*rows++] = col + 1;
2371         }
2372       }
2373 
2374       /* count the number of hits */
2375       nrows = 0;
2376       for (j=0; j<M; j++) {
2377         if (rowhit[j]) nrows++;
2378       }
2379       c->nrows[i] = nrows;
2380       ierr        = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);CHKERRQ(ierr);
2381       ierr        = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);CHKERRQ(ierr);
2382       ierr        = PetscLogObjectMemory((PetscObject)c,2*(nrows+1)*sizeof(PetscInt));CHKERRQ(ierr);
2383       nrows       = 0;
2384       for (j=0; j<M; j++) {
2385         if (rowhit[j]) {
2386           c->rows[i][nrows]          = j;
2387           c->columnsforrow[i][nrows] = rowhit[j] - 1;
2388           nrows++;
2389         }
2390       }
2391     } else { /*-------------------------------------------------------------------------------*/
2392       /* slow version, using rowhit as a linked list */
2393       PetscInt currentcol,fm,mfm;
2394       rowhit[M] = M;
2395       nrows     = 0;
2396       /* loop over columns*/
2397       for (j=0; j<nctot; j++) {
2398         if (ctype == IS_COLORING_GHOSTED) {
2399           col = ltog[cols[j]];
2400         } else {
2401           col = cols[j];
2402         }
2403         if (col >= cstart && col < cend) {
2404           /* column is in diagonal block of matrix */
2405           rows = A_cj + A_ci[col-cstart];
2406           m    = A_ci[col-cstart+1] - A_ci[col-cstart];
2407         } else {
2408 #if defined(PETSC_USE_CTABLE)
2409           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2410           colb--;
2411 #else
2412           colb = baij->colmap[col] - 1;
2413 #endif
2414           if (colb == -1) {
2415             m = 0;
2416           } else {
2417             colb = colb/bs;
2418             rows = B_cj + B_ci[colb];
2419             m    = B_ci[colb+1] - B_ci[colb];
2420           }
2421         }
2422 
2423         /* loop over columns marking them in rowhit */
2424         fm = M;    /* fm points to first entry in linked list */
2425         for (k=0; k<m; k++) {
2426           currentcol = *rows++;
2427           /* is it already in the list? */
2428           do {
2429             mfm = fm;
2430             fm  = rowhit[fm];
2431           } while (fm < currentcol);
2432           /* not in list so add it */
2433           if (fm != currentcol) {
2434             nrows++;
2435             columnsforrow[currentcol] = col;
2436             /* next three lines insert new entry into linked list */
2437             rowhit[mfm]        = currentcol;
2438             rowhit[currentcol] = fm;
2439             fm                 = currentcol;
2440             /* fm points to present position in list since we know the columns are sorted */
2441           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Invalid coloring of matrix detected");
2442         }
2443       }
2444       c->nrows[i] = nrows;
2445       ierr        = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->rows[i]);CHKERRQ(ierr);
2446       ierr        = PetscMalloc((nrows+1)*sizeof(PetscInt),&c->columnsforrow[i]);CHKERRQ(ierr);
2447       ierr        = PetscLogObjectMemory((PetscObject)c,(nrows+1)*sizeof(PetscInt));CHKERRQ(ierr);
2448       /* now store the linked list of rows into c->rows[i] */
2449       nrows = 0;
2450       fm    = rowhit[M];
2451       do {
2452         c->rows[i][nrows]            = fm;
2453         c->columnsforrow[i][nrows++] = columnsforrow[fm];
2454         fm                           = rowhit[fm];
2455       } while (fm < M);
2456     } /* ---------------------------------------------------------------------------------------*/
2457     ierr = PetscFree(cols);CHKERRQ(ierr);
2458   }
2459 
2460   /* Optimize by adding the vscale, and scaleforrow[][] fields */
2461   /*
2462        vscale will contain the "diagonal" on processor scalings followed by the off processor
2463   */
2464   if (ctype == IS_COLORING_GLOBAL) {
2465     PetscInt *garray;
2466     ierr = PetscMalloc(baij->B->cmap->n*sizeof(PetscInt),&garray);CHKERRQ(ierr);
2467     for (i=0; i<baij->B->cmap->n/bs; i++) {
2468       for (j=0; j<bs; j++) {
2469         garray[i*bs+j] = bs*baij->garray[i]+j;
2470       }
2471     }
2472     ierr = VecCreateGhost(PetscObjectComm((PetscObject)mat),baij->A->rmap->n,PETSC_DETERMINE,baij->B->cmap->n,garray,&c->vscale);CHKERRQ(ierr);
2473     ierr = PetscFree(garray);CHKERRQ(ierr);
2474     ierr = PetscMalloc(c->ncolors*sizeof(PetscInt*),&c->vscaleforrow);CHKERRQ(ierr);
2475     for (k=0; k<c->ncolors; k++) {
2476       ierr = PetscMalloc((c->nrows[k]+1)*sizeof(PetscInt),&c->vscaleforrow[k]);CHKERRQ(ierr);
2477       for (l=0; l<c->nrows[k]; l++) {
2478         col = c->columnsforrow[k][l];
2479         if (col >= cstart && col < cend) {
2480           /* column is in diagonal block of matrix */
2481           colb = col - cstart;
2482         } else {
2483           /* column  is in "off-processor" part */
2484 #if defined(PETSC_USE_CTABLE)
2485           ierr = PetscTableFind(baij->colmap,col+1,&colb);CHKERRQ(ierr);
2486           colb--;
2487 #else
2488           colb = baij->colmap[col] - 1;
2489 #endif
2490           colb  = colb/bs;
2491           colb += cend - cstart;
2492         }
2493         c->vscaleforrow[k][l] = colb;
2494       }
2495     }
2496   } else if (ctype == IS_COLORING_GHOSTED) {
2497     /* Get gtol mapping */
2498     PetscInt N = mat->cmap->N,nlocal,*gtol;
2499     ierr = PetscMalloc((N+1)*sizeof(PetscInt),&gtol);CHKERRQ(ierr);
2500     for (i=0; i<N; i++) gtol[i] = -1;
2501     ierr = ISLocalToGlobalMappingGetSize(map,&nlocal);CHKERRQ(ierr);
2502     for (i=0; i<nlocal; i++) gtol[ltog[i]] = i;
2503 
2504     c->vscale = 0; /* will be created in MatFDColoringApply() */
2505     ierr      = PetscMalloc(c->ncolors*sizeof(PetscInt*),&c->vscaleforrow);CHKERRQ(ierr);
2506     for (k=0; k<c->ncolors; k++) {
2507       ierr = PetscMalloc((c->nrows[k]+1)*sizeof(PetscInt),&c->vscaleforrow[k]);CHKERRQ(ierr);
2508       for (l=0; l<c->nrows[k]; l++) {
2509         col = c->columnsforrow[k][l];      /* global column index */
2510 
2511         c->vscaleforrow[k][l] = gtol[col]; /* local column index */
2512       }
2513     }
2514     ierr = PetscFree(gtol);CHKERRQ(ierr);
2515   }
2516   ierr = ISColoringRestoreIS(iscoloring,&isa);CHKERRQ(ierr);
2517 
2518   ierr = PetscFree(rowhit);CHKERRQ(ierr);
2519   ierr = PetscFree(columnsforrow);CHKERRQ(ierr);
2520   ierr = MatRestoreColumnIJ(baij->A,0,PETSC_FALSE,PETSC_FALSE,&ncols,&A_ci,&A_cj,&done);CHKERRQ(ierr);
2521   ierr = MatRestoreColumnIJ(baij->B,0,PETSC_FALSE,PETSC_FALSE,&ncols,&B_ci,&B_cj,&done);CHKERRQ(ierr);
2522   if (map) {ierr = ISLocalToGlobalMappingRestoreIndices(map,&ltog);CHKERRQ(ierr);}
2523   PetscFunctionReturn(0);
2524 }
2525 
2526 #undef __FUNCT__
2527 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2528 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2529 {
2530   Mat            B;
2531   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2532   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2533   Mat_SeqAIJ     *b;
2534   PetscErrorCode ierr;
2535   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2536   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2537   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2538 
2539   PetscFunctionBegin;
2540   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2541   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2542 
2543   /* ----------------------------------------------------------------
2544      Tell every processor the number of nonzeros per row
2545   */
2546   ierr = PetscMalloc((A->rmap->N/bs)*sizeof(PetscInt),&lens);CHKERRQ(ierr);
2547   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2548     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2549   }
2550   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2551   ierr      = PetscMalloc(2*size*sizeof(PetscMPIInt),&recvcounts);CHKERRQ(ierr);
2552   displs    = recvcounts + size;
2553   for (i=0; i<size; i++) {
2554     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2555     displs[i]     = A->rmap->range[i]/bs;
2556   }
2557 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2558   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2559 #else
2560   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2561 #endif
2562   /* ---------------------------------------------------------------
2563      Create the sequential matrix of the same type as the local block diagonal
2564   */
2565   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2566   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2567   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2568   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2569   b    = (Mat_SeqAIJ*)B->data;
2570 
2571   /*--------------------------------------------------------------------
2572     Copy my part of matrix column indices over
2573   */
2574   sendcount  = ad->nz + bd->nz;
2575   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2576   a_jsendbuf = ad->j;
2577   b_jsendbuf = bd->j;
2578   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2579   cnt        = 0;
2580   for (i=0; i<n; i++) {
2581 
2582     /* put in lower diagonal portion */
2583     m = bd->i[i+1] - bd->i[i];
2584     while (m > 0) {
2585       /* is it above diagonal (in bd (compressed) numbering) */
2586       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2587       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2588       m--;
2589     }
2590 
2591     /* put in diagonal portion */
2592     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2593       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2594     }
2595 
2596     /* put in upper diagonal portion */
2597     while (m-- > 0) {
2598       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2599     }
2600   }
2601   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2602 
2603   /*--------------------------------------------------------------------
2604     Gather all column indices to all processors
2605   */
2606   for (i=0; i<size; i++) {
2607     recvcounts[i] = 0;
2608     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2609       recvcounts[i] += lens[j];
2610     }
2611   }
2612   displs[0] = 0;
2613   for (i=1; i<size; i++) {
2614     displs[i] = displs[i-1] + recvcounts[i-1];
2615   }
2616 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2617   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2618 #else
2619   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2620 #endif
2621   /*--------------------------------------------------------------------
2622     Assemble the matrix into useable form (note numerical values not yet set)
2623   */
2624   /* set the b->ilen (length of each row) values */
2625   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2626   /* set the b->i indices */
2627   b->i[0] = 0;
2628   for (i=1; i<=A->rmap->N/bs; i++) {
2629     b->i[i] = b->i[i-1] + lens[i-1];
2630   }
2631   ierr = PetscFree(lens);CHKERRQ(ierr);
2632   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2633   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2634   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2635 
2636   if (A->symmetric) {
2637     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2638   } else if (A->hermitian) {
2639     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2640   } else if (A->structurally_symmetric) {
2641     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2642   }
2643   *newmat = B;
2644   PetscFunctionReturn(0);
2645 }
2646 
2647 #undef __FUNCT__
2648 #define __FUNCT__ "MatSOR_MPIBAIJ"
2649 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2650 {
2651   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2652   PetscErrorCode ierr;
2653   Vec            bb1 = 0;
2654 
2655   PetscFunctionBegin;
2656   if (flag == SOR_APPLY_UPPER) {
2657     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2658     PetscFunctionReturn(0);
2659   }
2660 
2661   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2662     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2663   }
2664 
2665   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2666     if (flag & SOR_ZERO_INITIAL_GUESS) {
2667       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2668       its--;
2669     }
2670 
2671     while (its--) {
2672       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2673       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2674 
2675       /* update rhs: bb1 = bb - B*x */
2676       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2677       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2678 
2679       /* local sweep */
2680       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2681     }
2682   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2683     if (flag & SOR_ZERO_INITIAL_GUESS) {
2684       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2685       its--;
2686     }
2687     while (its--) {
2688       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2689       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2690 
2691       /* update rhs: bb1 = bb - B*x */
2692       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2693       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2694 
2695       /* local sweep */
2696       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2697     }
2698   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2699     if (flag & SOR_ZERO_INITIAL_GUESS) {
2700       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2701       its--;
2702     }
2703     while (its--) {
2704       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2705       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2706 
2707       /* update rhs: bb1 = bb - B*x */
2708       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2709       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2710 
2711       /* local sweep */
2712       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2713     }
2714   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2715 
2716   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2717   PetscFunctionReturn(0);
2718 }
2719 
2720 extern PetscErrorCode  MatFDColoringApply_BAIJ(Mat,MatFDColoring,Vec,MatStructure*,void*);
2721 
2722 #undef __FUNCT__
2723 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2724 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2725 {
2726   PetscErrorCode ierr;
2727   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2728   PetscInt       N,i,*garray = aij->garray;
2729   PetscInt       ib,jb,bs = A->rmap->bs;
2730   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2731   MatScalar      *a_val = a_aij->a;
2732   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2733   MatScalar      *b_val = b_aij->a;
2734   PetscReal      *work;
2735 
2736   PetscFunctionBegin;
2737   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2738   ierr = PetscMalloc(N*sizeof(PetscReal),&work);CHKERRQ(ierr);
2739   ierr = PetscMemzero(work,N*sizeof(PetscReal));CHKERRQ(ierr);
2740   if (type == NORM_2) {
2741     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2742       for (jb=0; jb<bs; jb++) {
2743         for (ib=0; ib<bs; ib++) {
2744           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2745           a_val++;
2746         }
2747       }
2748     }
2749     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2750       for (jb=0; jb<bs; jb++) {
2751         for (ib=0; ib<bs; ib++) {
2752           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2753           b_val++;
2754         }
2755       }
2756     }
2757   } else if (type == NORM_1) {
2758     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2759       for (jb=0; jb<bs; jb++) {
2760         for (ib=0; ib<bs; ib++) {
2761           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2762           a_val++;
2763         }
2764       }
2765     }
2766     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2767       for (jb=0; jb<bs; jb++) {
2768        for (ib=0; ib<bs; ib++) {
2769           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2770           b_val++;
2771         }
2772       }
2773     }
2774   } else if (type == NORM_INFINITY) {
2775     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2776       for (jb=0; jb<bs; jb++) {
2777         for (ib=0; ib<bs; ib++) {
2778           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2779           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2780           a_val++;
2781         }
2782       }
2783     }
2784     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2785       for (jb=0; jb<bs; jb++) {
2786         for (ib=0; ib<bs; ib++) {
2787           int col = garray[b_aij->j[i]] * bs + jb;
2788           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2789           b_val++;
2790         }
2791       }
2792     }
2793   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2794   if (type == NORM_INFINITY) {
2795     ierr = MPI_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2796   } else {
2797     ierr = MPI_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2798   }
2799   ierr = PetscFree(work);CHKERRQ(ierr);
2800   if (type == NORM_2) {
2801     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2802   }
2803   PetscFunctionReturn(0);
2804 }
2805 
2806 #undef __FUNCT__
2807 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2808 PetscErrorCode  MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2809 {
2810   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2811   PetscErrorCode ierr;
2812 
2813   PetscFunctionBegin;
2814   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2815   PetscFunctionReturn(0);
2816 }
2817 
2818 
2819 /* -------------------------------------------------------------------*/
2820 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2821                                        MatGetRow_MPIBAIJ,
2822                                        MatRestoreRow_MPIBAIJ,
2823                                        MatMult_MPIBAIJ,
2824                                 /* 4*/ MatMultAdd_MPIBAIJ,
2825                                        MatMultTranspose_MPIBAIJ,
2826                                        MatMultTransposeAdd_MPIBAIJ,
2827                                        0,
2828                                        0,
2829                                        0,
2830                                 /*10*/ 0,
2831                                        0,
2832                                        0,
2833                                        MatSOR_MPIBAIJ,
2834                                        MatTranspose_MPIBAIJ,
2835                                 /*15*/ MatGetInfo_MPIBAIJ,
2836                                        MatEqual_MPIBAIJ,
2837                                        MatGetDiagonal_MPIBAIJ,
2838                                        MatDiagonalScale_MPIBAIJ,
2839                                        MatNorm_MPIBAIJ,
2840                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2841                                        MatAssemblyEnd_MPIBAIJ,
2842                                        MatSetOption_MPIBAIJ,
2843                                        MatZeroEntries_MPIBAIJ,
2844                                 /*24*/ MatZeroRows_MPIBAIJ,
2845                                        0,
2846                                        0,
2847                                        0,
2848                                        0,
2849                                 /*29*/ MatSetUp_MPIBAIJ,
2850                                        0,
2851                                        0,
2852                                        0,
2853                                        0,
2854                                 /*34*/ MatDuplicate_MPIBAIJ,
2855                                        0,
2856                                        0,
2857                                        0,
2858                                        0,
2859                                 /*39*/ MatAXPY_MPIBAIJ,
2860                                        MatGetSubMatrices_MPIBAIJ,
2861                                        MatIncreaseOverlap_MPIBAIJ,
2862                                        MatGetValues_MPIBAIJ,
2863                                        MatCopy_MPIBAIJ,
2864                                 /*44*/ 0,
2865                                        MatScale_MPIBAIJ,
2866                                        0,
2867                                        0,
2868                                        0,
2869                                 /*49*/ 0,
2870                                        0,
2871                                        0,
2872                                        0,
2873                                        0,
2874                                 /*54*/ MatFDColoringCreate_MPIBAIJ,
2875                                        0,
2876                                        MatSetUnfactored_MPIBAIJ,
2877                                        MatPermute_MPIBAIJ,
2878                                        MatSetValuesBlocked_MPIBAIJ,
2879                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2880                                        MatDestroy_MPIBAIJ,
2881                                        MatView_MPIBAIJ,
2882                                        0,
2883                                        0,
2884                                 /*64*/ 0,
2885                                        0,
2886                                        0,
2887                                        0,
2888                                        0,
2889                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2890                                        0,
2891                                        0,
2892                                        0,
2893                                        0,
2894                                 /*74*/ 0,
2895                                        MatFDColoringApply_BAIJ,
2896                                        0,
2897                                        0,
2898                                        0,
2899                                 /*79*/ 0,
2900                                        0,
2901                                        0,
2902                                        0,
2903                                        MatLoad_MPIBAIJ,
2904                                 /*84*/ 0,
2905                                        0,
2906                                        0,
2907                                        0,
2908                                        0,
2909                                 /*89*/ 0,
2910                                        0,
2911                                        0,
2912                                        0,
2913                                        0,
2914                                 /*94*/ 0,
2915                                        0,
2916                                        0,
2917                                        0,
2918                                        0,
2919                                 /*99*/ 0,
2920                                        0,
2921                                        0,
2922                                        0,
2923                                        0,
2924                                 /*104*/0,
2925                                        MatRealPart_MPIBAIJ,
2926                                        MatImaginaryPart_MPIBAIJ,
2927                                        0,
2928                                        0,
2929                                 /*109*/0,
2930                                        0,
2931                                        0,
2932                                        0,
2933                                        0,
2934                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2935                                        0,
2936                                        MatGetGhosts_MPIBAIJ,
2937                                        0,
2938                                        0,
2939                                 /*119*/0,
2940                                        0,
2941                                        0,
2942                                        0,
2943                                        MatGetMultiProcBlock_MPIBAIJ,
2944                                 /*124*/0,
2945                                        MatGetColumnNorms_MPIBAIJ,
2946                                        MatInvertBlockDiagonal_MPIBAIJ,
2947                                        0,
2948                                        0,
2949                                /*129*/ 0,
2950                                        0,
2951                                        0,
2952                                        0,
2953                                        0,
2954                                /*134*/ 0,
2955                                        0,
2956                                        0,
2957                                        0,
2958                                        0,
2959                                /*139*/ 0,
2960                                        0,
2961                                        0
2962 };
2963 
2964 #undef __FUNCT__
2965 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2966 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2967 {
2968   PetscFunctionBegin;
2969   *a = ((Mat_MPIBAIJ*)A->data)->A;
2970   PetscFunctionReturn(0);
2971 }
2972 
2973 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2974 
2975 #undef __FUNCT__
2976 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2977 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2978 {
2979   PetscInt       m,rstart,cstart,cend;
2980   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2981   const PetscInt *JJ    =0;
2982   PetscScalar    *values=0;
2983   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2984   PetscErrorCode ierr;
2985 
2986   PetscFunctionBegin;
2987   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2988   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2989   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2990   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2991   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2992   m      = B->rmap->n/bs;
2993   rstart = B->rmap->rstart/bs;
2994   cstart = B->cmap->rstart/bs;
2995   cend   = B->cmap->rend/bs;
2996 
2997   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2998   ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr);
2999   for (i=0; i<m; i++) {
3000     nz = ii[i+1] - ii[i];
3001     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
3002     nz_max = PetscMax(nz_max,nz);
3003     JJ     = jj + ii[i];
3004     for (j=0; j<nz; j++) {
3005       if (*JJ >= cstart) break;
3006       JJ++;
3007     }
3008     d = 0;
3009     for (; j<nz; j++) {
3010       if (*JJ++ >= cend) break;
3011       d++;
3012     }
3013     d_nnz[i] = d;
3014     o_nnz[i] = nz - d;
3015   }
3016   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
3017   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
3018 
3019   values = (PetscScalar*)V;
3020   if (!values) {
3021     ierr = PetscMalloc(bs*bs*nz_max*sizeof(PetscScalar),&values);CHKERRQ(ierr);
3022     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
3023   }
3024   for (i=0; i<m; i++) {
3025     PetscInt          row    = i + rstart;
3026     PetscInt          ncols  = ii[i+1] - ii[i];
3027     const PetscInt    *icols = jj + ii[i];
3028     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
3029       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
3030       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
3031     } else {                    /* block ordering does not match so we can only insert one block at a time. */
3032       PetscInt j;
3033       for (j=0; j<ncols; j++) {
3034         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
3035         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
3036       }
3037     }
3038   }
3039 
3040   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
3041   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3042   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3043   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
3044   PetscFunctionReturn(0);
3045 }
3046 
3047 #undef __FUNCT__
3048 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
3049 /*@C
3050    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
3051    (the default parallel PETSc format).
3052 
3053    Collective on MPI_Comm
3054 
3055    Input Parameters:
3056 +  A - the matrix
3057 .  bs - the block size
3058 .  i - the indices into j for the start of each local row (starts with zero)
3059 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
3060 -  v - optional values in the matrix
3061 
3062    Level: developer
3063 
3064    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
3065    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
3066    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
3067    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
3068    block column and the second index is over columns within a block.
3069 
3070 .keywords: matrix, aij, compressed row, sparse, parallel
3071 
3072 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
3073 @*/
3074 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
3075 {
3076   PetscErrorCode ierr;
3077 
3078   PetscFunctionBegin;
3079   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3080   PetscValidType(B,1);
3081   PetscValidLogicalCollectiveInt(B,bs,2);
3082   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
3083   PetscFunctionReturn(0);
3084 }
3085 
3086 #undef __FUNCT__
3087 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
3088 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
3089 {
3090   Mat_MPIBAIJ    *b;
3091   PetscErrorCode ierr;
3092   PetscInt       i;
3093 
3094   PetscFunctionBegin;
3095   ierr = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
3096   ierr = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
3097   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
3098   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
3099   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
3100 
3101   if (d_nnz) {
3102     for (i=0; i<B->rmap->n/bs; i++) {
3103       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
3104     }
3105   }
3106   if (o_nnz) {
3107     for (i=0; i<B->rmap->n/bs; i++) {
3108       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
3109     }
3110   }
3111 
3112   b      = (Mat_MPIBAIJ*)B->data;
3113   b->bs2 = bs*bs;
3114   b->mbs = B->rmap->n/bs;
3115   b->nbs = B->cmap->n/bs;
3116   b->Mbs = B->rmap->N/bs;
3117   b->Nbs = B->cmap->N/bs;
3118 
3119   for (i=0; i<=b->size; i++) {
3120     b->rangebs[i] = B->rmap->range[i]/bs;
3121   }
3122   b->rstartbs = B->rmap->rstart/bs;
3123   b->rendbs   = B->rmap->rend/bs;
3124   b->cstartbs = B->cmap->rstart/bs;
3125   b->cendbs   = B->cmap->rend/bs;
3126 
3127   if (!B->preallocated) {
3128     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3129     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3130     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3131     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3132     ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3133     ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3134     ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3135     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3136     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3137   }
3138 
3139   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3140   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3141   B->preallocated = PETSC_TRUE;
3142   PetscFunctionReturn(0);
3143 }
3144 
3145 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3146 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3147 
3148 #undef __FUNCT__
3149 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3150 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3151 {
3152   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3153   PetscErrorCode ierr;
3154   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3155   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3156   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3157 
3158   PetscFunctionBegin;
3159   ierr  = PetscMalloc((M+1)*sizeof(PetscInt),&ii);CHKERRQ(ierr);
3160   ii[0] = 0;
3161   for (i=0; i<M; i++) {
3162     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3163     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3164     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3165     /* remove one from count of matrix has diagonal */
3166     for (j=id[i]; j<id[i+1]; j++) {
3167       if (jd[j] == i) {ii[i+1]--;break;}
3168     }
3169   }
3170   ierr = PetscMalloc(ii[M]*sizeof(PetscInt),&jj);CHKERRQ(ierr);
3171   cnt  = 0;
3172   for (i=0; i<M; i++) {
3173     for (j=io[i]; j<io[i+1]; j++) {
3174       if (garray[jo[j]] > rstart) break;
3175       jj[cnt++] = garray[jo[j]];
3176     }
3177     for (k=id[i]; k<id[i+1]; k++) {
3178       if (jd[k] != i) {
3179         jj[cnt++] = rstart + jd[k];
3180       }
3181     }
3182     for (; j<io[i+1]; j++) {
3183       jj[cnt++] = garray[jo[j]];
3184     }
3185   }
3186   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3187   PetscFunctionReturn(0);
3188 }
3189 
3190 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3191 
3192 PETSC_EXTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3193 
3194 #undef __FUNCT__
3195 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3196 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3197 {
3198   PetscErrorCode ierr;
3199   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3200   Mat            B;
3201   Mat_MPIAIJ     *b;
3202 
3203   PetscFunctionBegin;
3204   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3205 
3206   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3207   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3208   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3209   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3210   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3211   b    = (Mat_MPIAIJ*) B->data;
3212 
3213   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3214   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3215   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3216   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3217   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3218   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3219   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3220   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3221   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3222   if (reuse == MAT_REUSE_MATRIX) {
3223     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
3224   } else {
3225    *newmat = B;
3226   }
3227   PetscFunctionReturn(0);
3228 }
3229 
3230 #if defined(PETSC_HAVE_MUMPS)
3231 PETSC_EXTERN PetscErrorCode MatGetFactor_baij_mumps(Mat,MatFactorType,Mat*);
3232 #endif
3233 
3234 /*MC
3235    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3236 
3237    Options Database Keys:
3238 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3239 . -mat_block_size <bs> - set the blocksize used to store the matrix
3240 - -mat_use_hash_table <fact>
3241 
3242   Level: beginner
3243 
3244 .seealso: MatCreateMPIBAIJ
3245 M*/
3246 
3247 PETSC_EXTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3248 
3249 #undef __FUNCT__
3250 #define __FUNCT__ "MatCreate_MPIBAIJ"
3251 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3252 {
3253   Mat_MPIBAIJ    *b;
3254   PetscErrorCode ierr;
3255   PetscBool      flg;
3256 
3257   PetscFunctionBegin;
3258   ierr    = PetscNewLog(B,Mat_MPIBAIJ,&b);CHKERRQ(ierr);
3259   B->data = (void*)b;
3260 
3261   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3262   B->assembled = PETSC_FALSE;
3263 
3264   B->insertmode = NOT_SET_VALUES;
3265   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3266   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3267 
3268   /* build local table of row and column ownerships */
3269   ierr = PetscMalloc((b->size+1)*sizeof(PetscInt),&b->rangebs);CHKERRQ(ierr);
3270 
3271   /* build cache for off array entries formed */
3272   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3273 
3274   b->donotstash  = PETSC_FALSE;
3275   b->colmap      = NULL;
3276   b->garray      = NULL;
3277   b->roworiented = PETSC_TRUE;
3278 
3279   /* stuff used in block assembly */
3280   b->barray = 0;
3281 
3282   /* stuff used for matrix vector multiply */
3283   b->lvec  = 0;
3284   b->Mvctx = 0;
3285 
3286   /* stuff for MatGetRow() */
3287   b->rowindices   = 0;
3288   b->rowvalues    = 0;
3289   b->getrowactive = PETSC_FALSE;
3290 
3291   /* hash table stuff */
3292   b->ht           = 0;
3293   b->hd           = 0;
3294   b->ht_size      = 0;
3295   b->ht_flag      = PETSC_FALSE;
3296   b->ht_fact      = 0;
3297   b->ht_total_ct  = 0;
3298   b->ht_insert_ct = 0;
3299 
3300   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3301   b->ijonly = PETSC_FALSE;
3302 
3303   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3304   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",PETSC_FALSE,&flg,NULL);CHKERRQ(ierr);
3305   if (flg) {
3306     PetscReal fact = 1.39;
3307     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3308     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3309     if (fact <= 1.0) fact = 1.39;
3310     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3311     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3312   }
3313   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3314 
3315 #if defined(PETSC_HAVE_MUMPS)
3316   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetFactor_mumps_C",MatGetFactor_baij_mumps);CHKERRQ(ierr);
3317 #endif
3318   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3319   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3320   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3321   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3322   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3323   ierr = PetscObjectComposeFunction((PetscObject)B,"MatGetDiagonalBlock_C",MatGetDiagonalBlock_MPIBAIJ);CHKERRQ(ierr);
3324   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3325   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3326   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3327   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3328   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpibstrm_C",MatConvert_MPIBAIJ_MPIBSTRM);CHKERRQ(ierr);
3329   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3330   PetscFunctionReturn(0);
3331 }
3332 
3333 /*MC
3334    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3335 
3336    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3337    and MATMPIBAIJ otherwise.
3338 
3339    Options Database Keys:
3340 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3341 
3342   Level: beginner
3343 
3344 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3345 M*/
3346 
3347 #undef __FUNCT__
3348 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3349 /*@C
3350    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3351    (block compressed row).  For good matrix assembly performance
3352    the user should preallocate the matrix storage by setting the parameters
3353    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3354    performance can be increased by more than a factor of 50.
3355 
3356    Collective on Mat
3357 
3358    Input Parameters:
3359 +  A - the matrix
3360 .  bs   - size of block
3361 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3362            submatrix  (same for all local rows)
3363 .  d_nnz - array containing the number of block nonzeros in the various block rows
3364            of the in diagonal portion of the local (possibly different for each block
3365            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3366            set it even if it is zero.
3367 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3368            submatrix (same for all local rows).
3369 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3370            off-diagonal portion of the local submatrix (possibly different for
3371            each block row) or NULL.
3372 
3373    If the *_nnz parameter is given then the *_nz parameter is ignored
3374 
3375    Options Database Keys:
3376 +   -mat_block_size - size of the blocks to use
3377 -   -mat_use_hash_table <fact>
3378 
3379    Notes:
3380    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3381    than it must be used on all processors that share the object for that argument.
3382 
3383    Storage Information:
3384    For a square global matrix we define each processor's diagonal portion
3385    to be its local rows and the corresponding columns (a square submatrix);
3386    each processor's off-diagonal portion encompasses the remainder of the
3387    local matrix (a rectangular submatrix).
3388 
3389    The user can specify preallocated storage for the diagonal part of
3390    the local submatrix with either d_nz or d_nnz (not both).  Set
3391    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3392    memory allocation.  Likewise, specify preallocated storage for the
3393    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3394 
3395    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3396    the figure below we depict these three local rows and all columns (0-11).
3397 
3398 .vb
3399            0 1 2 3 4 5 6 7 8 9 10 11
3400           --------------------------
3401    row 3  |o o o d d d o o o o  o  o
3402    row 4  |o o o d d d o o o o  o  o
3403    row 5  |o o o d d d o o o o  o  o
3404           --------------------------
3405 .ve
3406 
3407    Thus, any entries in the d locations are stored in the d (diagonal)
3408    submatrix, and any entries in the o locations are stored in the
3409    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3410    stored simply in the MATSEQBAIJ format for compressed row storage.
3411 
3412    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3413    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3414    In general, for PDE problems in which most nonzeros are near the diagonal,
3415    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3416    or you will get TERRIBLE performance; see the users' manual chapter on
3417    matrices.
3418 
3419    You can call MatGetInfo() to get information on how effective the preallocation was;
3420    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3421    You can also run with the option -info and look for messages with the string
3422    malloc in them to see if additional memory allocation was needed.
3423 
3424    Level: intermediate
3425 
3426 .keywords: matrix, block, aij, compressed row, sparse, parallel
3427 
3428 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3429 @*/
3430 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3431 {
3432   PetscErrorCode ierr;
3433 
3434   PetscFunctionBegin;
3435   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3436   PetscValidType(B,1);
3437   PetscValidLogicalCollectiveInt(B,bs,2);
3438   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3439   PetscFunctionReturn(0);
3440 }
3441 
3442 #undef __FUNCT__
3443 #define __FUNCT__ "MatCreateBAIJ"
3444 /*@C
3445    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3446    (block compressed row).  For good matrix assembly performance
3447    the user should preallocate the matrix storage by setting the parameters
3448    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3449    performance can be increased by more than a factor of 50.
3450 
3451    Collective on MPI_Comm
3452 
3453    Input Parameters:
3454 +  comm - MPI communicator
3455 .  bs   - size of blockk
3456 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3457            This value should be the same as the local size used in creating the
3458            y vector for the matrix-vector product y = Ax.
3459 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3460            This value should be the same as the local size used in creating the
3461            x vector for the matrix-vector product y = Ax.
3462 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3463 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3464 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3465            submatrix  (same for all local rows)
3466 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3467            of the in diagonal portion of the local (possibly different for each block
3468            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3469            and set it even if it is zero.
3470 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3471            submatrix (same for all local rows).
3472 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3473            off-diagonal portion of the local submatrix (possibly different for
3474            each block row) or NULL.
3475 
3476    Output Parameter:
3477 .  A - the matrix
3478 
3479    Options Database Keys:
3480 +   -mat_block_size - size of the blocks to use
3481 -   -mat_use_hash_table <fact>
3482 
3483    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3484    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3485    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3486 
3487    Notes:
3488    If the *_nnz parameter is given then the *_nz parameter is ignored
3489 
3490    A nonzero block is any block that as 1 or more nonzeros in it
3491 
3492    The user MUST specify either the local or global matrix dimensions
3493    (possibly both).
3494 
3495    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3496    than it must be used on all processors that share the object for that argument.
3497 
3498    Storage Information:
3499    For a square global matrix we define each processor's diagonal portion
3500    to be its local rows and the corresponding columns (a square submatrix);
3501    each processor's off-diagonal portion encompasses the remainder of the
3502    local matrix (a rectangular submatrix).
3503 
3504    The user can specify preallocated storage for the diagonal part of
3505    the local submatrix with either d_nz or d_nnz (not both).  Set
3506    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3507    memory allocation.  Likewise, specify preallocated storage for the
3508    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3509 
3510    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3511    the figure below we depict these three local rows and all columns (0-11).
3512 
3513 .vb
3514            0 1 2 3 4 5 6 7 8 9 10 11
3515           --------------------------
3516    row 3  |o o o d d d o o o o  o  o
3517    row 4  |o o o d d d o o o o  o  o
3518    row 5  |o o o d d d o o o o  o  o
3519           --------------------------
3520 .ve
3521 
3522    Thus, any entries in the d locations are stored in the d (diagonal)
3523    submatrix, and any entries in the o locations are stored in the
3524    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3525    stored simply in the MATSEQBAIJ format for compressed row storage.
3526 
3527    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3528    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3529    In general, for PDE problems in which most nonzeros are near the diagonal,
3530    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3531    or you will get TERRIBLE performance; see the users' manual chapter on
3532    matrices.
3533 
3534    Level: intermediate
3535 
3536 .keywords: matrix, block, aij, compressed row, sparse, parallel
3537 
3538 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3539 @*/
3540 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3541 {
3542   PetscErrorCode ierr;
3543   PetscMPIInt    size;
3544 
3545   PetscFunctionBegin;
3546   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3547   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3548   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3549   if (size > 1) {
3550     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3551     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3552   } else {
3553     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3554     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3555   }
3556   PetscFunctionReturn(0);
3557 }
3558 
3559 #undef __FUNCT__
3560 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3561 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3562 {
3563   Mat            mat;
3564   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3565   PetscErrorCode ierr;
3566   PetscInt       len=0;
3567 
3568   PetscFunctionBegin;
3569   *newmat = 0;
3570   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3571   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3572   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3573   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3574 
3575   mat->factortype   = matin->factortype;
3576   mat->preallocated = PETSC_TRUE;
3577   mat->assembled    = PETSC_TRUE;
3578   mat->insertmode   = NOT_SET_VALUES;
3579 
3580   a             = (Mat_MPIBAIJ*)mat->data;
3581   mat->rmap->bs = matin->rmap->bs;
3582   a->bs2        = oldmat->bs2;
3583   a->mbs        = oldmat->mbs;
3584   a->nbs        = oldmat->nbs;
3585   a->Mbs        = oldmat->Mbs;
3586   a->Nbs        = oldmat->Nbs;
3587 
3588   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3589   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3590 
3591   a->size         = oldmat->size;
3592   a->rank         = oldmat->rank;
3593   a->donotstash   = oldmat->donotstash;
3594   a->roworiented  = oldmat->roworiented;
3595   a->rowindices   = 0;
3596   a->rowvalues    = 0;
3597   a->getrowactive = PETSC_FALSE;
3598   a->barray       = 0;
3599   a->rstartbs     = oldmat->rstartbs;
3600   a->rendbs       = oldmat->rendbs;
3601   a->cstartbs     = oldmat->cstartbs;
3602   a->cendbs       = oldmat->cendbs;
3603 
3604   /* hash table stuff */
3605   a->ht           = 0;
3606   a->hd           = 0;
3607   a->ht_size      = 0;
3608   a->ht_flag      = oldmat->ht_flag;
3609   a->ht_fact      = oldmat->ht_fact;
3610   a->ht_total_ct  = 0;
3611   a->ht_insert_ct = 0;
3612 
3613   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3614   if (oldmat->colmap) {
3615 #if defined(PETSC_USE_CTABLE)
3616     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3617 #else
3618     ierr = PetscMalloc((a->Nbs)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr);
3619     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3620     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3621 #endif
3622   } else a->colmap = 0;
3623 
3624   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3625     ierr = PetscMalloc(len*sizeof(PetscInt),&a->garray);CHKERRQ(ierr);
3626     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3627     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3628   } else a->garray = 0;
3629 
3630   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3631   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3632   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3633   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3634   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3635 
3636   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3637   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3638   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3639   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3640   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3641   *newmat = mat;
3642   PetscFunctionReturn(0);
3643 }
3644 
3645 #undef __FUNCT__
3646 #define __FUNCT__ "MatLoad_MPIBAIJ"
3647 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3648 {
3649   PetscErrorCode ierr;
3650   int            fd;
3651   PetscInt       i,nz,j,rstart,rend;
3652   PetscScalar    *vals,*buf;
3653   MPI_Comm       comm;
3654   MPI_Status     status;
3655   PetscMPIInt    rank,size,maxnz;
3656   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3657   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3658   PetscInt       jj,*mycols,*ibuf,bs=1,Mbs,mbs,extra_rows,mmax;
3659   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3660   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3661   PetscInt       dcount,kmax,k,nzcount,tmp,mend,sizesset=1,grows,gcols;
3662 
3663   PetscFunctionBegin;
3664   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3665   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3666   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3667   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3668 
3669   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3670   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3671   if (!rank) {
3672     ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3673     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3674     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3675   }
3676 
3677   if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0;
3678 
3679   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3680   M    = header[1]; N = header[2];
3681 
3682   /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */
3683   if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M;
3684   if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N;
3685 
3686   /* If global sizes are set, check if they are consistent with that given in the file */
3687   if (sizesset) {
3688     ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr);
3689   }
3690   if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows);
3691   if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols);
3692 
3693   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3694 
3695   /*
3696      This code adds extra rows to make sure the number of rows is
3697      divisible by the blocksize
3698   */
3699   Mbs        = M/bs;
3700   extra_rows = bs - M + bs*Mbs;
3701   if (extra_rows == bs) extra_rows = 0;
3702   else                  Mbs++;
3703   if (extra_rows && !rank) {
3704     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3705   }
3706 
3707   /* determine ownership of all rows */
3708   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3709     mbs = Mbs/size + ((Mbs % size) > rank);
3710     m   = mbs*bs;
3711   } else { /* User set */
3712     m   = newmat->rmap->n;
3713     mbs = m/bs;
3714   }
3715   ierr = PetscMalloc2(size+1,PetscInt,&rowners,size+1,PetscInt,&browners);CHKERRQ(ierr);
3716   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3717 
3718   /* process 0 needs enough room for process with most rows */
3719   if (!rank) {
3720     mmax = rowners[1];
3721     for (i=2; i<=size; i++) {
3722       mmax = PetscMax(mmax,rowners[i]);
3723     }
3724     mmax*=bs;
3725   } else mmax = -1;             /* unused, but compiler warns anyway */
3726 
3727   rowners[0] = 0;
3728   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3729   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3730   rstart = rowners[rank];
3731   rend   = rowners[rank+1];
3732 
3733   /* distribute row lengths to all processors */
3734   ierr = PetscMalloc(m*sizeof(PetscInt),&locrowlens);CHKERRQ(ierr);
3735   if (!rank) {
3736     mend = m;
3737     if (size == 1) mend = mend - extra_rows;
3738     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3739     for (j=mend; j<m; j++) locrowlens[j] = 1;
3740     ierr = PetscMalloc(mmax*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr);
3741     ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr);
3742     ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr);
3743     for (j=0; j<m; j++) {
3744       procsnz[0] += locrowlens[j];
3745     }
3746     for (i=1; i<size; i++) {
3747       mend = browners[i+1] - browners[i];
3748       if (i == size-1) mend = mend - extra_rows;
3749       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3750       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3751       /* calculate the number of nonzeros on each processor */
3752       for (j=0; j<browners[i+1]-browners[i]; j++) {
3753         procsnz[i] += rowlengths[j];
3754       }
3755       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3756     }
3757     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3758   } else {
3759     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3760   }
3761 
3762   if (!rank) {
3763     /* determine max buffer needed and allocate it */
3764     maxnz = procsnz[0];
3765     for (i=1; i<size; i++) {
3766       maxnz = PetscMax(maxnz,procsnz[i]);
3767     }
3768     ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr);
3769 
3770     /* read in my part of the matrix column indices  */
3771     nz     = procsnz[0];
3772     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3773     mycols = ibuf;
3774     if (size == 1) nz -= extra_rows;
3775     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3776     if (size == 1) {
3777       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3778     }
3779 
3780     /* read in every ones (except the last) and ship off */
3781     for (i=1; i<size-1; i++) {
3782       nz   = procsnz[i];
3783       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3784       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3785     }
3786     /* read in the stuff for the last proc */
3787     if (size != 1) {
3788       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3789       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3790       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3791       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3792     }
3793     ierr = PetscFree(cols);CHKERRQ(ierr);
3794   } else {
3795     /* determine buffer space needed for message */
3796     nz = 0;
3797     for (i=0; i<m; i++) {
3798       nz += locrowlens[i];
3799     }
3800     ierr   = PetscMalloc((nz+1)*sizeof(PetscInt),&ibuf);CHKERRQ(ierr);
3801     mycols = ibuf;
3802     /* receive message of column indices*/
3803     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3804     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3805     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3806   }
3807 
3808   /* loop over local rows, determining number of off diagonal entries */
3809   ierr     = PetscMalloc2(rend-rstart,PetscInt,&dlens,rend-rstart,PetscInt,&odlens);CHKERRQ(ierr);
3810   ierr     = PetscMalloc3(Mbs,PetscInt,&mask,Mbs,PetscInt,&masked1,Mbs,PetscInt,&masked2);CHKERRQ(ierr);
3811   ierr     = PetscMemzero(mask,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3812   ierr     = PetscMemzero(masked1,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3813   ierr     = PetscMemzero(masked2,Mbs*sizeof(PetscInt));CHKERRQ(ierr);
3814   rowcount = 0; nzcount = 0;
3815   for (i=0; i<mbs; i++) {
3816     dcount  = 0;
3817     odcount = 0;
3818     for (j=0; j<bs; j++) {
3819       kmax = locrowlens[rowcount];
3820       for (k=0; k<kmax; k++) {
3821         tmp = mycols[nzcount++]/bs;
3822         if (!mask[tmp]) {
3823           mask[tmp] = 1;
3824           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3825           else masked1[dcount++] = tmp;
3826         }
3827       }
3828       rowcount++;
3829     }
3830 
3831     dlens[i]  = dcount;
3832     odlens[i] = odcount;
3833 
3834     /* zero out the mask elements we set */
3835     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3836     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3837   }
3838 
3839 
3840   if (!sizesset) {
3841     ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3842   }
3843   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3844 
3845   if (!rank) {
3846     ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3847     /* read in my part of the matrix numerical values  */
3848     nz     = procsnz[0];
3849     vals   = buf;
3850     mycols = ibuf;
3851     if (size == 1) nz -= extra_rows;
3852     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3853     if (size == 1) {
3854       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3855     }
3856 
3857     /* insert into matrix */
3858     jj = rstart*bs;
3859     for (i=0; i<m; i++) {
3860       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3861       mycols += locrowlens[i];
3862       vals   += locrowlens[i];
3863       jj++;
3864     }
3865     /* read in other processors (except the last one) and ship out */
3866     for (i=1; i<size-1; i++) {
3867       nz   = procsnz[i];
3868       vals = buf;
3869       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3870       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3871     }
3872     /* the last proc */
3873     if (size != 1) {
3874       nz   = procsnz[i] - extra_rows;
3875       vals = buf;
3876       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3877       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3878       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3879     }
3880     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3881   } else {
3882     /* receive numeric values */
3883     ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&buf);CHKERRQ(ierr);
3884 
3885     /* receive message of values*/
3886     vals   = buf;
3887     mycols = ibuf;
3888     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3889 
3890     /* insert into matrix */
3891     jj = rstart*bs;
3892     for (i=0; i<m; i++) {
3893       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3894       mycols += locrowlens[i];
3895       vals   += locrowlens[i];
3896       jj++;
3897     }
3898   }
3899   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3900   ierr = PetscFree(buf);CHKERRQ(ierr);
3901   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3902   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3903   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3904   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3905   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3906   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3907   PetscFunctionReturn(0);
3908 }
3909 
3910 #undef __FUNCT__
3911 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3912 /*@
3913    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3914 
3915    Input Parameters:
3916 .  mat  - the matrix
3917 .  fact - factor
3918 
3919    Not Collective, each process can use a different factor
3920 
3921    Level: advanced
3922 
3923   Notes:
3924    This can also be set by the command line option: -mat_use_hash_table <fact>
3925 
3926 .keywords: matrix, hashtable, factor, HT
3927 
3928 .seealso: MatSetOption()
3929 @*/
3930 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3931 {
3932   PetscErrorCode ierr;
3933 
3934   PetscFunctionBegin;
3935   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3936   PetscFunctionReturn(0);
3937 }
3938 
3939 #undef __FUNCT__
3940 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3941 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3942 {
3943   Mat_MPIBAIJ *baij;
3944 
3945   PetscFunctionBegin;
3946   baij          = (Mat_MPIBAIJ*)mat->data;
3947   baij->ht_fact = fact;
3948   PetscFunctionReturn(0);
3949 }
3950 
3951 #undef __FUNCT__
3952 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3953 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3954 {
3955   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3956 
3957   PetscFunctionBegin;
3958   *Ad     = a->A;
3959   *Ao     = a->B;
3960   *colmap = a->garray;
3961   PetscFunctionReturn(0);
3962 }
3963 
3964 /*
3965     Special version for direct calls from Fortran (to eliminate two function call overheads
3966 */
3967 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3968 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3969 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3970 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3971 #endif
3972 
3973 #undef __FUNCT__
3974 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3975 /*@C
3976   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3977 
3978   Collective on Mat
3979 
3980   Input Parameters:
3981 + mat - the matrix
3982 . min - number of input rows
3983 . im - input rows
3984 . nin - number of input columns
3985 . in - input columns
3986 . v - numerical values input
3987 - addvin - INSERT_VALUES or ADD_VALUES
3988 
3989   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3990 
3991   Level: advanced
3992 
3993 .seealso:   MatSetValuesBlocked()
3994 @*/
3995 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3996 {
3997   /* convert input arguments to C version */
3998   Mat        mat  = *matin;
3999   PetscInt   m    = *min, n = *nin;
4000   InsertMode addv = *addvin;
4001 
4002   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
4003   const MatScalar *value;
4004   MatScalar       *barray     = baij->barray;
4005   PetscBool       roworiented = baij->roworiented;
4006   PetscErrorCode  ierr;
4007   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
4008   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
4009   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
4010 
4011   PetscFunctionBegin;
4012   /* tasks normally handled by MatSetValuesBlocked() */
4013   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
4014 #if defined(PETSC_USE_DEBUG)
4015   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
4016   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4017 #endif
4018   if (mat->assembled) {
4019     mat->was_assembled = PETSC_TRUE;
4020     mat->assembled     = PETSC_FALSE;
4021   }
4022   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
4023 
4024 
4025   if (!barray) {
4026     ierr         = PetscMalloc(bs2*sizeof(MatScalar),&barray);CHKERRQ(ierr);
4027     baij->barray = barray;
4028   }
4029 
4030   if (roworiented) stepval = (n-1)*bs;
4031   else stepval = (m-1)*bs;
4032 
4033   for (i=0; i<m; i++) {
4034     if (im[i] < 0) continue;
4035 #if defined(PETSC_USE_DEBUG)
4036     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
4037 #endif
4038     if (im[i] >= rstart && im[i] < rend) {
4039       row = im[i] - rstart;
4040       for (j=0; j<n; j++) {
4041         /* If NumCol = 1 then a copy is not required */
4042         if ((roworiented) && (n == 1)) {
4043           barray = (MatScalar*)v + i*bs2;
4044         } else if ((!roworiented) && (m == 1)) {
4045           barray = (MatScalar*)v + j*bs2;
4046         } else { /* Here a copy is required */
4047           if (roworiented) {
4048             value = v + i*(stepval+bs)*bs + j*bs;
4049           } else {
4050             value = v + j*(stepval+bs)*bs + i*bs;
4051           }
4052           for (ii=0; ii<bs; ii++,value+=stepval) {
4053             for (jj=0; jj<bs; jj++) {
4054               *barray++ = *value++;
4055             }
4056           }
4057           barray -=bs2;
4058         }
4059 
4060         if (in[j] >= cstart && in[j] < cend) {
4061           col  = in[j] - cstart;
4062           ierr = MatSetValuesBlocked_SeqBAIJ(baij->A,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
4063         } else if (in[j] < 0) continue;
4064 #if defined(PETSC_USE_DEBUG)
4065         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
4066 #endif
4067         else {
4068           if (mat->was_assembled) {
4069             if (!baij->colmap) {
4070               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
4071             }
4072 
4073 #if defined(PETSC_USE_DEBUG)
4074 #if defined(PETSC_USE_CTABLE)
4075             { PetscInt data;
4076               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
4077               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
4078             }
4079 #else
4080             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
4081 #endif
4082 #endif
4083 #if defined(PETSC_USE_CTABLE)
4084             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
4085             col  = (col - 1)/bs;
4086 #else
4087             col = (baij->colmap[in[j]] - 1)/bs;
4088 #endif
4089             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
4090               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
4091               col  =  in[j];
4092             }
4093           } else col = in[j];
4094           ierr = MatSetValuesBlocked_SeqBAIJ(baij->B,1,&row,1,&col,barray,addv);CHKERRQ(ierr);
4095         }
4096       }
4097     } else {
4098       if (!baij->donotstash) {
4099         if (roworiented) {
4100           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
4101         } else {
4102           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
4103         }
4104       }
4105     }
4106   }
4107 
4108   /* task normally handled by MatSetValuesBlocked() */
4109   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
4110   PetscFunctionReturn(0);
4111 }
4112 
4113 #undef __FUNCT__
4114 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
4115 /*@
4116      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
4117          CSR format the local rows.
4118 
4119    Collective on MPI_Comm
4120 
4121    Input Parameters:
4122 +  comm - MPI communicator
4123 .  bs - the block size, only a block size of 1 is supported
4124 .  m - number of local rows (Cannot be PETSC_DECIDE)
4125 .  n - This value should be the same as the local size used in creating the
4126        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
4127        calculated if N is given) For square matrices n is almost always m.
4128 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
4129 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
4130 .   i - row indices
4131 .   j - column indices
4132 -   a - matrix values
4133 
4134    Output Parameter:
4135 .   mat - the matrix
4136 
4137    Level: intermediate
4138 
4139    Notes:
4140        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4141      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4142      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4143 
4144      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4145      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4146      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4147      with column-major ordering within blocks.
4148 
4149        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4150 
4151 .keywords: matrix, aij, compressed row, sparse, parallel
4152 
4153 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4154           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4155 @*/
4156 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4157 {
4158   PetscErrorCode ierr;
4159 
4160   PetscFunctionBegin;
4161   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4162   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4163   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4164   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4165   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4166   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4167   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4168   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4169   PetscFunctionReturn(0);
4170 }
4171