xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision 26e8aace4f063ad23964f515bdc4b39acb1d85a7)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  = total_ct;
539   baij->ht_insert_ct = insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  = total_ct;
649   baij->ht_insert_ct = insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "MatMult_MPIBAIJ"
1395 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1396 {
1397   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1398   PetscErrorCode ierr;
1399   PetscInt       nt;
1400 
1401   PetscFunctionBegin;
1402   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1403   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1404   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1405   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1406   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1407   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1408   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1409   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1410   PetscFunctionReturn(0);
1411 }
1412 
1413 #undef __FUNCT__
1414 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1415 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1416 {
1417   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1418   PetscErrorCode ierr;
1419 
1420   PetscFunctionBegin;
1421   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1422   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1423   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1424   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1430 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1431 {
1432   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1433   PetscErrorCode ierr;
1434   PetscBool      merged;
1435 
1436   PetscFunctionBegin;
1437   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1438   /* do nondiagonal part */
1439   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1440   if (!merged) {
1441     /* send it on its way */
1442     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1443     /* do local part */
1444     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1445     /* receive remote parts: note this assumes the values are not actually */
1446     /* inserted in yy until the next line */
1447     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1448   } else {
1449     /* do local part */
1450     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1451     /* send it on its way */
1452     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1453     /* values actually were received in the Begin() but we need to call this nop */
1454     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1455   }
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1461 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1462 {
1463   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1464   PetscErrorCode ierr;
1465 
1466   PetscFunctionBegin;
1467   /* do nondiagonal part */
1468   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1469   /* send it on its way */
1470   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1471   /* do local part */
1472   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1473   /* receive remote parts: note this assumes the values are not actually */
1474   /* inserted in yy until the next line, which is true for my implementation*/
1475   /* but is not perhaps always true. */
1476   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 /*
1481   This only works correctly for square matrices where the subblock A->A is the
1482    diagonal block
1483 */
1484 #undef __FUNCT__
1485 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1486 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1487 {
1488   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1489   PetscErrorCode ierr;
1490 
1491   PetscFunctionBegin;
1492   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1493   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 #undef __FUNCT__
1498 #define __FUNCT__ "MatScale_MPIBAIJ"
1499 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1500 {
1501   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1502   PetscErrorCode ierr;
1503 
1504   PetscFunctionBegin;
1505   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1506   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1512 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1513 {
1514   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1515   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1516   PetscErrorCode ierr;
1517   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1518   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1519   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1520 
1521   PetscFunctionBegin;
1522   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1523   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1524   mat->getrowactive = PETSC_TRUE;
1525 
1526   if (!mat->rowvalues && (idx || v)) {
1527     /*
1528         allocate enough space to hold information from the longest row.
1529     */
1530     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1531     PetscInt    max = 1,mbs = mat->mbs,tmp;
1532     for (i=0; i<mbs; i++) {
1533       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1534       if (max < tmp) max = tmp;
1535     }
1536     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1537   }
1538   lrow = row - brstart;
1539 
1540   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1541   if (!v)   {pvA = 0; pvB = 0;}
1542   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1543   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1544   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1545   nztot = nzA + nzB;
1546 
1547   cmap = mat->garray;
1548   if (v  || idx) {
1549     if (nztot) {
1550       /* Sort by increasing column numbers, assuming A and B already sorted */
1551       PetscInt imark = -1;
1552       if (v) {
1553         *v = v_p = mat->rowvalues;
1554         for (i=0; i<nzB; i++) {
1555           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1556           else break;
1557         }
1558         imark = i;
1559         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1560         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1561       }
1562       if (idx) {
1563         *idx = idx_p = mat->rowindices;
1564         if (imark > -1) {
1565           for (i=0; i<imark; i++) {
1566             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1567           }
1568         } else {
1569           for (i=0; i<nzB; i++) {
1570             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1571             else break;
1572           }
1573           imark = i;
1574         }
1575         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1576         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1577       }
1578     } else {
1579       if (idx) *idx = 0;
1580       if (v)   *v   = 0;
1581     }
1582   }
1583   *nz  = nztot;
1584   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1585   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 #undef __FUNCT__
1590 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1591 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1592 {
1593   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1594 
1595   PetscFunctionBegin;
1596   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1597   baij->getrowactive = PETSC_FALSE;
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1603 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1604 {
1605   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1606   PetscErrorCode ierr;
1607 
1608   PetscFunctionBegin;
1609   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1610   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 #undef __FUNCT__
1615 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1616 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1617 {
1618   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1619   Mat            A  = a->A,B = a->B;
1620   PetscErrorCode ierr;
1621   PetscReal      isend[5],irecv[5];
1622 
1623   PetscFunctionBegin;
1624   info->block_size = (PetscReal)matin->rmap->bs;
1625 
1626   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1627 
1628   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1629   isend[3] = info->memory;  isend[4] = info->mallocs;
1630 
1631   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1632 
1633   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1634   isend[3] += info->memory;  isend[4] += info->mallocs;
1635 
1636   if (flag == MAT_LOCAL) {
1637     info->nz_used      = isend[0];
1638     info->nz_allocated = isend[1];
1639     info->nz_unneeded  = isend[2];
1640     info->memory       = isend[3];
1641     info->mallocs      = isend[4];
1642   } else if (flag == MAT_GLOBAL_MAX) {
1643     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1644 
1645     info->nz_used      = irecv[0];
1646     info->nz_allocated = irecv[1];
1647     info->nz_unneeded  = irecv[2];
1648     info->memory       = irecv[3];
1649     info->mallocs      = irecv[4];
1650   } else if (flag == MAT_GLOBAL_SUM) {
1651     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1652 
1653     info->nz_used      = irecv[0];
1654     info->nz_allocated = irecv[1];
1655     info->nz_unneeded  = irecv[2];
1656     info->memory       = irecv[3];
1657     info->mallocs      = irecv[4];
1658   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1659   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1660   info->fill_ratio_needed = 0;
1661   info->factor_mallocs    = 0;
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 #undef __FUNCT__
1666 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1667 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1668 {
1669   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1670   PetscErrorCode ierr;
1671 
1672   PetscFunctionBegin;
1673   switch (op) {
1674   case MAT_NEW_NONZERO_LOCATIONS:
1675   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1676   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1677   case MAT_KEEP_NONZERO_PATTERN:
1678   case MAT_NEW_NONZERO_LOCATION_ERR:
1679     MatCheckPreallocated(A,1);
1680     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1681     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1682     break;
1683   case MAT_ROW_ORIENTED:
1684     MatCheckPreallocated(A,1);
1685     a->roworiented = flg;
1686 
1687     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1688     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1689     break;
1690   case MAT_NEW_DIAGONALS:
1691     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1692     break;
1693   case MAT_IGNORE_OFF_PROC_ENTRIES:
1694     a->donotstash = flg;
1695     break;
1696   case MAT_USE_HASH_TABLE:
1697     a->ht_flag = flg;
1698     break;
1699   case MAT_SYMMETRIC:
1700   case MAT_STRUCTURALLY_SYMMETRIC:
1701   case MAT_HERMITIAN:
1702   case MAT_SYMMETRY_ETERNAL:
1703     MatCheckPreallocated(A,1);
1704     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1705     break;
1706   default:
1707     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1708   }
1709   PetscFunctionReturn(0);
1710 }
1711 
1712 #undef __FUNCT__
1713 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1714 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1715 {
1716   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1717   Mat_SeqBAIJ    *Aloc;
1718   Mat            B;
1719   PetscErrorCode ierr;
1720   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1721   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1722   MatScalar      *a;
1723 
1724   PetscFunctionBegin;
1725   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1726   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1727     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1728     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1729     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1730     /* Do not know preallocation information, but must set block size */
1731     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1732   } else {
1733     B = *matout;
1734   }
1735 
1736   /* copy over the A part */
1737   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1738   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1739   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1740 
1741   for (i=0; i<mbs; i++) {
1742     rvals[0] = bs*(baij->rstartbs + i);
1743     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1744     for (j=ai[i]; j<ai[i+1]; j++) {
1745       col = (baij->cstartbs+aj[j])*bs;
1746       for (k=0; k<bs; k++) {
1747         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1748 
1749         col++; a += bs;
1750       }
1751     }
1752   }
1753   /* copy over the B part */
1754   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1755   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1756   for (i=0; i<mbs; i++) {
1757     rvals[0] = bs*(baij->rstartbs + i);
1758     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1759     for (j=ai[i]; j<ai[i+1]; j++) {
1760       col = baij->garray[aj[j]]*bs;
1761       for (k=0; k<bs; k++) {
1762         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1763         col++;
1764         a += bs;
1765       }
1766     }
1767   }
1768   ierr = PetscFree(rvals);CHKERRQ(ierr);
1769   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1770   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1771 
1772   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1773   else {
1774     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1775   }
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 #undef __FUNCT__
1780 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1781 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1782 {
1783   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1784   Mat            a     = baij->A,b = baij->B;
1785   PetscErrorCode ierr;
1786   PetscInt       s1,s2,s3;
1787 
1788   PetscFunctionBegin;
1789   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1790   if (rr) {
1791     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1792     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1793     /* Overlap communication with computation. */
1794     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1795   }
1796   if (ll) {
1797     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1798     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1799     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1800   }
1801   /* scale  the diagonal block */
1802   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1803 
1804   if (rr) {
1805     /* Do a scatter end and then right scale the off-diagonal block */
1806     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1807     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1808   }
1809   PetscFunctionReturn(0);
1810 }
1811 
1812 #undef __FUNCT__
1813 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1814 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1815 {
1816   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1817   PetscInt      *lrows;
1818   PetscInt       r, len;
1819   PetscErrorCode ierr;
1820 
1821   PetscFunctionBegin;
1822   /* get locally owned rows */
1823   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1824   /* fix right hand side if needed */
1825   if (x && b) {
1826     const PetscScalar *xx;
1827     PetscScalar       *bb;
1828 
1829     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1830     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1831     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1832     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1833     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1834   }
1835 
1836   /* actually zap the local rows */
1837   /*
1838         Zero the required rows. If the "diagonal block" of the matrix
1839      is square and the user wishes to set the diagonal we use separate
1840      code so that MatSetValues() is not called for each diagonal allocating
1841      new memory, thus calling lots of mallocs and slowing things down.
1842 
1843   */
1844   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1845   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1846   if (A->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
1847     PetscBool cong;
1848     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1849     if (cong) A->congruentlayouts = 1;
1850     else      A->congruentlayouts = 0;
1851   }
1852   if ((diag != 0.0) && A->congruentlayouts) {
1853     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1854   } else if (diag != 0.0) {
1855     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1856     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1857        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1858     for (r = 0; r < len; ++r) {
1859       const PetscInt row = lrows[r] + A->rmap->rstart;
1860       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1861     }
1862     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1863     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1864   } else {
1865     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1866   }
1867   ierr = PetscFree(lrows);CHKERRQ(ierr);
1868 
1869   /* only change matrix nonzero state if pattern was allowed to be changed */
1870   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1871     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1872     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1873   }
1874   PetscFunctionReturn(0);
1875 }
1876 
1877 #undef __FUNCT__
1878 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1879 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1880 {
1881   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1882   PetscErrorCode    ierr;
1883   PetscMPIInt       n = A->rmap->n;
1884   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1885   PetscInt          *lrows,*owners = A->rmap->range;
1886   PetscSFNode       *rrows;
1887   PetscSF           sf;
1888   const PetscScalar *xx;
1889   PetscScalar       *bb,*mask;
1890   Vec               xmask,lmask;
1891   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1892   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1893   PetscScalar       *aa;
1894 
1895   PetscFunctionBegin;
1896   /* Create SF where leaves are input rows and roots are owned rows */
1897   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1898   for (r = 0; r < n; ++r) lrows[r] = -1;
1899   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1900   for (r = 0; r < N; ++r) {
1901     const PetscInt idx   = rows[r];
1902     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1903     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1904       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1905     }
1906     rrows[r].rank  = p;
1907     rrows[r].index = rows[r] - owners[p];
1908   }
1909   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1910   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1911   /* Collect flags for rows to be zeroed */
1912   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1913   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1914   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1915   /* Compress and put in row numbers */
1916   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1917   /* zero diagonal part of matrix */
1918   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1919   /* handle off diagonal part of matrix */
1920   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1921   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1922   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1923   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1924   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1925   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1926   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1927   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1928   if (x) {
1929     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1930     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1931     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1932     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1933   }
1934   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1935   /* remove zeroed rows of off diagonal matrix */
1936   for (i = 0; i < len; ++i) {
1937     row   = lrows[i];
1938     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1939     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1940     for (k = 0; k < count; ++k) {
1941       aa[0] = 0.0;
1942       aa   += bs;
1943     }
1944   }
1945   /* loop over all elements of off process part of matrix zeroing removed columns*/
1946   for (i = 0; i < l->B->rmap->N; ++i) {
1947     row = i/bs;
1948     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1949       for (k = 0; k < bs; ++k) {
1950         col = bs*baij->j[j] + k;
1951         if (PetscAbsScalar(mask[col])) {
1952           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1953           if (x) bb[i] -= aa[0]*xx[col];
1954           aa[0] = 0.0;
1955         }
1956       }
1957     }
1958   }
1959   if (x) {
1960     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1961     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1962   }
1963   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1964   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1965   ierr = PetscFree(lrows);CHKERRQ(ierr);
1966 
1967   /* only change matrix nonzero state if pattern was allowed to be changed */
1968   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1969     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1970     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1971   }
1972   PetscFunctionReturn(0);
1973 }
1974 
1975 #undef __FUNCT__
1976 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1977 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1978 {
1979   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1980   PetscErrorCode ierr;
1981 
1982   PetscFunctionBegin;
1983   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1988 
1989 #undef __FUNCT__
1990 #define __FUNCT__ "MatEqual_MPIBAIJ"
1991 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1992 {
1993   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1994   Mat            a,b,c,d;
1995   PetscBool      flg;
1996   PetscErrorCode ierr;
1997 
1998   PetscFunctionBegin;
1999   a = matA->A; b = matA->B;
2000   c = matB->A; d = matB->B;
2001 
2002   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2003   if (flg) {
2004     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2005   }
2006   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #undef __FUNCT__
2011 #define __FUNCT__ "MatCopy_MPIBAIJ"
2012 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2013 {
2014   PetscErrorCode ierr;
2015   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2016   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2017 
2018   PetscFunctionBegin;
2019   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2020   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2021     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2022   } else {
2023     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2024     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2025   }
2026   PetscFunctionReturn(0);
2027 }
2028 
2029 #undef __FUNCT__
2030 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2031 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2032 {
2033   PetscErrorCode ierr;
2034 
2035   PetscFunctionBegin;
2036   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2037   PetscFunctionReturn(0);
2038 }
2039 
2040 #undef __FUNCT__
2041 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2042 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2043 {
2044   PetscErrorCode ierr;
2045   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2046   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2047   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2048 
2049   PetscFunctionBegin;
2050   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 #undef __FUNCT__
2055 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2056 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2057 {
2058   PetscErrorCode ierr;
2059   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2060   PetscBLASInt   bnz,one=1;
2061   Mat_SeqBAIJ    *x,*y;
2062 
2063   PetscFunctionBegin;
2064   if (str == SAME_NONZERO_PATTERN) {
2065     PetscScalar alpha = a;
2066     x    = (Mat_SeqBAIJ*)xx->A->data;
2067     y    = (Mat_SeqBAIJ*)yy->A->data;
2068     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2069     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2070     x    = (Mat_SeqBAIJ*)xx->B->data;
2071     y    = (Mat_SeqBAIJ*)yy->B->data;
2072     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2073     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2074     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2075   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2076     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2077   } else {
2078     Mat      B;
2079     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2080     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2081     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2082     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2083     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2084     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2085     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2086     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2087     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2088     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2089     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2090     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2091     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2092     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2093     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2094     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2095   }
2096   PetscFunctionReturn(0);
2097 }
2098 
2099 #undef __FUNCT__
2100 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2101 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2102 {
2103   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2104   PetscErrorCode ierr;
2105 
2106   PetscFunctionBegin;
2107   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2108   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 #undef __FUNCT__
2113 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2114 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2115 {
2116   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2117   PetscErrorCode ierr;
2118 
2119   PetscFunctionBegin;
2120   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2121   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2122   PetscFunctionReturn(0);
2123 }
2124 
2125 #undef __FUNCT__
2126 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2127 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2128 {
2129   PetscErrorCode ierr;
2130   IS             iscol_local;
2131   PetscInt       csize;
2132 
2133   PetscFunctionBegin;
2134   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2135   if (call == MAT_REUSE_MATRIX) {
2136     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2137     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2138   } else {
2139     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2140   }
2141   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2142   if (call == MAT_INITIAL_MATRIX) {
2143     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2144     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2145   }
2146   PetscFunctionReturn(0);
2147 }
2148 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2149 #undef __FUNCT__
2150 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2151 /*
2152   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2153   in local and then by concatenating the local matrices the end result.
2154   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2155   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2156 */
2157 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2158 {
2159   PetscErrorCode ierr;
2160   PetscMPIInt    rank,size;
2161   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2162   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2163   Mat            M,Mreuse;
2164   MatScalar      *vwork,*aa;
2165   MPI_Comm       comm;
2166   IS             isrow_new, iscol_new;
2167   PetscBool      idflag,allrows, allcols;
2168   Mat_SeqBAIJ    *aij;
2169 
2170   PetscFunctionBegin;
2171   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2172   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2173   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2174   /* The compression and expansion should be avoided. Doesn't point
2175      out errors, might change the indices, hence buggey */
2176   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2177   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2178 
2179   /* Check for special case: each processor gets entire matrix columns */
2180   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2181   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2182   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2183   else allcols = PETSC_FALSE;
2184 
2185   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2186   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2187   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2188   else allrows = PETSC_FALSE;
2189 
2190   if (call ==  MAT_REUSE_MATRIX) {
2191     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2192     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2193     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2194   } else {
2195     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2196   }
2197   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2198   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2199   /*
2200       m - number of local rows
2201       n - number of columns (same on all processors)
2202       rstart - first row in new global matrix generated
2203   */
2204   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2205   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2206   m    = m/bs;
2207   n    = n/bs;
2208 
2209   if (call == MAT_INITIAL_MATRIX) {
2210     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2211     ii  = aij->i;
2212     jj  = aij->j;
2213 
2214     /*
2215         Determine the number of non-zeros in the diagonal and off-diagonal
2216         portions of the matrix in order to do correct preallocation
2217     */
2218 
2219     /* first get start and end of "diagonal" columns */
2220     if (csize == PETSC_DECIDE) {
2221       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2222       if (mglobal == n*bs) { /* square matrix */
2223         nlocal = m;
2224       } else {
2225         nlocal = n/size + ((n % size) > rank);
2226       }
2227     } else {
2228       nlocal = csize/bs;
2229     }
2230     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2231     rstart = rend - nlocal;
2232     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2233 
2234     /* next, compute all the lengths */
2235     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2236     for (i=0; i<m; i++) {
2237       jend = ii[i+1] - ii[i];
2238       olen = 0;
2239       dlen = 0;
2240       for (j=0; j<jend; j++) {
2241         if (*jj < rstart || *jj >= rend) olen++;
2242         else dlen++;
2243         jj++;
2244       }
2245       olens[i] = olen;
2246       dlens[i] = dlen;
2247     }
2248     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2249     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2250     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2251     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2252     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2253     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2254   } else {
2255     PetscInt ml,nl;
2256 
2257     M    = *newmat;
2258     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2259     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2260     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2261     /*
2262          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2263        rather than the slower MatSetValues().
2264     */
2265     M->was_assembled = PETSC_TRUE;
2266     M->assembled     = PETSC_FALSE;
2267   }
2268   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2269   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2270   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2271   ii   = aij->i;
2272   jj   = aij->j;
2273   aa   = aij->a;
2274   for (i=0; i<m; i++) {
2275     row   = rstart/bs + i;
2276     nz    = ii[i+1] - ii[i];
2277     cwork = jj;     jj += nz;
2278     vwork = aa;     aa += nz*bs*bs;
2279     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2280   }
2281 
2282   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2283   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2284   *newmat = M;
2285 
2286   /* save submatrix used in processor for next request */
2287   if (call ==  MAT_INITIAL_MATRIX) {
2288     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2289     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2290   }
2291   PetscFunctionReturn(0);
2292 }
2293 
2294 #undef __FUNCT__
2295 #define __FUNCT__ "MatPermute_MPIBAIJ"
2296 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2297 {
2298   MPI_Comm       comm,pcomm;
2299   PetscInt       clocal_size,nrows;
2300   const PetscInt *rows;
2301   PetscMPIInt    size;
2302   IS             crowp,lcolp;
2303   PetscErrorCode ierr;
2304 
2305   PetscFunctionBegin;
2306   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2307   /* make a collective version of 'rowp' */
2308   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2309   if (pcomm==comm) {
2310     crowp = rowp;
2311   } else {
2312     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2313     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2314     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2315     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2316   }
2317   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2318   /* make a local version of 'colp' */
2319   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2320   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2321   if (size==1) {
2322     lcolp = colp;
2323   } else {
2324     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2325   }
2326   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2327   /* now we just get the submatrix */
2328   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2329   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2330   /* clean up */
2331   if (pcomm!=comm) {
2332     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2333   }
2334   if (size>1) {
2335     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2336   }
2337   PetscFunctionReturn(0);
2338 }
2339 
2340 #undef __FUNCT__
2341 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2342 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2343 {
2344   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2345   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2346 
2347   PetscFunctionBegin;
2348   if (nghosts) *nghosts = B->nbs;
2349   if (ghosts) *ghosts = baij->garray;
2350   PetscFunctionReturn(0);
2351 }
2352 
2353 #undef __FUNCT__
2354 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2355 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2356 {
2357   Mat            B;
2358   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2359   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2360   Mat_SeqAIJ     *b;
2361   PetscErrorCode ierr;
2362   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2363   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2364   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2365 
2366   PetscFunctionBegin;
2367   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2368   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2369 
2370   /* ----------------------------------------------------------------
2371      Tell every processor the number of nonzeros per row
2372   */
2373   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2374   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2375     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2376   }
2377   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2378   displs    = recvcounts + size;
2379   for (i=0; i<size; i++) {
2380     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2381     displs[i]     = A->rmap->range[i]/bs;
2382   }
2383 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2384   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2385 #else
2386   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2387   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2388 #endif
2389   /* ---------------------------------------------------------------
2390      Create the sequential matrix of the same type as the local block diagonal
2391   */
2392   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2393   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2394   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2395   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2396   b    = (Mat_SeqAIJ*)B->data;
2397 
2398   /*--------------------------------------------------------------------
2399     Copy my part of matrix column indices over
2400   */
2401   sendcount  = ad->nz + bd->nz;
2402   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2403   a_jsendbuf = ad->j;
2404   b_jsendbuf = bd->j;
2405   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2406   cnt        = 0;
2407   for (i=0; i<n; i++) {
2408 
2409     /* put in lower diagonal portion */
2410     m = bd->i[i+1] - bd->i[i];
2411     while (m > 0) {
2412       /* is it above diagonal (in bd (compressed) numbering) */
2413       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2414       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2415       m--;
2416     }
2417 
2418     /* put in diagonal portion */
2419     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2420       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2421     }
2422 
2423     /* put in upper diagonal portion */
2424     while (m-- > 0) {
2425       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2426     }
2427   }
2428   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2429 
2430   /*--------------------------------------------------------------------
2431     Gather all column indices to all processors
2432   */
2433   for (i=0; i<size; i++) {
2434     recvcounts[i] = 0;
2435     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2436       recvcounts[i] += lens[j];
2437     }
2438   }
2439   displs[0] = 0;
2440   for (i=1; i<size; i++) {
2441     displs[i] = displs[i-1] + recvcounts[i-1];
2442   }
2443 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2444   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2445 #else
2446   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2447 #endif
2448   /*--------------------------------------------------------------------
2449     Assemble the matrix into useable form (note numerical values not yet set)
2450   */
2451   /* set the b->ilen (length of each row) values */
2452   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2453   /* set the b->i indices */
2454   b->i[0] = 0;
2455   for (i=1; i<=A->rmap->N/bs; i++) {
2456     b->i[i] = b->i[i-1] + lens[i-1];
2457   }
2458   ierr = PetscFree(lens);CHKERRQ(ierr);
2459   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2460   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2461   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2462 
2463   if (A->symmetric) {
2464     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2465   } else if (A->hermitian) {
2466     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2467   } else if (A->structurally_symmetric) {
2468     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2469   }
2470   *newmat = B;
2471   PetscFunctionReturn(0);
2472 }
2473 
2474 #undef __FUNCT__
2475 #define __FUNCT__ "MatSOR_MPIBAIJ"
2476 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2477 {
2478   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2479   PetscErrorCode ierr;
2480   Vec            bb1 = 0;
2481 
2482   PetscFunctionBegin;
2483   if (flag == SOR_APPLY_UPPER) {
2484     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2485     PetscFunctionReturn(0);
2486   }
2487 
2488   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2489     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2490   }
2491 
2492   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2493     if (flag & SOR_ZERO_INITIAL_GUESS) {
2494       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2495       its--;
2496     }
2497 
2498     while (its--) {
2499       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2500       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2501 
2502       /* update rhs: bb1 = bb - B*x */
2503       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2504       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2505 
2506       /* local sweep */
2507       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2508     }
2509   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2510     if (flag & SOR_ZERO_INITIAL_GUESS) {
2511       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2512       its--;
2513     }
2514     while (its--) {
2515       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2516       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2517 
2518       /* update rhs: bb1 = bb - B*x */
2519       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2520       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2521 
2522       /* local sweep */
2523       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2524     }
2525   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2526     if (flag & SOR_ZERO_INITIAL_GUESS) {
2527       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2528       its--;
2529     }
2530     while (its--) {
2531       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2532       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2533 
2534       /* update rhs: bb1 = bb - B*x */
2535       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2536       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2537 
2538       /* local sweep */
2539       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2540     }
2541   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2542 
2543   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2544   PetscFunctionReturn(0);
2545 }
2546 
2547 #undef __FUNCT__
2548 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2549 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2550 {
2551   PetscErrorCode ierr;
2552   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2553   PetscInt       N,i,*garray = aij->garray;
2554   PetscInt       ib,jb,bs = A->rmap->bs;
2555   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2556   MatScalar      *a_val = a_aij->a;
2557   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2558   MatScalar      *b_val = b_aij->a;
2559   PetscReal      *work;
2560 
2561   PetscFunctionBegin;
2562   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2563   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2564   if (type == NORM_2) {
2565     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2566       for (jb=0; jb<bs; jb++) {
2567         for (ib=0; ib<bs; ib++) {
2568           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2569           a_val++;
2570         }
2571       }
2572     }
2573     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2574       for (jb=0; jb<bs; jb++) {
2575         for (ib=0; ib<bs; ib++) {
2576           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2577           b_val++;
2578         }
2579       }
2580     }
2581   } else if (type == NORM_1) {
2582     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2583       for (jb=0; jb<bs; jb++) {
2584         for (ib=0; ib<bs; ib++) {
2585           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2586           a_val++;
2587         }
2588       }
2589     }
2590     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2591       for (jb=0; jb<bs; jb++) {
2592        for (ib=0; ib<bs; ib++) {
2593           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2594           b_val++;
2595         }
2596       }
2597     }
2598   } else if (type == NORM_INFINITY) {
2599     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2600       for (jb=0; jb<bs; jb++) {
2601         for (ib=0; ib<bs; ib++) {
2602           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2603           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2604           a_val++;
2605         }
2606       }
2607     }
2608     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2609       for (jb=0; jb<bs; jb++) {
2610         for (ib=0; ib<bs; ib++) {
2611           int col = garray[b_aij->j[i]] * bs + jb;
2612           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2613           b_val++;
2614         }
2615       }
2616     }
2617   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2618   if (type == NORM_INFINITY) {
2619     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2620   } else {
2621     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2622   }
2623   ierr = PetscFree(work);CHKERRQ(ierr);
2624   if (type == NORM_2) {
2625     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2626   }
2627   PetscFunctionReturn(0);
2628 }
2629 
2630 #undef __FUNCT__
2631 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2632 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2633 {
2634   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2635   PetscErrorCode ierr;
2636 
2637   PetscFunctionBegin;
2638   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2639   A->factorerrortype             = a->A->factorerrortype;
2640   A->factorerror_zeropivot_value = a->A->factorerror_zeropivot_value;
2641   A->factorerror_zeropivot_row   = a->A->factorerror_zeropivot_row;
2642   PetscFunctionReturn(0);
2643 }
2644 
2645 #undef __FUNCT__
2646 #define __FUNCT__ "MatShift_MPIBAIJ"
2647 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2648 {
2649   PetscErrorCode ierr;
2650   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2651   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2652 
2653   PetscFunctionBegin;
2654   if (!Y->preallocated) {
2655     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2656   } else if (!aij->nz) {
2657     PetscInt nonew = aij->nonew;
2658     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2659     aij->nonew = nonew;
2660   }
2661   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2662   PetscFunctionReturn(0);
2663 }
2664 
2665 #undef __FUNCT__
2666 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2667 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2668 {
2669   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2670   PetscErrorCode ierr;
2671 
2672   PetscFunctionBegin;
2673   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2674   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2675   if (d) {
2676     PetscInt rstart;
2677     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2678     *d += rstart/A->rmap->bs;
2679 
2680   }
2681   PetscFunctionReturn(0);
2682 }
2683 
2684 #undef __FUNCT__
2685 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2686 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2687 {
2688   PetscFunctionBegin;
2689   *a = ((Mat_MPIBAIJ*)A->data)->A;
2690   PetscFunctionReturn(0);
2691 }
2692 
2693 /* -------------------------------------------------------------------*/
2694 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2695                                        MatGetRow_MPIBAIJ,
2696                                        MatRestoreRow_MPIBAIJ,
2697                                        MatMult_MPIBAIJ,
2698                                 /* 4*/ MatMultAdd_MPIBAIJ,
2699                                        MatMultTranspose_MPIBAIJ,
2700                                        MatMultTransposeAdd_MPIBAIJ,
2701                                        0,
2702                                        0,
2703                                        0,
2704                                 /*10*/ 0,
2705                                        0,
2706                                        0,
2707                                        MatSOR_MPIBAIJ,
2708                                        MatTranspose_MPIBAIJ,
2709                                 /*15*/ MatGetInfo_MPIBAIJ,
2710                                        MatEqual_MPIBAIJ,
2711                                        MatGetDiagonal_MPIBAIJ,
2712                                        MatDiagonalScale_MPIBAIJ,
2713                                        MatNorm_MPIBAIJ,
2714                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2715                                        MatAssemblyEnd_MPIBAIJ,
2716                                        MatSetOption_MPIBAIJ,
2717                                        MatZeroEntries_MPIBAIJ,
2718                                 /*24*/ MatZeroRows_MPIBAIJ,
2719                                        0,
2720                                        0,
2721                                        0,
2722                                        0,
2723                                 /*29*/ MatSetUp_MPIBAIJ,
2724                                        0,
2725                                        0,
2726                                        MatGetDiagonalBlock_MPIBAIJ,
2727                                        0,
2728                                 /*34*/ MatDuplicate_MPIBAIJ,
2729                                        0,
2730                                        0,
2731                                        0,
2732                                        0,
2733                                 /*39*/ MatAXPY_MPIBAIJ,
2734                                        MatGetSubMatrices_MPIBAIJ,
2735                                        MatIncreaseOverlap_MPIBAIJ,
2736                                        MatGetValues_MPIBAIJ,
2737                                        MatCopy_MPIBAIJ,
2738                                 /*44*/ 0,
2739                                        MatScale_MPIBAIJ,
2740                                        MatShift_MPIBAIJ,
2741                                        0,
2742                                        MatZeroRowsColumns_MPIBAIJ,
2743                                 /*49*/ 0,
2744                                        0,
2745                                        0,
2746                                        0,
2747                                        0,
2748                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2749                                        0,
2750                                        MatSetUnfactored_MPIBAIJ,
2751                                        MatPermute_MPIBAIJ,
2752                                        MatSetValuesBlocked_MPIBAIJ,
2753                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2754                                        MatDestroy_MPIBAIJ,
2755                                        MatView_MPIBAIJ,
2756                                        0,
2757                                        0,
2758                                 /*64*/ 0,
2759                                        0,
2760                                        0,
2761                                        0,
2762                                        0,
2763                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2764                                        0,
2765                                        0,
2766                                        0,
2767                                        0,
2768                                 /*74*/ 0,
2769                                        MatFDColoringApply_BAIJ,
2770                                        0,
2771                                        0,
2772                                        0,
2773                                 /*79*/ 0,
2774                                        0,
2775                                        0,
2776                                        0,
2777                                        MatLoad_MPIBAIJ,
2778                                 /*84*/ 0,
2779                                        0,
2780                                        0,
2781                                        0,
2782                                        0,
2783                                 /*89*/ 0,
2784                                        0,
2785                                        0,
2786                                        0,
2787                                        0,
2788                                 /*94*/ 0,
2789                                        0,
2790                                        0,
2791                                        0,
2792                                        0,
2793                                 /*99*/ 0,
2794                                        0,
2795                                        0,
2796                                        0,
2797                                        0,
2798                                 /*104*/0,
2799                                        MatRealPart_MPIBAIJ,
2800                                        MatImaginaryPart_MPIBAIJ,
2801                                        0,
2802                                        0,
2803                                 /*109*/0,
2804                                        0,
2805                                        0,
2806                                        0,
2807                                        MatMissingDiagonal_MPIBAIJ,
2808                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2809                                        0,
2810                                        MatGetGhosts_MPIBAIJ,
2811                                        0,
2812                                        0,
2813                                 /*119*/0,
2814                                        0,
2815                                        0,
2816                                        0,
2817                                        MatGetMultiProcBlock_MPIBAIJ,
2818                                 /*124*/0,
2819                                        MatGetColumnNorms_MPIBAIJ,
2820                                        MatInvertBlockDiagonal_MPIBAIJ,
2821                                        0,
2822                                        0,
2823                                /*129*/ 0,
2824                                        0,
2825                                        0,
2826                                        0,
2827                                        0,
2828                                /*134*/ 0,
2829                                        0,
2830                                        0,
2831                                        0,
2832                                        0,
2833                                /*139*/ 0,
2834                                        0,
2835                                        0,
2836                                        MatFDColoringSetUp_MPIXAIJ,
2837                                        0,
2838                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2839 };
2840 
2841 
2842 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2843 
2844 #undef __FUNCT__
2845 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2846 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2847 {
2848   PetscInt       m,rstart,cstart,cend;
2849   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2850   const PetscInt *JJ    =0;
2851   PetscScalar    *values=0;
2852   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2853   PetscErrorCode ierr;
2854 
2855   PetscFunctionBegin;
2856   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2857   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2858   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2859   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2860   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2861   m      = B->rmap->n/bs;
2862   rstart = B->rmap->rstart/bs;
2863   cstart = B->cmap->rstart/bs;
2864   cend   = B->cmap->rend/bs;
2865 
2866   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2867   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2868   for (i=0; i<m; i++) {
2869     nz = ii[i+1] - ii[i];
2870     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2871     nz_max = PetscMax(nz_max,nz);
2872     JJ     = jj + ii[i];
2873     for (j=0; j<nz; j++) {
2874       if (*JJ >= cstart) break;
2875       JJ++;
2876     }
2877     d = 0;
2878     for (; j<nz; j++) {
2879       if (*JJ++ >= cend) break;
2880       d++;
2881     }
2882     d_nnz[i] = d;
2883     o_nnz[i] = nz - d;
2884   }
2885   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2886   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2887 
2888   values = (PetscScalar*)V;
2889   if (!values) {
2890     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2891     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2892   }
2893   for (i=0; i<m; i++) {
2894     PetscInt          row    = i + rstart;
2895     PetscInt          ncols  = ii[i+1] - ii[i];
2896     const PetscInt    *icols = jj + ii[i];
2897     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2898       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2899       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2900     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2901       PetscInt j;
2902       for (j=0; j<ncols; j++) {
2903         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2904         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2905       }
2906     }
2907   }
2908 
2909   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2910   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2911   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2912   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2913   PetscFunctionReturn(0);
2914 }
2915 
2916 #undef __FUNCT__
2917 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2918 /*@C
2919    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2920    (the default parallel PETSc format).
2921 
2922    Collective on MPI_Comm
2923 
2924    Input Parameters:
2925 +  B - the matrix
2926 .  bs - the block size
2927 .  i - the indices into j for the start of each local row (starts with zero)
2928 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2929 -  v - optional values in the matrix
2930 
2931    Level: developer
2932 
2933    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2934    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2935    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2936    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2937    block column and the second index is over columns within a block.
2938 
2939 .keywords: matrix, aij, compressed row, sparse, parallel
2940 
2941 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2942 @*/
2943 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2944 {
2945   PetscErrorCode ierr;
2946 
2947   PetscFunctionBegin;
2948   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2949   PetscValidType(B,1);
2950   PetscValidLogicalCollectiveInt(B,bs,2);
2951   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2952   PetscFunctionReturn(0);
2953 }
2954 
2955 #undef __FUNCT__
2956 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2957 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2958 {
2959   Mat_MPIBAIJ    *b;
2960   PetscErrorCode ierr;
2961   PetscInt       i;
2962 
2963   PetscFunctionBegin;
2964   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2965   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2966   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2967   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2968 
2969   if (d_nnz) {
2970     for (i=0; i<B->rmap->n/bs; i++) {
2971       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2972     }
2973   }
2974   if (o_nnz) {
2975     for (i=0; i<B->rmap->n/bs; i++) {
2976       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2977     }
2978   }
2979 
2980   b      = (Mat_MPIBAIJ*)B->data;
2981   b->bs2 = bs*bs;
2982   b->mbs = B->rmap->n/bs;
2983   b->nbs = B->cmap->n/bs;
2984   b->Mbs = B->rmap->N/bs;
2985   b->Nbs = B->cmap->N/bs;
2986 
2987   for (i=0; i<=b->size; i++) {
2988     b->rangebs[i] = B->rmap->range[i]/bs;
2989   }
2990   b->rstartbs = B->rmap->rstart/bs;
2991   b->rendbs   = B->rmap->rend/bs;
2992   b->cstartbs = B->cmap->rstart/bs;
2993   b->cendbs   = B->cmap->rend/bs;
2994 
2995 #if defined(PETSC_USE_CTABLE)
2996   ierr = PetscTableDestroy(&b->colmap);CHKERRQ(ierr);
2997 #else
2998   ierr = PetscFree(b->colmap);CHKERRQ(ierr);
2999 #endif
3000   ierr = PetscFree(b->garray);CHKERRQ(ierr);
3001   ierr = VecDestroy(&b->lvec);CHKERRQ(ierr);
3002   ierr = VecScatterDestroy(&b->Mvctx);CHKERRQ(ierr);
3003 
3004   /* Because the B will have been resized we simply destroy it and create a new one each time */
3005   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3006   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3007   ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3008   ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3009   ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3010 
3011   if (!B->preallocated) {
3012     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3013     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3014     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3015     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3016     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3017   }
3018 
3019   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3020   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3021   B->preallocated  = PETSC_TRUE;
3022   B->was_assembled = PETSC_FALSE;
3023   B->assembled     = PETSC_FALSE;
3024   PetscFunctionReturn(0);
3025 }
3026 
3027 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3028 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3029 
3030 #undef __FUNCT__
3031 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3032 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3033 {
3034   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3035   PetscErrorCode ierr;
3036   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3037   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3038   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3039 
3040   PetscFunctionBegin;
3041   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3042   ii[0] = 0;
3043   for (i=0; i<M; i++) {
3044     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3045     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3046     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3047     /* remove one from count of matrix has diagonal */
3048     for (j=id[i]; j<id[i+1]; j++) {
3049       if (jd[j] == i) {ii[i+1]--;break;}
3050     }
3051   }
3052   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3053   cnt  = 0;
3054   for (i=0; i<M; i++) {
3055     for (j=io[i]; j<io[i+1]; j++) {
3056       if (garray[jo[j]] > rstart) break;
3057       jj[cnt++] = garray[jo[j]];
3058     }
3059     for (k=id[i]; k<id[i+1]; k++) {
3060       if (jd[k] != i) {
3061         jj[cnt++] = rstart + jd[k];
3062       }
3063     }
3064     for (; j<io[i+1]; j++) {
3065       jj[cnt++] = garray[jo[j]];
3066     }
3067   }
3068   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3069   PetscFunctionReturn(0);
3070 }
3071 
3072 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3073 
3074 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3075 
3076 #undef __FUNCT__
3077 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3078 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3079 {
3080   PetscErrorCode ierr;
3081   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3082   Mat            B;
3083   Mat_MPIAIJ     *b;
3084 
3085   PetscFunctionBegin;
3086   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3087 
3088   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3089   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3090   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3091   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3092   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3093   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3094   b    = (Mat_MPIAIJ*) B->data;
3095 
3096   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3097   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3098   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3099   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3100   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3101   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3102   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3103   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3104   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3105   if (reuse == MAT_INPLACE_MATRIX) {
3106     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3107   } else {
3108    *newmat = B;
3109   }
3110   PetscFunctionReturn(0);
3111 }
3112 
3113 /*MC
3114    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3115 
3116    Options Database Keys:
3117 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3118 . -mat_block_size <bs> - set the blocksize used to store the matrix
3119 - -mat_use_hash_table <fact>
3120 
3121   Level: beginner
3122 
3123 .seealso: MatCreateMPIBAIJ
3124 M*/
3125 
3126 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3127 
3128 #undef __FUNCT__
3129 #define __FUNCT__ "MatCreate_MPIBAIJ"
3130 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3131 {
3132   Mat_MPIBAIJ    *b;
3133   PetscErrorCode ierr;
3134   PetscBool      flg = PETSC_FALSE;
3135 
3136   PetscFunctionBegin;
3137   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3138   B->data = (void*)b;
3139 
3140   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3141   B->assembled = PETSC_FALSE;
3142 
3143   B->insertmode = NOT_SET_VALUES;
3144   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3145   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3146 
3147   /* build local table of row and column ownerships */
3148   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3149 
3150   /* build cache for off array entries formed */
3151   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3152 
3153   b->donotstash  = PETSC_FALSE;
3154   b->colmap      = NULL;
3155   b->garray      = NULL;
3156   b->roworiented = PETSC_TRUE;
3157 
3158   /* stuff used in block assembly */
3159   b->barray = 0;
3160 
3161   /* stuff used for matrix vector multiply */
3162   b->lvec  = 0;
3163   b->Mvctx = 0;
3164 
3165   /* stuff for MatGetRow() */
3166   b->rowindices   = 0;
3167   b->rowvalues    = 0;
3168   b->getrowactive = PETSC_FALSE;
3169 
3170   /* hash table stuff */
3171   b->ht           = 0;
3172   b->hd           = 0;
3173   b->ht_size      = 0;
3174   b->ht_flag      = PETSC_FALSE;
3175   b->ht_fact      = 0;
3176   b->ht_total_ct  = 0;
3177   b->ht_insert_ct = 0;
3178 
3179   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3180   b->ijonly = PETSC_FALSE;
3181 
3182 
3183   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3184   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3185   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3186   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3187   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3188   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3189   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3190   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3191   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3192   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3193 
3194   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3195   ierr = PetscOptionsBool("-mat_use_hash_table","Use hash table to save memory in constructing matrix","MatSetOption",flg,&flg,NULL);CHKERRQ(ierr);
3196   if (flg) {
3197     PetscReal fact = 1.39;
3198     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3199     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3200     if (fact <= 1.0) fact = 1.39;
3201     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3202     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3203   }
3204   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3205   PetscFunctionReturn(0);
3206 }
3207 
3208 /*MC
3209    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3210 
3211    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3212    and MATMPIBAIJ otherwise.
3213 
3214    Options Database Keys:
3215 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3216 
3217   Level: beginner
3218 
3219 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3220 M*/
3221 
3222 #undef __FUNCT__
3223 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3224 /*@C
3225    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3226    (block compressed row).  For good matrix assembly performance
3227    the user should preallocate the matrix storage by setting the parameters
3228    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3229    performance can be increased by more than a factor of 50.
3230 
3231    Collective on Mat
3232 
3233    Input Parameters:
3234 +  B - the matrix
3235 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3236           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3237 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3238            submatrix  (same for all local rows)
3239 .  d_nnz - array containing the number of block nonzeros in the various block rows
3240            of the in diagonal portion of the local (possibly different for each block
3241            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3242            set it even if it is zero.
3243 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3244            submatrix (same for all local rows).
3245 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3246            off-diagonal portion of the local submatrix (possibly different for
3247            each block row) or NULL.
3248 
3249    If the *_nnz parameter is given then the *_nz parameter is ignored
3250 
3251    Options Database Keys:
3252 +   -mat_block_size - size of the blocks to use
3253 -   -mat_use_hash_table <fact>
3254 
3255    Notes:
3256    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3257    than it must be used on all processors that share the object for that argument.
3258 
3259    Storage Information:
3260    For a square global matrix we define each processor's diagonal portion
3261    to be its local rows and the corresponding columns (a square submatrix);
3262    each processor's off-diagonal portion encompasses the remainder of the
3263    local matrix (a rectangular submatrix).
3264 
3265    The user can specify preallocated storage for the diagonal part of
3266    the local submatrix with either d_nz or d_nnz (not both).  Set
3267    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3268    memory allocation.  Likewise, specify preallocated storage for the
3269    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3270 
3271    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3272    the figure below we depict these three local rows and all columns (0-11).
3273 
3274 .vb
3275            0 1 2 3 4 5 6 7 8 9 10 11
3276           --------------------------
3277    row 3  |o o o d d d o o o o  o  o
3278    row 4  |o o o d d d o o o o  o  o
3279    row 5  |o o o d d d o o o o  o  o
3280           --------------------------
3281 .ve
3282 
3283    Thus, any entries in the d locations are stored in the d (diagonal)
3284    submatrix, and any entries in the o locations are stored in the
3285    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3286    stored simply in the MATSEQBAIJ format for compressed row storage.
3287 
3288    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3289    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3290    In general, for PDE problems in which most nonzeros are near the diagonal,
3291    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3292    or you will get TERRIBLE performance; see the users' manual chapter on
3293    matrices.
3294 
3295    You can call MatGetInfo() to get information on how effective the preallocation was;
3296    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3297    You can also run with the option -info and look for messages with the string
3298    malloc in them to see if additional memory allocation was needed.
3299 
3300    Level: intermediate
3301 
3302 .keywords: matrix, block, aij, compressed row, sparse, parallel
3303 
3304 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3305 @*/
3306 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3307 {
3308   PetscErrorCode ierr;
3309 
3310   PetscFunctionBegin;
3311   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3312   PetscValidType(B,1);
3313   PetscValidLogicalCollectiveInt(B,bs,2);
3314   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3315   PetscFunctionReturn(0);
3316 }
3317 
3318 #undef __FUNCT__
3319 #define __FUNCT__ "MatCreateBAIJ"
3320 /*@C
3321    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3322    (block compressed row).  For good matrix assembly performance
3323    the user should preallocate the matrix storage by setting the parameters
3324    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3325    performance can be increased by more than a factor of 50.
3326 
3327    Collective on MPI_Comm
3328 
3329    Input Parameters:
3330 +  comm - MPI communicator
3331 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3332           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3333 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3334            This value should be the same as the local size used in creating the
3335            y vector for the matrix-vector product y = Ax.
3336 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3337            This value should be the same as the local size used in creating the
3338            x vector for the matrix-vector product y = Ax.
3339 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3340 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3341 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3342            submatrix  (same for all local rows)
3343 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3344            of the in diagonal portion of the local (possibly different for each block
3345            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3346            and set it even if it is zero.
3347 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3348            submatrix (same for all local rows).
3349 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3350            off-diagonal portion of the local submatrix (possibly different for
3351            each block row) or NULL.
3352 
3353    Output Parameter:
3354 .  A - the matrix
3355 
3356    Options Database Keys:
3357 +   -mat_block_size - size of the blocks to use
3358 -   -mat_use_hash_table <fact>
3359 
3360    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3361    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3362    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3363 
3364    Notes:
3365    If the *_nnz parameter is given then the *_nz parameter is ignored
3366 
3367    A nonzero block is any block that as 1 or more nonzeros in it
3368 
3369    The user MUST specify either the local or global matrix dimensions
3370    (possibly both).
3371 
3372    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3373    than it must be used on all processors that share the object for that argument.
3374 
3375    Storage Information:
3376    For a square global matrix we define each processor's diagonal portion
3377    to be its local rows and the corresponding columns (a square submatrix);
3378    each processor's off-diagonal portion encompasses the remainder of the
3379    local matrix (a rectangular submatrix).
3380 
3381    The user can specify preallocated storage for the diagonal part of
3382    the local submatrix with either d_nz or d_nnz (not both).  Set
3383    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3384    memory allocation.  Likewise, specify preallocated storage for the
3385    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3386 
3387    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3388    the figure below we depict these three local rows and all columns (0-11).
3389 
3390 .vb
3391            0 1 2 3 4 5 6 7 8 9 10 11
3392           --------------------------
3393    row 3  |o o o d d d o o o o  o  o
3394    row 4  |o o o d d d o o o o  o  o
3395    row 5  |o o o d d d o o o o  o  o
3396           --------------------------
3397 .ve
3398 
3399    Thus, any entries in the d locations are stored in the d (diagonal)
3400    submatrix, and any entries in the o locations are stored in the
3401    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3402    stored simply in the MATSEQBAIJ format for compressed row storage.
3403 
3404    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3405    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3406    In general, for PDE problems in which most nonzeros are near the diagonal,
3407    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3408    or you will get TERRIBLE performance; see the users' manual chapter on
3409    matrices.
3410 
3411    Level: intermediate
3412 
3413 .keywords: matrix, block, aij, compressed row, sparse, parallel
3414 
3415 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3416 @*/
3417 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3418 {
3419   PetscErrorCode ierr;
3420   PetscMPIInt    size;
3421 
3422   PetscFunctionBegin;
3423   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3424   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3425   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3426   if (size > 1) {
3427     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3428     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3429   } else {
3430     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3431     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3432   }
3433   PetscFunctionReturn(0);
3434 }
3435 
3436 #undef __FUNCT__
3437 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3438 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3439 {
3440   Mat            mat;
3441   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3442   PetscErrorCode ierr;
3443   PetscInt       len=0;
3444 
3445   PetscFunctionBegin;
3446   *newmat = 0;
3447   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3448   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3449   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3450   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3451 
3452   mat->factortype   = matin->factortype;
3453   mat->preallocated = PETSC_TRUE;
3454   mat->assembled    = PETSC_TRUE;
3455   mat->insertmode   = NOT_SET_VALUES;
3456 
3457   a             = (Mat_MPIBAIJ*)mat->data;
3458   mat->rmap->bs = matin->rmap->bs;
3459   a->bs2        = oldmat->bs2;
3460   a->mbs        = oldmat->mbs;
3461   a->nbs        = oldmat->nbs;
3462   a->Mbs        = oldmat->Mbs;
3463   a->Nbs        = oldmat->Nbs;
3464 
3465   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3466   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3467 
3468   a->size         = oldmat->size;
3469   a->rank         = oldmat->rank;
3470   a->donotstash   = oldmat->donotstash;
3471   a->roworiented  = oldmat->roworiented;
3472   a->rowindices   = 0;
3473   a->rowvalues    = 0;
3474   a->getrowactive = PETSC_FALSE;
3475   a->barray       = 0;
3476   a->rstartbs     = oldmat->rstartbs;
3477   a->rendbs       = oldmat->rendbs;
3478   a->cstartbs     = oldmat->cstartbs;
3479   a->cendbs       = oldmat->cendbs;
3480 
3481   /* hash table stuff */
3482   a->ht           = 0;
3483   a->hd           = 0;
3484   a->ht_size      = 0;
3485   a->ht_flag      = oldmat->ht_flag;
3486   a->ht_fact      = oldmat->ht_fact;
3487   a->ht_total_ct  = 0;
3488   a->ht_insert_ct = 0;
3489 
3490   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3491   if (oldmat->colmap) {
3492 #if defined(PETSC_USE_CTABLE)
3493     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3494 #else
3495     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3496     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3497     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3498 #endif
3499   } else a->colmap = 0;
3500 
3501   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3502     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3503     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3504     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3505   } else a->garray = 0;
3506 
3507   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3508   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3509   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3510   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3511   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3512 
3513   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3514   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3515   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3516   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3517   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3518   *newmat = mat;
3519   PetscFunctionReturn(0);
3520 }
3521 
3522 #undef __FUNCT__
3523 #define __FUNCT__ "MatLoad_MPIBAIJ"
3524 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3525 {
3526   PetscErrorCode ierr;
3527   int            fd;
3528   PetscInt       i,nz,j,rstart,rend;
3529   PetscScalar    *vals,*buf;
3530   MPI_Comm       comm;
3531   MPI_Status     status;
3532   PetscMPIInt    rank,size,maxnz;
3533   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3534   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3535   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3536   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3537   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3538   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3539 
3540   PetscFunctionBegin;
3541   /* force binary viewer to load .info file if it has not yet done so */
3542   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3543   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3544   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3545   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3546   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3547   if (bs < 0) bs = 1;
3548 
3549   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3550   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3551   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3552   if (!rank) {
3553     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3554     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3555     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3556   }
3557   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3558   M    = header[1]; N = header[2];
3559 
3560   /* If global sizes are set, check if they are consistent with that given in the file */
3561   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3562   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3563 
3564   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3565 
3566   /*
3567      This code adds extra rows to make sure the number of rows is
3568      divisible by the blocksize
3569   */
3570   Mbs        = M/bs;
3571   extra_rows = bs - M + bs*Mbs;
3572   if (extra_rows == bs) extra_rows = 0;
3573   else                  Mbs++;
3574   if (extra_rows && !rank) {
3575     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3576   }
3577 
3578   /* determine ownership of all rows */
3579   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3580     mbs = Mbs/size + ((Mbs % size) > rank);
3581     m   = mbs*bs;
3582   } else { /* User set */
3583     m   = newmat->rmap->n;
3584     mbs = m/bs;
3585   }
3586   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3587   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3588 
3589   /* process 0 needs enough room for process with most rows */
3590   if (!rank) {
3591     mmax = rowners[1];
3592     for (i=2; i<=size; i++) {
3593       mmax = PetscMax(mmax,rowners[i]);
3594     }
3595     mmax*=bs;
3596   } else mmax = -1;             /* unused, but compiler warns anyway */
3597 
3598   rowners[0] = 0;
3599   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3600   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3601   rstart = rowners[rank];
3602   rend   = rowners[rank+1];
3603 
3604   /* distribute row lengths to all processors */
3605   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3606   if (!rank) {
3607     mend = m;
3608     if (size == 1) mend = mend - extra_rows;
3609     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3610     for (j=mend; j<m; j++) locrowlens[j] = 1;
3611     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3612     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3613     for (j=0; j<m; j++) {
3614       procsnz[0] += locrowlens[j];
3615     }
3616     for (i=1; i<size; i++) {
3617       mend = browners[i+1] - browners[i];
3618       if (i == size-1) mend = mend - extra_rows;
3619       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3620       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3621       /* calculate the number of nonzeros on each processor */
3622       for (j=0; j<browners[i+1]-browners[i]; j++) {
3623         procsnz[i] += rowlengths[j];
3624       }
3625       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3626     }
3627     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3628   } else {
3629     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3630   }
3631 
3632   if (!rank) {
3633     /* determine max buffer needed and allocate it */
3634     maxnz = procsnz[0];
3635     for (i=1; i<size; i++) {
3636       maxnz = PetscMax(maxnz,procsnz[i]);
3637     }
3638     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3639 
3640     /* read in my part of the matrix column indices  */
3641     nz     = procsnz[0];
3642     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3643     mycols = ibuf;
3644     if (size == 1) nz -= extra_rows;
3645     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3646     if (size == 1) {
3647       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3648     }
3649 
3650     /* read in every ones (except the last) and ship off */
3651     for (i=1; i<size-1; i++) {
3652       nz   = procsnz[i];
3653       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3654       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3655     }
3656     /* read in the stuff for the last proc */
3657     if (size != 1) {
3658       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3659       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3660       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3661       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3662     }
3663     ierr = PetscFree(cols);CHKERRQ(ierr);
3664   } else {
3665     /* determine buffer space needed for message */
3666     nz = 0;
3667     for (i=0; i<m; i++) {
3668       nz += locrowlens[i];
3669     }
3670     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3671     mycols = ibuf;
3672     /* receive message of column indices*/
3673     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3674     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3675     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3676   }
3677 
3678   /* loop over local rows, determining number of off diagonal entries */
3679   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3680   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3681   rowcount = 0; nzcount = 0;
3682   for (i=0; i<mbs; i++) {
3683     dcount  = 0;
3684     odcount = 0;
3685     for (j=0; j<bs; j++) {
3686       kmax = locrowlens[rowcount];
3687       for (k=0; k<kmax; k++) {
3688         tmp = mycols[nzcount++]/bs;
3689         if (!mask[tmp]) {
3690           mask[tmp] = 1;
3691           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3692           else masked1[dcount++] = tmp;
3693         }
3694       }
3695       rowcount++;
3696     }
3697 
3698     dlens[i]  = dcount;
3699     odlens[i] = odcount;
3700 
3701     /* zero out the mask elements we set */
3702     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3703     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3704   }
3705 
3706   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3707   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3708 
3709   if (!rank) {
3710     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3711     /* read in my part of the matrix numerical values  */
3712     nz     = procsnz[0];
3713     vals   = buf;
3714     mycols = ibuf;
3715     if (size == 1) nz -= extra_rows;
3716     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3717     if (size == 1) {
3718       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3719     }
3720 
3721     /* insert into matrix */
3722     jj = rstart*bs;
3723     for (i=0; i<m; i++) {
3724       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3725       mycols += locrowlens[i];
3726       vals   += locrowlens[i];
3727       jj++;
3728     }
3729     /* read in other processors (except the last one) and ship out */
3730     for (i=1; i<size-1; i++) {
3731       nz   = procsnz[i];
3732       vals = buf;
3733       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3734       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3735     }
3736     /* the last proc */
3737     if (size != 1) {
3738       nz   = procsnz[i] - extra_rows;
3739       vals = buf;
3740       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3741       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3742       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3743     }
3744     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3745   } else {
3746     /* receive numeric values */
3747     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3748 
3749     /* receive message of values*/
3750     vals   = buf;
3751     mycols = ibuf;
3752     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3753 
3754     /* insert into matrix */
3755     jj = rstart*bs;
3756     for (i=0; i<m; i++) {
3757       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3758       mycols += locrowlens[i];
3759       vals   += locrowlens[i];
3760       jj++;
3761     }
3762   }
3763   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3764   ierr = PetscFree(buf);CHKERRQ(ierr);
3765   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3766   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3767   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3768   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3769   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3770   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3771   PetscFunctionReturn(0);
3772 }
3773 
3774 #undef __FUNCT__
3775 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3776 /*@
3777    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3778 
3779    Input Parameters:
3780 .  mat  - the matrix
3781 .  fact - factor
3782 
3783    Not Collective, each process can use a different factor
3784 
3785    Level: advanced
3786 
3787   Notes:
3788    This can also be set by the command line option: -mat_use_hash_table <fact>
3789 
3790 .keywords: matrix, hashtable, factor, HT
3791 
3792 .seealso: MatSetOption()
3793 @*/
3794 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3795 {
3796   PetscErrorCode ierr;
3797 
3798   PetscFunctionBegin;
3799   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3800   PetscFunctionReturn(0);
3801 }
3802 
3803 #undef __FUNCT__
3804 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3805 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3806 {
3807   Mat_MPIBAIJ *baij;
3808 
3809   PetscFunctionBegin;
3810   baij          = (Mat_MPIBAIJ*)mat->data;
3811   baij->ht_fact = fact;
3812   PetscFunctionReturn(0);
3813 }
3814 
3815 #undef __FUNCT__
3816 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3817 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3818 {
3819   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3820 
3821   PetscFunctionBegin;
3822   if (Ad)     *Ad     = a->A;
3823   if (Ao)     *Ao     = a->B;
3824   if (colmap) *colmap = a->garray;
3825   PetscFunctionReturn(0);
3826 }
3827 
3828 /*
3829     Special version for direct calls from Fortran (to eliminate two function call overheads
3830 */
3831 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3832 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3833 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3834 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3835 #endif
3836 
3837 #undef __FUNCT__
3838 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3839 /*@C
3840   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3841 
3842   Collective on Mat
3843 
3844   Input Parameters:
3845 + mat - the matrix
3846 . min - number of input rows
3847 . im - input rows
3848 . nin - number of input columns
3849 . in - input columns
3850 . v - numerical values input
3851 - addvin - INSERT_VALUES or ADD_VALUES
3852 
3853   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3854 
3855   Level: advanced
3856 
3857 .seealso:   MatSetValuesBlocked()
3858 @*/
3859 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3860 {
3861   /* convert input arguments to C version */
3862   Mat        mat  = *matin;
3863   PetscInt   m    = *min, n = *nin;
3864   InsertMode addv = *addvin;
3865 
3866   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3867   const MatScalar *value;
3868   MatScalar       *barray     = baij->barray;
3869   PetscBool       roworiented = baij->roworiented;
3870   PetscErrorCode  ierr;
3871   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3872   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3873   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3874 
3875   PetscFunctionBegin;
3876   /* tasks normally handled by MatSetValuesBlocked() */
3877   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3878 #if defined(PETSC_USE_DEBUG)
3879   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3880   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3881 #endif
3882   if (mat->assembled) {
3883     mat->was_assembled = PETSC_TRUE;
3884     mat->assembled     = PETSC_FALSE;
3885   }
3886   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3887 
3888 
3889   if (!barray) {
3890     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3891     baij->barray = barray;
3892   }
3893 
3894   if (roworiented) stepval = (n-1)*bs;
3895   else stepval = (m-1)*bs;
3896 
3897   for (i=0; i<m; i++) {
3898     if (im[i] < 0) continue;
3899 #if defined(PETSC_USE_DEBUG)
3900     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3901 #endif
3902     if (im[i] >= rstart && im[i] < rend) {
3903       row = im[i] - rstart;
3904       for (j=0; j<n; j++) {
3905         /* If NumCol = 1 then a copy is not required */
3906         if ((roworiented) && (n == 1)) {
3907           barray = (MatScalar*)v + i*bs2;
3908         } else if ((!roworiented) && (m == 1)) {
3909           barray = (MatScalar*)v + j*bs2;
3910         } else { /* Here a copy is required */
3911           if (roworiented) {
3912             value = v + i*(stepval+bs)*bs + j*bs;
3913           } else {
3914             value = v + j*(stepval+bs)*bs + i*bs;
3915           }
3916           for (ii=0; ii<bs; ii++,value+=stepval) {
3917             for (jj=0; jj<bs; jj++) {
3918               *barray++ = *value++;
3919             }
3920           }
3921           barray -=bs2;
3922         }
3923 
3924         if (in[j] >= cstart && in[j] < cend) {
3925           col  = in[j] - cstart;
3926           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3927         } else if (in[j] < 0) continue;
3928 #if defined(PETSC_USE_DEBUG)
3929         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3930 #endif
3931         else {
3932           if (mat->was_assembled) {
3933             if (!baij->colmap) {
3934               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3935             }
3936 
3937 #if defined(PETSC_USE_DEBUG)
3938 #if defined(PETSC_USE_CTABLE)
3939             { PetscInt data;
3940               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3941               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3942             }
3943 #else
3944             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3945 #endif
3946 #endif
3947 #if defined(PETSC_USE_CTABLE)
3948             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3949             col  = (col - 1)/bs;
3950 #else
3951             col = (baij->colmap[in[j]] - 1)/bs;
3952 #endif
3953             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3954               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3955               col  =  in[j];
3956             }
3957           } else col = in[j];
3958           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3959         }
3960       }
3961     } else {
3962       if (!baij->donotstash) {
3963         if (roworiented) {
3964           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3965         } else {
3966           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3967         }
3968       }
3969     }
3970   }
3971 
3972   /* task normally handled by MatSetValuesBlocked() */
3973   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3974   PetscFunctionReturn(0);
3975 }
3976 
3977 #undef __FUNCT__
3978 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3979 /*@
3980      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3981          CSR format the local rows.
3982 
3983    Collective on MPI_Comm
3984 
3985    Input Parameters:
3986 +  comm - MPI communicator
3987 .  bs - the block size, only a block size of 1 is supported
3988 .  m - number of local rows (Cannot be PETSC_DECIDE)
3989 .  n - This value should be the same as the local size used in creating the
3990        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3991        calculated if N is given) For square matrices n is almost always m.
3992 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3993 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3994 .   i - row indices
3995 .   j - column indices
3996 -   a - matrix values
3997 
3998    Output Parameter:
3999 .   mat - the matrix
4000 
4001    Level: intermediate
4002 
4003    Notes:
4004        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4005      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4006      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4007 
4008      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4009      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4010      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4011      with column-major ordering within blocks.
4012 
4013        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4014 
4015 .keywords: matrix, aij, compressed row, sparse, parallel
4016 
4017 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4018           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4019 @*/
4020 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4021 {
4022   PetscErrorCode ierr;
4023 
4024   PetscFunctionBegin;
4025   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4026   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4027   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4028   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4029   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4030   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4031   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4032   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4033   PetscFunctionReturn(0);
4034 }
4035 
4036 #undef __FUNCT__
4037 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4038 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4039 {
4040   PetscErrorCode ierr;
4041   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4042   PetscInt       *indx;
4043   PetscScalar    *values;
4044 
4045   PetscFunctionBegin;
4046   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4047   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4048     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4049     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4050     PetscInt       *bindx,rmax=a->rmax,j;
4051 
4052     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4053     mbs = m/bs; Nbs = N/cbs;
4054     if (n == PETSC_DECIDE) {
4055       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4056     }
4057     /* Check sum(n) = Nbs */
4058     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4059     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4060 
4061     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4062     rstart -= mbs;
4063 
4064     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4065     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4066     for (i=0; i<mbs; i++) {
4067       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4068       nnz = nnz/bs;
4069       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4070       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4071       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4072     }
4073     ierr = PetscFree(bindx);CHKERRQ(ierr);
4074 
4075     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4076     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4077     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4078     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4079     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4080     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4081   }
4082 
4083   /* numeric phase */
4084   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4085   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4086 
4087   for (i=0; i<m; i++) {
4088     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4089     Ii   = i + rstart;
4090     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4091     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4092   }
4093   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4094   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4095   PetscFunctionReturn(0);
4096 }
4097