xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision a84bf75375d243999ae03e2bcce232bf9895aa4e)
1 
2 #include <../src/mat/impls/baij/mpi/mpibaij.h>   /*I  "petscmat.h"  I*/
3 
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatGetRowMaxAbs_MPIBAIJ"
9 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A,Vec v,PetscInt idx[])
10 {
11   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
12   PetscErrorCode ierr;
13   PetscInt       i,*idxb = 0;
14   PetscScalar    *va,*vb;
15   Vec            vtmp;
16 
17   PetscFunctionBegin;
18   ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr);
19   ierr = VecGetArray(v,&va);CHKERRQ(ierr);
20   if (idx) {
21     for (i=0; i<A->rmap->n; i++) {
22       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
23     }
24   }
25 
26   ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr);
27   if (idx) {ierr = PetscMalloc1(A->rmap->n,&idxb);CHKERRQ(ierr);}
28   ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr);
29   ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr);
30 
31   for (i=0; i<A->rmap->n; i++) {
32     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
33       va[i] = vb[i];
34       if (idx) idx[i] = A->cmap->bs*a->garray[idxb[i]/A->cmap->bs] + (idxb[i] % A->cmap->bs);
35     }
36   }
37 
38   ierr = VecRestoreArray(v,&va);CHKERRQ(ierr);
39   ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr);
40   ierr = PetscFree(idxb);CHKERRQ(ierr);
41   ierr = VecDestroy(&vtmp);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "MatStoreValues_MPIBAIJ"
47 PetscErrorCode  MatStoreValues_MPIBAIJ(Mat mat)
48 {
49   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
50   PetscErrorCode ierr;
51 
52   PetscFunctionBegin;
53   ierr = MatStoreValues(aij->A);CHKERRQ(ierr);
54   ierr = MatStoreValues(aij->B);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatRetrieveValues_MPIBAIJ"
60 PetscErrorCode  MatRetrieveValues_MPIBAIJ(Mat mat)
61 {
62   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66   ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr);
67   ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr);
68   PetscFunctionReturn(0);
69 }
70 
71 /*
72      Local utility routine that creates a mapping from the global column
73    number to the local number in the off-diagonal part of the local
74    storage of the matrix.  This is done in a non scalable way since the
75    length of colmap equals the global matrix length.
76 */
77 #undef __FUNCT__
78 #define __FUNCT__ "MatCreateColmap_MPIBAIJ_Private"
79 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
80 {
81   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
82   Mat_SeqBAIJ    *B    = (Mat_SeqBAIJ*)baij->B->data;
83   PetscErrorCode ierr;
84   PetscInt       nbs = B->nbs,i,bs=mat->rmap->bs;
85 
86   PetscFunctionBegin;
87 #if defined(PETSC_USE_CTABLE)
88   ierr = PetscTableCreate(baij->nbs,baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
89   for (i=0; i<nbs; i++) {
90     ierr = PetscTableAdd(baij->colmap,baij->garray[i]+1,i*bs+1,INSERT_VALUES);CHKERRQ(ierr);
91   }
92 #else
93   ierr = PetscMalloc1(baij->Nbs+1,&baij->colmap);CHKERRQ(ierr);
94   ierr = PetscLogObjectMemory((PetscObject)mat,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
95   ierr = PetscMemzero(baij->colmap,baij->Nbs*sizeof(PetscInt));CHKERRQ(ierr);
96   for (i=0; i<nbs; i++) baij->colmap[baij->garray[i]] = i*bs+1;
97 #endif
98   PetscFunctionReturn(0);
99 }
100 
101 #define  MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,orow,ocol)       \
102   { \
103  \
104     brow = row/bs;  \
105     rp   = aj + ai[brow]; ap = aa + bs2*ai[brow]; \
106     rmax = aimax[brow]; nrow = ailen[brow]; \
107     bcol = col/bs; \
108     ridx = row % bs; cidx = col % bs; \
109     low  = 0; high = nrow; \
110     while (high-low > 3) { \
111       t = (low+high)/2; \
112       if (rp[t] > bcol) high = t; \
113       else              low  = t; \
114     } \
115     for (_i=low; _i<high; _i++) { \
116       if (rp[_i] > bcol) break; \
117       if (rp[_i] == bcol) { \
118         bap = ap +  bs2*_i + bs*cidx + ridx; \
119         if (addv == ADD_VALUES) *bap += value;  \
120         else                    *bap  = value;  \
121         goto a_noinsert; \
122       } \
123     } \
124     if (a->nonew == 1) goto a_noinsert; \
125     if (a->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column (%D, %D) into matrix", orow, ocol); \
126     MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,brow,bcol,rmax,aa,ai,aj,rp,ap,aimax,a->nonew,MatScalar); \
127     N = nrow++ - 1;  \
128     /* shift up all the later entries in this row */ \
129     for (ii=N; ii>=_i; ii--) { \
130       rp[ii+1] = rp[ii]; \
131       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
132     } \
133     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr); }  \
134     rp[_i]                      = bcol;  \
135     ap[bs2*_i + bs*cidx + ridx] = value;  \
136 a_noinsert:; \
137     ailen[brow] = nrow; \
138   }
139 
140 #define  MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,orow,ocol)       \
141   { \
142     brow = row/bs;  \
143     rp   = bj + bi[brow]; ap = ba + bs2*bi[brow]; \
144     rmax = bimax[brow]; nrow = bilen[brow]; \
145     bcol = col/bs; \
146     ridx = row % bs; cidx = col % bs; \
147     low  = 0; high = nrow; \
148     while (high-low > 3) { \
149       t = (low+high)/2; \
150       if (rp[t] > bcol) high = t; \
151       else              low  = t; \
152     } \
153     for (_i=low; _i<high; _i++) { \
154       if (rp[_i] > bcol) break; \
155       if (rp[_i] == bcol) { \
156         bap = ap +  bs2*_i + bs*cidx + ridx; \
157         if (addv == ADD_VALUES) *bap += value;  \
158         else                    *bap  = value;  \
159         goto b_noinsert; \
160       } \
161     } \
162     if (b->nonew == 1) goto b_noinsert; \
163     if (b->nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero at global row/column  (%D, %D) into matrix", orow, ocol); \
164     MatSeqXAIJReallocateAIJ(B,b->mbs,bs2,nrow,brow,bcol,rmax,ba,bi,bj,rp,ap,bimax,b->nonew,MatScalar); \
165     N = nrow++ - 1;  \
166     /* shift up all the later entries in this row */ \
167     for (ii=N; ii>=_i; ii--) { \
168       rp[ii+1] = rp[ii]; \
169       ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr); \
170     } \
171     if (N>=_i) { ierr = PetscMemzero(ap+bs2*_i,bs2*sizeof(MatScalar));CHKERRQ(ierr);}  \
172     rp[_i]                      = bcol;  \
173     ap[bs2*_i + bs*cidx + ridx] = value;  \
174 b_noinsert:; \
175     bilen[brow] = nrow; \
176   }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatSetValues_MPIBAIJ"
180 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
181 {
182   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
183   MatScalar      value;
184   PetscBool      roworiented = baij->roworiented;
185   PetscErrorCode ierr;
186   PetscInt       i,j,row,col;
187   PetscInt       rstart_orig=mat->rmap->rstart;
188   PetscInt       rend_orig  =mat->rmap->rend,cstart_orig=mat->cmap->rstart;
189   PetscInt       cend_orig  =mat->cmap->rend,bs=mat->rmap->bs;
190 
191   /* Some Variables required in the macro */
192   Mat         A     = baij->A;
193   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ*)(A)->data;
194   PetscInt    *aimax=a->imax,*ai=a->i,*ailen=a->ilen,*aj=a->j;
195   MatScalar   *aa   =a->a;
196 
197   Mat         B     = baij->B;
198   Mat_SeqBAIJ *b    = (Mat_SeqBAIJ*)(B)->data;
199   PetscInt    *bimax=b->imax,*bi=b->i,*bilen=b->ilen,*bj=b->j;
200   MatScalar   *ba   =b->a;
201 
202   PetscInt  *rp,ii,nrow,_i,rmax,N,brow,bcol;
203   PetscInt  low,high,t,ridx,cidx,bs2=a->bs2;
204   MatScalar *ap,*bap;
205 
206   PetscFunctionBegin;
207   for (i=0; i<m; i++) {
208     if (im[i] < 0) continue;
209 #if defined(PETSC_USE_DEBUG)
210     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
211 #endif
212     if (im[i] >= rstart_orig && im[i] < rend_orig) {
213       row = im[i] - rstart_orig;
214       for (j=0; j<n; j++) {
215         if (in[j] >= cstart_orig && in[j] < cend_orig) {
216           col = in[j] - cstart_orig;
217           if (roworiented) value = v[i*n+j];
218           else             value = v[i+j*m];
219           MatSetValues_SeqBAIJ_A_Private(row,col,value,addv,im[i],in[j]);
220           /* ierr = MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
221         } else if (in[j] < 0) continue;
222 #if defined(PETSC_USE_DEBUG)
223         else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);
224 #endif
225         else {
226           if (mat->was_assembled) {
227             if (!baij->colmap) {
228               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
229             }
230 #if defined(PETSC_USE_CTABLE)
231             ierr = PetscTableFind(baij->colmap,in[j]/bs + 1,&col);CHKERRQ(ierr);
232             col  = col - 1;
233 #else
234             col = baij->colmap[in[j]/bs] - 1;
235 #endif
236             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
237               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
238               col  =  in[j];
239               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
240               B    = baij->B;
241               b    = (Mat_SeqBAIJ*)(B)->data;
242               bimax=b->imax;bi=b->i;bilen=b->ilen;bj=b->j;
243               ba   =b->a;
244             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", im[i], in[j]);
245             else col += in[j]%bs;
246           } else col = in[j];
247           if (roworiented) value = v[i*n+j];
248           else             value = v[i+j*m];
249           MatSetValues_SeqBAIJ_B_Private(row,col,value,addv,im[i],in[j]);
250           /* ierr = MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv);CHKERRQ(ierr); */
251         }
252       }
253     } else {
254       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
255       if (!baij->donotstash) {
256         mat->assembled = PETSC_FALSE;
257         if (roworiented) {
258           ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
259         } else {
260           ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
261         }
262       }
263     }
264   }
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatSetValuesBlocked_SeqBAIJ_Inlined"
270 PETSC_STATIC_INLINE PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A,PetscInt row,PetscInt col,const PetscScalar v[],InsertMode is,PetscInt orow,PetscInt ocol)
271 {
272   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ*)A->data;
273   PetscInt          *rp,low,high,t,ii,jj,nrow,i,rmax,N;
274   PetscInt          *imax=a->imax,*ai=a->i,*ailen=a->ilen;
275   PetscErrorCode    ierr;
276   PetscInt          *aj        =a->j,nonew=a->nonew,bs2=a->bs2,bs=A->rmap->bs;
277   PetscBool         roworiented=a->roworiented;
278   const PetscScalar *value     = v;
279   MatScalar         *ap,*aa = a->a,*bap;
280 
281   PetscFunctionBegin;
282   rp   = aj + ai[row];
283   ap   = aa + bs2*ai[row];
284   rmax = imax[row];
285   nrow = ailen[row];
286   value = v;
287   low = 0;
288   high = nrow;
289   while (high-low > 7) {
290     t = (low+high)/2;
291     if (rp[t] > col) high = t;
292     else             low  = t;
293   }
294   for (i=low; i<high; i++) {
295     if (rp[i] > col) break;
296     if (rp[i] == col) {
297       bap = ap +  bs2*i;
298       if (roworiented) {
299         if (is == ADD_VALUES) {
300           for (ii=0; ii<bs; ii++) {
301             for (jj=ii; jj<bs2; jj+=bs) {
302               bap[jj] += *value++;
303             }
304           }
305         } else {
306           for (ii=0; ii<bs; ii++) {
307             for (jj=ii; jj<bs2; jj+=bs) {
308               bap[jj] = *value++;
309             }
310           }
311         }
312       } else {
313         if (is == ADD_VALUES) {
314           for (ii=0; ii<bs; ii++,value+=bs) {
315             for (jj=0; jj<bs; jj++) {
316               bap[jj] += value[jj];
317             }
318             bap += bs;
319           }
320         } else {
321           for (ii=0; ii<bs; ii++,value+=bs) {
322             for (jj=0; jj<bs; jj++) {
323               bap[jj]  = value[jj];
324             }
325             bap += bs;
326           }
327         }
328       }
329       goto noinsert2;
330     }
331   }
332   if (nonew == 1) goto noinsert2;
333   if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new global block indexed nonzero block (%D, %D) in the matrix", orow, ocol);
334   MatSeqXAIJReallocateAIJ(A,a->mbs,bs2,nrow,row,col,rmax,aa,ai,aj,rp,ap,imax,nonew,MatScalar);
335   N = nrow++ - 1; high++;
336   /* shift up all the later entries in this row */
337   for (ii=N; ii>=i; ii--) {
338     rp[ii+1] = rp[ii];
339     ierr     = PetscMemcpy(ap+bs2*(ii+1),ap+bs2*(ii),bs2*sizeof(MatScalar));CHKERRQ(ierr);
340   }
341   if (N >= i) {
342     ierr = PetscMemzero(ap+bs2*i,bs2*sizeof(MatScalar));CHKERRQ(ierr);
343   }
344   rp[i] = col;
345   bap   = ap +  bs2*i;
346   if (roworiented) {
347     for (ii=0; ii<bs; ii++) {
348       for (jj=ii; jj<bs2; jj+=bs) {
349         bap[jj] = *value++;
350       }
351     }
352   } else {
353     for (ii=0; ii<bs; ii++) {
354       for (jj=0; jj<bs; jj++) {
355         *bap++ = *value++;
356       }
357     }
358   }
359   noinsert2:;
360   ailen[row] = nrow;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ"
366 /*
367     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
368     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
369 */
370 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
371 {
372   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
373   const PetscScalar *value;
374   MatScalar         *barray     = baij->barray;
375   PetscBool         roworiented = baij->roworiented;
376   PetscErrorCode    ierr;
377   PetscInt          i,j,ii,jj,row,col,rstart=baij->rstartbs;
378   PetscInt          rend=baij->rendbs,cstart=baij->cstartbs,stepval;
379   PetscInt          cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
380 
381   PetscFunctionBegin;
382   if (!barray) {
383     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
384     baij->barray = barray;
385   }
386 
387   if (roworiented) stepval = (n-1)*bs;
388   else stepval = (m-1)*bs;
389 
390   for (i=0; i<m; i++) {
391     if (im[i] < 0) continue;
392 #if defined(PETSC_USE_DEBUG)
393     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed row too large %D max %D",im[i],baij->Mbs-1);
394 #endif
395     if (im[i] >= rstart && im[i] < rend) {
396       row = im[i] - rstart;
397       for (j=0; j<n; j++) {
398         /* If NumCol = 1 then a copy is not required */
399         if ((roworiented) && (n == 1)) {
400           barray = (MatScalar*)v + i*bs2;
401         } else if ((!roworiented) && (m == 1)) {
402           barray = (MatScalar*)v + j*bs2;
403         } else { /* Here a copy is required */
404           if (roworiented) {
405             value = v + (i*(stepval+bs) + j)*bs;
406           } else {
407             value = v + (j*(stepval+bs) + i)*bs;
408           }
409           for (ii=0; ii<bs; ii++,value+=bs+stepval) {
410             for (jj=0; jj<bs; jj++) barray[jj] = value[jj];
411             barray += bs;
412           }
413           barray -= bs2;
414         }
415 
416         if (in[j] >= cstart && in[j] < cend) {
417           col  = in[j] - cstart;
418           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
419         } else if (in[j] < 0) continue;
420 #if defined(PETSC_USE_DEBUG)
421         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Block indexed column too large %D max %D",in[j],baij->Nbs-1);
422 #endif
423         else {
424           if (mat->was_assembled) {
425             if (!baij->colmap) {
426               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
427             }
428 
429 #if defined(PETSC_USE_DEBUG)
430 #if defined(PETSC_USE_CTABLE)
431             { PetscInt data;
432               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
433               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
434             }
435 #else
436             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
437 #endif
438 #endif
439 #if defined(PETSC_USE_CTABLE)
440             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
441             col  = (col - 1)/bs;
442 #else
443             col = (baij->colmap[in[j]] - 1)/bs;
444 #endif
445             if (col < 0 && !((Mat_SeqBAIJ*)(baij->B->data))->nonew) {
446               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
447               col  =  in[j];
448             } else if (col < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new blocked indexed nonzero block (%D, %D) into matrix",im[i],in[j]);
449           } else col = in[j];
450           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
451         }
452       }
453     } else {
454       if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process block indexed row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]);
455       if (!baij->donotstash) {
456         if (roworiented) {
457           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
458         } else {
459           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
460         }
461       }
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #define HASH_KEY 0.6180339887
468 #define HASH(size,key,tmp) (tmp = (key)*HASH_KEY,(PetscInt)((size)*(tmp-(PetscInt)tmp)))
469 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
470 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
471 #undef __FUNCT__
472 #define __FUNCT__ "MatSetValues_MPIBAIJ_HT"
473 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
474 {
475   Mat_MPIBAIJ    *baij       = (Mat_MPIBAIJ*)mat->data;
476   PetscBool      roworiented = baij->roworiented;
477   PetscErrorCode ierr;
478   PetscInt       i,j,row,col;
479   PetscInt       rstart_orig=mat->rmap->rstart;
480   PetscInt       rend_orig  =mat->rmap->rend,Nbs=baij->Nbs;
481   PetscInt       h1,key,size=baij->ht_size,bs=mat->rmap->bs,*HT=baij->ht,idx;
482   PetscReal      tmp;
483   MatScalar      **HD = baij->hd,value;
484 #if defined(PETSC_USE_DEBUG)
485   PetscInt       total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
486 #endif
487 
488   PetscFunctionBegin;
489   for (i=0; i<m; i++) {
490 #if defined(PETSC_USE_DEBUG)
491     if (im[i] < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row");
492     if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1);
493 #endif
494     row = im[i];
495     if (row >= rstart_orig && row < rend_orig) {
496       for (j=0; j<n; j++) {
497         col = in[j];
498         if (roworiented) value = v[i*n+j];
499         else             value = v[i+j*m];
500         /* Look up PetscInto the Hash Table */
501         key = (row/bs)*Nbs+(col/bs)+1;
502         h1  = HASH(size,key,tmp);
503 
504 
505         idx = h1;
506 #if defined(PETSC_USE_DEBUG)
507         insert_ct++;
508         total_ct++;
509         if (HT[idx] != key) {
510           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
511           if (idx == size) {
512             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
513             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
514           }
515         }
516 #else
517         if (HT[idx] != key) {
518           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
519           if (idx == size) {
520             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
521             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
522           }
523         }
524 #endif
525         /* A HASH table entry is found, so insert the values at the correct address */
526         if (addv == ADD_VALUES) *(HD[idx]+ (col % bs)*bs + (row % bs)) += value;
527         else                    *(HD[idx]+ (col % bs)*bs + (row % bs))  = value;
528       }
529     } else if (!baij->donotstash) {
530       if (roworiented) {
531         ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,PETSC_FALSE);CHKERRQ(ierr);
532       } else {
533         ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,PETSC_FALSE);CHKERRQ(ierr);
534       }
535     }
536   }
537 #if defined(PETSC_USE_DEBUG)
538   baij->ht_total_ct  += total_ct;
539   baij->ht_insert_ct += insert_ct;
540 #endif
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "MatSetValuesBlocked_MPIBAIJ_HT"
546 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv)
547 {
548   Mat_MPIBAIJ       *baij       = (Mat_MPIBAIJ*)mat->data;
549   PetscBool         roworiented = baij->roworiented;
550   PetscErrorCode    ierr;
551   PetscInt          i,j,ii,jj,row,col;
552   PetscInt          rstart=baij->rstartbs;
553   PetscInt          rend  =mat->rmap->rend,stepval,bs=mat->rmap->bs,bs2=baij->bs2,nbs2=n*bs2;
554   PetscInt          h1,key,size=baij->ht_size,idx,*HT=baij->ht,Nbs=baij->Nbs;
555   PetscReal         tmp;
556   MatScalar         **HD = baij->hd,*baij_a;
557   const PetscScalar *v_t,*value;
558 #if defined(PETSC_USE_DEBUG)
559   PetscInt          total_ct=baij->ht_total_ct,insert_ct=baij->ht_insert_ct;
560 #endif
561 
562   PetscFunctionBegin;
563   if (roworiented) stepval = (n-1)*bs;
564   else stepval = (m-1)*bs;
565 
566   for (i=0; i<m; i++) {
567 #if defined(PETSC_USE_DEBUG)
568     if (im[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",im[i]);
569     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],baij->Mbs-1);
570 #endif
571     row = im[i];
572     v_t = v + i*nbs2;
573     if (row >= rstart && row < rend) {
574       for (j=0; j<n; j++) {
575         col = in[j];
576 
577         /* Look up into the Hash Table */
578         key = row*Nbs+col+1;
579         h1  = HASH(size,key,tmp);
580 
581         idx = h1;
582 #if defined(PETSC_USE_DEBUG)
583         total_ct++;
584         insert_ct++;
585         if (HT[idx] != key) {
586           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++,total_ct++) ;
587           if (idx == size) {
588             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++,total_ct++) ;
589             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
590           }
591         }
592 #else
593         if (HT[idx] != key) {
594           for (idx=h1; (idx<size) && (HT[idx]!=key); idx++) ;
595           if (idx == size) {
596             for (idx=0; (idx<h1) && (HT[idx]!=key); idx++) ;
597             if (idx == h1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"(%D,%D) has no entry in the hash table", row, col);
598           }
599         }
600 #endif
601         baij_a = HD[idx];
602         if (roworiented) {
603           /*value = v + i*(stepval+bs)*bs + j*bs;*/
604           /* value = v + (i*(stepval+bs)+j)*bs; */
605           value = v_t;
606           v_t  += bs;
607           if (addv == ADD_VALUES) {
608             for (ii=0; ii<bs; ii++,value+=stepval) {
609               for (jj=ii; jj<bs2; jj+=bs) {
610                 baij_a[jj] += *value++;
611               }
612             }
613           } else {
614             for (ii=0; ii<bs; ii++,value+=stepval) {
615               for (jj=ii; jj<bs2; jj+=bs) {
616                 baij_a[jj] = *value++;
617               }
618             }
619           }
620         } else {
621           value = v + j*(stepval+bs)*bs + i*bs;
622           if (addv == ADD_VALUES) {
623             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
624               for (jj=0; jj<bs; jj++) {
625                 baij_a[jj] += *value++;
626               }
627             }
628           } else {
629             for (ii=0; ii<bs; ii++,value+=stepval,baij_a+=bs) {
630               for (jj=0; jj<bs; jj++) {
631                 baij_a[jj] = *value++;
632               }
633             }
634           }
635         }
636       }
637     } else {
638       if (!baij->donotstash) {
639         if (roworiented) {
640           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
641         } else {
642           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
643         }
644       }
645     }
646   }
647 #if defined(PETSC_USE_DEBUG)
648   baij->ht_total_ct  += total_ct;
649   baij->ht_insert_ct += insert_ct;
650 #endif
651   PetscFunctionReturn(0);
652 }
653 
654 #undef __FUNCT__
655 #define __FUNCT__ "MatGetValues_MPIBAIJ"
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
657 {
658   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
659   PetscErrorCode ierr;
660   PetscInt       bs       = mat->rmap->bs,i,j,bsrstart = mat->rmap->rstart,bsrend = mat->rmap->rend;
661   PetscInt       bscstart = mat->cmap->rstart,bscend = mat->cmap->rend,row,col,data;
662 
663   PetscFunctionBegin;
664   for (i=0; i<m; i++) {
665     if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/
666     if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1);
667     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
668       row = idxm[i] - bsrstart;
669       for (j=0; j<n; j++) {
670         if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */
671         if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1);
672         if (idxn[j] >= bscstart && idxn[j] < bscend) {
673           col  = idxn[j] - bscstart;
674           ierr = MatGetValues_SeqBAIJ(baij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
675         } else {
676           if (!baij->colmap) {
677             ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
678           }
679 #if defined(PETSC_USE_CTABLE)
680           ierr = PetscTableFind(baij->colmap,idxn[j]/bs+1,&data);CHKERRQ(ierr);
681           data--;
682 #else
683           data = baij->colmap[idxn[j]/bs]-1;
684 #endif
685           if ((data < 0) || (baij->garray[data/bs] != idxn[j]/bs)) *(v+i*n+j) = 0.0;
686           else {
687             col  = data + idxn[j]%bs;
688             ierr = MatGetValues_SeqBAIJ(baij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr);
689           }
690         }
691       }
692     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported");
693   }
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "MatNorm_MPIBAIJ"
699 PetscErrorCode MatNorm_MPIBAIJ(Mat mat,NormType type,PetscReal *nrm)
700 {
701   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
702   Mat_SeqBAIJ    *amat = (Mat_SeqBAIJ*)baij->A->data,*bmat = (Mat_SeqBAIJ*)baij->B->data;
703   PetscErrorCode ierr;
704   PetscInt       i,j,bs2=baij->bs2,bs=baij->A->rmap->bs,nz,row,col;
705   PetscReal      sum = 0.0;
706   MatScalar      *v;
707 
708   PetscFunctionBegin;
709   if (baij->size == 1) {
710     ierr =  MatNorm(baij->A,type,nrm);CHKERRQ(ierr);
711   } else {
712     if (type == NORM_FROBENIUS) {
713       v  = amat->a;
714       nz = amat->nz*bs2;
715       for (i=0; i<nz; i++) {
716         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
717       }
718       v  = bmat->a;
719       nz = bmat->nz*bs2;
720       for (i=0; i<nz; i++) {
721         sum += PetscRealPart(PetscConj(*v)*(*v)); v++;
722       }
723       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
724       *nrm = PetscSqrtReal(*nrm);
725     } else if (type == NORM_1) { /* max column sum */
726       PetscReal *tmp,*tmp2;
727       PetscInt  *jj,*garray=baij->garray,cstart=baij->rstartbs;
728       ierr = PetscMalloc2(mat->cmap->N,&tmp,mat->cmap->N,&tmp2);CHKERRQ(ierr);
729       ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr);
730       v    = amat->a; jj = amat->j;
731       for (i=0; i<amat->nz; i++) {
732         for (j=0; j<bs; j++) {
733           col = bs*(cstart + *jj) + j; /* column index */
734           for (row=0; row<bs; row++) {
735             tmp[col] += PetscAbsScalar(*v);  v++;
736           }
737         }
738         jj++;
739       }
740       v = bmat->a; jj = bmat->j;
741       for (i=0; i<bmat->nz; i++) {
742         for (j=0; j<bs; j++) {
743           col = bs*garray[*jj] + j;
744           for (row=0; row<bs; row++) {
745             tmp[col] += PetscAbsScalar(*v); v++;
746           }
747         }
748         jj++;
749       }
750       ierr = MPIU_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
751       *nrm = 0.0;
752       for (j=0; j<mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       ierr = PetscFree2(tmp,tmp2);CHKERRQ(ierr);
756     } else if (type == NORM_INFINITY) { /* max row sum */
757       PetscReal *sums;
758       ierr = PetscMalloc1(bs,&sums);CHKERRQ(ierr);
759       sum  = 0.0;
760       for (j=0; j<amat->mbs; j++) {
761         for (row=0; row<bs; row++) sums[row] = 0.0;
762         v  = amat->a + bs2*amat->i[j];
763         nz = amat->i[j+1]-amat->i[j];
764         for (i=0; i<nz; i++) {
765           for (col=0; col<bs; col++) {
766             for (row=0; row<bs; row++) {
767               sums[row] += PetscAbsScalar(*v); v++;
768             }
769           }
770         }
771         v  = bmat->a + bs2*bmat->i[j];
772         nz = bmat->i[j+1]-bmat->i[j];
773         for (i=0; i<nz; i++) {
774           for (col=0; col<bs; col++) {
775             for (row=0; row<bs; row++) {
776               sums[row] += PetscAbsScalar(*v); v++;
777             }
778           }
779         }
780         for (row=0; row<bs; row++) {
781           if (sums[row] > sum) sum = sums[row];
782         }
783       }
784       ierr = MPIU_Allreduce(&sum,nrm,1,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
785       ierr = PetscFree(sums);CHKERRQ(ierr);
786     } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No support for this norm yet");
787   }
788   PetscFunctionReturn(0);
789 }
790 
791 /*
792   Creates the hash table, and sets the table
793   This table is created only once.
794   If new entried need to be added to the matrix
795   then the hash table has to be destroyed and
796   recreated.
797 */
798 #undef __FUNCT__
799 #define __FUNCT__ "MatCreateHashTable_MPIBAIJ_Private"
800 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat,PetscReal factor)
801 {
802   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
803   Mat            A     = baij->A,B=baij->B;
804   Mat_SeqBAIJ    *a    = (Mat_SeqBAIJ*)A->data,*b=(Mat_SeqBAIJ*)B->data;
805   PetscInt       i,j,k,nz=a->nz+b->nz,h1,*ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j;
806   PetscErrorCode ierr;
807   PetscInt       ht_size,bs2=baij->bs2,rstart=baij->rstartbs;
808   PetscInt       cstart=baij->cstartbs,*garray=baij->garray,row,col,Nbs=baij->Nbs;
809   PetscInt       *HT,key;
810   MatScalar      **HD;
811   PetscReal      tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct=0,max=0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(0);
818 
819   baij->ht_size = (PetscInt)(factor*nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   ierr = PetscCalloc2(ht_size,&baij->hd,ht_size,&baij->ht);CHKERRQ(ierr);
824   HD   = baij->hd;
825   HT   = baij->ht;
826 
827   /* Loop Over A */
828   for (i=0; i<a->mbs; i++) {
829     for (j=ai[i]; j<ai[i+1]; j++) {
830       row = i+rstart;
831       col = aj[j]+cstart;
832 
833       key = row*Nbs + col + 1;
834       h1  = HASH(ht_size,key,tmp);
835       for (k=0; k<ht_size; k++) {
836         if (!HT[(h1+k)%ht_size]) {
837           HT[(h1+k)%ht_size] = key;
838           HD[(h1+k)%ht_size] = a->a + j*bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k> max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i=0; i<b->mbs; i++) {
853     for (j=bi[i]; j<bi[i+1]; j++) {
854       row = i+rstart;
855       col = garray[bj[j]];
856       key = row*Nbs + col + 1;
857       h1  = HASH(ht_size,key,tmp);
858       for (k=0; k<ht_size; k++) {
859         if (!HT[(h1+k)%ht_size]) {
860           HT[(h1+k)%ht_size] = key;
861           HD[(h1+k)%ht_size] = b->a + j*bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k> max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i=0,j=0; i<ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   ierr = PetscInfo2(mat,"Average Search = %5.2f,max search = %D\n",(!j)? 0.0:((PetscReal)(ct+j))/j,max);CHKERRQ(ierr);
881 #endif
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "MatAssemblyBegin_MPIBAIJ"
887 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat,MatAssemblyType mode)
888 {
889   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
890   PetscErrorCode ierr;
891   PetscInt       nstash,reallocs;
892 
893   PetscFunctionBegin;
894   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(0);
895 
896   ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr);
897   ierr = MatStashScatterBegin_Private(mat,&mat->bstash,baij->rangebs);CHKERRQ(ierr);
898   ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr);
899   ierr = PetscInfo2(mat,"Stash has %D entries,uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
900   ierr = MatStashGetInfo_Private(&mat->bstash,&nstash,&reallocs);CHKERRQ(ierr);
901   ierr = PetscInfo2(mat,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatAssemblyEnd_MPIBAIJ"
907 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat,MatAssemblyType mode)
908 {
909   Mat_MPIBAIJ    *baij=(Mat_MPIBAIJ*)mat->data;
910   Mat_SeqBAIJ    *a   =(Mat_SeqBAIJ*)baij->A->data;
911   PetscErrorCode ierr;
912   PetscInt       i,j,rstart,ncols,flg,bs2=baij->bs2;
913   PetscInt       *row,*col;
914   PetscBool      r1,r2,r3,other_disassembled;
915   MatScalar      *val;
916   PetscMPIInt    n;
917 
918   PetscFunctionBegin;
919   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
920   if (!baij->donotstash && !mat->nooffprocentries) {
921     while (1) {
922       ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
923       if (!flg) break;
924 
925       for (i=0; i<n;) {
926         /* Now identify the consecutive vals belonging to the same row */
927         for (j=i,rstart=row[j]; j<n; j++) {
928           if (row[j] != rstart) break;
929         }
930         if (j < n) ncols = j-i;
931         else       ncols = n-i;
932         /* Now assemble all these values with a single function call */
933         ierr = MatSetValues_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i,mat->insertmode);CHKERRQ(ierr);
934         i    = j;
935       }
936     }
937     ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr);
938     /* Now process the block-stash. Since the values are stashed column-oriented,
939        set the roworiented flag to column oriented, and after MatSetValues()
940        restore the original flags */
941     r1 = baij->roworiented;
942     r2 = a->roworiented;
943     r3 = ((Mat_SeqBAIJ*)baij->B->data)->roworiented;
944 
945     baij->roworiented = PETSC_FALSE;
946     a->roworiented    = PETSC_FALSE;
947 
948     (((Mat_SeqBAIJ*)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
949     while (1) {
950       ierr = MatStashScatterGetMesg_Private(&mat->bstash,&n,&row,&col,&val,&flg);CHKERRQ(ierr);
951       if (!flg) break;
952 
953       for (i=0; i<n;) {
954         /* Now identify the consecutive vals belonging to the same row */
955         for (j=i,rstart=row[j]; j<n; j++) {
956           if (row[j] != rstart) break;
957         }
958         if (j < n) ncols = j-i;
959         else       ncols = n-i;
960         ierr = MatSetValuesBlocked_MPIBAIJ(mat,1,row+i,ncols,col+i,val+i*bs2,mat->insertmode);CHKERRQ(ierr);
961         i    = j;
962       }
963     }
964     ierr = MatStashScatterEnd_Private(&mat->bstash);CHKERRQ(ierr);
965 
966     baij->roworiented = r1;
967     a->roworiented    = r2;
968 
969     ((Mat_SeqBAIJ*)baij->B->data)->roworiented = r3; /* b->roworiented */
970   }
971 
972   ierr = MatAssemblyBegin(baij->A,mode);CHKERRQ(ierr);
973   ierr = MatAssemblyEnd(baij->A,mode);CHKERRQ(ierr);
974 
975   /* determine if any processor has disassembled, if so we must
976      also disassemble ourselfs, in order that we may reassemble. */
977   /*
978      if nonzero structure of submatrix B cannot change then we know that
979      no processor disassembled thus we can skip this stuff
980   */
981   if (!((Mat_SeqBAIJ*)baij->B->data)->nonew) {
982     ierr = MPIU_Allreduce(&mat->was_assembled,&other_disassembled,1,MPIU_BOOL,MPI_PROD,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
983     if (mat->was_assembled && !other_disassembled) {
984       ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
985     }
986   }
987 
988   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) {
989     ierr = MatSetUpMultiply_MPIBAIJ(mat);CHKERRQ(ierr);
990   }
991   ierr = MatAssemblyBegin(baij->B,mode);CHKERRQ(ierr);
992   ierr = MatAssemblyEnd(baij->B,mode);CHKERRQ(ierr);
993 
994 #if defined(PETSC_USE_INFO)
995   if (baij->ht && mode== MAT_FINAL_ASSEMBLY) {
996     ierr = PetscInfo1(mat,"Average Hash Table Search in MatSetValues = %5.2f\n",(double)((PetscReal)baij->ht_total_ct)/baij->ht_insert_ct);CHKERRQ(ierr);
997 
998     baij->ht_total_ct  = 0;
999     baij->ht_insert_ct = 0;
1000   }
1001 #endif
1002   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
1003     ierr = MatCreateHashTable_MPIBAIJ_Private(mat,baij->ht_fact);CHKERRQ(ierr);
1004 
1005     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
1006     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
1007   }
1008 
1009   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1010 
1011   baij->rowvalues = 0;
1012 
1013   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1014   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
1015     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1016     ierr = MPIU_Allreduce(&state,&mat->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 extern PetscErrorCode MatView_SeqBAIJ(Mat,PetscViewer);
1022 #include <petscdraw.h>
1023 #undef __FUNCT__
1024 #define __FUNCT__ "MatView_MPIBAIJ_ASCIIorDraworSocket"
1025 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer)
1026 {
1027   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ*)mat->data;
1028   PetscErrorCode    ierr;
1029   PetscMPIInt       rank = baij->rank;
1030   PetscInt          bs   = mat->rmap->bs;
1031   PetscBool         iascii,isdraw;
1032   PetscViewer       sviewer;
1033   PetscViewerFormat format;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1037   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1038   if (iascii) {
1039     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
1040     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1041       MatInfo info;
1042       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1043       ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr);
1044       ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
1045       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D bs %D mem %D\n",
1046                                                 rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,mat->rmap->bs,(PetscInt)info.memory);CHKERRQ(ierr);
1047       ierr = MatGetInfo(baij->A,MAT_LOCAL,&info);CHKERRQ(ierr);
1048       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1049       ierr = MatGetInfo(baij->B,MAT_LOCAL,&info);CHKERRQ(ierr);
1050       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr);
1051       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1052       ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
1053       ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr);
1054       ierr = VecScatterView(baij->Mvctx,viewer);CHKERRQ(ierr);
1055       PetscFunctionReturn(0);
1056     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1057       ierr = PetscViewerASCIIPrintf(viewer,"  block size is %D\n",bs);CHKERRQ(ierr);
1058       PetscFunctionReturn(0);
1059     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1060       PetscFunctionReturn(0);
1061     }
1062   }
1063 
1064   if (isdraw) {
1065     PetscDraw draw;
1066     PetscBool isnull;
1067     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
1068     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr);
1069     if (isnull) PetscFunctionReturn(0);
1070   }
1071 
1072   {
1073     /* assemble the entire matrix onto first processor. */
1074     Mat         A;
1075     Mat_SeqBAIJ *Aloc;
1076     PetscInt    M = mat->rmap->N,N = mat->cmap->N,*ai,*aj,col,i,j,k,*rvals,mbs = baij->mbs;
1077     MatScalar   *a;
1078     const char  *matname;
1079 
1080     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1081     /* Perhaps this should be the type of mat? */
1082     ierr = MatCreate(PetscObjectComm((PetscObject)mat),&A);CHKERRQ(ierr);
1083     if (!rank) {
1084       ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr);
1085     } else {
1086       ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr);
1087     }
1088     ierr = MatSetType(A,MATMPIBAIJ);CHKERRQ(ierr);
1089     ierr = MatMPIBAIJSetPreallocation(A,mat->rmap->bs,0,NULL,0,NULL);CHKERRQ(ierr);
1090     ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
1091     ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)A);CHKERRQ(ierr);
1092 
1093     /* copy over the A part */
1094     Aloc = (Mat_SeqBAIJ*)baij->A->data;
1095     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1096     ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1097 
1098     for (i=0; i<mbs; i++) {
1099       rvals[0] = bs*(baij->rstartbs + i);
1100       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1101       for (j=ai[i]; j<ai[i+1]; j++) {
1102         col = (baij->cstartbs+aj[j])*bs;
1103         for (k=0; k<bs; k++) {
1104           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1105           col++; a += bs;
1106         }
1107       }
1108     }
1109     /* copy over the B part */
1110     Aloc = (Mat_SeqBAIJ*)baij->B->data;
1111     ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1112     for (i=0; i<mbs; i++) {
1113       rvals[0] = bs*(baij->rstartbs + i);
1114       for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1115       for (j=ai[i]; j<ai[i+1]; j++) {
1116         col = baij->garray[aj[j]]*bs;
1117         for (k=0; k<bs; k++) {
1118           ierr      = MatSetValues_MPIBAIJ(A,bs,rvals,1,&col,a,INSERT_VALUES);CHKERRQ(ierr);
1119           col++; a += bs;
1120         }
1121       }
1122     }
1123     ierr = PetscFree(rvals);CHKERRQ(ierr);
1124     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1125     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1126     /*
1127        Everyone has to call to draw the matrix since the graphics waits are
1128        synchronized across all processors that share the PetscDraw object
1129     */
1130     ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1131     ierr = PetscObjectGetName((PetscObject)mat,&matname);CHKERRQ(ierr);
1132     if (!rank) {
1133       ierr = PetscObjectSetName((PetscObject)((Mat_MPIBAIJ*)(A->data))->A,matname);CHKERRQ(ierr);
1134       ierr = MatView_SeqBAIJ(((Mat_MPIBAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr);
1135     }
1136     ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1137     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1138     ierr = MatDestroy(&A);CHKERRQ(ierr);
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 #undef __FUNCT__
1144 #define __FUNCT__ "MatView_MPIBAIJ_Binary"
1145 static PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat,PetscViewer viewer)
1146 {
1147   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)mat->data;
1148   Mat_SeqBAIJ    *A = (Mat_SeqBAIJ*)a->A->data;
1149   Mat_SeqBAIJ    *B = (Mat_SeqBAIJ*)a->B->data;
1150   PetscErrorCode ierr;
1151   PetscInt       i,*row_lens,*crow_lens,bs = mat->rmap->bs,j,k,bs2=a->bs2,header[4],nz,rlen;
1152   PetscInt       *range=0,nzmax,*column_indices,cnt,col,*garray = a->garray,cstart = mat->cmap->rstart/bs,len,pcnt,l,ll;
1153   int            fd;
1154   PetscScalar    *column_values;
1155   FILE           *file;
1156   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
1157   PetscInt       message_count,flowcontrolcount;
1158 
1159   PetscFunctionBegin;
1160   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&rank);CHKERRQ(ierr);
1161   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr);
1162   nz   = bs2*(A->nz + B->nz);
1163   rlen = mat->rmap->n;
1164   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
1165   if (!rank) {
1166     header[0] = MAT_FILE_CLASSID;
1167     header[1] = mat->rmap->N;
1168     header[2] = mat->cmap->N;
1169 
1170     ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1171     ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1172     /* get largest number of rows any processor has */
1173     range = mat->rmap->range;
1174     for (i=1; i<size; i++) {
1175       rlen = PetscMax(rlen,range[i+1] - range[i]);
1176     }
1177   } else {
1178     ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1179   }
1180 
1181   ierr = PetscMalloc1(rlen/bs,&crow_lens);CHKERRQ(ierr);
1182   /* compute lengths of each row  */
1183   for (i=0; i<a->mbs; i++) {
1184     crow_lens[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i];
1185   }
1186   /* store the row lengths to the file */
1187   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1188   if (!rank) {
1189     MPI_Status status;
1190     ierr = PetscMalloc1(rlen,&row_lens);CHKERRQ(ierr);
1191     rlen = (range[1] - range[0])/bs;
1192     for (i=0; i<rlen; i++) {
1193       for (j=0; j<bs; j++) {
1194         row_lens[i*bs+j] = bs*crow_lens[i];
1195       }
1196     }
1197     ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1198     for (i=1; i<size; i++) {
1199       rlen = (range[i+1] - range[i])/bs;
1200       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1201       ierr = MPI_Recv(crow_lens,rlen,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1202       for (k=0; k<rlen; k++) {
1203         for (j=0; j<bs; j++) {
1204           row_lens[k*bs+j] = bs*crow_lens[k];
1205         }
1206       }
1207       ierr = PetscBinaryWrite(fd,row_lens,bs*rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1208     }
1209     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1210     ierr = PetscFree(row_lens);CHKERRQ(ierr);
1211   } else {
1212     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1213     ierr = MPI_Send(crow_lens,mat->rmap->n/bs,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1214     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(crow_lens);CHKERRQ(ierr);
1217 
1218   /* load up the local column indices. Include for all rows not just one for each block row since process 0 does not have the
1219      information needed to make it for each row from a block row. This does require more communication but still not more than
1220      the communication needed for the nonzero values  */
1221   nzmax = nz; /*  space a largest processor needs */
1222   ierr  = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1223   ierr  = PetscMalloc1(nzmax,&column_indices);CHKERRQ(ierr);
1224   cnt   = 0;
1225   for (i=0; i<a->mbs; i++) {
1226     pcnt = cnt;
1227     for (j=B->i[i]; j<B->i[i+1]; j++) {
1228       if ((col = garray[B->j[j]]) > cstart) break;
1229       for (l=0; l<bs; l++) {
1230         column_indices[cnt++] = bs*col+l;
1231       }
1232     }
1233     for (k=A->i[i]; k<A->i[i+1]; k++) {
1234       for (l=0; l<bs; l++) {
1235         column_indices[cnt++] = bs*(A->j[k] + cstart)+l;
1236       }
1237     }
1238     for (; j<B->i[i+1]; j++) {
1239       for (l=0; l<bs; l++) {
1240         column_indices[cnt++] = bs*garray[B->j[j]]+l;
1241       }
1242     }
1243     len = cnt - pcnt;
1244     for (k=1; k<bs; k++) {
1245       ierr = PetscMemcpy(&column_indices[cnt],&column_indices[pcnt],len*sizeof(PetscInt));CHKERRQ(ierr);
1246       cnt += len;
1247     }
1248   }
1249   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1250 
1251   /* store the columns to the file */
1252   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1253   if (!rank) {
1254     MPI_Status status;
1255     ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1256     for (i=1; i<size; i++) {
1257       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1258       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1259       ierr = MPI_Recv(column_indices,cnt,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1260       ierr = PetscBinaryWrite(fd,column_indices,cnt,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
1261     }
1262     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1263   } else {
1264     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1265     ierr = MPI_Send(&cnt,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1266     ierr = MPI_Send(column_indices,cnt,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1267     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1268   }
1269   ierr = PetscFree(column_indices);CHKERRQ(ierr);
1270 
1271   /* load up the numerical values */
1272   ierr = PetscMalloc1(nzmax,&column_values);CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<a->mbs; i++) {
1275     rlen = bs*(B->i[i+1] - B->i[i] + A->i[i+1] - A->i[i]);
1276     for (j=B->i[i]; j<B->i[i+1]; j++) {
1277       if (garray[B->j[j]] > cstart) break;
1278       for (l=0; l<bs; l++) {
1279         for (ll=0; ll<bs; ll++) {
1280           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1281         }
1282       }
1283       cnt += bs;
1284     }
1285     for (k=A->i[i]; k<A->i[i+1]; k++) {
1286       for (l=0; l<bs; l++) {
1287         for (ll=0; ll<bs; ll++) {
1288           column_values[cnt + l*rlen + ll] = A->a[bs2*k+l+bs*ll];
1289         }
1290       }
1291       cnt += bs;
1292     }
1293     for (; j<B->i[i+1]; j++) {
1294       for (l=0; l<bs; l++) {
1295         for (ll=0; ll<bs; ll++) {
1296           column_values[cnt + l*rlen + ll] = B->a[bs2*j+l+bs*ll];
1297         }
1298       }
1299       cnt += bs;
1300     }
1301     cnt += (bs-1)*rlen;
1302   }
1303   if (cnt != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,nz);
1304 
1305   /* store the column values to the file */
1306   ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
1307   if (!rank) {
1308     MPI_Status status;
1309     ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1310     for (i=1; i<size; i++) {
1311       ierr = PetscViewerFlowControlStepMaster(viewer,i,&message_count,flowcontrolcount);CHKERRQ(ierr);
1312       ierr = MPI_Recv(&cnt,1,MPIU_INT,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1313       ierr = MPI_Recv(column_values,cnt,MPIU_SCALAR,i,tag,PetscObjectComm((PetscObject)mat),&status);CHKERRQ(ierr);
1314       ierr = PetscBinaryWrite(fd,column_values,cnt,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
1315     }
1316     ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
1317   } else {
1318     ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
1319     ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1320     ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
1321     ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
1322   }
1323   ierr = PetscFree(column_values);CHKERRQ(ierr);
1324 
1325   ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
1326   if (file) {
1327     fprintf(file,"-matload_block_size %d\n",(int)mat->rmap->bs);
1328   }
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "MatView_MPIBAIJ"
1334 PetscErrorCode MatView_MPIBAIJ(Mat mat,PetscViewer viewer)
1335 {
1336   PetscErrorCode ierr;
1337   PetscBool      iascii,isdraw,issocket,isbinary;
1338 
1339   PetscFunctionBegin;
1340   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
1341   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
1342   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr);
1343   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
1344   if (iascii || isdraw || issocket) {
1345     ierr = MatView_MPIBAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr);
1346   } else if (isbinary) {
1347     ierr = MatView_MPIBAIJ_Binary(mat,viewer);CHKERRQ(ierr);
1348   }
1349   PetscFunctionReturn(0);
1350 }
1351 
1352 #undef __FUNCT__
1353 #define __FUNCT__ "MatDestroy_MPIBAIJ"
1354 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
1355 {
1356   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360 #if defined(PETSC_USE_LOG)
1361   PetscLogObjectState((PetscObject)mat,"Rows=%D,Cols=%D",mat->rmap->N,mat->cmap->N);
1362 #endif
1363   ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr);
1364   ierr = MatStashDestroy_Private(&mat->bstash);CHKERRQ(ierr);
1365   ierr = MatDestroy(&baij->A);CHKERRQ(ierr);
1366   ierr = MatDestroy(&baij->B);CHKERRQ(ierr);
1367 #if defined(PETSC_USE_CTABLE)
1368   ierr = PetscTableDestroy(&baij->colmap);CHKERRQ(ierr);
1369 #else
1370   ierr = PetscFree(baij->colmap);CHKERRQ(ierr);
1371 #endif
1372   ierr = PetscFree(baij->garray);CHKERRQ(ierr);
1373   ierr = VecDestroy(&baij->lvec);CHKERRQ(ierr);
1374   ierr = VecScatterDestroy(&baij->Mvctx);CHKERRQ(ierr);
1375   ierr = PetscFree2(baij->rowvalues,baij->rowindices);CHKERRQ(ierr);
1376   ierr = PetscFree(baij->barray);CHKERRQ(ierr);
1377   ierr = PetscFree2(baij->hd,baij->ht);CHKERRQ(ierr);
1378   ierr = PetscFree(baij->rangebs);CHKERRQ(ierr);
1379   ierr = PetscFree(mat->data);CHKERRQ(ierr);
1380 
1381   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
1382   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C",NULL);CHKERRQ(ierr);
1383   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C",NULL);CHKERRQ(ierr);
1384   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocation_C",NULL);CHKERRQ(ierr);
1385   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIBAIJSetPreallocationCSR_C",NULL);CHKERRQ(ierr);
1386   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C",NULL);CHKERRQ(ierr);
1387   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatSetHashTableFactor_C",NULL);CHKERRQ(ierr);
1388   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpisbaij_C",NULL);CHKERRQ(ierr);
1389   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpibaij_mpibstrm_C",NULL);CHKERRQ(ierr);
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "MatMult_MPIBAIJ"
1395 PetscErrorCode MatMult_MPIBAIJ(Mat A,Vec xx,Vec yy)
1396 {
1397   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1398   PetscErrorCode ierr;
1399   PetscInt       nt;
1400 
1401   PetscFunctionBegin;
1402   ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr);
1403   if (nt != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A and xx");
1404   ierr = VecGetLocalSize(yy,&nt);CHKERRQ(ierr);
1405   if (nt != A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible parition of A and yy");
1406   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1407   ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr);
1408   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1409   ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr);
1410   PetscFunctionReturn(0);
1411 }
1412 
1413 #undef __FUNCT__
1414 #define __FUNCT__ "MatMultAdd_MPIBAIJ"
1415 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1416 {
1417   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1418   PetscErrorCode ierr;
1419 
1420   PetscFunctionBegin;
1421   ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1422   ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1423   ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1424   ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr);
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 #undef __FUNCT__
1429 #define __FUNCT__ "MatMultTranspose_MPIBAIJ"
1430 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A,Vec xx,Vec yy)
1431 {
1432   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1433   PetscErrorCode ierr;
1434   PetscBool      merged;
1435 
1436   PetscFunctionBegin;
1437   ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr);
1438   /* do nondiagonal part */
1439   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1440   if (!merged) {
1441     /* send it on its way */
1442     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1443     /* do local part */
1444     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1445     /* receive remote parts: note this assumes the values are not actually */
1446     /* inserted in yy until the next line */
1447     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1448   } else {
1449     /* do local part */
1450     ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr);
1451     /* send it on its way */
1452     ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1453     /* values actually were received in the Begin() but we need to call this nop */
1454     ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1455   }
1456   PetscFunctionReturn(0);
1457 }
1458 
1459 #undef __FUNCT__
1460 #define __FUNCT__ "MatMultTransposeAdd_MPIBAIJ"
1461 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A,Vec xx,Vec yy,Vec zz)
1462 {
1463   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1464   PetscErrorCode ierr;
1465 
1466   PetscFunctionBegin;
1467   /* do nondiagonal part */
1468   ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr);
1469   /* send it on its way */
1470   ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1471   /* do local part */
1472   ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr);
1473   /* receive remote parts: note this assumes the values are not actually */
1474   /* inserted in yy until the next line, which is true for my implementation*/
1475   /* but is not perhaps always true. */
1476   ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 /*
1481   This only works correctly for square matrices where the subblock A->A is the
1482    diagonal block
1483 */
1484 #undef __FUNCT__
1485 #define __FUNCT__ "MatGetDiagonal_MPIBAIJ"
1486 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A,Vec v)
1487 {
1488   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1489   PetscErrorCode ierr;
1490 
1491   PetscFunctionBegin;
1492   if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block");
1493   ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr);
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 #undef __FUNCT__
1498 #define __FUNCT__ "MatScale_MPIBAIJ"
1499 PetscErrorCode MatScale_MPIBAIJ(Mat A,PetscScalar aa)
1500 {
1501   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1502   PetscErrorCode ierr;
1503 
1504   PetscFunctionBegin;
1505   ierr = MatScale(a->A,aa);CHKERRQ(ierr);
1506   ierr = MatScale(a->B,aa);CHKERRQ(ierr);
1507   PetscFunctionReturn(0);
1508 }
1509 
1510 #undef __FUNCT__
1511 #define __FUNCT__ "MatGetRow_MPIBAIJ"
1512 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1513 {
1514   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
1515   PetscScalar    *vworkA,*vworkB,**pvA,**pvB,*v_p;
1516   PetscErrorCode ierr;
1517   PetscInt       bs = matin->rmap->bs,bs2 = mat->bs2,i,*cworkA,*cworkB,**pcA,**pcB;
1518   PetscInt       nztot,nzA,nzB,lrow,brstart = matin->rmap->rstart,brend = matin->rmap->rend;
1519   PetscInt       *cmap,*idx_p,cstart = mat->cstartbs;
1520 
1521   PetscFunctionBegin;
1522   if (row < brstart || row >= brend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local rows");
1523   if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active");
1524   mat->getrowactive = PETSC_TRUE;
1525 
1526   if (!mat->rowvalues && (idx || v)) {
1527     /*
1528         allocate enough space to hold information from the longest row.
1529     */
1530     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ*)mat->A->data,*Ba = (Mat_SeqBAIJ*)mat->B->data;
1531     PetscInt    max = 1,mbs = mat->mbs,tmp;
1532     for (i=0; i<mbs; i++) {
1533       tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i];
1534       if (max < tmp) max = tmp;
1535     }
1536     ierr = PetscMalloc2(max*bs2,&mat->rowvalues,max*bs2,&mat->rowindices);CHKERRQ(ierr);
1537   }
1538   lrow = row - brstart;
1539 
1540   pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB;
1541   if (!v)   {pvA = 0; pvB = 0;}
1542   if (!idx) {pcA = 0; if (!v) pcB = 0;}
1543   ierr  = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1544   ierr  = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1545   nztot = nzA + nzB;
1546 
1547   cmap = mat->garray;
1548   if (v  || idx) {
1549     if (nztot) {
1550       /* Sort by increasing column numbers, assuming A and B already sorted */
1551       PetscInt imark = -1;
1552       if (v) {
1553         *v = v_p = mat->rowvalues;
1554         for (i=0; i<nzB; i++) {
1555           if (cmap[cworkB[i]/bs] < cstart) v_p[i] = vworkB[i];
1556           else break;
1557         }
1558         imark = i;
1559         for (i=0; i<nzA; i++)     v_p[imark+i] = vworkA[i];
1560         for (i=imark; i<nzB; i++) v_p[nzA+i]   = vworkB[i];
1561       }
1562       if (idx) {
1563         *idx = idx_p = mat->rowindices;
1564         if (imark > -1) {
1565           for (i=0; i<imark; i++) {
1566             idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1567           }
1568         } else {
1569           for (i=0; i<nzB; i++) {
1570             if (cmap[cworkB[i]/bs] < cstart) idx_p[i] = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs;
1571             else break;
1572           }
1573           imark = i;
1574         }
1575         for (i=0; i<nzA; i++)     idx_p[imark+i] = cstart*bs + cworkA[i];
1576         for (i=imark; i<nzB; i++) idx_p[nzA+i]   = cmap[cworkB[i]/bs]*bs + cworkB[i]%bs ;
1577       }
1578     } else {
1579       if (idx) *idx = 0;
1580       if (v)   *v   = 0;
1581     }
1582   }
1583   *nz  = nztot;
1584   ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr);
1585   ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr);
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 #undef __FUNCT__
1590 #define __FUNCT__ "MatRestoreRow_MPIBAIJ"
1591 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
1592 {
1593   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*)mat->data;
1594 
1595   PetscFunctionBegin;
1596   if (!baij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow not called");
1597   baij->getrowactive = PETSC_FALSE;
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "MatZeroEntries_MPIBAIJ"
1603 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1604 {
1605   Mat_MPIBAIJ    *l = (Mat_MPIBAIJ*)A->data;
1606   PetscErrorCode ierr;
1607 
1608   PetscFunctionBegin;
1609   ierr = MatZeroEntries(l->A);CHKERRQ(ierr);
1610   ierr = MatZeroEntries(l->B);CHKERRQ(ierr);
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 #undef __FUNCT__
1615 #define __FUNCT__ "MatGetInfo_MPIBAIJ"
1616 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin,MatInfoType flag,MatInfo *info)
1617 {
1618   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)matin->data;
1619   Mat            A  = a->A,B = a->B;
1620   PetscErrorCode ierr;
1621   PetscReal      isend[5],irecv[5];
1622 
1623   PetscFunctionBegin;
1624   info->block_size = (PetscReal)matin->rmap->bs;
1625 
1626   ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr);
1627 
1628   isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded;
1629   isend[3] = info->memory;  isend[4] = info->mallocs;
1630 
1631   ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr);
1632 
1633   isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded;
1634   isend[3] += info->memory;  isend[4] += info->mallocs;
1635 
1636   if (flag == MAT_LOCAL) {
1637     info->nz_used      = isend[0];
1638     info->nz_allocated = isend[1];
1639     info->nz_unneeded  = isend[2];
1640     info->memory       = isend[3];
1641     info->mallocs      = isend[4];
1642   } else if (flag == MAT_GLOBAL_MAX) {
1643     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1644 
1645     info->nz_used      = irecv[0];
1646     info->nz_allocated = irecv[1];
1647     info->nz_unneeded  = irecv[2];
1648     info->memory       = irecv[3];
1649     info->mallocs      = irecv[4];
1650   } else if (flag == MAT_GLOBAL_SUM) {
1651     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
1652 
1653     info->nz_used      = irecv[0];
1654     info->nz_allocated = irecv[1];
1655     info->nz_unneeded  = irecv[2];
1656     info->memory       = irecv[3];
1657     info->mallocs      = irecv[4];
1658   } else SETERRQ1(PetscObjectComm((PetscObject)matin),PETSC_ERR_ARG_WRONG,"Unknown MatInfoType argument %d",(int)flag);
1659   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1660   info->fill_ratio_needed = 0;
1661   info->factor_mallocs    = 0;
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 #undef __FUNCT__
1666 #define __FUNCT__ "MatSetOption_MPIBAIJ"
1667 PetscErrorCode MatSetOption_MPIBAIJ(Mat A,MatOption op,PetscBool flg)
1668 {
1669   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1670   PetscErrorCode ierr;
1671 
1672   PetscFunctionBegin;
1673   switch (op) {
1674   case MAT_NEW_NONZERO_LOCATIONS:
1675   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1676   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1677   case MAT_KEEP_NONZERO_PATTERN:
1678   case MAT_NEW_NONZERO_LOCATION_ERR:
1679     MatCheckPreallocated(A,1);
1680     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1681     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1682     break;
1683   case MAT_ROW_ORIENTED:
1684     MatCheckPreallocated(A,1);
1685     a->roworiented = flg;
1686 
1687     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1688     ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr);
1689     break;
1690   case MAT_NEW_DIAGONALS:
1691     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
1692     break;
1693   case MAT_IGNORE_OFF_PROC_ENTRIES:
1694     a->donotstash = flg;
1695     break;
1696   case MAT_USE_HASH_TABLE:
1697     a->ht_flag = flg;
1698     a->ht_fact = 1.39;
1699     break;
1700   case MAT_SYMMETRIC:
1701   case MAT_STRUCTURALLY_SYMMETRIC:
1702   case MAT_HERMITIAN:
1703   case MAT_SYMMETRY_ETERNAL:
1704     MatCheckPreallocated(A,1);
1705     ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1706     break;
1707   default:
1708     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"unknown option %d",op);
1709   }
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 #undef __FUNCT__
1714 #define __FUNCT__ "MatTranspose_MPIBAIJ"
1715 PetscErrorCode MatTranspose_MPIBAIJ(Mat A,MatReuse reuse,Mat *matout)
1716 {
1717   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)A->data;
1718   Mat_SeqBAIJ    *Aloc;
1719   Mat            B;
1720   PetscErrorCode ierr;
1721   PetscInt       M =A->rmap->N,N=A->cmap->N,*ai,*aj,i,*rvals,j,k,col;
1722   PetscInt       bs=A->rmap->bs,mbs=baij->mbs;
1723   MatScalar      *a;
1724 
1725   PetscFunctionBegin;
1726   if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Square matrix only for in-place");
1727   if (reuse == MAT_INITIAL_MATRIX || *matout == A) {
1728     ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
1729     ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr);
1730     ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr);
1731     /* Do not know preallocation information, but must set block size */
1732     ierr = MatMPIBAIJSetPreallocation(B,A->rmap->bs,PETSC_DECIDE,NULL,PETSC_DECIDE,NULL);CHKERRQ(ierr);
1733   } else {
1734     B = *matout;
1735   }
1736 
1737   /* copy over the A part */
1738   Aloc = (Mat_SeqBAIJ*)baij->A->data;
1739   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1740   ierr = PetscMalloc1(bs,&rvals);CHKERRQ(ierr);
1741 
1742   for (i=0; i<mbs; i++) {
1743     rvals[0] = bs*(baij->rstartbs + i);
1744     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1745     for (j=ai[i]; j<ai[i+1]; j++) {
1746       col = (baij->cstartbs+aj[j])*bs;
1747       for (k=0; k<bs; k++) {
1748         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1749 
1750         col++; a += bs;
1751       }
1752     }
1753   }
1754   /* copy over the B part */
1755   Aloc = (Mat_SeqBAIJ*)baij->B->data;
1756   ai   = Aloc->i; aj = Aloc->j; a = Aloc->a;
1757   for (i=0; i<mbs; i++) {
1758     rvals[0] = bs*(baij->rstartbs + i);
1759     for (j=1; j<bs; j++) rvals[j] = rvals[j-1] + 1;
1760     for (j=ai[i]; j<ai[i+1]; j++) {
1761       col = baij->garray[aj[j]]*bs;
1762       for (k=0; k<bs; k++) {
1763         ierr = MatSetValues_MPIBAIJ(B,1,&col,bs,rvals,a,INSERT_VALUES);CHKERRQ(ierr);
1764         col++;
1765         a += bs;
1766       }
1767     }
1768   }
1769   ierr = PetscFree(rvals);CHKERRQ(ierr);
1770   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1771   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1772 
1773   if (reuse == MAT_INITIAL_MATRIX || *matout != A) *matout = B;
1774   else {
1775     ierr = MatHeaderMerge(A,&B);CHKERRQ(ierr);
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 #undef __FUNCT__
1781 #define __FUNCT__ "MatDiagonalScale_MPIBAIJ"
1782 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat,Vec ll,Vec rr)
1783 {
1784   Mat_MPIBAIJ    *baij = (Mat_MPIBAIJ*)mat->data;
1785   Mat            a     = baij->A,b = baij->B;
1786   PetscErrorCode ierr;
1787   PetscInt       s1,s2,s3;
1788 
1789   PetscFunctionBegin;
1790   ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr);
1791   if (rr) {
1792     ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr);
1793     if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size");
1794     /* Overlap communication with computation. */
1795     ierr = VecScatterBegin(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1796   }
1797   if (ll) {
1798     ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr);
1799     if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size");
1800     ierr = (*b->ops->diagonalscale)(b,ll,NULL);CHKERRQ(ierr);
1801   }
1802   /* scale  the diagonal block */
1803   ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr);
1804 
1805   if (rr) {
1806     /* Do a scatter end and then right scale the off-diagonal block */
1807     ierr = VecScatterEnd(baij->Mvctx,rr,baij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1808     ierr = (*b->ops->diagonalscale)(b,NULL,baij->lvec);CHKERRQ(ierr);
1809   }
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 #undef __FUNCT__
1814 #define __FUNCT__ "MatZeroRows_MPIBAIJ"
1815 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1816 {
1817   Mat_MPIBAIJ   *l      = (Mat_MPIBAIJ *) A->data;
1818   PetscInt      *lrows;
1819   PetscInt       r, len;
1820   PetscErrorCode ierr;
1821 
1822   PetscFunctionBegin;
1823   /* get locally owned rows */
1824   ierr = MatZeroRowsMapLocal_Private(A,N,rows,&len,&lrows);CHKERRQ(ierr);
1825   /* fix right hand side if needed */
1826   if (x && b) {
1827     const PetscScalar *xx;
1828     PetscScalar       *bb;
1829 
1830     ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
1831     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1832     for (r = 0; r < len; ++r) bb[lrows[r]] = diag*xx[lrows[r]];
1833     ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
1834     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1835   }
1836 
1837   /* actually zap the local rows */
1838   /*
1839         Zero the required rows. If the "diagonal block" of the matrix
1840      is square and the user wishes to set the diagonal we use separate
1841      code so that MatSetValues() is not called for each diagonal allocating
1842      new memory, thus calling lots of mallocs and slowing things down.
1843 
1844   */
1845   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1846   ierr = MatZeroRows_SeqBAIJ(l->B,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1847   if (A->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
1848     PetscBool cong;
1849     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1850     if (cong) A->congruentlayouts = 1;
1851     else      A->congruentlayouts = 0;
1852   }
1853   if ((diag != 0.0) && A->congruentlayouts) {
1854     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,diag,NULL,NULL);CHKERRQ(ierr);
1855   } else if (diag != 0.0) {
1856     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,0,0);CHKERRQ(ierr);
1857     if (((Mat_SeqBAIJ*)l->A->data)->nonew) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1858        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1859     for (r = 0; r < len; ++r) {
1860       const PetscInt row = lrows[r] + A->rmap->rstart;
1861       ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr);
1862     }
1863     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1864     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1865   } else {
1866     ierr = MatZeroRows_SeqBAIJ(l->A,len,lrows,0.0,NULL,NULL);CHKERRQ(ierr);
1867   }
1868   ierr = PetscFree(lrows);CHKERRQ(ierr);
1869 
1870   /* only change matrix nonzero state if pattern was allowed to be changed */
1871   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1872     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1873     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1874   }
1875   PetscFunctionReturn(0);
1876 }
1877 
1878 #undef __FUNCT__
1879 #define __FUNCT__ "MatZeroRowsColumns_MPIBAIJ"
1880 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1881 {
1882   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ*)A->data;
1883   PetscErrorCode    ierr;
1884   PetscMPIInt       n = A->rmap->n;
1885   PetscInt          i,j,k,r,p = 0,len = 0,row,col,count;
1886   PetscInt          *lrows,*owners = A->rmap->range;
1887   PetscSFNode       *rrows;
1888   PetscSF           sf;
1889   const PetscScalar *xx;
1890   PetscScalar       *bb,*mask;
1891   Vec               xmask,lmask;
1892   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ*)l->B->data;
1893   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1894   PetscScalar       *aa;
1895 
1896   PetscFunctionBegin;
1897   /* Create SF where leaves are input rows and roots are owned rows */
1898   ierr = PetscMalloc1(n, &lrows);CHKERRQ(ierr);
1899   for (r = 0; r < n; ++r) lrows[r] = -1;
1900   ierr = PetscMalloc1(N, &rrows);CHKERRQ(ierr);
1901   for (r = 0; r < N; ++r) {
1902     const PetscInt idx   = rows[r];
1903     if (idx < 0 || A->rmap->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row %D out of range [0,%D)",idx,A->rmap->N);
1904     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this row too */
1905       ierr = PetscLayoutFindOwner(A->rmap,idx,&p);CHKERRQ(ierr);
1906     }
1907     rrows[r].rank  = p;
1908     rrows[r].index = rows[r] - owners[p];
1909   }
1910   ierr = PetscSFCreate(PetscObjectComm((PetscObject) A), &sf);CHKERRQ(ierr);
1911   ierr = PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER);CHKERRQ(ierr);
1912   /* Collect flags for rows to be zeroed */
1913   ierr = PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1914   ierr = PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *) rows, lrows, MPI_LOR);CHKERRQ(ierr);
1915   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
1916   /* Compress and put in row numbers */
1917   for (r = 0; r < n; ++r) if (lrows[r] >= 0) lrows[len++] = r;
1918   /* zero diagonal part of matrix */
1919   ierr = MatZeroRowsColumns(l->A,len,lrows,diag,x,b);CHKERRQ(ierr);
1920   /* handle off diagonal part of matrix */
1921   ierr = MatCreateVecs(A,&xmask,NULL);CHKERRQ(ierr);
1922   ierr = VecDuplicate(l->lvec,&lmask);CHKERRQ(ierr);
1923   ierr = VecGetArray(xmask,&bb);CHKERRQ(ierr);
1924   for (i=0; i<len; i++) bb[lrows[i]] = 1;
1925   ierr = VecRestoreArray(xmask,&bb);CHKERRQ(ierr);
1926   ierr = VecScatterBegin(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1927   ierr = VecScatterEnd(l->Mvctx,xmask,lmask,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1928   ierr = VecDestroy(&xmask);CHKERRQ(ierr);
1929   if (x) {
1930     ierr = VecScatterBegin(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1931     ierr = VecScatterEnd(l->Mvctx,x,l->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1932     ierr = VecGetArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1933     ierr = VecGetArray(b,&bb);CHKERRQ(ierr);
1934   }
1935   ierr = VecGetArray(lmask,&mask);CHKERRQ(ierr);
1936   /* remove zeroed rows of off diagonal matrix */
1937   for (i = 0; i < len; ++i) {
1938     row   = lrows[i];
1939     count = (baij->i[row/bs +1] - baij->i[row/bs])*bs;
1940     aa    = ((MatScalar*)(baij->a)) + baij->i[row/bs]*bs2 + (row%bs);
1941     for (k = 0; k < count; ++k) {
1942       aa[0] = 0.0;
1943       aa   += bs;
1944     }
1945   }
1946   /* loop over all elements of off process part of matrix zeroing removed columns*/
1947   for (i = 0; i < l->B->rmap->N; ++i) {
1948     row = i/bs;
1949     for (j = baij->i[row]; j < baij->i[row+1]; ++j) {
1950       for (k = 0; k < bs; ++k) {
1951         col = bs*baij->j[j] + k;
1952         if (PetscAbsScalar(mask[col])) {
1953           aa = ((MatScalar*)(baij->a)) + j*bs2 + (i%bs) + bs*k;
1954           if (x) bb[i] -= aa[0]*xx[col];
1955           aa[0] = 0.0;
1956         }
1957       }
1958     }
1959   }
1960   if (x) {
1961     ierr = VecRestoreArray(b,&bb);CHKERRQ(ierr);
1962     ierr = VecRestoreArrayRead(l->lvec,&xx);CHKERRQ(ierr);
1963   }
1964   ierr = VecRestoreArray(lmask,&mask);CHKERRQ(ierr);
1965   ierr = VecDestroy(&lmask);CHKERRQ(ierr);
1966   ierr = PetscFree(lrows);CHKERRQ(ierr);
1967 
1968   /* only change matrix nonzero state if pattern was allowed to be changed */
1969   if (!((Mat_SeqBAIJ*)(l->A->data))->keepnonzeropattern) {
1970     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1971     ierr = MPIU_Allreduce(&state,&A->nonzerostate,1,MPIU_INT64,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
1972   }
1973   PetscFunctionReturn(0);
1974 }
1975 
1976 #undef __FUNCT__
1977 #define __FUNCT__ "MatSetUnfactored_MPIBAIJ"
1978 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1979 {
1980   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
1981   PetscErrorCode ierr;
1982 
1983   PetscFunctionBegin;
1984   ierr = MatSetUnfactored(a->A);CHKERRQ(ierr);
1985   PetscFunctionReturn(0);
1986 }
1987 
1988 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat,MatDuplicateOption,Mat*);
1989 
1990 #undef __FUNCT__
1991 #define __FUNCT__ "MatEqual_MPIBAIJ"
1992 PetscErrorCode MatEqual_MPIBAIJ(Mat A,Mat B,PetscBool  *flag)
1993 {
1994   Mat_MPIBAIJ    *matB = (Mat_MPIBAIJ*)B->data,*matA = (Mat_MPIBAIJ*)A->data;
1995   Mat            a,b,c,d;
1996   PetscBool      flg;
1997   PetscErrorCode ierr;
1998 
1999   PetscFunctionBegin;
2000   a = matA->A; b = matA->B;
2001   c = matB->A; d = matB->B;
2002 
2003   ierr = MatEqual(a,c,&flg);CHKERRQ(ierr);
2004   if (flg) {
2005     ierr = MatEqual(b,d,&flg);CHKERRQ(ierr);
2006   }
2007   ierr = MPIU_Allreduce(&flg,flag,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2008   PetscFunctionReturn(0);
2009 }
2010 
2011 #undef __FUNCT__
2012 #define __FUNCT__ "MatCopy_MPIBAIJ"
2013 PetscErrorCode MatCopy_MPIBAIJ(Mat A,Mat B,MatStructure str)
2014 {
2015   PetscErrorCode ierr;
2016   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2017   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
2018 
2019   PetscFunctionBegin;
2020   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
2021   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
2022     ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr);
2023   } else {
2024     ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
2025     ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr);
2026   }
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 #undef __FUNCT__
2031 #define __FUNCT__ "MatSetUp_MPIBAIJ"
2032 PetscErrorCode MatSetUp_MPIBAIJ(Mat A)
2033 {
2034   PetscErrorCode ierr;
2035 
2036   PetscFunctionBegin;
2037   ierr = MatMPIBAIJSetPreallocation(A,A->rmap->bs,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr);
2038   PetscFunctionReturn(0);
2039 }
2040 
2041 #undef __FUNCT__
2042 #define __FUNCT__ "MatAXPYGetPreallocation_MPIBAIJ"
2043 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y,const PetscInt *yltog,Mat X,const PetscInt *xltog,PetscInt *nnz)
2044 {
2045   PetscErrorCode ierr;
2046   PetscInt       bs = Y->rmap->bs,m = Y->rmap->N/bs;
2047   Mat_SeqBAIJ    *x = (Mat_SeqBAIJ*)X->data;
2048   Mat_SeqBAIJ    *y = (Mat_SeqBAIJ*)Y->data;
2049 
2050   PetscFunctionBegin;
2051   ierr = MatAXPYGetPreallocation_MPIX_private(m,x->i,x->j,xltog,y->i,y->j,yltog,nnz);CHKERRQ(ierr);
2052   PetscFunctionReturn(0);
2053 }
2054 
2055 #undef __FUNCT__
2056 #define __FUNCT__ "MatAXPY_MPIBAIJ"
2057 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str)
2058 {
2059   PetscErrorCode ierr;
2060   Mat_MPIBAIJ    *xx=(Mat_MPIBAIJ*)X->data,*yy=(Mat_MPIBAIJ*)Y->data;
2061   PetscBLASInt   bnz,one=1;
2062   Mat_SeqBAIJ    *x,*y;
2063 
2064   PetscFunctionBegin;
2065   if (str == SAME_NONZERO_PATTERN) {
2066     PetscScalar alpha = a;
2067     x    = (Mat_SeqBAIJ*)xx->A->data;
2068     y    = (Mat_SeqBAIJ*)yy->A->data;
2069     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2070     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2071     x    = (Mat_SeqBAIJ*)xx->B->data;
2072     y    = (Mat_SeqBAIJ*)yy->B->data;
2073     ierr = PetscBLASIntCast(x->nz,&bnz);CHKERRQ(ierr);
2074     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one));
2075     ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
2076   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
2077     ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr);
2078   } else {
2079     Mat      B;
2080     PetscInt *nnz_d,*nnz_o,bs=Y->rmap->bs;
2081     ierr = PetscMalloc1(yy->A->rmap->N,&nnz_d);CHKERRQ(ierr);
2082     ierr = PetscMalloc1(yy->B->rmap->N,&nnz_o);CHKERRQ(ierr);
2083     ierr = MatCreate(PetscObjectComm((PetscObject)Y),&B);CHKERRQ(ierr);
2084     ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr);
2085     ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr);
2086     ierr = MatSetBlockSizesFromMats(B,Y,Y);CHKERRQ(ierr);
2087     ierr = MatSetType(B,MATMPIBAIJ);CHKERRQ(ierr);
2088     ierr = MatAXPYGetPreallocation_SeqBAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr);
2089     ierr = MatAXPYGetPreallocation_MPIBAIJ(yy->B,yy->garray,xx->B,xx->garray,nnz_o);CHKERRQ(ierr);
2090     ierr = MatMPIBAIJSetPreallocation(B,bs,0,nnz_d,0,nnz_o);CHKERRQ(ierr);
2091     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
2092     ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr);
2093     ierr = MatHeaderReplace(Y,&B);CHKERRQ(ierr);
2094     ierr = PetscFree(nnz_d);CHKERRQ(ierr);
2095     ierr = PetscFree(nnz_o);CHKERRQ(ierr);
2096   }
2097   PetscFunctionReturn(0);
2098 }
2099 
2100 #undef __FUNCT__
2101 #define __FUNCT__ "MatRealPart_MPIBAIJ"
2102 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
2103 {
2104   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2105   PetscErrorCode ierr;
2106 
2107   PetscFunctionBegin;
2108   ierr = MatRealPart(a->A);CHKERRQ(ierr);
2109   ierr = MatRealPart(a->B);CHKERRQ(ierr);
2110   PetscFunctionReturn(0);
2111 }
2112 
2113 #undef __FUNCT__
2114 #define __FUNCT__ "MatImaginaryPart_MPIBAIJ"
2115 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
2116 {
2117   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2118   PetscErrorCode ierr;
2119 
2120   PetscFunctionBegin;
2121   ierr = MatImaginaryPart(a->A);CHKERRQ(ierr);
2122   ierr = MatImaginaryPart(a->B);CHKERRQ(ierr);
2123   PetscFunctionReturn(0);
2124 }
2125 
2126 #undef __FUNCT__
2127 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ"
2128 PetscErrorCode MatGetSubMatrix_MPIBAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat)
2129 {
2130   PetscErrorCode ierr;
2131   IS             iscol_local;
2132   PetscInt       csize;
2133 
2134   PetscFunctionBegin;
2135   ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr);
2136   if (call == MAT_REUSE_MATRIX) {
2137     ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr);
2138     if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2139   } else {
2140     ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr);
2141   }
2142   ierr = MatGetSubMatrix_MPIBAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr);
2143   if (call == MAT_INITIAL_MATRIX) {
2144     ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr);
2145     ierr = ISDestroy(&iscol_local);CHKERRQ(ierr);
2146   }
2147   PetscFunctionReturn(0);
2148 }
2149 extern PetscErrorCode MatGetSubMatrices_MPIBAIJ_local(Mat,PetscInt,const IS[],const IS[],MatReuse,PetscBool*,PetscBool*,Mat*);
2150 #undef __FUNCT__
2151 #define __FUNCT__ "MatGetSubMatrix_MPIBAIJ_Private"
2152 /*
2153   Not great since it makes two copies of the submatrix, first an SeqBAIJ
2154   in local and then by concatenating the local matrices the end result.
2155   Writing it directly would be much like MatGetSubMatrices_MPIBAIJ().
2156   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
2157 */
2158 PetscErrorCode MatGetSubMatrix_MPIBAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat)
2159 {
2160   PetscErrorCode ierr;
2161   PetscMPIInt    rank,size;
2162   PetscInt       i,m,n,rstart,row,rend,nz,*cwork,j,bs;
2163   PetscInt       *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal,ncol,nrow;
2164   Mat            M,Mreuse;
2165   MatScalar      *vwork,*aa;
2166   MPI_Comm       comm;
2167   IS             isrow_new, iscol_new;
2168   PetscBool      idflag,allrows, allcols;
2169   Mat_SeqBAIJ    *aij;
2170 
2171   PetscFunctionBegin;
2172   ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
2173   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
2174   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
2175   /* The compression and expansion should be avoided. Doesn't point
2176      out errors, might change the indices, hence buggey */
2177   ierr = ISCompressIndicesGeneral(mat->rmap->N,mat->rmap->n,mat->rmap->bs,1,&isrow,&isrow_new);CHKERRQ(ierr);
2178   ierr = ISCompressIndicesGeneral(mat->cmap->N,mat->cmap->n,mat->cmap->bs,1,&iscol,&iscol_new);CHKERRQ(ierr);
2179 
2180   /* Check for special case: each processor gets entire matrix columns */
2181   ierr = ISIdentity(iscol,&idflag);CHKERRQ(ierr);
2182   ierr = ISGetLocalSize(iscol,&ncol);CHKERRQ(ierr);
2183   if (idflag && ncol == mat->cmap->N) allcols = PETSC_TRUE;
2184   else allcols = PETSC_FALSE;
2185 
2186   ierr = ISIdentity(isrow,&idflag);CHKERRQ(ierr);
2187   ierr = ISGetLocalSize(isrow,&nrow);CHKERRQ(ierr);
2188   if (idflag && nrow == mat->rmap->N) allrows = PETSC_TRUE;
2189   else allrows = PETSC_FALSE;
2190 
2191   if (call ==  MAT_REUSE_MATRIX) {
2192     ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject*)&Mreuse);CHKERRQ(ierr);
2193     if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse");
2194     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_REUSE_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2195   } else {
2196     ierr = MatGetSubMatrices_MPIBAIJ_local(mat,1,&isrow_new,&iscol_new,MAT_INITIAL_MATRIX,&allrows,&allcols,&Mreuse);CHKERRQ(ierr);
2197   }
2198   ierr = ISDestroy(&isrow_new);CHKERRQ(ierr);
2199   ierr = ISDestroy(&iscol_new);CHKERRQ(ierr);
2200   /*
2201       m - number of local rows
2202       n - number of columns (same on all processors)
2203       rstart - first row in new global matrix generated
2204   */
2205   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
2206   ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr);
2207   m    = m/bs;
2208   n    = n/bs;
2209 
2210   if (call == MAT_INITIAL_MATRIX) {
2211     aij = (Mat_SeqBAIJ*)(Mreuse)->data;
2212     ii  = aij->i;
2213     jj  = aij->j;
2214 
2215     /*
2216         Determine the number of non-zeros in the diagonal and off-diagonal
2217         portions of the matrix in order to do correct preallocation
2218     */
2219 
2220     /* first get start and end of "diagonal" columns */
2221     if (csize == PETSC_DECIDE) {
2222       ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr);
2223       if (mglobal == n*bs) { /* square matrix */
2224         nlocal = m;
2225       } else {
2226         nlocal = n/size + ((n % size) > rank);
2227       }
2228     } else {
2229       nlocal = csize/bs;
2230     }
2231     ierr   = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
2232     rstart = rend - nlocal;
2233     if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n);
2234 
2235     /* next, compute all the lengths */
2236     ierr  = PetscMalloc2(m+1,&dlens,m+1,&olens);CHKERRQ(ierr);
2237     for (i=0; i<m; i++) {
2238       jend = ii[i+1] - ii[i];
2239       olen = 0;
2240       dlen = 0;
2241       for (j=0; j<jend; j++) {
2242         if (*jj < rstart || *jj >= rend) olen++;
2243         else dlen++;
2244         jj++;
2245       }
2246       olens[i] = olen;
2247       dlens[i] = dlen;
2248     }
2249     ierr = MatCreate(comm,&M);CHKERRQ(ierr);
2250     ierr = MatSetSizes(M,bs*m,bs*nlocal,PETSC_DECIDE,bs*n);CHKERRQ(ierr);
2251     ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr);
2252     ierr = MatMPIBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2253     ierr = MatMPISBAIJSetPreallocation(M,bs,0,dlens,0,olens);CHKERRQ(ierr);
2254     ierr = PetscFree2(dlens,olens);CHKERRQ(ierr);
2255   } else {
2256     PetscInt ml,nl;
2257 
2258     M    = *newmat;
2259     ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr);
2260     if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request");
2261     ierr = MatZeroEntries(M);CHKERRQ(ierr);
2262     /*
2263          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2264        rather than the slower MatSetValues().
2265     */
2266     M->was_assembled = PETSC_TRUE;
2267     M->assembled     = PETSC_FALSE;
2268   }
2269   ierr = MatSetOption(M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
2270   ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr);
2271   aij  = (Mat_SeqBAIJ*)(Mreuse)->data;
2272   ii   = aij->i;
2273   jj   = aij->j;
2274   aa   = aij->a;
2275   for (i=0; i<m; i++) {
2276     row   = rstart/bs + i;
2277     nz    = ii[i+1] - ii[i];
2278     cwork = jj;     jj += nz;
2279     vwork = aa;     aa += nz*bs*bs;
2280     ierr  = MatSetValuesBlocked_MPIBAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr);
2281   }
2282 
2283   ierr    = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2284   ierr    = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2285   *newmat = M;
2286 
2287   /* save submatrix used in processor for next request */
2288   if (call ==  MAT_INITIAL_MATRIX) {
2289     ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr);
2290     ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr);
2291   }
2292   PetscFunctionReturn(0);
2293 }
2294 
2295 #undef __FUNCT__
2296 #define __FUNCT__ "MatPermute_MPIBAIJ"
2297 PetscErrorCode MatPermute_MPIBAIJ(Mat A,IS rowp,IS colp,Mat *B)
2298 {
2299   MPI_Comm       comm,pcomm;
2300   PetscInt       clocal_size,nrows;
2301   const PetscInt *rows;
2302   PetscMPIInt    size;
2303   IS             crowp,lcolp;
2304   PetscErrorCode ierr;
2305 
2306   PetscFunctionBegin;
2307   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
2308   /* make a collective version of 'rowp' */
2309   ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr);
2310   if (pcomm==comm) {
2311     crowp = rowp;
2312   } else {
2313     ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr);
2314     ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr);
2315     ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr);
2316     ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr);
2317   }
2318   ierr = ISSetPermutation(crowp);CHKERRQ(ierr);
2319   /* make a local version of 'colp' */
2320   ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr);
2321   ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr);
2322   if (size==1) {
2323     lcolp = colp;
2324   } else {
2325     ierr = ISAllGather(colp,&lcolp);CHKERRQ(ierr);
2326   }
2327   ierr = ISSetPermutation(lcolp);CHKERRQ(ierr);
2328   /* now we just get the submatrix */
2329   ierr = MatGetLocalSize(A,NULL,&clocal_size);CHKERRQ(ierr);
2330   ierr = MatGetSubMatrix_MPIBAIJ_Private(A,crowp,lcolp,clocal_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr);
2331   /* clean up */
2332   if (pcomm!=comm) {
2333     ierr = ISDestroy(&crowp);CHKERRQ(ierr);
2334   }
2335   if (size>1) {
2336     ierr = ISDestroy(&lcolp);CHKERRQ(ierr);
2337   }
2338   PetscFunctionReturn(0);
2339 }
2340 
2341 #undef __FUNCT__
2342 #define __FUNCT__ "MatGetGhosts_MPIBAIJ"
2343 PetscErrorCode  MatGetGhosts_MPIBAIJ(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
2344 {
2345   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ*) mat->data;
2346   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ*)baij->B->data;
2347 
2348   PetscFunctionBegin;
2349   if (nghosts) *nghosts = B->nbs;
2350   if (ghosts) *ghosts = baij->garray;
2351   PetscFunctionReturn(0);
2352 }
2353 
2354 #undef __FUNCT__
2355 #define __FUNCT__ "MatGetSeqNonzeroStructure_MPIBAIJ"
2356 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A,Mat *newmat)
2357 {
2358   Mat            B;
2359   Mat_MPIBAIJ    *a  = (Mat_MPIBAIJ*)A->data;
2360   Mat_SeqBAIJ    *ad = (Mat_SeqBAIJ*)a->A->data,*bd = (Mat_SeqBAIJ*)a->B->data;
2361   Mat_SeqAIJ     *b;
2362   PetscErrorCode ierr;
2363   PetscMPIInt    size,rank,*recvcounts = 0,*displs = 0;
2364   PetscInt       sendcount,i,*rstarts = A->rmap->range,n,cnt,j,bs = A->rmap->bs;
2365   PetscInt       m,*garray = a->garray,*lens,*jsendbuf,*a_jsendbuf,*b_jsendbuf;
2366 
2367   PetscFunctionBegin;
2368   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
2369   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)A),&rank);CHKERRQ(ierr);
2370 
2371   /* ----------------------------------------------------------------
2372      Tell every processor the number of nonzeros per row
2373   */
2374   ierr = PetscMalloc1(A->rmap->N/bs,&lens);CHKERRQ(ierr);
2375   for (i=A->rmap->rstart/bs; i<A->rmap->rend/bs; i++) {
2376     lens[i] = ad->i[i-A->rmap->rstart/bs+1] - ad->i[i-A->rmap->rstart/bs] + bd->i[i-A->rmap->rstart/bs+1] - bd->i[i-A->rmap->rstart/bs];
2377   }
2378   ierr      = PetscMalloc1(2*size,&recvcounts);CHKERRQ(ierr);
2379   displs    = recvcounts + size;
2380   for (i=0; i<size; i++) {
2381     recvcounts[i] = A->rmap->range[i+1]/bs - A->rmap->range[i]/bs;
2382     displs[i]     = A->rmap->range[i]/bs;
2383   }
2384 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2385   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2386 #else
2387   sendcount = A->rmap->rend/bs - A->rmap->rstart/bs;
2388   ierr = MPI_Allgatherv(lens+A->rmap->rstart/bs,sendcount,MPIU_INT,lens,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2389 #endif
2390   /* ---------------------------------------------------------------
2391      Create the sequential matrix of the same type as the local block diagonal
2392   */
2393   ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
2394   ierr = MatSetSizes(B,A->rmap->N/bs,A->cmap->N/bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
2395   ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr);
2396   ierr = MatSeqAIJSetPreallocation(B,0,lens);CHKERRQ(ierr);
2397   b    = (Mat_SeqAIJ*)B->data;
2398 
2399   /*--------------------------------------------------------------------
2400     Copy my part of matrix column indices over
2401   */
2402   sendcount  = ad->nz + bd->nz;
2403   jsendbuf   = b->j + b->i[rstarts[rank]/bs];
2404   a_jsendbuf = ad->j;
2405   b_jsendbuf = bd->j;
2406   n          = A->rmap->rend/bs - A->rmap->rstart/bs;
2407   cnt        = 0;
2408   for (i=0; i<n; i++) {
2409 
2410     /* put in lower diagonal portion */
2411     m = bd->i[i+1] - bd->i[i];
2412     while (m > 0) {
2413       /* is it above diagonal (in bd (compressed) numbering) */
2414       if (garray[*b_jsendbuf] > A->rmap->rstart/bs + i) break;
2415       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2416       m--;
2417     }
2418 
2419     /* put in diagonal portion */
2420     for (j=ad->i[i]; j<ad->i[i+1]; j++) {
2421       jsendbuf[cnt++] = A->rmap->rstart/bs + *a_jsendbuf++;
2422     }
2423 
2424     /* put in upper diagonal portion */
2425     while (m-- > 0) {
2426       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2427     }
2428   }
2429   if (cnt != sendcount) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Corrupted PETSc matrix: nz given %D actual nz %D",sendcount,cnt);
2430 
2431   /*--------------------------------------------------------------------
2432     Gather all column indices to all processors
2433   */
2434   for (i=0; i<size; i++) {
2435     recvcounts[i] = 0;
2436     for (j=A->rmap->range[i]/bs; j<A->rmap->range[i+1]/bs; j++) {
2437       recvcounts[i] += lens[j];
2438     }
2439   }
2440   displs[0] = 0;
2441   for (i=1; i<size; i++) {
2442     displs[i] = displs[i-1] + recvcounts[i-1];
2443   }
2444 #if defined(PETSC_HAVE_MPI_IN_PLACE)
2445   ierr = MPI_Allgatherv(MPI_IN_PLACE,0,MPI_DATATYPE_NULL,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2446 #else
2447   ierr = MPI_Allgatherv(jsendbuf,sendcount,MPIU_INT,b->j,recvcounts,displs,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2448 #endif
2449   /*--------------------------------------------------------------------
2450     Assemble the matrix into useable form (note numerical values not yet set)
2451   */
2452   /* set the b->ilen (length of each row) values */
2453   ierr = PetscMemcpy(b->ilen,lens,(A->rmap->N/bs)*sizeof(PetscInt));CHKERRQ(ierr);
2454   /* set the b->i indices */
2455   b->i[0] = 0;
2456   for (i=1; i<=A->rmap->N/bs; i++) {
2457     b->i[i] = b->i[i-1] + lens[i-1];
2458   }
2459   ierr = PetscFree(lens);CHKERRQ(ierr);
2460   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2461   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2462   ierr = PetscFree(recvcounts);CHKERRQ(ierr);
2463 
2464   if (A->symmetric) {
2465     ierr = MatSetOption(B,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2466   } else if (A->hermitian) {
2467     ierr = MatSetOption(B,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr);
2468   } else if (A->structurally_symmetric) {
2469     ierr = MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
2470   }
2471   *newmat = B;
2472   PetscFunctionReturn(0);
2473 }
2474 
2475 #undef __FUNCT__
2476 #define __FUNCT__ "MatSOR_MPIBAIJ"
2477 PetscErrorCode MatSOR_MPIBAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx)
2478 {
2479   Mat_MPIBAIJ    *mat = (Mat_MPIBAIJ*)matin->data;
2480   PetscErrorCode ierr;
2481   Vec            bb1 = 0;
2482 
2483   PetscFunctionBegin;
2484   if (flag == SOR_APPLY_UPPER) {
2485     ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2486     PetscFunctionReturn(0);
2487   }
2488 
2489   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) {
2490     ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr);
2491   }
2492 
2493   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2494     if (flag & SOR_ZERO_INITIAL_GUESS) {
2495       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2496       its--;
2497     }
2498 
2499     while (its--) {
2500       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2501       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2502 
2503       /* update rhs: bb1 = bb - B*x */
2504       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2505       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2506 
2507       /* local sweep */
2508       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2509     }
2510   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2511     if (flag & SOR_ZERO_INITIAL_GUESS) {
2512       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2513       its--;
2514     }
2515     while (its--) {
2516       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2517       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2518 
2519       /* update rhs: bb1 = bb - B*x */
2520       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2521       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2522 
2523       /* local sweep */
2524       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2525     }
2526   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2527     if (flag & SOR_ZERO_INITIAL_GUESS) {
2528       ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr);
2529       its--;
2530     }
2531     while (its--) {
2532       ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2533       ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
2534 
2535       /* update rhs: bb1 = bb - B*x */
2536       ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr);
2537       ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr);
2538 
2539       /* local sweep */
2540       ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr);
2541     }
2542   } else SETERRQ(PetscObjectComm((PetscObject)matin),PETSC_ERR_SUP,"Parallel version of SOR requested not supported");
2543 
2544   ierr = VecDestroy(&bb1);CHKERRQ(ierr);
2545   PetscFunctionReturn(0);
2546 }
2547 
2548 #undef __FUNCT__
2549 #define __FUNCT__ "MatGetColumnNorms_MPIBAIJ"
2550 PetscErrorCode MatGetColumnNorms_MPIBAIJ(Mat A,NormType type,PetscReal *norms)
2551 {
2552   PetscErrorCode ierr;
2553   Mat_MPIBAIJ    *aij = (Mat_MPIBAIJ*)A->data;
2554   PetscInt       N,i,*garray = aij->garray;
2555   PetscInt       ib,jb,bs = A->rmap->bs;
2556   Mat_SeqBAIJ    *a_aij = (Mat_SeqBAIJ*) aij->A->data;
2557   MatScalar      *a_val = a_aij->a;
2558   Mat_SeqBAIJ    *b_aij = (Mat_SeqBAIJ*) aij->B->data;
2559   MatScalar      *b_val = b_aij->a;
2560   PetscReal      *work;
2561 
2562   PetscFunctionBegin;
2563   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
2564   ierr = PetscCalloc1(N,&work);CHKERRQ(ierr);
2565   if (type == NORM_2) {
2566     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2567       for (jb=0; jb<bs; jb++) {
2568         for (ib=0; ib<bs; ib++) {
2569           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2570           a_val++;
2571         }
2572       }
2573     }
2574     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2575       for (jb=0; jb<bs; jb++) {
2576         for (ib=0; ib<bs; ib++) {
2577           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2578           b_val++;
2579         }
2580       }
2581     }
2582   } else if (type == NORM_1) {
2583     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2584       for (jb=0; jb<bs; jb++) {
2585         for (ib=0; ib<bs; ib++) {
2586           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2587           a_val++;
2588         }
2589       }
2590     }
2591     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2592       for (jb=0; jb<bs; jb++) {
2593        for (ib=0; ib<bs; ib++) {
2594           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2595           b_val++;
2596         }
2597       }
2598     }
2599   } else if (type == NORM_INFINITY) {
2600     for (i=a_aij->i[0]; i<a_aij->i[aij->A->rmap->n/bs]; i++) {
2601       for (jb=0; jb<bs; jb++) {
2602         for (ib=0; ib<bs; ib++) {
2603           int col = A->cmap->rstart + a_aij->j[i] * bs + jb;
2604           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2605           a_val++;
2606         }
2607       }
2608     }
2609     for (i=b_aij->i[0]; i<b_aij->i[aij->B->rmap->n/bs]; i++) {
2610       for (jb=0; jb<bs; jb++) {
2611         for (ib=0; ib<bs; ib++) {
2612           int col = garray[b_aij->j[i]] * bs + jb;
2613           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2614           b_val++;
2615         }
2616       }
2617     }
2618   } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Unknown NormType");
2619   if (type == NORM_INFINITY) {
2620     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2621   } else {
2622     ierr = MPIU_Allreduce(work,norms,N,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
2623   }
2624   ierr = PetscFree(work);CHKERRQ(ierr);
2625   if (type == NORM_2) {
2626     for (i=0; i<N; i++) norms[i] = PetscSqrtReal(norms[i]);
2627   }
2628   PetscFunctionReturn(0);
2629 }
2630 
2631 #undef __FUNCT__
2632 #define __FUNCT__ "MatInvertBlockDiagonal_MPIBAIJ"
2633 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A,const PetscScalar **values)
2634 {
2635   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*) A->data;
2636   PetscErrorCode ierr;
2637 
2638   PetscFunctionBegin;
2639   ierr = MatInvertBlockDiagonal(a->A,values);CHKERRQ(ierr);
2640   A->factorerrortype             = a->A->factorerrortype;
2641   A->factorerror_zeropivot_value = a->A->factorerror_zeropivot_value;
2642   A->factorerror_zeropivot_row   = a->A->factorerror_zeropivot_row;
2643   PetscFunctionReturn(0);
2644 }
2645 
2646 #undef __FUNCT__
2647 #define __FUNCT__ "MatShift_MPIBAIJ"
2648 PetscErrorCode MatShift_MPIBAIJ(Mat Y,PetscScalar a)
2649 {
2650   PetscErrorCode ierr;
2651   Mat_MPIBAIJ    *maij = (Mat_MPIBAIJ*)Y->data;
2652   Mat_SeqBAIJ    *aij = (Mat_SeqBAIJ*)maij->A->data;
2653 
2654   PetscFunctionBegin;
2655   if (!Y->preallocated) {
2656     ierr = MatMPIBAIJSetPreallocation(Y,Y->rmap->bs,1,NULL,0,NULL);CHKERRQ(ierr);
2657   } else if (!aij->nz) {
2658     PetscInt nonew = aij->nonew;
2659     ierr = MatSeqBAIJSetPreallocation(maij->A,Y->rmap->bs,1,NULL);CHKERRQ(ierr);
2660     aij->nonew = nonew;
2661   }
2662   ierr = MatShift_Basic(Y,a);CHKERRQ(ierr);
2663   PetscFunctionReturn(0);
2664 }
2665 
2666 #undef __FUNCT__
2667 #define __FUNCT__ "MatMissingDiagonal_MPIBAIJ"
2668 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A,PetscBool  *missing,PetscInt *d)
2669 {
2670   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
2671   PetscErrorCode ierr;
2672 
2673   PetscFunctionBegin;
2674   if (A->rmap->n != A->cmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only works for square matrices");
2675   ierr = MatMissingDiagonal(a->A,missing,d);CHKERRQ(ierr);
2676   if (d) {
2677     PetscInt rstart;
2678     ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
2679     *d += rstart/A->rmap->bs;
2680 
2681   }
2682   PetscFunctionReturn(0);
2683 }
2684 
2685 #undef __FUNCT__
2686 #define __FUNCT__ "MatGetDiagonalBlock_MPIBAIJ"
2687 PetscErrorCode  MatGetDiagonalBlock_MPIBAIJ(Mat A,Mat *a)
2688 {
2689   PetscFunctionBegin;
2690   *a = ((Mat_MPIBAIJ*)A->data)->A;
2691   PetscFunctionReturn(0);
2692 }
2693 
2694 /* -------------------------------------------------------------------*/
2695 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2696                                        MatGetRow_MPIBAIJ,
2697                                        MatRestoreRow_MPIBAIJ,
2698                                        MatMult_MPIBAIJ,
2699                                 /* 4*/ MatMultAdd_MPIBAIJ,
2700                                        MatMultTranspose_MPIBAIJ,
2701                                        MatMultTransposeAdd_MPIBAIJ,
2702                                        0,
2703                                        0,
2704                                        0,
2705                                 /*10*/ 0,
2706                                        0,
2707                                        0,
2708                                        MatSOR_MPIBAIJ,
2709                                        MatTranspose_MPIBAIJ,
2710                                 /*15*/ MatGetInfo_MPIBAIJ,
2711                                        MatEqual_MPIBAIJ,
2712                                        MatGetDiagonal_MPIBAIJ,
2713                                        MatDiagonalScale_MPIBAIJ,
2714                                        MatNorm_MPIBAIJ,
2715                                 /*20*/ MatAssemblyBegin_MPIBAIJ,
2716                                        MatAssemblyEnd_MPIBAIJ,
2717                                        MatSetOption_MPIBAIJ,
2718                                        MatZeroEntries_MPIBAIJ,
2719                                 /*24*/ MatZeroRows_MPIBAIJ,
2720                                        0,
2721                                        0,
2722                                        0,
2723                                        0,
2724                                 /*29*/ MatSetUp_MPIBAIJ,
2725                                        0,
2726                                        0,
2727                                        MatGetDiagonalBlock_MPIBAIJ,
2728                                        0,
2729                                 /*34*/ MatDuplicate_MPIBAIJ,
2730                                        0,
2731                                        0,
2732                                        0,
2733                                        0,
2734                                 /*39*/ MatAXPY_MPIBAIJ,
2735                                        MatGetSubMatrices_MPIBAIJ,
2736                                        MatIncreaseOverlap_MPIBAIJ,
2737                                        MatGetValues_MPIBAIJ,
2738                                        MatCopy_MPIBAIJ,
2739                                 /*44*/ 0,
2740                                        MatScale_MPIBAIJ,
2741                                        MatShift_MPIBAIJ,
2742                                        0,
2743                                        MatZeroRowsColumns_MPIBAIJ,
2744                                 /*49*/ 0,
2745                                        0,
2746                                        0,
2747                                        0,
2748                                        0,
2749                                 /*54*/ MatFDColoringCreate_MPIXAIJ,
2750                                        0,
2751                                        MatSetUnfactored_MPIBAIJ,
2752                                        MatPermute_MPIBAIJ,
2753                                        MatSetValuesBlocked_MPIBAIJ,
2754                                 /*59*/ MatGetSubMatrix_MPIBAIJ,
2755                                        MatDestroy_MPIBAIJ,
2756                                        MatView_MPIBAIJ,
2757                                        0,
2758                                        0,
2759                                 /*64*/ 0,
2760                                        0,
2761                                        0,
2762                                        0,
2763                                        0,
2764                                 /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2765                                        0,
2766                                        0,
2767                                        0,
2768                                        0,
2769                                 /*74*/ 0,
2770                                        MatFDColoringApply_BAIJ,
2771                                        0,
2772                                        0,
2773                                        0,
2774                                 /*79*/ 0,
2775                                        0,
2776                                        0,
2777                                        0,
2778                                        MatLoad_MPIBAIJ,
2779                                 /*84*/ 0,
2780                                        0,
2781                                        0,
2782                                        0,
2783                                        0,
2784                                 /*89*/ 0,
2785                                        0,
2786                                        0,
2787                                        0,
2788                                        0,
2789                                 /*94*/ 0,
2790                                        0,
2791                                        0,
2792                                        0,
2793                                        0,
2794                                 /*99*/ 0,
2795                                        0,
2796                                        0,
2797                                        0,
2798                                        0,
2799                                 /*104*/0,
2800                                        MatRealPart_MPIBAIJ,
2801                                        MatImaginaryPart_MPIBAIJ,
2802                                        0,
2803                                        0,
2804                                 /*109*/0,
2805                                        0,
2806                                        0,
2807                                        0,
2808                                        MatMissingDiagonal_MPIBAIJ,
2809                                 /*114*/MatGetSeqNonzeroStructure_MPIBAIJ,
2810                                        0,
2811                                        MatGetGhosts_MPIBAIJ,
2812                                        0,
2813                                        0,
2814                                 /*119*/0,
2815                                        0,
2816                                        0,
2817                                        0,
2818                                        MatGetMultiProcBlock_MPIBAIJ,
2819                                 /*124*/0,
2820                                        MatGetColumnNorms_MPIBAIJ,
2821                                        MatInvertBlockDiagonal_MPIBAIJ,
2822                                        0,
2823                                        0,
2824                                /*129*/ 0,
2825                                        0,
2826                                        0,
2827                                        0,
2828                                        0,
2829                                /*134*/ 0,
2830                                        0,
2831                                        0,
2832                                        0,
2833                                        0,
2834                                /*139*/ 0,
2835                                        0,
2836                                        0,
2837                                        MatFDColoringSetUp_MPIXAIJ,
2838                                        0,
2839                                 /*144*/MatCreateMPIMatConcatenateSeqMat_MPIBAIJ
2840 };
2841 
2842 
2843 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType,MatReuse,Mat*);
2844 
2845 #undef __FUNCT__
2846 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR_MPIBAIJ"
2847 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B,PetscInt bs,const PetscInt ii[],const PetscInt jj[],const PetscScalar V[])
2848 {
2849   PetscInt       m,rstart,cstart,cend;
2850   PetscInt       i,j,d,nz,nz_max=0,*d_nnz=0,*o_nnz=0;
2851   const PetscInt *JJ    =0;
2852   PetscScalar    *values=0;
2853   PetscBool      roworiented = ((Mat_MPIBAIJ*)B->data)->roworiented;
2854   PetscErrorCode ierr;
2855 
2856   PetscFunctionBegin;
2857   ierr   = PetscLayoutSetBlockSize(B->rmap,bs);CHKERRQ(ierr);
2858   ierr   = PetscLayoutSetBlockSize(B->cmap,bs);CHKERRQ(ierr);
2859   ierr   = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2860   ierr   = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2861   ierr   = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2862   m      = B->rmap->n/bs;
2863   rstart = B->rmap->rstart/bs;
2864   cstart = B->cmap->rstart/bs;
2865   cend   = B->cmap->rend/bs;
2866 
2867   if (ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"ii[0] must be 0 but it is %D",ii[0]);
2868   ierr = PetscMalloc2(m,&d_nnz,m,&o_nnz);CHKERRQ(ierr);
2869   for (i=0; i<m; i++) {
2870     nz = ii[i+1] - ii[i];
2871     if (nz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative number of columns %D",i,nz);
2872     nz_max = PetscMax(nz_max,nz);
2873     JJ     = jj + ii[i];
2874     for (j=0; j<nz; j++) {
2875       if (*JJ >= cstart) break;
2876       JJ++;
2877     }
2878     d = 0;
2879     for (; j<nz; j++) {
2880       if (*JJ++ >= cend) break;
2881       d++;
2882     }
2883     d_nnz[i] = d;
2884     o_nnz[i] = nz - d;
2885   }
2886   ierr = MatMPIBAIJSetPreallocation(B,bs,0,d_nnz,0,o_nnz);CHKERRQ(ierr);
2887   ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr);
2888 
2889   values = (PetscScalar*)V;
2890   if (!values) {
2891     ierr = PetscMalloc1(bs*bs*nz_max,&values);CHKERRQ(ierr);
2892     ierr = PetscMemzero(values,bs*bs*nz_max*sizeof(PetscScalar));CHKERRQ(ierr);
2893   }
2894   for (i=0; i<m; i++) {
2895     PetscInt          row    = i + rstart;
2896     PetscInt          ncols  = ii[i+1] - ii[i];
2897     const PetscInt    *icols = jj + ii[i];
2898     if (!roworiented) {         /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2899       const PetscScalar *svals = values + (V ? (bs*bs*ii[i]) : 0);
2900       ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,ncols,icols,svals,INSERT_VALUES);CHKERRQ(ierr);
2901     } else {                    /* block ordering does not match so we can only insert one block at a time. */
2902       PetscInt j;
2903       for (j=0; j<ncols; j++) {
2904         const PetscScalar *svals = values + (V ? (bs*bs*(ii[i]+j)) : 0);
2905         ierr = MatSetValuesBlocked_MPIBAIJ(B,1,&row,1,&icols[j],svals,INSERT_VALUES);CHKERRQ(ierr);
2906       }
2907     }
2908   }
2909 
2910   if (!V) { ierr = PetscFree(values);CHKERRQ(ierr); }
2911   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2912   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2913   ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
2914   PetscFunctionReturn(0);
2915 }
2916 
2917 #undef __FUNCT__
2918 #define __FUNCT__ "MatMPIBAIJSetPreallocationCSR"
2919 /*@C
2920    MatMPIBAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in BAIJ format
2921    (the default parallel PETSc format).
2922 
2923    Collective on MPI_Comm
2924 
2925    Input Parameters:
2926 +  B - the matrix
2927 .  bs - the block size
2928 .  i - the indices into j for the start of each local row (starts with zero)
2929 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2930 -  v - optional values in the matrix
2931 
2932    Level: developer
2933 
2934    Notes: The order of the entries in values is specified by the MatOption MAT_ROW_ORIENTED.  For example, C programs
2935    may want to use the default MAT_ROW_ORIENTED=PETSC_TRUE and use an array v[nnz][bs][bs] where the second index is
2936    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2937    MAT_ROW_ORIENTED=PETSC_FALSE and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2938    block column and the second index is over columns within a block.
2939 
2940 .keywords: matrix, aij, compressed row, sparse, parallel
2941 
2942 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIBAIJSetPreallocation(), MatCreateAIJ(), MPIAIJ, MatCreateMPIBAIJWithArrays(), MPIBAIJ
2943 @*/
2944 PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat B,PetscInt bs,const PetscInt i[],const PetscInt j[], const PetscScalar v[])
2945 {
2946   PetscErrorCode ierr;
2947 
2948   PetscFunctionBegin;
2949   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
2950   PetscValidType(B,1);
2951   PetscValidLogicalCollectiveInt(B,bs,2);
2952   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocationCSR_C",(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,bs,i,j,v));CHKERRQ(ierr);
2953   PetscFunctionReturn(0);
2954 }
2955 
2956 #undef __FUNCT__
2957 #define __FUNCT__ "MatMPIBAIJSetPreallocation_MPIBAIJ"
2958 PetscErrorCode  MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt *d_nnz,PetscInt o_nz,const PetscInt *o_nnz)
2959 {
2960   Mat_MPIBAIJ    *b;
2961   PetscErrorCode ierr;
2962   PetscInt       i;
2963 
2964   PetscFunctionBegin;
2965   ierr = MatSetBlockSize(B,PetscAbs(bs));CHKERRQ(ierr);
2966   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
2967   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
2968   ierr = PetscLayoutGetBlockSize(B->rmap,&bs);CHKERRQ(ierr);
2969 
2970   if (d_nnz) {
2971     for (i=0; i<B->rmap->n/bs; i++) {
2972       if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than -1: local row %D value %D",i,d_nnz[i]);
2973     }
2974   }
2975   if (o_nnz) {
2976     for (i=0; i<B->rmap->n/bs; i++) {
2977       if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than -1: local row %D value %D",i,o_nnz[i]);
2978     }
2979   }
2980 
2981   b      = (Mat_MPIBAIJ*)B->data;
2982   b->bs2 = bs*bs;
2983   b->mbs = B->rmap->n/bs;
2984   b->nbs = B->cmap->n/bs;
2985   b->Mbs = B->rmap->N/bs;
2986   b->Nbs = B->cmap->N/bs;
2987 
2988   for (i=0; i<=b->size; i++) {
2989     b->rangebs[i] = B->rmap->range[i]/bs;
2990   }
2991   b->rstartbs = B->rmap->rstart/bs;
2992   b->rendbs   = B->rmap->rend/bs;
2993   b->cstartbs = B->cmap->rstart/bs;
2994   b->cendbs   = B->cmap->rend/bs;
2995 
2996 #if defined(PETSC_USE_CTABLE)
2997   ierr = PetscTableDestroy(&b->colmap);CHKERRQ(ierr);
2998 #else
2999   ierr = PetscFree(b->colmap);CHKERRQ(ierr);
3000 #endif
3001   ierr = PetscFree(b->garray);CHKERRQ(ierr);
3002   ierr = VecDestroy(&b->lvec);CHKERRQ(ierr);
3003   ierr = VecScatterDestroy(&b->Mvctx);CHKERRQ(ierr);
3004 
3005   /* Because the B will have been resized we simply destroy it and create a new one each time */
3006   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3007   ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr);
3008   ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr);
3009   ierr = MatSetType(b->B,MATSEQBAIJ);CHKERRQ(ierr);
3010   ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->B);CHKERRQ(ierr);
3011 
3012   if (!B->preallocated) {
3013     ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr);
3014     ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr);
3015     ierr = MatSetType(b->A,MATSEQBAIJ);CHKERRQ(ierr);
3016     ierr = PetscLogObjectParent((PetscObject)B,(PetscObject)b->A);CHKERRQ(ierr);
3017     ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),bs,&B->bstash);CHKERRQ(ierr);
3018   }
3019 
3020   ierr = MatSeqBAIJSetPreallocation(b->A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3021   ierr = MatSeqBAIJSetPreallocation(b->B,bs,o_nz,o_nnz);CHKERRQ(ierr);
3022   B->preallocated  = PETSC_TRUE;
3023   B->was_assembled = PETSC_FALSE;
3024   B->assembled     = PETSC_FALSE;
3025   PetscFunctionReturn(0);
3026 }
3027 
3028 extern PetscErrorCode  MatDiagonalScaleLocal_MPIBAIJ(Mat,Vec);
3029 extern PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat,PetscReal);
3030 
3031 #undef __FUNCT__
3032 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAdj"
3033 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype,MatReuse reuse,Mat *adj)
3034 {
3035   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ*)B->data;
3036   PetscErrorCode ierr;
3037   Mat_SeqBAIJ    *d  = (Mat_SeqBAIJ*) b->A->data,*o = (Mat_SeqBAIJ*) b->B->data;
3038   PetscInt       M   = B->rmap->n/B->rmap->bs,i,*ii,*jj,cnt,j,k,rstart = B->rmap->rstart/B->rmap->bs;
3039   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
3040 
3041   PetscFunctionBegin;
3042   ierr  = PetscMalloc1(M+1,&ii);CHKERRQ(ierr);
3043   ii[0] = 0;
3044   for (i=0; i<M; i++) {
3045     if ((id[i+1] - id[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,id[i],id[i+1]);
3046     if ((io[i+1] - io[i]) < 0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Indices wrong %D %D %D",i,io[i],io[i+1]);
3047     ii[i+1] = ii[i] + id[i+1] - id[i] + io[i+1] - io[i];
3048     /* remove one from count of matrix has diagonal */
3049     for (j=id[i]; j<id[i+1]; j++) {
3050       if (jd[j] == i) {ii[i+1]--;break;}
3051     }
3052   }
3053   ierr = PetscMalloc1(ii[M],&jj);CHKERRQ(ierr);
3054   cnt  = 0;
3055   for (i=0; i<M; i++) {
3056     for (j=io[i]; j<io[i+1]; j++) {
3057       if (garray[jo[j]] > rstart) break;
3058       jj[cnt++] = garray[jo[j]];
3059     }
3060     for (k=id[i]; k<id[i+1]; k++) {
3061       if (jd[k] != i) {
3062         jj[cnt++] = rstart + jd[k];
3063       }
3064     }
3065     for (; j<io[i+1]; j++) {
3066       jj[cnt++] = garray[jo[j]];
3067     }
3068   }
3069   ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)B),M,B->cmap->N/B->rmap->bs,ii,jj,NULL,adj);CHKERRQ(ierr);
3070   PetscFunctionReturn(0);
3071 }
3072 
3073 #include <../src/mat/impls/aij/mpi/mpiaij.h>
3074 
3075 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat,MatType,MatReuse,Mat*);
3076 
3077 #undef __FUNCT__
3078 #define __FUNCT__ "MatConvert_MPIBAIJ_MPIAIJ"
3079 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A,MatType newtype,MatReuse reuse,Mat *newmat)
3080 {
3081   PetscErrorCode ierr;
3082   Mat_MPIBAIJ    *a = (Mat_MPIBAIJ*)A->data;
3083   Mat            B;
3084   Mat_MPIAIJ     *b;
3085 
3086   PetscFunctionBegin;
3087   if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix must be assembled");
3088 
3089   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
3090   ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr);
3091   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
3092   ierr = MatSetBlockSizes(B,A->rmap->bs,A->cmap->bs);CHKERRQ(ierr);
3093   ierr = MatSeqAIJSetPreallocation(B,0,NULL);CHKERRQ(ierr);
3094   ierr = MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);CHKERRQ(ierr);
3095   b    = (Mat_MPIAIJ*) B->data;
3096 
3097   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
3098   ierr = MatDestroy(&b->B);CHKERRQ(ierr);
3099   ierr = MatDisAssemble_MPIBAIJ(A);CHKERRQ(ierr);
3100   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A);CHKERRQ(ierr);
3101   ierr = MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B);CHKERRQ(ierr);
3102   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3103   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3104   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3105   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3106   if (reuse == MAT_INPLACE_MATRIX) {
3107     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
3108   } else {
3109    *newmat = B;
3110   }
3111   PetscFunctionReturn(0);
3112 }
3113 
3114 /*MC
3115    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
3116 
3117    Options Database Keys:
3118 + -mat_type mpibaij - sets the matrix type to "mpibaij" during a call to MatSetFromOptions()
3119 . -mat_block_size <bs> - set the blocksize used to store the matrix
3120 - -mat_use_hash_table <fact>
3121 
3122   Level: beginner
3123 
3124 .seealso: MatCreateMPIBAIJ
3125 M*/
3126 
3127 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat,MatType,MatReuse,Mat*);
3128 
3129 #undef __FUNCT__
3130 #define __FUNCT__ "MatCreate_MPIBAIJ"
3131 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
3132 {
3133   Mat_MPIBAIJ    *b;
3134   PetscErrorCode ierr;
3135   PetscBool      flg = PETSC_FALSE;
3136 
3137   PetscFunctionBegin;
3138   ierr    = PetscNewLog(B,&b);CHKERRQ(ierr);
3139   B->data = (void*)b;
3140 
3141   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
3142   B->assembled = PETSC_FALSE;
3143 
3144   B->insertmode = NOT_SET_VALUES;
3145   ierr          = MPI_Comm_rank(PetscObjectComm((PetscObject)B),&b->rank);CHKERRQ(ierr);
3146   ierr          = MPI_Comm_size(PetscObjectComm((PetscObject)B),&b->size);CHKERRQ(ierr);
3147 
3148   /* build local table of row and column ownerships */
3149   ierr = PetscMalloc1(b->size+1,&b->rangebs);CHKERRQ(ierr);
3150 
3151   /* build cache for off array entries formed */
3152   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)B),1,&B->stash);CHKERRQ(ierr);
3153 
3154   b->donotstash  = PETSC_FALSE;
3155   b->colmap      = NULL;
3156   b->garray      = NULL;
3157   b->roworiented = PETSC_TRUE;
3158 
3159   /* stuff used in block assembly */
3160   b->barray = 0;
3161 
3162   /* stuff used for matrix vector multiply */
3163   b->lvec  = 0;
3164   b->Mvctx = 0;
3165 
3166   /* stuff for MatGetRow() */
3167   b->rowindices   = 0;
3168   b->rowvalues    = 0;
3169   b->getrowactive = PETSC_FALSE;
3170 
3171   /* hash table stuff */
3172   b->ht           = 0;
3173   b->hd           = 0;
3174   b->ht_size      = 0;
3175   b->ht_flag      = PETSC_FALSE;
3176   b->ht_fact      = 0;
3177   b->ht_total_ct  = 0;
3178   b->ht_insert_ct = 0;
3179 
3180   /* stuff for MatGetSubMatrices_MPIBAIJ_local() */
3181   b->ijonly = PETSC_FALSE;
3182 
3183 
3184   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiadj_C",MatConvert_MPIBAIJ_MPIAdj);CHKERRQ(ierr);
3185   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpiaij_C",MatConvert_MPIBAIJ_MPIAIJ);CHKERRQ(ierr);
3186   ierr = PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_mpisbaij_C",MatConvert_MPIBAIJ_MPISBAIJ);CHKERRQ(ierr);
3187   ierr = PetscObjectComposeFunction((PetscObject)B,"MatStoreValues_C",MatStoreValues_MPIBAIJ);CHKERRQ(ierr);
3188   ierr = PetscObjectComposeFunction((PetscObject)B,"MatRetrieveValues_C",MatRetrieveValues_MPIBAIJ);CHKERRQ(ierr);
3189   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",MatMPIBAIJSetPreallocation_MPIBAIJ);CHKERRQ(ierr);
3190   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocationCSR_C",MatMPIBAIJSetPreallocationCSR_MPIBAIJ);CHKERRQ(ierr);
3191   ierr = PetscObjectComposeFunction((PetscObject)B,"MatDiagonalScaleLocal_C",MatDiagonalScaleLocal_MPIBAIJ);CHKERRQ(ierr);
3192   ierr = PetscObjectComposeFunction((PetscObject)B,"MatSetHashTableFactor_C",MatSetHashTableFactor_MPIBAIJ);CHKERRQ(ierr);
3193   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIBAIJ);CHKERRQ(ierr);
3194 
3195   ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)B),NULL,"Options for loading MPIBAIJ matrix 1","Mat");CHKERRQ(ierr);
3196   ierr = PetscOptionsName("-mat_use_hash_table","Use hash table to save time in constructing matrix","MatSetOption",&flg);CHKERRQ(ierr);
3197   if (flg) {
3198     PetscReal fact = 1.39;
3199     ierr = MatSetOption(B,MAT_USE_HASH_TABLE,PETSC_TRUE);CHKERRQ(ierr);
3200     ierr = PetscOptionsReal("-mat_use_hash_table","Use hash table factor","MatMPIBAIJSetHashTableFactor",fact,&fact,NULL);CHKERRQ(ierr);
3201     if (fact <= 1.0) fact = 1.39;
3202     ierr = MatMPIBAIJSetHashTableFactor(B,fact);CHKERRQ(ierr);
3203     ierr = PetscInfo1(B,"Hash table Factor used %5.2f\n",fact);CHKERRQ(ierr);
3204   }
3205   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3206   PetscFunctionReturn(0);
3207 }
3208 
3209 /*MC
3210    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
3211 
3212    This matrix type is identical to MATSEQBAIJ when constructed with a single process communicator,
3213    and MATMPIBAIJ otherwise.
3214 
3215    Options Database Keys:
3216 . -mat_type baij - sets the matrix type to "baij" during a call to MatSetFromOptions()
3217 
3218   Level: beginner
3219 
3220 .seealso: MatCreateBAIJ(),MATSEQBAIJ,MATMPIBAIJ, MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3221 M*/
3222 
3223 #undef __FUNCT__
3224 #define __FUNCT__ "MatMPIBAIJSetPreallocation"
3225 /*@C
3226    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in block AIJ format
3227    (block compressed row).  For good matrix assembly performance
3228    the user should preallocate the matrix storage by setting the parameters
3229    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3230    performance can be increased by more than a factor of 50.
3231 
3232    Collective on Mat
3233 
3234    Input Parameters:
3235 +  B - the matrix
3236 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3237           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3238 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
3239            submatrix  (same for all local rows)
3240 .  d_nnz - array containing the number of block nonzeros in the various block rows
3241            of the in diagonal portion of the local (possibly different for each block
3242            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry and
3243            set it even if it is zero.
3244 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
3245            submatrix (same for all local rows).
3246 -  o_nnz - array containing the number of nonzeros in the various block rows of the
3247            off-diagonal portion of the local submatrix (possibly different for
3248            each block row) or NULL.
3249 
3250    If the *_nnz parameter is given then the *_nz parameter is ignored
3251 
3252    Options Database Keys:
3253 +   -mat_block_size - size of the blocks to use
3254 -   -mat_use_hash_table <fact>
3255 
3256    Notes:
3257    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3258    than it must be used on all processors that share the object for that argument.
3259 
3260    Storage Information:
3261    For a square global matrix we define each processor's diagonal portion
3262    to be its local rows and the corresponding columns (a square submatrix);
3263    each processor's off-diagonal portion encompasses the remainder of the
3264    local matrix (a rectangular submatrix).
3265 
3266    The user can specify preallocated storage for the diagonal part of
3267    the local submatrix with either d_nz or d_nnz (not both).  Set
3268    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3269    memory allocation.  Likewise, specify preallocated storage for the
3270    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3271 
3272    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3273    the figure below we depict these three local rows and all columns (0-11).
3274 
3275 .vb
3276            0 1 2 3 4 5 6 7 8 9 10 11
3277           --------------------------
3278    row 3  |o o o d d d o o o o  o  o
3279    row 4  |o o o d d d o o o o  o  o
3280    row 5  |o o o d d d o o o o  o  o
3281           --------------------------
3282 .ve
3283 
3284    Thus, any entries in the d locations are stored in the d (diagonal)
3285    submatrix, and any entries in the o locations are stored in the
3286    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3287    stored simply in the MATSEQBAIJ format for compressed row storage.
3288 
3289    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3290    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3291    In general, for PDE problems in which most nonzeros are near the diagonal,
3292    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3293    or you will get TERRIBLE performance; see the users' manual chapter on
3294    matrices.
3295 
3296    You can call MatGetInfo() to get information on how effective the preallocation was;
3297    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3298    You can also run with the option -info and look for messages with the string
3299    malloc in them to see if additional memory allocation was needed.
3300 
3301    Level: intermediate
3302 
3303 .keywords: matrix, block, aij, compressed row, sparse, parallel
3304 
3305 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocationCSR(), PetscSplitOwnership()
3306 @*/
3307 PetscErrorCode  MatMPIBAIJSetPreallocation(Mat B,PetscInt bs,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
3308 {
3309   PetscErrorCode ierr;
3310 
3311   PetscFunctionBegin;
3312   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
3313   PetscValidType(B,1);
3314   PetscValidLogicalCollectiveInt(B,bs,2);
3315   ierr = PetscTryMethod(B,"MatMPIBAIJSetPreallocation_C",(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,bs,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
3316   PetscFunctionReturn(0);
3317 }
3318 
3319 #undef __FUNCT__
3320 #define __FUNCT__ "MatCreateBAIJ"
3321 /*@C
3322    MatCreateBAIJ - Creates a sparse parallel matrix in block AIJ format
3323    (block compressed row).  For good matrix assembly performance
3324    the user should preallocate the matrix storage by setting the parameters
3325    d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
3326    performance can be increased by more than a factor of 50.
3327 
3328    Collective on MPI_Comm
3329 
3330    Input Parameters:
3331 +  comm - MPI communicator
3332 .  bs   - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
3333           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
3334 .  m - number of local rows (or PETSC_DECIDE to have calculated if M is given)
3335            This value should be the same as the local size used in creating the
3336            y vector for the matrix-vector product y = Ax.
3337 .  n - number of local columns (or PETSC_DECIDE to have calculated if N is given)
3338            This value should be the same as the local size used in creating the
3339            x vector for the matrix-vector product y = Ax.
3340 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3341 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3342 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3343            submatrix  (same for all local rows)
3344 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3345            of the in diagonal portion of the local (possibly different for each block
3346            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3347            and set it even if it is zero.
3348 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3349            submatrix (same for all local rows).
3350 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3351            off-diagonal portion of the local submatrix (possibly different for
3352            each block row) or NULL.
3353 
3354    Output Parameter:
3355 .  A - the matrix
3356 
3357    Options Database Keys:
3358 +   -mat_block_size - size of the blocks to use
3359 -   -mat_use_hash_table <fact>
3360 
3361    It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(),
3362    MatXXXXSetPreallocation() paradgm instead of this routine directly.
3363    [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation]
3364 
3365    Notes:
3366    If the *_nnz parameter is given then the *_nz parameter is ignored
3367 
3368    A nonzero block is any block that as 1 or more nonzeros in it
3369 
3370    The user MUST specify either the local or global matrix dimensions
3371    (possibly both).
3372 
3373    If PETSC_DECIDE or  PETSC_DETERMINE is used for a particular argument on one processor
3374    than it must be used on all processors that share the object for that argument.
3375 
3376    Storage Information:
3377    For a square global matrix we define each processor's diagonal portion
3378    to be its local rows and the corresponding columns (a square submatrix);
3379    each processor's off-diagonal portion encompasses the remainder of the
3380    local matrix (a rectangular submatrix).
3381 
3382    The user can specify preallocated storage for the diagonal part of
3383    the local submatrix with either d_nz or d_nnz (not both).  Set
3384    d_nz=PETSC_DEFAULT and d_nnz=NULL for PETSc to control dynamic
3385    memory allocation.  Likewise, specify preallocated storage for the
3386    off-diagonal part of the local submatrix with o_nz or o_nnz (not both).
3387 
3388    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3389    the figure below we depict these three local rows and all columns (0-11).
3390 
3391 .vb
3392            0 1 2 3 4 5 6 7 8 9 10 11
3393           --------------------------
3394    row 3  |o o o d d d o o o o  o  o
3395    row 4  |o o o d d d o o o o  o  o
3396    row 5  |o o o d d d o o o o  o  o
3397           --------------------------
3398 .ve
3399 
3400    Thus, any entries in the d locations are stored in the d (diagonal)
3401    submatrix, and any entries in the o locations are stored in the
3402    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3403    stored simply in the MATSEQBAIJ format for compressed row storage.
3404 
3405    Now d_nz should indicate the number of block nonzeros per row in the d matrix,
3406    and o_nz should indicate the number of block nonzeros per row in the o matrix.
3407    In general, for PDE problems in which most nonzeros are near the diagonal,
3408    one expects d_nz >> o_nz.   For large problems you MUST preallocate memory
3409    or you will get TERRIBLE performance; see the users' manual chapter on
3410    matrices.
3411 
3412    Level: intermediate
3413 
3414 .keywords: matrix, block, aij, compressed row, sparse, parallel
3415 
3416 .seealso: MatCreate(), MatCreateSeqBAIJ(), MatSetValues(), MatCreateBAIJ(), MatMPIBAIJSetPreallocation(), MatMPIBAIJSetPreallocationCSR()
3417 @*/
3418 PetscErrorCode  MatCreateBAIJ(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A)
3419 {
3420   PetscErrorCode ierr;
3421   PetscMPIInt    size;
3422 
3423   PetscFunctionBegin;
3424   ierr = MatCreate(comm,A);CHKERRQ(ierr);
3425   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
3426   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3427   if (size > 1) {
3428     ierr = MatSetType(*A,MATMPIBAIJ);CHKERRQ(ierr);
3429     ierr = MatMPIBAIJSetPreallocation(*A,bs,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr);
3430   } else {
3431     ierr = MatSetType(*A,MATSEQBAIJ);CHKERRQ(ierr);
3432     ierr = MatSeqBAIJSetPreallocation(*A,bs,d_nz,d_nnz);CHKERRQ(ierr);
3433   }
3434   PetscFunctionReturn(0);
3435 }
3436 
3437 #undef __FUNCT__
3438 #define __FUNCT__ "MatDuplicate_MPIBAIJ"
3439 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat)
3440 {
3441   Mat            mat;
3442   Mat_MPIBAIJ    *a,*oldmat = (Mat_MPIBAIJ*)matin->data;
3443   PetscErrorCode ierr;
3444   PetscInt       len=0;
3445 
3446   PetscFunctionBegin;
3447   *newmat = 0;
3448   ierr    = MatCreate(PetscObjectComm((PetscObject)matin),&mat);CHKERRQ(ierr);
3449   ierr    = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr);
3450   ierr    = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr);
3451   ierr    = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
3452 
3453   mat->factortype   = matin->factortype;
3454   mat->preallocated = PETSC_TRUE;
3455   mat->assembled    = PETSC_TRUE;
3456   mat->insertmode   = NOT_SET_VALUES;
3457 
3458   a             = (Mat_MPIBAIJ*)mat->data;
3459   mat->rmap->bs = matin->rmap->bs;
3460   a->bs2        = oldmat->bs2;
3461   a->mbs        = oldmat->mbs;
3462   a->nbs        = oldmat->nbs;
3463   a->Mbs        = oldmat->Mbs;
3464   a->Nbs        = oldmat->Nbs;
3465 
3466   ierr = PetscLayoutReference(matin->rmap,&mat->rmap);CHKERRQ(ierr);
3467   ierr = PetscLayoutReference(matin->cmap,&mat->cmap);CHKERRQ(ierr);
3468 
3469   a->size         = oldmat->size;
3470   a->rank         = oldmat->rank;
3471   a->donotstash   = oldmat->donotstash;
3472   a->roworiented  = oldmat->roworiented;
3473   a->rowindices   = 0;
3474   a->rowvalues    = 0;
3475   a->getrowactive = PETSC_FALSE;
3476   a->barray       = 0;
3477   a->rstartbs     = oldmat->rstartbs;
3478   a->rendbs       = oldmat->rendbs;
3479   a->cstartbs     = oldmat->cstartbs;
3480   a->cendbs       = oldmat->cendbs;
3481 
3482   /* hash table stuff */
3483   a->ht           = 0;
3484   a->hd           = 0;
3485   a->ht_size      = 0;
3486   a->ht_flag      = oldmat->ht_flag;
3487   a->ht_fact      = oldmat->ht_fact;
3488   a->ht_total_ct  = 0;
3489   a->ht_insert_ct = 0;
3490 
3491   ierr = PetscMemcpy(a->rangebs,oldmat->rangebs,(a->size+1)*sizeof(PetscInt));CHKERRQ(ierr);
3492   if (oldmat->colmap) {
3493 #if defined(PETSC_USE_CTABLE)
3494     ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr);
3495 #else
3496     ierr = PetscMalloc1(a->Nbs,&a->colmap);CHKERRQ(ierr);
3497     ierr = PetscLogObjectMemory((PetscObject)mat,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3498     ierr = PetscMemcpy(a->colmap,oldmat->colmap,(a->Nbs)*sizeof(PetscInt));CHKERRQ(ierr);
3499 #endif
3500   } else a->colmap = 0;
3501 
3502   if (oldmat->garray && (len = ((Mat_SeqBAIJ*)(oldmat->B->data))->nbs)) {
3503     ierr = PetscMalloc1(len,&a->garray);CHKERRQ(ierr);
3504     ierr = PetscLogObjectMemory((PetscObject)mat,len*sizeof(PetscInt));CHKERRQ(ierr);
3505     ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr);
3506   } else a->garray = 0;
3507 
3508   ierr = MatStashCreate_Private(PetscObjectComm((PetscObject)matin),matin->rmap->bs,&mat->bstash);CHKERRQ(ierr);
3509   ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr);
3510   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->lvec);CHKERRQ(ierr);
3511   ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr);
3512   ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->Mvctx);CHKERRQ(ierr);
3513 
3514   ierr    = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr);
3515   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->A);CHKERRQ(ierr);
3516   ierr    = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr);
3517   ierr    = PetscLogObjectParent((PetscObject)mat,(PetscObject)a->B);CHKERRQ(ierr);
3518   ierr    = PetscFunctionListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr);
3519   *newmat = mat;
3520   PetscFunctionReturn(0);
3521 }
3522 
3523 #undef __FUNCT__
3524 #define __FUNCT__ "MatLoad_MPIBAIJ"
3525 PetscErrorCode MatLoad_MPIBAIJ(Mat newmat,PetscViewer viewer)
3526 {
3527   PetscErrorCode ierr;
3528   int            fd;
3529   PetscInt       i,nz,j,rstart,rend;
3530   PetscScalar    *vals,*buf;
3531   MPI_Comm       comm;
3532   MPI_Status     status;
3533   PetscMPIInt    rank,size,maxnz;
3534   PetscInt       header[4],*rowlengths = 0,M,N,m,*rowners,*cols;
3535   PetscInt       *locrowlens = NULL,*procsnz = NULL,*browners = NULL;
3536   PetscInt       jj,*mycols,*ibuf,bs = newmat->rmap->bs,Mbs,mbs,extra_rows,mmax;
3537   PetscMPIInt    tag    = ((PetscObject)viewer)->tag;
3538   PetscInt       *dlens = NULL,*odlens = NULL,*mask = NULL,*masked1 = NULL,*masked2 = NULL,rowcount,odcount;
3539   PetscInt       dcount,kmax,k,nzcount,tmp,mend;
3540 
3541   PetscFunctionBegin;
3542   /* force binary viewer to load .info file if it has not yet done so */
3543   ierr = PetscViewerSetUp(viewer);CHKERRQ(ierr);
3544   ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
3545   ierr = PetscOptionsBegin(comm,NULL,"Options for loading MPIBAIJ matrix 2","Mat");CHKERRQ(ierr);
3546   ierr = PetscOptionsInt("-matload_block_size","Set the blocksize used to store the matrix","MatLoad",bs,&bs,NULL);CHKERRQ(ierr);
3547   ierr = PetscOptionsEnd();CHKERRQ(ierr);
3548   if (bs < 0) bs = 1;
3549 
3550   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
3551   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
3552   ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr);
3553   if (!rank) {
3554     ierr = PetscBinaryRead(fd,(char*)header,4,PETSC_INT);CHKERRQ(ierr);
3555     if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
3556     if (header[3] < 0) SETERRQ(PetscObjectComm((PetscObject)newmat),PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format on disk, cannot load as MPIAIJ");
3557   }
3558   ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr);
3559   M    = header[1]; N = header[2];
3560 
3561   /* If global sizes are set, check if they are consistent with that given in the file */
3562   if (newmat->rmap->N >= 0 && newmat->rmap->N != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of rows:Matrix in file has (%D) and input matrix has (%D)",newmat->rmap->N,M);
3563   if (newmat->cmap->N >= 0 && newmat->cmap->N != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"Inconsistent # of cols:Matrix in file has (%D) and input matrix has (%D)",newmat->cmap->N,N);
3564 
3565   if (M != N) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Can only do square matrices");
3566 
3567   /*
3568      This code adds extra rows to make sure the number of rows is
3569      divisible by the blocksize
3570   */
3571   Mbs        = M/bs;
3572   extra_rows = bs - M + bs*Mbs;
3573   if (extra_rows == bs) extra_rows = 0;
3574   else                  Mbs++;
3575   if (extra_rows && !rank) {
3576     ierr = PetscInfo(viewer,"Padding loaded matrix to match blocksize\n");CHKERRQ(ierr);
3577   }
3578 
3579   /* determine ownership of all rows */
3580   if (newmat->rmap->n < 0) { /* PETSC_DECIDE */
3581     mbs = Mbs/size + ((Mbs % size) > rank);
3582     m   = mbs*bs;
3583   } else { /* User set */
3584     m   = newmat->rmap->n;
3585     mbs = m/bs;
3586   }
3587   ierr = PetscMalloc2(size+1,&rowners,size+1,&browners);CHKERRQ(ierr);
3588   ierr = MPI_Allgather(&mbs,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr);
3589 
3590   /* process 0 needs enough room for process with most rows */
3591   if (!rank) {
3592     mmax = rowners[1];
3593     for (i=2; i<=size; i++) {
3594       mmax = PetscMax(mmax,rowners[i]);
3595     }
3596     mmax*=bs;
3597   } else mmax = -1;             /* unused, but compiler warns anyway */
3598 
3599   rowners[0] = 0;
3600   for (i=2; i<=size; i++) rowners[i] += rowners[i-1];
3601   for (i=0; i<=size; i++) browners[i] = rowners[i]*bs;
3602   rstart = rowners[rank];
3603   rend   = rowners[rank+1];
3604 
3605   /* distribute row lengths to all processors */
3606   ierr = PetscMalloc1(m,&locrowlens);CHKERRQ(ierr);
3607   if (!rank) {
3608     mend = m;
3609     if (size == 1) mend = mend - extra_rows;
3610     ierr = PetscBinaryRead(fd,locrowlens,mend,PETSC_INT);CHKERRQ(ierr);
3611     for (j=mend; j<m; j++) locrowlens[j] = 1;
3612     ierr = PetscMalloc1(mmax,&rowlengths);CHKERRQ(ierr);
3613     ierr = PetscCalloc1(size,&procsnz);CHKERRQ(ierr);
3614     for (j=0; j<m; j++) {
3615       procsnz[0] += locrowlens[j];
3616     }
3617     for (i=1; i<size; i++) {
3618       mend = browners[i+1] - browners[i];
3619       if (i == size-1) mend = mend - extra_rows;
3620       ierr = PetscBinaryRead(fd,rowlengths,mend,PETSC_INT);CHKERRQ(ierr);
3621       for (j=mend; j<browners[i+1] - browners[i]; j++) rowlengths[j] = 1;
3622       /* calculate the number of nonzeros on each processor */
3623       for (j=0; j<browners[i+1]-browners[i]; j++) {
3624         procsnz[i] += rowlengths[j];
3625       }
3626       ierr = MPI_Send(rowlengths,browners[i+1]-browners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3627     }
3628     ierr = PetscFree(rowlengths);CHKERRQ(ierr);
3629   } else {
3630     ierr = MPI_Recv(locrowlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3631   }
3632 
3633   if (!rank) {
3634     /* determine max buffer needed and allocate it */
3635     maxnz = procsnz[0];
3636     for (i=1; i<size; i++) {
3637       maxnz = PetscMax(maxnz,procsnz[i]);
3638     }
3639     ierr = PetscMalloc1(maxnz,&cols);CHKERRQ(ierr);
3640 
3641     /* read in my part of the matrix column indices  */
3642     nz     = procsnz[0];
3643     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3644     mycols = ibuf;
3645     if (size == 1) nz -= extra_rows;
3646     ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr);
3647     if (size == 1) {
3648       for (i=0; i< extra_rows; i++) mycols[nz+i] = M+i;
3649     }
3650 
3651     /* read in every ones (except the last) and ship off */
3652     for (i=1; i<size-1; i++) {
3653       nz   = procsnz[i];
3654       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3655       ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr);
3656     }
3657     /* read in the stuff for the last proc */
3658     if (size != 1) {
3659       nz   = procsnz[size-1] - extra_rows;  /* the extra rows are not on the disk */
3660       ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr);
3661       for (i=0; i<extra_rows; i++) cols[nz+i] = M+i;
3662       ierr = MPI_Send(cols,nz+extra_rows,MPIU_INT,size-1,tag,comm);CHKERRQ(ierr);
3663     }
3664     ierr = PetscFree(cols);CHKERRQ(ierr);
3665   } else {
3666     /* determine buffer space needed for message */
3667     nz = 0;
3668     for (i=0; i<m; i++) {
3669       nz += locrowlens[i];
3670     }
3671     ierr   = PetscMalloc1(nz+1,&ibuf);CHKERRQ(ierr);
3672     mycols = ibuf;
3673     /* receive message of column indices*/
3674     ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr);
3675     ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr);
3676     if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
3677   }
3678 
3679   /* loop over local rows, determining number of off diagonal entries */
3680   ierr     = PetscMalloc2(rend-rstart,&dlens,rend-rstart,&odlens);CHKERRQ(ierr);
3681   ierr     = PetscCalloc3(Mbs,&mask,Mbs,&masked1,Mbs,&masked2);CHKERRQ(ierr);
3682   rowcount = 0; nzcount = 0;
3683   for (i=0; i<mbs; i++) {
3684     dcount  = 0;
3685     odcount = 0;
3686     for (j=0; j<bs; j++) {
3687       kmax = locrowlens[rowcount];
3688       for (k=0; k<kmax; k++) {
3689         tmp = mycols[nzcount++]/bs;
3690         if (!mask[tmp]) {
3691           mask[tmp] = 1;
3692           if (tmp < rstart || tmp >= rend) masked2[odcount++] = tmp;
3693           else masked1[dcount++] = tmp;
3694         }
3695       }
3696       rowcount++;
3697     }
3698 
3699     dlens[i]  = dcount;
3700     odlens[i] = odcount;
3701 
3702     /* zero out the mask elements we set */
3703     for (j=0; j<dcount; j++) mask[masked1[j]] = 0;
3704     for (j=0; j<odcount; j++) mask[masked2[j]] = 0;
3705   }
3706 
3707   ierr = MatSetSizes(newmat,m,m,M+extra_rows,N+extra_rows);CHKERRQ(ierr);
3708   ierr = MatMPIBAIJSetPreallocation(newmat,bs,0,dlens,0,odlens);CHKERRQ(ierr);
3709 
3710   if (!rank) {
3711     ierr = PetscMalloc1(maxnz+1,&buf);CHKERRQ(ierr);
3712     /* read in my part of the matrix numerical values  */
3713     nz     = procsnz[0];
3714     vals   = buf;
3715     mycols = ibuf;
3716     if (size == 1) nz -= extra_rows;
3717     ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3718     if (size == 1) {
3719       for (i=0; i< extra_rows; i++) vals[nz+i] = 1.0;
3720     }
3721 
3722     /* insert into matrix */
3723     jj = rstart*bs;
3724     for (i=0; i<m; i++) {
3725       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3726       mycols += locrowlens[i];
3727       vals   += locrowlens[i];
3728       jj++;
3729     }
3730     /* read in other processors (except the last one) and ship out */
3731     for (i=1; i<size-1; i++) {
3732       nz   = procsnz[i];
3733       vals = buf;
3734       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3735       ierr = MPIULong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3736     }
3737     /* the last proc */
3738     if (size != 1) {
3739       nz   = procsnz[i] - extra_rows;
3740       vals = buf;
3741       ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr);
3742       for (i=0; i<extra_rows; i++) vals[nz+i] = 1.0;
3743       ierr = MPIULong_Send(vals,nz+extra_rows,MPIU_SCALAR,size-1,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3744     }
3745     ierr = PetscFree(procsnz);CHKERRQ(ierr);
3746   } else {
3747     /* receive numeric values */
3748     ierr = PetscMalloc1(nz+1,&buf);CHKERRQ(ierr);
3749 
3750     /* receive message of values*/
3751     vals   = buf;
3752     mycols = ibuf;
3753     ierr   = MPIULong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr);
3754 
3755     /* insert into matrix */
3756     jj = rstart*bs;
3757     for (i=0; i<m; i++) {
3758       ierr    = MatSetValues_MPIBAIJ(newmat,1,&jj,locrowlens[i],mycols,vals,INSERT_VALUES);CHKERRQ(ierr);
3759       mycols += locrowlens[i];
3760       vals   += locrowlens[i];
3761       jj++;
3762     }
3763   }
3764   ierr = PetscFree(locrowlens);CHKERRQ(ierr);
3765   ierr = PetscFree(buf);CHKERRQ(ierr);
3766   ierr = PetscFree(ibuf);CHKERRQ(ierr);
3767   ierr = PetscFree2(rowners,browners);CHKERRQ(ierr);
3768   ierr = PetscFree2(dlens,odlens);CHKERRQ(ierr);
3769   ierr = PetscFree3(mask,masked1,masked2);CHKERRQ(ierr);
3770   ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3771   ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
3772   PetscFunctionReturn(0);
3773 }
3774 
3775 #undef __FUNCT__
3776 #define __FUNCT__ "MatMPIBAIJSetHashTableFactor"
3777 /*@
3778    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the HashTable.
3779 
3780    Input Parameters:
3781 .  mat  - the matrix
3782 .  fact - factor
3783 
3784    Not Collective, each process can use a different factor
3785 
3786    Level: advanced
3787 
3788   Notes:
3789    This can also be set by the command line option: -mat_use_hash_table <fact>
3790 
3791 .keywords: matrix, hashtable, factor, HT
3792 
3793 .seealso: MatSetOption()
3794 @*/
3795 PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat mat,PetscReal fact)
3796 {
3797   PetscErrorCode ierr;
3798 
3799   PetscFunctionBegin;
3800   ierr = PetscTryMethod(mat,"MatSetHashTableFactor_C",(Mat,PetscReal),(mat,fact));CHKERRQ(ierr);
3801   PetscFunctionReturn(0);
3802 }
3803 
3804 #undef __FUNCT__
3805 #define __FUNCT__ "MatSetHashTableFactor_MPIBAIJ"
3806 PetscErrorCode  MatSetHashTableFactor_MPIBAIJ(Mat mat,PetscReal fact)
3807 {
3808   Mat_MPIBAIJ *baij;
3809 
3810   PetscFunctionBegin;
3811   baij          = (Mat_MPIBAIJ*)mat->data;
3812   baij->ht_fact = fact;
3813   PetscFunctionReturn(0);
3814 }
3815 
3816 #undef __FUNCT__
3817 #define __FUNCT__ "MatMPIBAIJGetSeqBAIJ"
3818 PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat A,Mat *Ad,Mat *Ao,const PetscInt *colmap[])
3819 {
3820   Mat_MPIBAIJ *a = (Mat_MPIBAIJ*)A->data;
3821 
3822   PetscFunctionBegin;
3823   if (Ad)     *Ad     = a->A;
3824   if (Ao)     *Ao     = a->B;
3825   if (colmap) *colmap = a->garray;
3826   PetscFunctionReturn(0);
3827 }
3828 
3829 /*
3830     Special version for direct calls from Fortran (to eliminate two function call overheads
3831 */
3832 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3833 #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3834 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3835 #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3836 #endif
3837 
3838 #undef __FUNCT__
3839 #define __FUNCT__ "matmpibiajsetvaluesblocked"
3840 /*@C
3841   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to MatSetValuesBlocked()
3842 
3843   Collective on Mat
3844 
3845   Input Parameters:
3846 + mat - the matrix
3847 . min - number of input rows
3848 . im - input rows
3849 . nin - number of input columns
3850 . in - input columns
3851 . v - numerical values input
3852 - addvin - INSERT_VALUES or ADD_VALUES
3853 
3854   Notes: This has a complete copy of MatSetValuesBlocked_MPIBAIJ() which is terrible code un-reuse.
3855 
3856   Level: advanced
3857 
3858 .seealso:   MatSetValuesBlocked()
3859 @*/
3860 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin,PetscInt *min,const PetscInt im[],PetscInt *nin,const PetscInt in[],const MatScalar v[],InsertMode *addvin)
3861 {
3862   /* convert input arguments to C version */
3863   Mat        mat  = *matin;
3864   PetscInt   m    = *min, n = *nin;
3865   InsertMode addv = *addvin;
3866 
3867   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ*)mat->data;
3868   const MatScalar *value;
3869   MatScalar       *barray     = baij->barray;
3870   PetscBool       roworiented = baij->roworiented;
3871   PetscErrorCode  ierr;
3872   PetscInt        i,j,ii,jj,row,col,rstart=baij->rstartbs;
3873   PetscInt        rend=baij->rendbs,cstart=baij->cstartbs,stepval;
3874   PetscInt        cend=baij->cendbs,bs=mat->rmap->bs,bs2=baij->bs2;
3875 
3876   PetscFunctionBegin;
3877   /* tasks normally handled by MatSetValuesBlocked() */
3878   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3879 #if defined(PETSC_USE_DEBUG)
3880   else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
3881   if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3882 #endif
3883   if (mat->assembled) {
3884     mat->was_assembled = PETSC_TRUE;
3885     mat->assembled     = PETSC_FALSE;
3886   }
3887   ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3888 
3889 
3890   if (!barray) {
3891     ierr         = PetscMalloc1(bs2,&barray);CHKERRQ(ierr);
3892     baij->barray = barray;
3893   }
3894 
3895   if (roworiented) stepval = (n-1)*bs;
3896   else stepval = (m-1)*bs;
3897 
3898   for (i=0; i<m; i++) {
3899     if (im[i] < 0) continue;
3900 #if defined(PETSC_USE_DEBUG)
3901     if (im[i] >= baij->Mbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large, row %D max %D",im[i],baij->Mbs-1);
3902 #endif
3903     if (im[i] >= rstart && im[i] < rend) {
3904       row = im[i] - rstart;
3905       for (j=0; j<n; j++) {
3906         /* If NumCol = 1 then a copy is not required */
3907         if ((roworiented) && (n == 1)) {
3908           barray = (MatScalar*)v + i*bs2;
3909         } else if ((!roworiented) && (m == 1)) {
3910           barray = (MatScalar*)v + j*bs2;
3911         } else { /* Here a copy is required */
3912           if (roworiented) {
3913             value = v + i*(stepval+bs)*bs + j*bs;
3914           } else {
3915             value = v + j*(stepval+bs)*bs + i*bs;
3916           }
3917           for (ii=0; ii<bs; ii++,value+=stepval) {
3918             for (jj=0; jj<bs; jj++) {
3919               *barray++ = *value++;
3920             }
3921           }
3922           barray -=bs2;
3923         }
3924 
3925         if (in[j] >= cstart && in[j] < cend) {
3926           col  = in[j] - cstart;
3927           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3928         } else if (in[j] < 0) continue;
3929 #if defined(PETSC_USE_DEBUG)
3930         else if (in[j] >= baij->Nbs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large, col %D max %D",in[j],baij->Nbs-1);
3931 #endif
3932         else {
3933           if (mat->was_assembled) {
3934             if (!baij->colmap) {
3935               ierr = MatCreateColmap_MPIBAIJ_Private(mat);CHKERRQ(ierr);
3936             }
3937 
3938 #if defined(PETSC_USE_DEBUG)
3939 #if defined(PETSC_USE_CTABLE)
3940             { PetscInt data;
3941               ierr = PetscTableFind(baij->colmap,in[j]+1,&data);CHKERRQ(ierr);
3942               if ((data - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3943             }
3944 #else
3945             if ((baij->colmap[in[j]] - 1) % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect colmap");
3946 #endif
3947 #endif
3948 #if defined(PETSC_USE_CTABLE)
3949             ierr = PetscTableFind(baij->colmap,in[j]+1,&col);CHKERRQ(ierr);
3950             col  = (col - 1)/bs;
3951 #else
3952             col = (baij->colmap[in[j]] - 1)/bs;
3953 #endif
3954             if (col < 0 && !((Mat_SeqBAIJ*)(baij->A->data))->nonew) {
3955               ierr = MatDisAssemble_MPIBAIJ(mat);CHKERRQ(ierr);
3956               col  =  in[j];
3957             }
3958           } else col = in[j];
3959           ierr = MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B,row,col,barray,addv,im[i],in[j]);CHKERRQ(ierr);
3960         }
3961       }
3962     } else {
3963       if (!baij->donotstash) {
3964         if (roworiented) {
3965           ierr = MatStashValuesRowBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3966         } else {
3967           ierr = MatStashValuesColBlocked_Private(&mat->bstash,im[i],n,in,v,m,n,i);CHKERRQ(ierr);
3968         }
3969       }
3970     }
3971   }
3972 
3973   /* task normally handled by MatSetValuesBlocked() */
3974   ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0);CHKERRQ(ierr);
3975   PetscFunctionReturn(0);
3976 }
3977 
3978 #undef __FUNCT__
3979 #define __FUNCT__ "MatCreateMPIBAIJWithArrays"
3980 /*@
3981      MatCreateMPIBAIJWithArrays - creates a MPI BAIJ matrix using arrays that contain in standard
3982          CSR format the local rows.
3983 
3984    Collective on MPI_Comm
3985 
3986    Input Parameters:
3987 +  comm - MPI communicator
3988 .  bs - the block size, only a block size of 1 is supported
3989 .  m - number of local rows (Cannot be PETSC_DECIDE)
3990 .  n - This value should be the same as the local size used in creating the
3991        x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have
3992        calculated if N is given) For square matrices n is almost always m.
3993 .  M - number of global rows (or PETSC_DETERMINE to have calculated if m is given)
3994 .  N - number of global columns (or PETSC_DETERMINE to have calculated if n is given)
3995 .   i - row indices
3996 .   j - column indices
3997 -   a - matrix values
3998 
3999    Output Parameter:
4000 .   mat - the matrix
4001 
4002    Level: intermediate
4003 
4004    Notes:
4005        The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc;
4006      thus you CANNOT change the matrix entries by changing the values of a[] after you have
4007      called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays.
4008 
4009      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
4010      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
4011      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
4012      with column-major ordering within blocks.
4013 
4014        The i and j indices are 0 based, and i indices are indices corresponding to the local j array.
4015 
4016 .keywords: matrix, aij, compressed row, sparse, parallel
4017 
4018 .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(),
4019           MPIAIJ, MatCreateAIJ(), MatCreateMPIAIJWithSplitArrays()
4020 @*/
4021 PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat)
4022 {
4023   PetscErrorCode ierr;
4024 
4025   PetscFunctionBegin;
4026   if (i[0]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0");
4027   if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative");
4028   ierr = MatCreate(comm,mat);CHKERRQ(ierr);
4029   ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr);
4030   ierr = MatSetType(*mat,MATMPISBAIJ);CHKERRQ(ierr);
4031   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
4032   ierr = MatMPIBAIJSetPreallocationCSR(*mat,bs,i,j,a);CHKERRQ(ierr);
4033   ierr = MatSetOption(*mat,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
4034   PetscFunctionReturn(0);
4035 }
4036 
4037 #undef __FUNCT__
4038 #define __FUNCT__ "MatCreateMPIMatConcatenateSeqMat_MPIBAIJ"
4039 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat)
4040 {
4041   PetscErrorCode ierr;
4042   PetscInt       m,N,i,rstart,nnz,Ii,bs,cbs;
4043   PetscInt       *indx;
4044   PetscScalar    *values;
4045 
4046   PetscFunctionBegin;
4047   ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr);
4048   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
4049     Mat_SeqBAIJ    *a = (Mat_SeqBAIJ*)inmat->data;
4050     PetscInt       *dnz,*onz,sum,mbs,Nbs;
4051     PetscInt       *bindx,rmax=a->rmax,j;
4052 
4053     ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4054     mbs = m/bs; Nbs = N/cbs;
4055     if (n == PETSC_DECIDE) {
4056       ierr = PetscSplitOwnership(comm,&n,&Nbs);CHKERRQ(ierr);
4057     }
4058     /* Check sum(n) = Nbs */
4059     ierr = MPIU_Allreduce(&n,&sum,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4060     if (sum != Nbs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Sum of local columns != global columns %d",Nbs);
4061 
4062     ierr    = MPI_Scan(&mbs, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
4063     rstart -= mbs;
4064 
4065     ierr = PetscMalloc1(rmax,&bindx);CHKERRQ(ierr);
4066     ierr = MatPreallocateInitialize(comm,mbs,n,dnz,onz);CHKERRQ(ierr);
4067     for (i=0; i<mbs; i++) {
4068       ierr = MatGetRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr); /* non-blocked nnz and indx */
4069       nnz = nnz/bs;
4070       for (j=0; j<nnz; j++) bindx[j] = indx[j*bs]/bs;
4071       ierr = MatPreallocateSet(i+rstart,nnz,bindx,dnz,onz);CHKERRQ(ierr);
4072       ierr = MatRestoreRow_SeqBAIJ(inmat,i*bs,&nnz,&indx,NULL);CHKERRQ(ierr);
4073     }
4074     ierr = PetscFree(bindx);CHKERRQ(ierr);
4075 
4076     ierr = MatCreate(comm,outmat);CHKERRQ(ierr);
4077     ierr = MatSetSizes(*outmat,m,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
4078     ierr = MatSetBlockSizes(*outmat,bs,cbs);CHKERRQ(ierr);
4079     ierr = MatSetType(*outmat,MATMPIBAIJ);CHKERRQ(ierr);
4080     ierr = MatMPIBAIJSetPreallocation(*outmat,bs,0,dnz,0,onz);CHKERRQ(ierr);
4081     ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
4082   }
4083 
4084   /* numeric phase */
4085   ierr = MatGetBlockSizes(inmat,&bs,&cbs);CHKERRQ(ierr);
4086   ierr = MatGetOwnershipRange(*outmat,&rstart,NULL);CHKERRQ(ierr);
4087 
4088   for (i=0; i<m; i++) {
4089     ierr = MatGetRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4090     Ii   = i + rstart;
4091     ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr);
4092     ierr = MatRestoreRow_SeqBAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr);
4093   }
4094   ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4095   ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
4096   PetscFunctionReturn(0);
4097 }
4098