xref: /petsc/src/mat/impls/baij/mpi/mpibaij.c (revision eae6d4307f971a3f3d37f2c82d177dcf1fc9927c)
1 #include <../src/mat/impls/baij/mpi/mpibaij.h> /*I  "petscmat.h"  I*/
2 
3 #include <petsc/private/hashseti.h>
4 #include <petscblaslapack.h>
5 #include <petscsf.h>
6 
7 PetscErrorCode MatDestroy_MPIBAIJ(Mat mat)
8 {
9   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
10 
11   PetscFunctionBegin;
12 #if defined(PETSC_USE_LOG)
13   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
14 #endif
15   PetscCall(MatStashDestroy_Private(&mat->stash));
16   PetscCall(MatStashDestroy_Private(&mat->bstash));
17   PetscCall(MatDestroy(&baij->A));
18   PetscCall(MatDestroy(&baij->B));
19 #if defined(PETSC_USE_CTABLE)
20   PetscCall(PetscHMapIDestroy(&baij->colmap));
21 #else
22   PetscCall(PetscFree(baij->colmap));
23 #endif
24   PetscCall(PetscFree(baij->garray));
25   PetscCall(VecDestroy(&baij->lvec));
26   PetscCall(VecScatterDestroy(&baij->Mvctx));
27   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
28   PetscCall(PetscFree(baij->barray));
29   PetscCall(PetscFree2(baij->hd, baij->ht));
30   PetscCall(PetscFree(baij->rangebs));
31   PetscCall(PetscFree(mat->data));
32 
33   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
34   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
35   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
36   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIBAIJSetPreallocation_C", NULL));
37   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPIBAIJSetPreallocationCSR_C", NULL));
38   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatDiagonalScaleLocal_C", NULL));
39   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatSetHashTableFactor_C", NULL));
40   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpibaij_mpisbaij_C", NULL));
41   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpibaij_mpiadj_C", NULL));
42   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpibaij_mpiaij_C", NULL));
43 #if defined(PETSC_HAVE_HYPRE)
44   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpibaij_hypre_C", NULL));
45 #endif
46   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpibaij_is_C", NULL));
47   PetscFunctionReturn(PETSC_SUCCESS);
48 }
49 
50 /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), and  MatAssemblyEnd_MPI_Hash() */
51 #define TYPE BAIJ
52 #include "../src/mat/impls/aij/mpi/mpihashmat.h"
53 #undef TYPE
54 
55 #if defined(PETSC_HAVE_HYPRE)
56 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat, MatType, MatReuse, Mat *);
57 #endif
58 
59 PetscErrorCode MatGetRowMaxAbs_MPIBAIJ(Mat A, Vec v, PetscInt idx[])
60 {
61   Mat_MPIBAIJ       *a = (Mat_MPIBAIJ *)A->data;
62   PetscInt           i, *idxb = NULL, m = A->rmap->n, bs = A->cmap->bs;
63   PetscScalar       *va, *vv;
64   Vec                vB, vA;
65   const PetscScalar *vb;
66 
67   PetscFunctionBegin;
68   PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vA));
69   PetscCall(MatGetRowMaxAbs(a->A, vA, idx));
70 
71   PetscCall(VecGetArrayWrite(vA, &va));
72   if (idx) {
73     for (i = 0; i < m; i++) {
74       if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart;
75     }
76   }
77 
78   PetscCall(VecCreateSeq(PETSC_COMM_SELF, m, &vB));
79   PetscCall(PetscMalloc1(m, &idxb));
80   PetscCall(MatGetRowMaxAbs(a->B, vB, idxb));
81 
82   PetscCall(VecGetArrayWrite(v, &vv));
83   PetscCall(VecGetArrayRead(vB, &vb));
84   for (i = 0; i < m; i++) {
85     if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) {
86       vv[i] = vb[i];
87       if (idx) idx[i] = bs * a->garray[idxb[i] / bs] + (idxb[i] % bs);
88     } else {
89       vv[i] = va[i];
90       if (idx && PetscAbsScalar(va[i]) == PetscAbsScalar(vb[i]) && idxb[i] != -1 && idx[i] > bs * a->garray[idxb[i] / bs] + (idxb[i] % bs)) idx[i] = bs * a->garray[idxb[i] / bs] + (idxb[i] % bs);
91     }
92   }
93   PetscCall(VecRestoreArrayWrite(vA, &vv));
94   PetscCall(VecRestoreArrayWrite(vA, &va));
95   PetscCall(VecRestoreArrayRead(vB, &vb));
96   PetscCall(PetscFree(idxb));
97   PetscCall(VecDestroy(&vA));
98   PetscCall(VecDestroy(&vB));
99   PetscFunctionReturn(PETSC_SUCCESS);
100 }
101 
102 PetscErrorCode MatStoreValues_MPIBAIJ(Mat mat)
103 {
104   Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)mat->data;
105 
106   PetscFunctionBegin;
107   PetscCall(MatStoreValues(aij->A));
108   PetscCall(MatStoreValues(aij->B));
109   PetscFunctionReturn(PETSC_SUCCESS);
110 }
111 
112 PetscErrorCode MatRetrieveValues_MPIBAIJ(Mat mat)
113 {
114   Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)mat->data;
115 
116   PetscFunctionBegin;
117   PetscCall(MatRetrieveValues(aij->A));
118   PetscCall(MatRetrieveValues(aij->B));
119   PetscFunctionReturn(PETSC_SUCCESS);
120 }
121 
122 /*
123      Local utility routine that creates a mapping from the global column
124    number to the local number in the off-diagonal part of the local
125    storage of the matrix.  This is done in a non scalable way since the
126    length of colmap equals the global matrix length.
127 */
128 PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat mat)
129 {
130   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
131   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ *)baij->B->data;
132   PetscInt     nbs = B->nbs, i, bs = mat->rmap->bs;
133 
134   PetscFunctionBegin;
135 #if defined(PETSC_USE_CTABLE)
136   PetscCall(PetscHMapICreateWithSize(baij->nbs, &baij->colmap));
137   for (i = 0; i < nbs; i++) PetscCall(PetscHMapISet(baij->colmap, baij->garray[i] + 1, i * bs + 1));
138 #else
139   PetscCall(PetscCalloc1(baij->Nbs + 1, &baij->colmap));
140   for (i = 0; i < nbs; i++) baij->colmap[baij->garray[i]] = i * bs + 1;
141 #endif
142   PetscFunctionReturn(PETSC_SUCCESS);
143 }
144 
145 #define MatSetValues_SeqBAIJ_A_Private(row, col, value, addv, orow, ocol) \
146   { \
147     brow = row / bs; \
148     rp   = aj + ai[brow]; \
149     ap   = aa + bs2 * ai[brow]; \
150     rmax = aimax[brow]; \
151     nrow = ailen[brow]; \
152     bcol = col / bs; \
153     ridx = row % bs; \
154     cidx = col % bs; \
155     low  = 0; \
156     high = nrow; \
157     while (high - low > 3) { \
158       t = (low + high) / 2; \
159       if (rp[t] > bcol) high = t; \
160       else low = t; \
161     } \
162     for (_i = low; _i < high; _i++) { \
163       if (rp[_i] > bcol) break; \
164       if (rp[_i] == bcol) { \
165         bap = ap + bs2 * _i + bs * cidx + ridx; \
166         if (addv == ADD_VALUES) *bap += value; \
167         else *bap = value; \
168         goto a_noinsert; \
169       } \
170     } \
171     if (a->nonew == 1) goto a_noinsert; \
172     PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
173     MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
174     N = nrow++ - 1; \
175     /* shift up all the later entries in this row */ \
176     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
177     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
178     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
179     rp[_i]                          = bcol; \
180     ap[bs2 * _i + bs * cidx + ridx] = value; \
181   a_noinsert:; \
182     ailen[brow] = nrow; \
183   }
184 
185 #define MatSetValues_SeqBAIJ_B_Private(row, col, value, addv, orow, ocol) \
186   { \
187     brow = row / bs; \
188     rp   = bj + bi[brow]; \
189     ap   = ba + bs2 * bi[brow]; \
190     rmax = bimax[brow]; \
191     nrow = bilen[brow]; \
192     bcol = col / bs; \
193     ridx = row % bs; \
194     cidx = col % bs; \
195     low  = 0; \
196     high = nrow; \
197     while (high - low > 3) { \
198       t = (low + high) / 2; \
199       if (rp[t] > bcol) high = t; \
200       else low = t; \
201     } \
202     for (_i = low; _i < high; _i++) { \
203       if (rp[_i] > bcol) break; \
204       if (rp[_i] == bcol) { \
205         bap = ap + bs2 * _i + bs * cidx + ridx; \
206         if (addv == ADD_VALUES) *bap += value; \
207         else *bap = value; \
208         goto b_noinsert; \
209       } \
210     } \
211     if (b->nonew == 1) goto b_noinsert; \
212     PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column  (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
213     MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
214     N = nrow++ - 1; \
215     /* shift up all the later entries in this row */ \
216     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
217     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
218     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
219     rp[_i]                          = bcol; \
220     ap[bs2 * _i + bs * cidx + ridx] = value; \
221   b_noinsert:; \
222     bilen[brow] = nrow; \
223   }
224 
225 PetscErrorCode MatSetValues_MPIBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
226 {
227   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
228   MatScalar    value;
229   PetscBool    roworiented = baij->roworiented;
230   PetscInt     i, j, row, col;
231   PetscInt     rstart_orig = mat->rmap->rstart;
232   PetscInt     rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
233   PetscInt     cend_orig = mat->cmap->rend, bs = mat->rmap->bs;
234 
235   /* Some Variables required in the macro */
236   Mat          A     = baij->A;
237   Mat_SeqBAIJ *a     = (Mat_SeqBAIJ *)(A)->data;
238   PetscInt    *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
239   MatScalar   *aa = a->a;
240 
241   Mat          B     = baij->B;
242   Mat_SeqBAIJ *b     = (Mat_SeqBAIJ *)(B)->data;
243   PetscInt    *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
244   MatScalar   *ba = b->a;
245 
246   PetscInt  *rp, ii, nrow, _i, rmax, N, brow, bcol;
247   PetscInt   low, high, t, ridx, cidx, bs2 = a->bs2;
248   MatScalar *ap, *bap;
249 
250   PetscFunctionBegin;
251   for (i = 0; i < m; i++) {
252     if (im[i] < 0) continue;
253     PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
254     if (im[i] >= rstart_orig && im[i] < rend_orig) {
255       row = im[i] - rstart_orig;
256       for (j = 0; j < n; j++) {
257         if (in[j] >= cstart_orig && in[j] < cend_orig) {
258           col = in[j] - cstart_orig;
259           if (roworiented) value = v[i * n + j];
260           else value = v[i + j * m];
261           MatSetValues_SeqBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
262         } else if (in[j] < 0) {
263           continue;
264         } else {
265           PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
266           if (mat->was_assembled) {
267             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
268 #if defined(PETSC_USE_CTABLE)
269             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
270             col = col - 1;
271 #else
272             col = baij->colmap[in[j] / bs] - 1;
273 #endif
274             if (col < 0 && !((Mat_SeqBAIJ *)(baij->B->data))->nonew) {
275               PetscCall(MatDisAssemble_MPIBAIJ(mat));
276               col = in[j];
277               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
278               B     = baij->B;
279               b     = (Mat_SeqBAIJ *)(B)->data;
280               bimax = b->imax;
281               bi    = b->i;
282               bilen = b->ilen;
283               bj    = b->j;
284               ba    = b->a;
285             } else {
286               PetscCheck(col >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", im[i], in[j]);
287               col += in[j] % bs;
288             }
289           } else col = in[j];
290           if (roworiented) value = v[i * n + j];
291           else value = v[i + j * m];
292           MatSetValues_SeqBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
293           /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
294         }
295       }
296     } else {
297       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
298       if (!baij->donotstash) {
299         mat->assembled = PETSC_FALSE;
300         if (roworiented) {
301           PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n, in, v + i * n, PETSC_FALSE));
302         } else {
303           PetscCall(MatStashValuesCol_Private(&mat->stash, im[i], n, in, v + i, m, PETSC_FALSE));
304         }
305       }
306     }
307   }
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
312 {
313   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ *)A->data;
314   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
315   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
316   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
317   PetscBool          roworiented = a->roworiented;
318   const PetscScalar *value       = v;
319   MatScalar         *ap, *aa = a->a, *bap;
320 
321   PetscFunctionBegin;
322   rp    = aj + ai[row];
323   ap    = aa + bs2 * ai[row];
324   rmax  = imax[row];
325   nrow  = ailen[row];
326   value = v;
327   low   = 0;
328   high  = nrow;
329   while (high - low > 7) {
330     t = (low + high) / 2;
331     if (rp[t] > col) high = t;
332     else low = t;
333   }
334   for (i = low; i < high; i++) {
335     if (rp[i] > col) break;
336     if (rp[i] == col) {
337       bap = ap + bs2 * i;
338       if (roworiented) {
339         if (is == ADD_VALUES) {
340           for (ii = 0; ii < bs; ii++) {
341             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
342           }
343         } else {
344           for (ii = 0; ii < bs; ii++) {
345             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
346           }
347         }
348       } else {
349         if (is == ADD_VALUES) {
350           for (ii = 0; ii < bs; ii++, value += bs) {
351             for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
352             bap += bs;
353           }
354         } else {
355           for (ii = 0; ii < bs; ii++, value += bs) {
356             for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
357             bap += bs;
358           }
359         }
360       }
361       goto noinsert2;
362     }
363   }
364   if (nonew == 1) goto noinsert2;
365   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
366   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
367   N = nrow++ - 1;
368   high++;
369   /* shift up all the later entries in this row */
370   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
371   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
372   rp[i] = col;
373   bap   = ap + bs2 * i;
374   if (roworiented) {
375     for (ii = 0; ii < bs; ii++) {
376       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
377     }
378   } else {
379     for (ii = 0; ii < bs; ii++) {
380       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
381     }
382   }
383 noinsert2:;
384   ailen[row] = nrow;
385   PetscFunctionReturn(PETSC_SUCCESS);
386 }
387 
388 /*
389     This routine should be optimized so that the block copy at ** Here a copy is required ** below is not needed
390     by passing additional stride information into the MatSetValuesBlocked_SeqBAIJ_Inlined() routine
391 */
392 PetscErrorCode MatSetValuesBlocked_MPIBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
393 {
394   Mat_MPIBAIJ       *baij = (Mat_MPIBAIJ *)mat->data;
395   const PetscScalar *value;
396   MatScalar         *barray      = baij->barray;
397   PetscBool          roworiented = baij->roworiented;
398   PetscInt           i, j, ii, jj, row, col, rstart = baij->rstartbs;
399   PetscInt           rend = baij->rendbs, cstart = baij->cstartbs, stepval;
400   PetscInt           cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
401 
402   PetscFunctionBegin;
403   if (!barray) {
404     PetscCall(PetscMalloc1(bs2, &barray));
405     baij->barray = barray;
406   }
407 
408   if (roworiented) stepval = (n - 1) * bs;
409   else stepval = (m - 1) * bs;
410 
411   for (i = 0; i < m; i++) {
412     if (im[i] < 0) continue;
413     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
414     if (im[i] >= rstart && im[i] < rend) {
415       row = im[i] - rstart;
416       for (j = 0; j < n; j++) {
417         /* If NumCol = 1 then a copy is not required */
418         if ((roworiented) && (n == 1)) {
419           barray = (MatScalar *)v + i * bs2;
420         } else if ((!roworiented) && (m == 1)) {
421           barray = (MatScalar *)v + j * bs2;
422         } else { /* Here a copy is required */
423           if (roworiented) {
424             value = v + (i * (stepval + bs) + j) * bs;
425           } else {
426             value = v + (j * (stepval + bs) + i) * bs;
427           }
428           for (ii = 0; ii < bs; ii++, value += bs + stepval) {
429             for (jj = 0; jj < bs; jj++) barray[jj] = value[jj];
430             barray += bs;
431           }
432           barray -= bs2;
433         }
434 
435         if (in[j] >= cstart && in[j] < cend) {
436           col = in[j] - cstart;
437           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
438         } else if (in[j] < 0) {
439           continue;
440         } else {
441           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
442           if (mat->was_assembled) {
443             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
444 
445 #if defined(PETSC_USE_DEBUG)
446   #if defined(PETSC_USE_CTABLE)
447             {
448               PetscInt data;
449               PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &data));
450               PetscCheck((data - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap");
451             }
452   #else
453             PetscCheck((baij->colmap[in[j]] - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap");
454   #endif
455 #endif
456 #if defined(PETSC_USE_CTABLE)
457             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
458             col = (col - 1) / bs;
459 #else
460             col = (baij->colmap[in[j]] - 1) / bs;
461 #endif
462             if (col < 0 && !((Mat_SeqBAIJ *)(baij->B->data))->nonew) {
463               PetscCall(MatDisAssemble_MPIBAIJ(mat));
464               col = in[j];
465             } else PetscCheck(col >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new blocked indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", im[i], in[j]);
466           } else col = in[j];
467           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
468         }
469       }
470     } else {
471       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
472       if (!baij->donotstash) {
473         if (roworiented) {
474           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
475         } else {
476           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
477         }
478       }
479     }
480   }
481   PetscFunctionReturn(PETSC_SUCCESS);
482 }
483 
484 #define HASH_KEY             0.6180339887
485 #define HASH(size, key, tmp) (tmp = (key)*HASH_KEY, (PetscInt)((size) * (tmp - (PetscInt)tmp)))
486 /* #define HASH(size,key) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
487 /* #define HASH(size,key,tmp) ((PetscInt)((size)*fmod(((key)*HASH_KEY),1))) */
488 PetscErrorCode MatSetValues_MPIBAIJ_HT(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
489 {
490   Mat_MPIBAIJ *baij        = (Mat_MPIBAIJ *)mat->data;
491   PetscBool    roworiented = baij->roworiented;
492   PetscInt     i, j, row, col;
493   PetscInt     rstart_orig = mat->rmap->rstart;
494   PetscInt     rend_orig = mat->rmap->rend, Nbs = baij->Nbs;
495   PetscInt     h1, key, size = baij->ht_size, bs = mat->rmap->bs, *HT = baij->ht, idx;
496   PetscReal    tmp;
497   MatScalar  **HD       = baij->hd, value;
498   PetscInt     total_ct = baij->ht_total_ct, insert_ct = baij->ht_insert_ct;
499 
500   PetscFunctionBegin;
501   for (i = 0; i < m; i++) {
502     if (PetscDefined(USE_DEBUG)) {
503       PetscCheck(im[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative row");
504       PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
505     }
506     row = im[i];
507     if (row >= rstart_orig && row < rend_orig) {
508       for (j = 0; j < n; j++) {
509         col = in[j];
510         if (roworiented) value = v[i * n + j];
511         else value = v[i + j * m];
512         /* Look up PetscInto the Hash Table */
513         key = (row / bs) * Nbs + (col / bs) + 1;
514         h1  = HASH(size, key, tmp);
515 
516         idx = h1;
517         if (PetscDefined(USE_DEBUG)) {
518           insert_ct++;
519           total_ct++;
520           if (HT[idx] != key) {
521             for (idx = h1; (idx < size) && (HT[idx] != key); idx++, total_ct++)
522               ;
523             if (idx == size) {
524               for (idx = 0; (idx < h1) && (HT[idx] != key); idx++, total_ct++)
525                 ;
526               PetscCheck(idx != h1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "(%" PetscInt_FMT ",%" PetscInt_FMT ") has no entry in the hash table", row, col);
527             }
528           }
529         } else if (HT[idx] != key) {
530           for (idx = h1; (idx < size) && (HT[idx] != key); idx++)
531             ;
532           if (idx == size) {
533             for (idx = 0; (idx < h1) && (HT[idx] != key); idx++)
534               ;
535             PetscCheck(idx != h1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "(%" PetscInt_FMT ",%" PetscInt_FMT ") has no entry in the hash table", row, col);
536           }
537         }
538         /* A HASH table entry is found, so insert the values at the correct address */
539         if (addv == ADD_VALUES) *(HD[idx] + (col % bs) * bs + (row % bs)) += value;
540         else *(HD[idx] + (col % bs) * bs + (row % bs)) = value;
541       }
542     } else if (!baij->donotstash) {
543       if (roworiented) {
544         PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n, in, v + i * n, PETSC_FALSE));
545       } else {
546         PetscCall(MatStashValuesCol_Private(&mat->stash, im[i], n, in, v + i, m, PETSC_FALSE));
547       }
548     }
549   }
550   if (PetscDefined(USE_DEBUG)) {
551     baij->ht_total_ct += total_ct;
552     baij->ht_insert_ct += insert_ct;
553   }
554   PetscFunctionReturn(PETSC_SUCCESS);
555 }
556 
557 PetscErrorCode MatSetValuesBlocked_MPIBAIJ_HT(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
558 {
559   Mat_MPIBAIJ       *baij        = (Mat_MPIBAIJ *)mat->data;
560   PetscBool          roworiented = baij->roworiented;
561   PetscInt           i, j, ii, jj, row, col;
562   PetscInt           rstart = baij->rstartbs;
563   PetscInt           rend = mat->rmap->rend, stepval, bs = mat->rmap->bs, bs2 = baij->bs2, nbs2 = n * bs2;
564   PetscInt           h1, key, size = baij->ht_size, idx, *HT = baij->ht, Nbs = baij->Nbs;
565   PetscReal          tmp;
566   MatScalar        **HD = baij->hd, *baij_a;
567   const PetscScalar *v_t, *value;
568   PetscInt           total_ct = baij->ht_total_ct, insert_ct = baij->ht_insert_ct;
569 
570   PetscFunctionBegin;
571   if (roworiented) stepval = (n - 1) * bs;
572   else stepval = (m - 1) * bs;
573 
574   for (i = 0; i < m; i++) {
575     if (PetscDefined(USE_DEBUG)) {
576       PetscCheck(im[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative row: %" PetscInt_FMT, im[i]);
577       PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
578     }
579     row = im[i];
580     v_t = v + i * nbs2;
581     if (row >= rstart && row < rend) {
582       for (j = 0; j < n; j++) {
583         col = in[j];
584 
585         /* Look up into the Hash Table */
586         key = row * Nbs + col + 1;
587         h1  = HASH(size, key, tmp);
588 
589         idx = h1;
590         if (PetscDefined(USE_DEBUG)) {
591           total_ct++;
592           insert_ct++;
593           if (HT[idx] != key) {
594             for (idx = h1; (idx < size) && (HT[idx] != key); idx++, total_ct++)
595               ;
596             if (idx == size) {
597               for (idx = 0; (idx < h1) && (HT[idx] != key); idx++, total_ct++)
598                 ;
599               PetscCheck(idx != h1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "(%" PetscInt_FMT ",%" PetscInt_FMT ") has no entry in the hash table", row, col);
600             }
601           }
602         } else if (HT[idx] != key) {
603           for (idx = h1; (idx < size) && (HT[idx] != key); idx++)
604             ;
605           if (idx == size) {
606             for (idx = 0; (idx < h1) && (HT[idx] != key); idx++)
607               ;
608             PetscCheck(idx != h1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "(%" PetscInt_FMT ",%" PetscInt_FMT ") has no entry in the hash table", row, col);
609           }
610         }
611         baij_a = HD[idx];
612         if (roworiented) {
613           /*value = v + i*(stepval+bs)*bs + j*bs;*/
614           /* value = v + (i*(stepval+bs)+j)*bs; */
615           value = v_t;
616           v_t += bs;
617           if (addv == ADD_VALUES) {
618             for (ii = 0; ii < bs; ii++, value += stepval) {
619               for (jj = ii; jj < bs2; jj += bs) baij_a[jj] += *value++;
620             }
621           } else {
622             for (ii = 0; ii < bs; ii++, value += stepval) {
623               for (jj = ii; jj < bs2; jj += bs) baij_a[jj] = *value++;
624             }
625           }
626         } else {
627           value = v + j * (stepval + bs) * bs + i * bs;
628           if (addv == ADD_VALUES) {
629             for (ii = 0; ii < bs; ii++, value += stepval, baij_a += bs) {
630               for (jj = 0; jj < bs; jj++) baij_a[jj] += *value++;
631             }
632           } else {
633             for (ii = 0; ii < bs; ii++, value += stepval, baij_a += bs) {
634               for (jj = 0; jj < bs; jj++) baij_a[jj] = *value++;
635             }
636           }
637         }
638       }
639     } else {
640       if (!baij->donotstash) {
641         if (roworiented) {
642           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
643         } else {
644           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
645         }
646       }
647     }
648   }
649   if (PetscDefined(USE_DEBUG)) {
650     baij->ht_total_ct += total_ct;
651     baij->ht_insert_ct += insert_ct;
652   }
653   PetscFunctionReturn(PETSC_SUCCESS);
654 }
655 
656 PetscErrorCode MatGetValues_MPIBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
657 {
658   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
659   PetscInt     bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
660   PetscInt     bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;
661 
662   PetscFunctionBegin;
663   for (i = 0; i < m; i++) {
664     if (idxm[i] < 0) continue; /* negative row */
665     PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
666     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
667       row = idxm[i] - bsrstart;
668       for (j = 0; j < n; j++) {
669         if (idxn[j] < 0) continue; /* negative column */
670         PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
671         if (idxn[j] >= bscstart && idxn[j] < bscend) {
672           col = idxn[j] - bscstart;
673           PetscCall(MatGetValues_SeqBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
674         } else {
675           if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
676 #if defined(PETSC_USE_CTABLE)
677           PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
678           data--;
679 #else
680           data = baij->colmap[idxn[j] / bs] - 1;
681 #endif
682           if ((data < 0) || (baij->garray[data / bs] != idxn[j] / bs)) *(v + i * n + j) = 0.0;
683           else {
684             col = data + idxn[j] % bs;
685             PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
686           }
687         }
688       }
689     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
690   }
691   PetscFunctionReturn(PETSC_SUCCESS);
692 }
693 
694 PetscErrorCode MatNorm_MPIBAIJ(Mat mat, NormType type, PetscReal *nrm)
695 {
696   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
697   Mat_SeqBAIJ *amat = (Mat_SeqBAIJ *)baij->A->data, *bmat = (Mat_SeqBAIJ *)baij->B->data;
698   PetscInt     i, j, bs2 = baij->bs2, bs = baij->A->rmap->bs, nz, row, col;
699   PetscReal    sum = 0.0;
700   MatScalar   *v;
701 
702   PetscFunctionBegin;
703   if (baij->size == 1) {
704     PetscCall(MatNorm(baij->A, type, nrm));
705   } else {
706     if (type == NORM_FROBENIUS) {
707       v  = amat->a;
708       nz = amat->nz * bs2;
709       for (i = 0; i < nz; i++) {
710         sum += PetscRealPart(PetscConj(*v) * (*v));
711         v++;
712       }
713       v  = bmat->a;
714       nz = bmat->nz * bs2;
715       for (i = 0; i < nz; i++) {
716         sum += PetscRealPart(PetscConj(*v) * (*v));
717         v++;
718       }
719       PetscCall(MPIU_Allreduce(&sum, nrm, 1, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
720       *nrm = PetscSqrtReal(*nrm);
721     } else if (type == NORM_1) { /* max column sum */
722       PetscReal *tmp, *tmp2;
723       PetscInt  *jj, *garray = baij->garray, cstart = baij->rstartbs;
724       PetscCall(PetscCalloc1(mat->cmap->N, &tmp));
725       PetscCall(PetscMalloc1(mat->cmap->N, &tmp2));
726       v  = amat->a;
727       jj = amat->j;
728       for (i = 0; i < amat->nz; i++) {
729         for (j = 0; j < bs; j++) {
730           col = bs * (cstart + *jj) + j; /* column index */
731           for (row = 0; row < bs; row++) {
732             tmp[col] += PetscAbsScalar(*v);
733             v++;
734           }
735         }
736         jj++;
737       }
738       v  = bmat->a;
739       jj = bmat->j;
740       for (i = 0; i < bmat->nz; i++) {
741         for (j = 0; j < bs; j++) {
742           col = bs * garray[*jj] + j;
743           for (row = 0; row < bs; row++) {
744             tmp[col] += PetscAbsScalar(*v);
745             v++;
746           }
747         }
748         jj++;
749       }
750       PetscCall(MPIU_Allreduce(tmp, tmp2, mat->cmap->N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
751       *nrm = 0.0;
752       for (j = 0; j < mat->cmap->N; j++) {
753         if (tmp2[j] > *nrm) *nrm = tmp2[j];
754       }
755       PetscCall(PetscFree(tmp));
756       PetscCall(PetscFree(tmp2));
757     } else if (type == NORM_INFINITY) { /* max row sum */
758       PetscReal *sums;
759       PetscCall(PetscMalloc1(bs, &sums));
760       sum = 0.0;
761       for (j = 0; j < amat->mbs; j++) {
762         for (row = 0; row < bs; row++) sums[row] = 0.0;
763         v  = amat->a + bs2 * amat->i[j];
764         nz = amat->i[j + 1] - amat->i[j];
765         for (i = 0; i < nz; i++) {
766           for (col = 0; col < bs; col++) {
767             for (row = 0; row < bs; row++) {
768               sums[row] += PetscAbsScalar(*v);
769               v++;
770             }
771           }
772         }
773         v  = bmat->a + bs2 * bmat->i[j];
774         nz = bmat->i[j + 1] - bmat->i[j];
775         for (i = 0; i < nz; i++) {
776           for (col = 0; col < bs; col++) {
777             for (row = 0; row < bs; row++) {
778               sums[row] += PetscAbsScalar(*v);
779               v++;
780             }
781           }
782         }
783         for (row = 0; row < bs; row++) {
784           if (sums[row] > sum) sum = sums[row];
785         }
786       }
787       PetscCall(MPIU_Allreduce(&sum, nrm, 1, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject)mat)));
788       PetscCall(PetscFree(sums));
789     } else SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "No support for this norm yet");
790   }
791   PetscFunctionReturn(PETSC_SUCCESS);
792 }
793 
794 /*
795   Creates the hash table, and sets the table
796   This table is created only once.
797   If new entried need to be added to the matrix
798   then the hash table has to be destroyed and
799   recreated.
800 */
801 PetscErrorCode MatCreateHashTable_MPIBAIJ_Private(Mat mat, PetscReal factor)
802 {
803   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
804   Mat          A = baij->A, B = baij->B;
805   Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)A->data, *b = (Mat_SeqBAIJ *)B->data;
806   PetscInt     i, j, k, nz = a->nz + b->nz, h1, *ai = a->i, *aj = a->j, *bi = b->i, *bj = b->j;
807   PetscInt     ht_size, bs2 = baij->bs2, rstart = baij->rstartbs;
808   PetscInt     cstart = baij->cstartbs, *garray = baij->garray, row, col, Nbs = baij->Nbs;
809   PetscInt    *HT, key;
810   MatScalar  **HD;
811   PetscReal    tmp;
812 #if defined(PETSC_USE_INFO)
813   PetscInt ct = 0, max = 0;
814 #endif
815 
816   PetscFunctionBegin;
817   if (baij->ht) PetscFunctionReturn(PETSC_SUCCESS);
818 
819   baij->ht_size = (PetscInt)(factor * nz);
820   ht_size       = baij->ht_size;
821 
822   /* Allocate Memory for Hash Table */
823   PetscCall(PetscCalloc2(ht_size, &baij->hd, ht_size, &baij->ht));
824   HD = baij->hd;
825   HT = baij->ht;
826 
827   /* Loop Over A */
828   for (i = 0; i < a->mbs; i++) {
829     for (j = ai[i]; j < ai[i + 1]; j++) {
830       row = i + rstart;
831       col = aj[j] + cstart;
832 
833       key = row * Nbs + col + 1;
834       h1  = HASH(ht_size, key, tmp);
835       for (k = 0; k < ht_size; k++) {
836         if (!HT[(h1 + k) % ht_size]) {
837           HT[(h1 + k) % ht_size] = key;
838           HD[(h1 + k) % ht_size] = a->a + j * bs2;
839           break;
840 #if defined(PETSC_USE_INFO)
841         } else {
842           ct++;
843 #endif
844         }
845       }
846 #if defined(PETSC_USE_INFO)
847       if (k > max) max = k;
848 #endif
849     }
850   }
851   /* Loop Over B */
852   for (i = 0; i < b->mbs; i++) {
853     for (j = bi[i]; j < bi[i + 1]; j++) {
854       row = i + rstart;
855       col = garray[bj[j]];
856       key = row * Nbs + col + 1;
857       h1  = HASH(ht_size, key, tmp);
858       for (k = 0; k < ht_size; k++) {
859         if (!HT[(h1 + k) % ht_size]) {
860           HT[(h1 + k) % ht_size] = key;
861           HD[(h1 + k) % ht_size] = b->a + j * bs2;
862           break;
863 #if defined(PETSC_USE_INFO)
864         } else {
865           ct++;
866 #endif
867         }
868       }
869 #if defined(PETSC_USE_INFO)
870       if (k > max) max = k;
871 #endif
872     }
873   }
874 
875   /* Print Summary */
876 #if defined(PETSC_USE_INFO)
877   for (i = 0, j = 0; i < ht_size; i++) {
878     if (HT[i]) j++;
879   }
880   PetscCall(PetscInfo(mat, "Average Search = %5.2g,max search = %" PetscInt_FMT "\n", (!j) ? (double)0.0 : (double)(((PetscReal)(ct + j)) / (double)j), max));
881 #endif
882   PetscFunctionReturn(PETSC_SUCCESS);
883 }
884 
885 PetscErrorCode MatAssemblyBegin_MPIBAIJ(Mat mat, MatAssemblyType mode)
886 {
887   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
888   PetscInt     nstash, reallocs;
889 
890   PetscFunctionBegin;
891   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
892 
893   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
894   PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
895   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
896   PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
897   PetscCall(MatStashGetInfo_Private(&mat->bstash, &nstash, &reallocs));
898   PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
899   PetscFunctionReturn(PETSC_SUCCESS);
900 }
901 
902 PetscErrorCode MatAssemblyEnd_MPIBAIJ(Mat mat, MatAssemblyType mode)
903 {
904   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
905   Mat_SeqBAIJ *a    = (Mat_SeqBAIJ *)baij->A->data;
906   PetscInt     i, j, rstart, ncols, flg, bs2 = baij->bs2;
907   PetscInt    *row, *col;
908   PetscBool    r1, r2, r3, other_disassembled;
909   MatScalar   *val;
910   PetscMPIInt  n;
911 
912   PetscFunctionBegin;
913   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
914   if (!baij->donotstash && !mat->nooffprocentries) {
915     while (1) {
916       PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
917       if (!flg) break;
918 
919       for (i = 0; i < n;) {
920         /* Now identify the consecutive vals belonging to the same row */
921         for (j = i, rstart = row[j]; j < n; j++) {
922           if (row[j] != rstart) break;
923         }
924         if (j < n) ncols = j - i;
925         else ncols = n - i;
926         /* Now assemble all these values with a single function call */
927         PetscCall(MatSetValues_MPIBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
928         i = j;
929       }
930     }
931     PetscCall(MatStashScatterEnd_Private(&mat->stash));
932     /* Now process the block-stash. Since the values are stashed column-oriented,
933        set the roworiented flag to column oriented, and after MatSetValues()
934        restore the original flags */
935     r1 = baij->roworiented;
936     r2 = a->roworiented;
937     r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;
938 
939     baij->roworiented = PETSC_FALSE;
940     a->roworiented    = PETSC_FALSE;
941 
942     (((Mat_SeqBAIJ *)baij->B->data))->roworiented = PETSC_FALSE; /* b->roworiented */
943     while (1) {
944       PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
945       if (!flg) break;
946 
947       for (i = 0; i < n;) {
948         /* Now identify the consecutive vals belonging to the same row */
949         for (j = i, rstart = row[j]; j < n; j++) {
950           if (row[j] != rstart) break;
951         }
952         if (j < n) ncols = j - i;
953         else ncols = n - i;
954         PetscCall(MatSetValuesBlocked_MPIBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
955         i = j;
956       }
957     }
958     PetscCall(MatStashScatterEnd_Private(&mat->bstash));
959 
960     baij->roworiented = r1;
961     a->roworiented    = r2;
962 
963     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworiented */
964   }
965 
966   PetscCall(MatAssemblyBegin(baij->A, mode));
967   PetscCall(MatAssemblyEnd(baij->A, mode));
968 
969   /* determine if any processor has disassembled, if so we must
970      also disassemble ourselves, in order that we may reassemble. */
971   /*
972      if nonzero structure of submatrix B cannot change then we know that
973      no processor disassembled thus we can skip this stuff
974   */
975   if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
976     PetscCall(MPIU_Allreduce(&mat->was_assembled, &other_disassembled, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
977     if (mat->was_assembled && !other_disassembled) PetscCall(MatDisAssemble_MPIBAIJ(mat));
978   }
979 
980   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) PetscCall(MatSetUpMultiply_MPIBAIJ(mat));
981   PetscCall(MatAssemblyBegin(baij->B, mode));
982   PetscCall(MatAssemblyEnd(baij->B, mode));
983 
984 #if defined(PETSC_USE_INFO)
985   if (baij->ht && mode == MAT_FINAL_ASSEMBLY) {
986     PetscCall(PetscInfo(mat, "Average Hash Table Search in MatSetValues = %5.2f\n", (double)((PetscReal)baij->ht_total_ct) / baij->ht_insert_ct));
987 
988     baij->ht_total_ct  = 0;
989     baij->ht_insert_ct = 0;
990   }
991 #endif
992   if (baij->ht_flag && !baij->ht && mode == MAT_FINAL_ASSEMBLY) {
993     PetscCall(MatCreateHashTable_MPIBAIJ_Private(mat, baij->ht_fact));
994 
995     mat->ops->setvalues        = MatSetValues_MPIBAIJ_HT;
996     mat->ops->setvaluesblocked = MatSetValuesBlocked_MPIBAIJ_HT;
997   }
998 
999   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
1000 
1001   baij->rowvalues = NULL;
1002 
1003   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
1004   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)(baij->A->data))->nonew) {
1005     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
1006     PetscCall(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
1007   }
1008   PetscFunctionReturn(PETSC_SUCCESS);
1009 }
1010 
1011 extern PetscErrorCode MatView_SeqBAIJ(Mat, PetscViewer);
1012 #include <petscdraw.h>
1013 static PetscErrorCode MatView_MPIBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
1014 {
1015   Mat_MPIBAIJ      *baij = (Mat_MPIBAIJ *)mat->data;
1016   PetscMPIInt       rank = baij->rank;
1017   PetscInt          bs   = mat->rmap->bs;
1018   PetscBool         iascii, isdraw;
1019   PetscViewer       sviewer;
1020   PetscViewerFormat format;
1021 
1022   PetscFunctionBegin;
1023   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1024   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1025   if (iascii) {
1026     PetscCall(PetscViewerGetFormat(viewer, &format));
1027     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1028       MatInfo info;
1029       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
1030       PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
1031       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
1032       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
1033                                                    mat->rmap->bs, (double)info.memory));
1034       PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
1035       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
1036       PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
1037       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
1038       PetscCall(PetscViewerFlush(viewer));
1039       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
1040       PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
1041       PetscCall(VecScatterView(baij->Mvctx, viewer));
1042       PetscFunctionReturn(PETSC_SUCCESS);
1043     } else if (format == PETSC_VIEWER_ASCII_INFO) {
1044       PetscCall(PetscViewerASCIIPrintf(viewer, "  block size is %" PetscInt_FMT "\n", bs));
1045       PetscFunctionReturn(PETSC_SUCCESS);
1046     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
1047       PetscFunctionReturn(PETSC_SUCCESS);
1048     }
1049   }
1050 
1051   if (isdraw) {
1052     PetscDraw draw;
1053     PetscBool isnull;
1054     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
1055     PetscCall(PetscDrawIsNull(draw, &isnull));
1056     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
1057   }
1058 
1059   {
1060     /* assemble the entire matrix onto first processor. */
1061     Mat          A;
1062     Mat_SeqBAIJ *Aloc;
1063     PetscInt     M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
1064     MatScalar   *a;
1065     const char  *matname;
1066 
1067     /* Here we are creating a temporary matrix, so will assume MPIBAIJ is acceptable */
1068     /* Perhaps this should be the type of mat? */
1069     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
1070     if (rank == 0) {
1071       PetscCall(MatSetSizes(A, M, N, M, N));
1072     } else {
1073       PetscCall(MatSetSizes(A, 0, 0, M, N));
1074     }
1075     PetscCall(MatSetType(A, MATMPIBAIJ));
1076     PetscCall(MatMPIBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
1077     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
1078 
1079     /* copy over the A part */
1080     Aloc = (Mat_SeqBAIJ *)baij->A->data;
1081     ai   = Aloc->i;
1082     aj   = Aloc->j;
1083     a    = Aloc->a;
1084     PetscCall(PetscMalloc1(bs, &rvals));
1085 
1086     for (i = 0; i < mbs; i++) {
1087       rvals[0] = bs * (baij->rstartbs + i);
1088       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
1089       for (j = ai[i]; j < ai[i + 1]; j++) {
1090         col = (baij->cstartbs + aj[j]) * bs;
1091         for (k = 0; k < bs; k++) {
1092           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
1093           col++;
1094           a += bs;
1095         }
1096       }
1097     }
1098     /* copy over the B part */
1099     Aloc = (Mat_SeqBAIJ *)baij->B->data;
1100     ai   = Aloc->i;
1101     aj   = Aloc->j;
1102     a    = Aloc->a;
1103     for (i = 0; i < mbs; i++) {
1104       rvals[0] = bs * (baij->rstartbs + i);
1105       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
1106       for (j = ai[i]; j < ai[i + 1]; j++) {
1107         col = baij->garray[aj[j]] * bs;
1108         for (k = 0; k < bs; k++) {
1109           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
1110           col++;
1111           a += bs;
1112         }
1113       }
1114     }
1115     PetscCall(PetscFree(rvals));
1116     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1117     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1118     /*
1119        Everyone has to call to draw the matrix since the graphics waits are
1120        synchronized across all processors that share the PetscDraw object
1121     */
1122     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
1123     if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
1124     if (rank == 0) {
1125       if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPIBAIJ *)(A->data))->A, matname));
1126       PetscCall(MatView_SeqBAIJ(((Mat_MPIBAIJ *)(A->data))->A, sviewer));
1127     }
1128     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
1129     PetscCall(PetscViewerFlush(viewer));
1130     PetscCall(MatDestroy(&A));
1131   }
1132   PetscFunctionReturn(PETSC_SUCCESS);
1133 }
1134 
1135 /* Used for both MPIBAIJ and MPISBAIJ matrices */
1136 PetscErrorCode MatView_MPIBAIJ_Binary(Mat mat, PetscViewer viewer)
1137 {
1138   Mat_MPIBAIJ    *aij    = (Mat_MPIBAIJ *)mat->data;
1139   Mat_SeqBAIJ    *A      = (Mat_SeqBAIJ *)aij->A->data;
1140   Mat_SeqBAIJ    *B      = (Mat_SeqBAIJ *)aij->B->data;
1141   const PetscInt *garray = aij->garray;
1142   PetscInt        header[4], M, N, m, rs, cs, bs, cnt, i, j, ja, jb, k, l;
1143   PetscInt64      nz, hnz;
1144   PetscInt       *rowlens, *colidxs;
1145   PetscScalar    *matvals;
1146   PetscMPIInt     rank;
1147 
1148   PetscFunctionBegin;
1149   PetscCall(PetscViewerSetUp(viewer));
1150 
1151   M  = mat->rmap->N;
1152   N  = mat->cmap->N;
1153   m  = mat->rmap->n;
1154   rs = mat->rmap->rstart;
1155   cs = mat->cmap->rstart;
1156   bs = mat->rmap->bs;
1157   nz = bs * bs * (A->nz + B->nz);
1158 
1159   /* write matrix header */
1160   header[0] = MAT_FILE_CLASSID;
1161   header[1] = M;
1162   header[2] = N;
1163   PetscCallMPI(MPI_Reduce(&nz, &hnz, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)mat)));
1164   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
1165   if (rank == 0) PetscCall(PetscIntCast(hnz, &header[3]));
1166   PetscCall(PetscViewerBinaryWrite(viewer, header, 4, PETSC_INT));
1167 
1168   /* fill in and store row lengths */
1169   PetscCall(PetscMalloc1(m, &rowlens));
1170   for (cnt = 0, i = 0; i < A->mbs; i++)
1171     for (j = 0; j < bs; j++) rowlens[cnt++] = bs * (A->i[i + 1] - A->i[i] + B->i[i + 1] - B->i[i]);
1172   PetscCall(PetscViewerBinaryWriteAll(viewer, rowlens, m, rs, M, PETSC_INT));
1173   PetscCall(PetscFree(rowlens));
1174 
1175   /* fill in and store column indices */
1176   PetscCall(PetscMalloc1(nz, &colidxs));
1177   for (cnt = 0, i = 0; i < A->mbs; i++) {
1178     for (k = 0; k < bs; k++) {
1179       for (jb = B->i[i]; jb < B->i[i + 1]; jb++) {
1180         if (garray[B->j[jb]] > cs / bs) break;
1181         for (l = 0; l < bs; l++) colidxs[cnt++] = bs * garray[B->j[jb]] + l;
1182       }
1183       for (ja = A->i[i]; ja < A->i[i + 1]; ja++)
1184         for (l = 0; l < bs; l++) colidxs[cnt++] = bs * A->j[ja] + l + cs;
1185       for (; jb < B->i[i + 1]; jb++)
1186         for (l = 0; l < bs; l++) colidxs[cnt++] = bs * garray[B->j[jb]] + l;
1187     }
1188   }
1189   PetscCheck(cnt == nz, PETSC_COMM_SELF, PETSC_ERR_LIB, "Internal PETSc error: cnt = %" PetscInt_FMT " nz = %" PetscInt64_FMT, cnt, nz);
1190   PetscCall(PetscViewerBinaryWriteAll(viewer, colidxs, nz, PETSC_DECIDE, PETSC_DECIDE, PETSC_INT));
1191   PetscCall(PetscFree(colidxs));
1192 
1193   /* fill in and store nonzero values */
1194   PetscCall(PetscMalloc1(nz, &matvals));
1195   for (cnt = 0, i = 0; i < A->mbs; i++) {
1196     for (k = 0; k < bs; k++) {
1197       for (jb = B->i[i]; jb < B->i[i + 1]; jb++) {
1198         if (garray[B->j[jb]] > cs / bs) break;
1199         for (l = 0; l < bs; l++) matvals[cnt++] = B->a[bs * (bs * jb + l) + k];
1200       }
1201       for (ja = A->i[i]; ja < A->i[i + 1]; ja++)
1202         for (l = 0; l < bs; l++) matvals[cnt++] = A->a[bs * (bs * ja + l) + k];
1203       for (; jb < B->i[i + 1]; jb++)
1204         for (l = 0; l < bs; l++) matvals[cnt++] = B->a[bs * (bs * jb + l) + k];
1205     }
1206   }
1207   PetscCall(PetscViewerBinaryWriteAll(viewer, matvals, nz, PETSC_DECIDE, PETSC_DECIDE, PETSC_SCALAR));
1208   PetscCall(PetscFree(matvals));
1209 
1210   /* write block size option to the viewer's .info file */
1211   PetscCall(MatView_Binary_BlockSizes(mat, viewer));
1212   PetscFunctionReturn(PETSC_SUCCESS);
1213 }
1214 
1215 PetscErrorCode MatView_MPIBAIJ(Mat mat, PetscViewer viewer)
1216 {
1217   PetscBool iascii, isdraw, issocket, isbinary;
1218 
1219   PetscFunctionBegin;
1220   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1221   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1222   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1223   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1224   if (iascii || isdraw || issocket) {
1225     PetscCall(MatView_MPIBAIJ_ASCIIorDraworSocket(mat, viewer));
1226   } else if (isbinary) PetscCall(MatView_MPIBAIJ_Binary(mat, viewer));
1227   PetscFunctionReturn(PETSC_SUCCESS);
1228 }
1229 
1230 PetscErrorCode MatMult_MPIBAIJ(Mat A, Vec xx, Vec yy)
1231 {
1232   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1233   PetscInt     nt;
1234 
1235   PetscFunctionBegin;
1236   PetscCall(VecGetLocalSize(xx, &nt));
1237   PetscCheck(nt == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible partition of A and xx");
1238   PetscCall(VecGetLocalSize(yy, &nt));
1239   PetscCheck(nt == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible partition of A and yy");
1240   PetscCall(VecScatterBegin(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD));
1241   PetscCall((*a->A->ops->mult)(a->A, xx, yy));
1242   PetscCall(VecScatterEnd(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD));
1243   PetscCall((*a->B->ops->multadd)(a->B, a->lvec, yy, yy));
1244   PetscFunctionReturn(PETSC_SUCCESS);
1245 }
1246 
1247 PetscErrorCode MatMultAdd_MPIBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1248 {
1249   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1250 
1251   PetscFunctionBegin;
1252   PetscCall(VecScatterBegin(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD));
1253   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, zz));
1254   PetscCall(VecScatterEnd(a->Mvctx, xx, a->lvec, INSERT_VALUES, SCATTER_FORWARD));
1255   PetscCall((*a->B->ops->multadd)(a->B, a->lvec, zz, zz));
1256   PetscFunctionReturn(PETSC_SUCCESS);
1257 }
1258 
1259 PetscErrorCode MatMultTranspose_MPIBAIJ(Mat A, Vec xx, Vec yy)
1260 {
1261   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1262 
1263   PetscFunctionBegin;
1264   /* do nondiagonal part */
1265   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->lvec));
1266   /* do local part */
1267   PetscCall((*a->A->ops->multtranspose)(a->A, xx, yy));
1268   /* add partial results together */
1269   PetscCall(VecScatterBegin(a->Mvctx, a->lvec, yy, ADD_VALUES, SCATTER_REVERSE));
1270   PetscCall(VecScatterEnd(a->Mvctx, a->lvec, yy, ADD_VALUES, SCATTER_REVERSE));
1271   PetscFunctionReturn(PETSC_SUCCESS);
1272 }
1273 
1274 PetscErrorCode MatMultTransposeAdd_MPIBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1275 {
1276   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1277 
1278   PetscFunctionBegin;
1279   /* do nondiagonal part */
1280   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->lvec));
1281   /* do local part */
1282   PetscCall((*a->A->ops->multtransposeadd)(a->A, xx, yy, zz));
1283   /* add partial results together */
1284   PetscCall(VecScatterBegin(a->Mvctx, a->lvec, zz, ADD_VALUES, SCATTER_REVERSE));
1285   PetscCall(VecScatterEnd(a->Mvctx, a->lvec, zz, ADD_VALUES, SCATTER_REVERSE));
1286   PetscFunctionReturn(PETSC_SUCCESS);
1287 }
1288 
1289 /*
1290   This only works correctly for square matrices where the subblock A->A is the
1291    diagonal block
1292 */
1293 PetscErrorCode MatGetDiagonal_MPIBAIJ(Mat A, Vec v)
1294 {
1295   PetscFunctionBegin;
1296   PetscCheck(A->rmap->N == A->cmap->N, PETSC_COMM_SELF, PETSC_ERR_SUP, "Supports only square matrix where A->A is diag block");
1297   PetscCall(MatGetDiagonal(((Mat_MPIBAIJ *)A->data)->A, v));
1298   PetscFunctionReturn(PETSC_SUCCESS);
1299 }
1300 
1301 PetscErrorCode MatScale_MPIBAIJ(Mat A, PetscScalar aa)
1302 {
1303   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1304 
1305   PetscFunctionBegin;
1306   PetscCall(MatScale(a->A, aa));
1307   PetscCall(MatScale(a->B, aa));
1308   PetscFunctionReturn(PETSC_SUCCESS);
1309 }
1310 
1311 PetscErrorCode MatGetRow_MPIBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1312 {
1313   Mat_MPIBAIJ *mat = (Mat_MPIBAIJ *)matin->data;
1314   PetscScalar *vworkA, *vworkB, **pvA, **pvB, *v_p;
1315   PetscInt     bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1316   PetscInt     nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1317   PetscInt    *cmap, *idx_p, cstart = mat->cstartbs;
1318 
1319   PetscFunctionBegin;
1320   PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1321   PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1322   mat->getrowactive = PETSC_TRUE;
1323 
1324   if (!mat->rowvalues && (idx || v)) {
1325     /*
1326         allocate enough space to hold information from the longest row.
1327     */
1328     Mat_SeqBAIJ *Aa = (Mat_SeqBAIJ *)mat->A->data, *Ba = (Mat_SeqBAIJ *)mat->B->data;
1329     PetscInt     max = 1, mbs = mat->mbs, tmp;
1330     for (i = 0; i < mbs; i++) {
1331       tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i];
1332       if (max < tmp) max = tmp;
1333     }
1334     PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1335   }
1336   lrow = row - brstart;
1337 
1338   pvA = &vworkA;
1339   pcA = &cworkA;
1340   pvB = &vworkB;
1341   pcB = &cworkB;
1342   if (!v) {
1343     pvA = NULL;
1344     pvB = NULL;
1345   }
1346   if (!idx) {
1347     pcA = NULL;
1348     if (!v) pcB = NULL;
1349   }
1350   PetscCall((*mat->A->ops->getrow)(mat->A, lrow, &nzA, pcA, pvA));
1351   PetscCall((*mat->B->ops->getrow)(mat->B, lrow, &nzB, pcB, pvB));
1352   nztot = nzA + nzB;
1353 
1354   cmap = mat->garray;
1355   if (v || idx) {
1356     if (nztot) {
1357       /* Sort by increasing column numbers, assuming A and B already sorted */
1358       PetscInt imark = -1;
1359       if (v) {
1360         *v = v_p = mat->rowvalues;
1361         for (i = 0; i < nzB; i++) {
1362           if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1363           else break;
1364         }
1365         imark = i;
1366         for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1367         for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1368       }
1369       if (idx) {
1370         *idx = idx_p = mat->rowindices;
1371         if (imark > -1) {
1372           for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1373         } else {
1374           for (i = 0; i < nzB; i++) {
1375             if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1376             else break;
1377           }
1378           imark = i;
1379         }
1380         for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1381         for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1382       }
1383     } else {
1384       if (idx) *idx = NULL;
1385       if (v) *v = NULL;
1386     }
1387   }
1388   *nz = nztot;
1389   PetscCall((*mat->A->ops->restorerow)(mat->A, lrow, &nzA, pcA, pvA));
1390   PetscCall((*mat->B->ops->restorerow)(mat->B, lrow, &nzB, pcB, pvB));
1391   PetscFunctionReturn(PETSC_SUCCESS);
1392 }
1393 
1394 PetscErrorCode MatRestoreRow_MPIBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1395 {
1396   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
1397 
1398   PetscFunctionBegin;
1399   PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow not called");
1400   baij->getrowactive = PETSC_FALSE;
1401   PetscFunctionReturn(PETSC_SUCCESS);
1402 }
1403 
1404 PetscErrorCode MatZeroEntries_MPIBAIJ(Mat A)
1405 {
1406   Mat_MPIBAIJ *l = (Mat_MPIBAIJ *)A->data;
1407 
1408   PetscFunctionBegin;
1409   PetscCall(MatZeroEntries(l->A));
1410   PetscCall(MatZeroEntries(l->B));
1411   PetscFunctionReturn(PETSC_SUCCESS);
1412 }
1413 
1414 PetscErrorCode MatGetInfo_MPIBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1415 {
1416   Mat_MPIBAIJ   *a = (Mat_MPIBAIJ *)matin->data;
1417   Mat            A = a->A, B = a->B;
1418   PetscLogDouble isend[5], irecv[5];
1419 
1420   PetscFunctionBegin;
1421   info->block_size = (PetscReal)matin->rmap->bs;
1422 
1423   PetscCall(MatGetInfo(A, MAT_LOCAL, info));
1424 
1425   isend[0] = info->nz_used;
1426   isend[1] = info->nz_allocated;
1427   isend[2] = info->nz_unneeded;
1428   isend[3] = info->memory;
1429   isend[4] = info->mallocs;
1430 
1431   PetscCall(MatGetInfo(B, MAT_LOCAL, info));
1432 
1433   isend[0] += info->nz_used;
1434   isend[1] += info->nz_allocated;
1435   isend[2] += info->nz_unneeded;
1436   isend[3] += info->memory;
1437   isend[4] += info->mallocs;
1438 
1439   if (flag == MAT_LOCAL) {
1440     info->nz_used      = isend[0];
1441     info->nz_allocated = isend[1];
1442     info->nz_unneeded  = isend[2];
1443     info->memory       = isend[3];
1444     info->mallocs      = isend[4];
1445   } else if (flag == MAT_GLOBAL_MAX) {
1446     PetscCall(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));
1447 
1448     info->nz_used      = irecv[0];
1449     info->nz_allocated = irecv[1];
1450     info->nz_unneeded  = irecv[2];
1451     info->memory       = irecv[3];
1452     info->mallocs      = irecv[4];
1453   } else if (flag == MAT_GLOBAL_SUM) {
1454     PetscCall(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));
1455 
1456     info->nz_used      = irecv[0];
1457     info->nz_allocated = irecv[1];
1458     info->nz_unneeded  = irecv[2];
1459     info->memory       = irecv[3];
1460     info->mallocs      = irecv[4];
1461   } else SETERRQ(PetscObjectComm((PetscObject)matin), PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1462   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1463   info->fill_ratio_needed = 0;
1464   info->factor_mallocs    = 0;
1465   PetscFunctionReturn(PETSC_SUCCESS);
1466 }
1467 
1468 PetscErrorCode MatSetOption_MPIBAIJ(Mat A, MatOption op, PetscBool flg)
1469 {
1470   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1471 
1472   PetscFunctionBegin;
1473   switch (op) {
1474   case MAT_NEW_NONZERO_LOCATIONS:
1475   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1476   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1477   case MAT_KEEP_NONZERO_PATTERN:
1478   case MAT_NEW_NONZERO_LOCATION_ERR:
1479     MatCheckPreallocated(A, 1);
1480     PetscCall(MatSetOption(a->A, op, flg));
1481     PetscCall(MatSetOption(a->B, op, flg));
1482     break;
1483   case MAT_ROW_ORIENTED:
1484     MatCheckPreallocated(A, 1);
1485     a->roworiented = flg;
1486 
1487     PetscCall(MatSetOption(a->A, op, flg));
1488     PetscCall(MatSetOption(a->B, op, flg));
1489     break;
1490   case MAT_FORCE_DIAGONAL_ENTRIES:
1491   case MAT_SORTED_FULL:
1492     PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op]));
1493     break;
1494   case MAT_IGNORE_OFF_PROC_ENTRIES:
1495     a->donotstash = flg;
1496     break;
1497   case MAT_USE_HASH_TABLE:
1498     a->ht_flag = flg;
1499     a->ht_fact = 1.39;
1500     break;
1501   case MAT_SYMMETRIC:
1502   case MAT_STRUCTURALLY_SYMMETRIC:
1503   case MAT_HERMITIAN:
1504   case MAT_SUBMAT_SINGLEIS:
1505   case MAT_SYMMETRY_ETERNAL:
1506   case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
1507   case MAT_SPD_ETERNAL:
1508     /* if the diagonal matrix is square it inherits some of the properties above */
1509     break;
1510   default:
1511     SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "unknown option %d", op);
1512   }
1513   PetscFunctionReturn(PETSC_SUCCESS);
1514 }
1515 
1516 PetscErrorCode MatTranspose_MPIBAIJ(Mat A, MatReuse reuse, Mat *matout)
1517 {
1518   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)A->data;
1519   Mat_SeqBAIJ *Aloc;
1520   Mat          B;
1521   PetscInt     M = A->rmap->N, N = A->cmap->N, *ai, *aj, i, *rvals, j, k, col;
1522   PetscInt     bs = A->rmap->bs, mbs = baij->mbs;
1523   MatScalar   *a;
1524 
1525   PetscFunctionBegin;
1526   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *matout));
1527   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
1528     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
1529     PetscCall(MatSetSizes(B, A->cmap->n, A->rmap->n, N, M));
1530     PetscCall(MatSetType(B, ((PetscObject)A)->type_name));
1531     /* Do not know preallocation information, but must set block size */
1532     PetscCall(MatMPIBAIJSetPreallocation(B, A->rmap->bs, PETSC_DECIDE, NULL, PETSC_DECIDE, NULL));
1533   } else {
1534     B = *matout;
1535   }
1536 
1537   /* copy over the A part */
1538   Aloc = (Mat_SeqBAIJ *)baij->A->data;
1539   ai   = Aloc->i;
1540   aj   = Aloc->j;
1541   a    = Aloc->a;
1542   PetscCall(PetscMalloc1(bs, &rvals));
1543 
1544   for (i = 0; i < mbs; i++) {
1545     rvals[0] = bs * (baij->rstartbs + i);
1546     for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
1547     for (j = ai[i]; j < ai[i + 1]; j++) {
1548       col = (baij->cstartbs + aj[j]) * bs;
1549       for (k = 0; k < bs; k++) {
1550         PetscCall(MatSetValues_MPIBAIJ(B, 1, &col, bs, rvals, a, INSERT_VALUES));
1551 
1552         col++;
1553         a += bs;
1554       }
1555     }
1556   }
1557   /* copy over the B part */
1558   Aloc = (Mat_SeqBAIJ *)baij->B->data;
1559   ai   = Aloc->i;
1560   aj   = Aloc->j;
1561   a    = Aloc->a;
1562   for (i = 0; i < mbs; i++) {
1563     rvals[0] = bs * (baij->rstartbs + i);
1564     for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
1565     for (j = ai[i]; j < ai[i + 1]; j++) {
1566       col = baij->garray[aj[j]] * bs;
1567       for (k = 0; k < bs; k++) {
1568         PetscCall(MatSetValues_MPIBAIJ(B, 1, &col, bs, rvals, a, INSERT_VALUES));
1569         col++;
1570         a += bs;
1571       }
1572     }
1573   }
1574   PetscCall(PetscFree(rvals));
1575   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1576   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1577 
1578   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) *matout = B;
1579   else PetscCall(MatHeaderMerge(A, &B));
1580   PetscFunctionReturn(PETSC_SUCCESS);
1581 }
1582 
1583 PetscErrorCode MatDiagonalScale_MPIBAIJ(Mat mat, Vec ll, Vec rr)
1584 {
1585   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
1586   Mat          a = baij->A, b = baij->B;
1587   PetscInt     s1, s2, s3;
1588 
1589   PetscFunctionBegin;
1590   PetscCall(MatGetLocalSize(mat, &s2, &s3));
1591   if (rr) {
1592     PetscCall(VecGetLocalSize(rr, &s1));
1593     PetscCheck(s1 == s3, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "right vector non-conforming local size");
1594     /* Overlap communication with computation. */
1595     PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1596   }
1597   if (ll) {
1598     PetscCall(VecGetLocalSize(ll, &s1));
1599     PetscCheck(s1 == s2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "left vector non-conforming local size");
1600     PetscUseTypeMethod(b, diagonalscale, ll, NULL);
1601   }
1602   /* scale  the diagonal block */
1603   PetscUseTypeMethod(a, diagonalscale, ll, rr);
1604 
1605   if (rr) {
1606     /* Do a scatter end and then right scale the off-diagonal block */
1607     PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1608     PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1609   }
1610   PetscFunctionReturn(PETSC_SUCCESS);
1611 }
1612 
1613 PetscErrorCode MatZeroRows_MPIBAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1614 {
1615   Mat_MPIBAIJ *l = (Mat_MPIBAIJ *)A->data;
1616   PetscInt    *lrows;
1617   PetscInt     r, len;
1618   PetscBool    cong;
1619 
1620   PetscFunctionBegin;
1621   /* get locally owned rows */
1622   PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows));
1623   /* fix right hand side if needed */
1624   if (x && b) {
1625     const PetscScalar *xx;
1626     PetscScalar       *bb;
1627 
1628     PetscCall(VecGetArrayRead(x, &xx));
1629     PetscCall(VecGetArray(b, &bb));
1630     for (r = 0; r < len; ++r) bb[lrows[r]] = diag * xx[lrows[r]];
1631     PetscCall(VecRestoreArrayRead(x, &xx));
1632     PetscCall(VecRestoreArray(b, &bb));
1633   }
1634 
1635   /* actually zap the local rows */
1636   /*
1637         Zero the required rows. If the "diagonal block" of the matrix
1638      is square and the user wishes to set the diagonal we use separate
1639      code so that MatSetValues() is not called for each diagonal allocating
1640      new memory, thus calling lots of mallocs and slowing things down.
1641 
1642   */
1643   /* must zero l->B before l->A because the (diag) case below may put values into l->B*/
1644   PetscCall(MatZeroRows_SeqBAIJ(l->B, len, lrows, 0.0, NULL, NULL));
1645   PetscCall(MatHasCongruentLayouts(A, &cong));
1646   if ((diag != 0.0) && cong) {
1647     PetscCall(MatZeroRows_SeqBAIJ(l->A, len, lrows, diag, NULL, NULL));
1648   } else if (diag != 0.0) {
1649     PetscCall(MatZeroRows_SeqBAIJ(l->A, len, lrows, 0.0, NULL, NULL));
1650     PetscCheck(!((Mat_SeqBAIJ *)l->A->data)->nonew, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatZeroRows() on rectangular matrices cannot be used with the Mat options \n\
1651        MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR");
1652     for (r = 0; r < len; ++r) {
1653       const PetscInt row = lrows[r] + A->rmap->rstart;
1654       PetscCall(MatSetValues(A, 1, &row, 1, &row, &diag, INSERT_VALUES));
1655     }
1656     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
1657     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
1658   } else {
1659     PetscCall(MatZeroRows_SeqBAIJ(l->A, len, lrows, 0.0, NULL, NULL));
1660   }
1661   PetscCall(PetscFree(lrows));
1662 
1663   /* only change matrix nonzero state if pattern was allowed to be changed */
1664   if (!((Mat_SeqBAIJ *)(l->A->data))->keepnonzeropattern) {
1665     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1666     PetscCall(MPIU_Allreduce(&state, &A->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)A)));
1667   }
1668   PetscFunctionReturn(PETSC_SUCCESS);
1669 }
1670 
1671 PetscErrorCode MatZeroRowsColumns_MPIBAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
1672 {
1673   Mat_MPIBAIJ       *l = (Mat_MPIBAIJ *)A->data;
1674   PetscMPIInt        n = A->rmap->n, p = 0;
1675   PetscInt           i, j, k, r, len = 0, row, col, count;
1676   PetscInt          *lrows, *owners = A->rmap->range;
1677   PetscSFNode       *rrows;
1678   PetscSF            sf;
1679   const PetscScalar *xx;
1680   PetscScalar       *bb, *mask;
1681   Vec                xmask, lmask;
1682   Mat_SeqBAIJ       *baij = (Mat_SeqBAIJ *)l->B->data;
1683   PetscInt           bs = A->rmap->bs, bs2 = baij->bs2;
1684   PetscScalar       *aa;
1685 
1686   PetscFunctionBegin;
1687   /* Create SF where leaves are input rows and roots are owned rows */
1688   PetscCall(PetscMalloc1(n, &lrows));
1689   for (r = 0; r < n; ++r) lrows[r] = -1;
1690   PetscCall(PetscMalloc1(N, &rrows));
1691   for (r = 0; r < N; ++r) {
1692     const PetscInt idx = rows[r];
1693     PetscCheck(idx >= 0 && A->rmap->N > idx, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row %" PetscInt_FMT " out of range [0,%" PetscInt_FMT ")", idx, A->rmap->N);
1694     if (idx < owners[p] || owners[p + 1] <= idx) { /* short-circuit the search if the last p owns this row too */
1695       PetscCall(PetscLayoutFindOwner(A->rmap, idx, &p));
1696     }
1697     rrows[r].rank  = p;
1698     rrows[r].index = rows[r] - owners[p];
1699   }
1700   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)A), &sf));
1701   PetscCall(PetscSFSetGraph(sf, n, N, NULL, PETSC_OWN_POINTER, rrows, PETSC_OWN_POINTER));
1702   /* Collect flags for rows to be zeroed */
1703   PetscCall(PetscSFReduceBegin(sf, MPIU_INT, (PetscInt *)rows, lrows, MPI_LOR));
1704   PetscCall(PetscSFReduceEnd(sf, MPIU_INT, (PetscInt *)rows, lrows, MPI_LOR));
1705   PetscCall(PetscSFDestroy(&sf));
1706   /* Compress and put in row numbers */
1707   for (r = 0; r < n; ++r)
1708     if (lrows[r] >= 0) lrows[len++] = r;
1709   /* zero diagonal part of matrix */
1710   PetscCall(MatZeroRowsColumns(l->A, len, lrows, diag, x, b));
1711   /* handle off diagonal part of matrix */
1712   PetscCall(MatCreateVecs(A, &xmask, NULL));
1713   PetscCall(VecDuplicate(l->lvec, &lmask));
1714   PetscCall(VecGetArray(xmask, &bb));
1715   for (i = 0; i < len; i++) bb[lrows[i]] = 1;
1716   PetscCall(VecRestoreArray(xmask, &bb));
1717   PetscCall(VecScatterBegin(l->Mvctx, xmask, lmask, ADD_VALUES, SCATTER_FORWARD));
1718   PetscCall(VecScatterEnd(l->Mvctx, xmask, lmask, ADD_VALUES, SCATTER_FORWARD));
1719   PetscCall(VecDestroy(&xmask));
1720   if (x) {
1721     PetscCall(VecScatterBegin(l->Mvctx, x, l->lvec, INSERT_VALUES, SCATTER_FORWARD));
1722     PetscCall(VecScatterEnd(l->Mvctx, x, l->lvec, INSERT_VALUES, SCATTER_FORWARD));
1723     PetscCall(VecGetArrayRead(l->lvec, &xx));
1724     PetscCall(VecGetArray(b, &bb));
1725   }
1726   PetscCall(VecGetArray(lmask, &mask));
1727   /* remove zeroed rows of off diagonal matrix */
1728   for (i = 0; i < len; ++i) {
1729     row   = lrows[i];
1730     count = (baij->i[row / bs + 1] - baij->i[row / bs]) * bs;
1731     aa    = ((MatScalar *)(baij->a)) + baij->i[row / bs] * bs2 + (row % bs);
1732     for (k = 0; k < count; ++k) {
1733       aa[0] = 0.0;
1734       aa += bs;
1735     }
1736   }
1737   /* loop over all elements of off process part of matrix zeroing removed columns*/
1738   for (i = 0; i < l->B->rmap->N; ++i) {
1739     row = i / bs;
1740     for (j = baij->i[row]; j < baij->i[row + 1]; ++j) {
1741       for (k = 0; k < bs; ++k) {
1742         col = bs * baij->j[j] + k;
1743         if (PetscAbsScalar(mask[col])) {
1744           aa = ((MatScalar *)(baij->a)) + j * bs2 + (i % bs) + bs * k;
1745           if (x) bb[i] -= aa[0] * xx[col];
1746           aa[0] = 0.0;
1747         }
1748       }
1749     }
1750   }
1751   if (x) {
1752     PetscCall(VecRestoreArray(b, &bb));
1753     PetscCall(VecRestoreArrayRead(l->lvec, &xx));
1754   }
1755   PetscCall(VecRestoreArray(lmask, &mask));
1756   PetscCall(VecDestroy(&lmask));
1757   PetscCall(PetscFree(lrows));
1758 
1759   /* only change matrix nonzero state if pattern was allowed to be changed */
1760   if (!((Mat_SeqBAIJ *)(l->A->data))->keepnonzeropattern) {
1761     PetscObjectState state = l->A->nonzerostate + l->B->nonzerostate;
1762     PetscCall(MPIU_Allreduce(&state, &A->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)A)));
1763   }
1764   PetscFunctionReturn(PETSC_SUCCESS);
1765 }
1766 
1767 PetscErrorCode MatSetUnfactored_MPIBAIJ(Mat A)
1768 {
1769   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1770 
1771   PetscFunctionBegin;
1772   PetscCall(MatSetUnfactored(a->A));
1773   PetscFunctionReturn(PETSC_SUCCESS);
1774 }
1775 
1776 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat, MatDuplicateOption, Mat *);
1777 
1778 PetscErrorCode MatEqual_MPIBAIJ(Mat A, Mat B, PetscBool *flag)
1779 {
1780   Mat_MPIBAIJ *matB = (Mat_MPIBAIJ *)B->data, *matA = (Mat_MPIBAIJ *)A->data;
1781   Mat          a, b, c, d;
1782   PetscBool    flg;
1783 
1784   PetscFunctionBegin;
1785   a = matA->A;
1786   b = matA->B;
1787   c = matB->A;
1788   d = matB->B;
1789 
1790   PetscCall(MatEqual(a, c, &flg));
1791   if (flg) PetscCall(MatEqual(b, d, &flg));
1792   PetscCall(MPIU_Allreduce(&flg, flag, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1793   PetscFunctionReturn(PETSC_SUCCESS);
1794 }
1795 
1796 PetscErrorCode MatCopy_MPIBAIJ(Mat A, Mat B, MatStructure str)
1797 {
1798   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1799   Mat_MPIBAIJ *b = (Mat_MPIBAIJ *)B->data;
1800 
1801   PetscFunctionBegin;
1802   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1803   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1804     PetscCall(MatCopy_Basic(A, B, str));
1805   } else {
1806     PetscCall(MatCopy(a->A, b->A, str));
1807     PetscCall(MatCopy(a->B, b->B, str));
1808   }
1809   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1810   PetscFunctionReturn(PETSC_SUCCESS);
1811 }
1812 
1813 PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat Y, const PetscInt *yltog, Mat X, const PetscInt *xltog, PetscInt *nnz)
1814 {
1815   PetscInt     bs = Y->rmap->bs, m = Y->rmap->N / bs;
1816   Mat_SeqBAIJ *x = (Mat_SeqBAIJ *)X->data;
1817   Mat_SeqBAIJ *y = (Mat_SeqBAIJ *)Y->data;
1818 
1819   PetscFunctionBegin;
1820   PetscCall(MatAXPYGetPreallocation_MPIX_private(m, x->i, x->j, xltog, y->i, y->j, yltog, nnz));
1821   PetscFunctionReturn(PETSC_SUCCESS);
1822 }
1823 
1824 PetscErrorCode MatAXPY_MPIBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1825 {
1826   Mat_MPIBAIJ *xx = (Mat_MPIBAIJ *)X->data, *yy = (Mat_MPIBAIJ *)Y->data;
1827   PetscBLASInt bnz, one                         = 1;
1828   Mat_SeqBAIJ *x, *y;
1829   PetscInt     bs2 = Y->rmap->bs * Y->rmap->bs;
1830 
1831   PetscFunctionBegin;
1832   if (str == SAME_NONZERO_PATTERN) {
1833     PetscScalar alpha = a;
1834     x                 = (Mat_SeqBAIJ *)xx->A->data;
1835     y                 = (Mat_SeqBAIJ *)yy->A->data;
1836     PetscCall(PetscBLASIntCast(x->nz * bs2, &bnz));
1837     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, x->a, &one, y->a, &one));
1838     x = (Mat_SeqBAIJ *)xx->B->data;
1839     y = (Mat_SeqBAIJ *)yy->B->data;
1840     PetscCall(PetscBLASIntCast(x->nz * bs2, &bnz));
1841     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, x->a, &one, y->a, &one));
1842     PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1843   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1844     PetscCall(MatAXPY_Basic(Y, a, X, str));
1845   } else {
1846     Mat       B;
1847     PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1848     PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1849     PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1850     PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1851     PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1852     PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1853     PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1854     PetscCall(MatSetType(B, MATMPIBAIJ));
1855     PetscCall(MatAXPYGetPreallocation_SeqBAIJ(yy->A, xx->A, nnz_d));
1856     PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1857     PetscCall(MatMPIBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1858     /* MatAXPY_BasicWithPreallocation() for BAIJ matrix is much slower than AIJ, even for bs=1 ! */
1859     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1860     PetscCall(MatHeaderMerge(Y, &B));
1861     PetscCall(PetscFree(nnz_d));
1862     PetscCall(PetscFree(nnz_o));
1863   }
1864   PetscFunctionReturn(PETSC_SUCCESS);
1865 }
1866 
1867 PetscErrorCode MatConjugate_MPIBAIJ(Mat mat)
1868 {
1869   PetscFunctionBegin;
1870   if (PetscDefined(USE_COMPLEX)) {
1871     Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)mat->data;
1872 
1873     PetscCall(MatConjugate_SeqBAIJ(a->A));
1874     PetscCall(MatConjugate_SeqBAIJ(a->B));
1875   }
1876   PetscFunctionReturn(PETSC_SUCCESS);
1877 }
1878 
1879 PetscErrorCode MatRealPart_MPIBAIJ(Mat A)
1880 {
1881   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1882 
1883   PetscFunctionBegin;
1884   PetscCall(MatRealPart(a->A));
1885   PetscCall(MatRealPart(a->B));
1886   PetscFunctionReturn(PETSC_SUCCESS);
1887 }
1888 
1889 PetscErrorCode MatImaginaryPart_MPIBAIJ(Mat A)
1890 {
1891   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
1892 
1893   PetscFunctionBegin;
1894   PetscCall(MatImaginaryPart(a->A));
1895   PetscCall(MatImaginaryPart(a->B));
1896   PetscFunctionReturn(PETSC_SUCCESS);
1897 }
1898 
1899 PetscErrorCode MatCreateSubMatrix_MPIBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1900 {
1901   IS       iscol_local;
1902   PetscInt csize;
1903 
1904   PetscFunctionBegin;
1905   PetscCall(ISGetLocalSize(iscol, &csize));
1906   if (call == MAT_REUSE_MATRIX) {
1907     PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1908     PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1909   } else {
1910     PetscCall(ISAllGather(iscol, &iscol_local));
1911   }
1912   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, call, newmat));
1913   if (call == MAT_INITIAL_MATRIX) {
1914     PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1915     PetscCall(ISDestroy(&iscol_local));
1916   }
1917   PetscFunctionReturn(PETSC_SUCCESS);
1918 }
1919 
1920 /*
1921   Not great since it makes two copies of the submatrix, first an SeqBAIJ
1922   in local and then by concatenating the local matrices the end result.
1923   Writing it directly would be much like MatCreateSubMatrices_MPIBAIJ().
1924   This routine is used for BAIJ and SBAIJ matrices (unfortunate dependency).
1925 */
1926 PetscErrorCode MatCreateSubMatrix_MPIBAIJ_Private(Mat mat, IS isrow, IS iscol, PetscInt csize, MatReuse call, Mat *newmat)
1927 {
1928   PetscMPIInt  rank, size;
1929   PetscInt     i, m, n, rstart, row, rend, nz, *cwork, j, bs;
1930   PetscInt    *ii, *jj, nlocal, *dlens, *olens, dlen, olen, jend, mglobal;
1931   Mat          M, Mreuse;
1932   MatScalar   *vwork, *aa;
1933   MPI_Comm     comm;
1934   IS           isrow_new, iscol_new;
1935   Mat_SeqBAIJ *aij;
1936 
1937   PetscFunctionBegin;
1938   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
1939   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1940   PetscCallMPI(MPI_Comm_size(comm, &size));
1941   /* The compression and expansion should be avoided. Doesn't point
1942      out errors, might change the indices, hence buggey */
1943   PetscCall(ISCompressIndicesGeneral(mat->rmap->N, mat->rmap->n, mat->rmap->bs, 1, &isrow, &isrow_new));
1944   PetscCall(ISCompressIndicesGeneral(mat->cmap->N, mat->cmap->n, mat->cmap->bs, 1, &iscol, &iscol_new));
1945 
1946   if (call == MAT_REUSE_MATRIX) {
1947     PetscCall(PetscObjectQuery((PetscObject)*newmat, "SubMatrix", (PetscObject *)&Mreuse));
1948     PetscCheck(Mreuse, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1949     PetscCall(MatCreateSubMatrices_MPIBAIJ_local(mat, 1, &isrow_new, &iscol_new, MAT_REUSE_MATRIX, &Mreuse));
1950   } else {
1951     PetscCall(MatCreateSubMatrices_MPIBAIJ_local(mat, 1, &isrow_new, &iscol_new, MAT_INITIAL_MATRIX, &Mreuse));
1952   }
1953   PetscCall(ISDestroy(&isrow_new));
1954   PetscCall(ISDestroy(&iscol_new));
1955   /*
1956       m - number of local rows
1957       n - number of columns (same on all processors)
1958       rstart - first row in new global matrix generated
1959   */
1960   PetscCall(MatGetBlockSize(mat, &bs));
1961   PetscCall(MatGetSize(Mreuse, &m, &n));
1962   m = m / bs;
1963   n = n / bs;
1964 
1965   if (call == MAT_INITIAL_MATRIX) {
1966     aij = (Mat_SeqBAIJ *)(Mreuse)->data;
1967     ii  = aij->i;
1968     jj  = aij->j;
1969 
1970     /*
1971         Determine the number of non-zeros in the diagonal and off-diagonal
1972         portions of the matrix in order to do correct preallocation
1973     */
1974 
1975     /* first get start and end of "diagonal" columns */
1976     if (csize == PETSC_DECIDE) {
1977       PetscCall(ISGetSize(isrow, &mglobal));
1978       if (mglobal == n * bs) { /* square matrix */
1979         nlocal = m;
1980       } else {
1981         nlocal = n / size + ((n % size) > rank);
1982       }
1983     } else {
1984       nlocal = csize / bs;
1985     }
1986     PetscCallMPI(MPI_Scan(&nlocal, &rend, 1, MPIU_INT, MPI_SUM, comm));
1987     rstart = rend - nlocal;
1988     PetscCheck(rank != size - 1 || rend == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local column sizes %" PetscInt_FMT " do not add up to total number of columns %" PetscInt_FMT, rend, n);
1989 
1990     /* next, compute all the lengths */
1991     PetscCall(PetscMalloc2(m + 1, &dlens, m + 1, &olens));
1992     for (i = 0; i < m; i++) {
1993       jend = ii[i + 1] - ii[i];
1994       olen = 0;
1995       dlen = 0;
1996       for (j = 0; j < jend; j++) {
1997         if (*jj < rstart || *jj >= rend) olen++;
1998         else dlen++;
1999         jj++;
2000       }
2001       olens[i] = olen;
2002       dlens[i] = dlen;
2003     }
2004     PetscCall(MatCreate(comm, &M));
2005     PetscCall(MatSetSizes(M, bs * m, bs * nlocal, PETSC_DECIDE, bs * n));
2006     PetscCall(MatSetType(M, ((PetscObject)mat)->type_name));
2007     PetscCall(MatMPIBAIJSetPreallocation(M, bs, 0, dlens, 0, olens));
2008     PetscCall(MatMPISBAIJSetPreallocation(M, bs, 0, dlens, 0, olens));
2009     PetscCall(PetscFree2(dlens, olens));
2010   } else {
2011     PetscInt ml, nl;
2012 
2013     M = *newmat;
2014     PetscCall(MatGetLocalSize(M, &ml, &nl));
2015     PetscCheck(ml == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Previous matrix must be same size/layout as request");
2016     PetscCall(MatZeroEntries(M));
2017     /*
2018          The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly,
2019        rather than the slower MatSetValues().
2020     */
2021     M->was_assembled = PETSC_TRUE;
2022     M->assembled     = PETSC_FALSE;
2023   }
2024   PetscCall(MatSetOption(M, MAT_ROW_ORIENTED, PETSC_FALSE));
2025   PetscCall(MatGetOwnershipRange(M, &rstart, &rend));
2026   aij = (Mat_SeqBAIJ *)(Mreuse)->data;
2027   ii  = aij->i;
2028   jj  = aij->j;
2029   aa  = aij->a;
2030   for (i = 0; i < m; i++) {
2031     row   = rstart / bs + i;
2032     nz    = ii[i + 1] - ii[i];
2033     cwork = jj;
2034     jj += nz;
2035     vwork = aa;
2036     aa += nz * bs * bs;
2037     PetscCall(MatSetValuesBlocked_MPIBAIJ(M, 1, &row, nz, cwork, vwork, INSERT_VALUES));
2038   }
2039 
2040   PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY));
2041   PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY));
2042   *newmat = M;
2043 
2044   /* save submatrix used in processor for next request */
2045   if (call == MAT_INITIAL_MATRIX) {
2046     PetscCall(PetscObjectCompose((PetscObject)M, "SubMatrix", (PetscObject)Mreuse));
2047     PetscCall(PetscObjectDereference((PetscObject)Mreuse));
2048   }
2049   PetscFunctionReturn(PETSC_SUCCESS);
2050 }
2051 
2052 PetscErrorCode MatPermute_MPIBAIJ(Mat A, IS rowp, IS colp, Mat *B)
2053 {
2054   MPI_Comm        comm, pcomm;
2055   PetscInt        clocal_size, nrows;
2056   const PetscInt *rows;
2057   PetscMPIInt     size;
2058   IS              crowp, lcolp;
2059 
2060   PetscFunctionBegin;
2061   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
2062   /* make a collective version of 'rowp' */
2063   PetscCall(PetscObjectGetComm((PetscObject)rowp, &pcomm));
2064   if (pcomm == comm) {
2065     crowp = rowp;
2066   } else {
2067     PetscCall(ISGetSize(rowp, &nrows));
2068     PetscCall(ISGetIndices(rowp, &rows));
2069     PetscCall(ISCreateGeneral(comm, nrows, rows, PETSC_COPY_VALUES, &crowp));
2070     PetscCall(ISRestoreIndices(rowp, &rows));
2071   }
2072   PetscCall(ISSetPermutation(crowp));
2073   /* make a local version of 'colp' */
2074   PetscCall(PetscObjectGetComm((PetscObject)colp, &pcomm));
2075   PetscCallMPI(MPI_Comm_size(pcomm, &size));
2076   if (size == 1) {
2077     lcolp = colp;
2078   } else {
2079     PetscCall(ISAllGather(colp, &lcolp));
2080   }
2081   PetscCall(ISSetPermutation(lcolp));
2082   /* now we just get the submatrix */
2083   PetscCall(MatGetLocalSize(A, NULL, &clocal_size));
2084   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(A, crowp, lcolp, clocal_size, MAT_INITIAL_MATRIX, B));
2085   /* clean up */
2086   if (pcomm != comm) PetscCall(ISDestroy(&crowp));
2087   if (size > 1) PetscCall(ISDestroy(&lcolp));
2088   PetscFunctionReturn(PETSC_SUCCESS);
2089 }
2090 
2091 PetscErrorCode MatGetGhosts_MPIBAIJ(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
2092 {
2093   Mat_MPIBAIJ *baij = (Mat_MPIBAIJ *)mat->data;
2094   Mat_SeqBAIJ *B    = (Mat_SeqBAIJ *)baij->B->data;
2095 
2096   PetscFunctionBegin;
2097   if (nghosts) *nghosts = B->nbs;
2098   if (ghosts) *ghosts = baij->garray;
2099   PetscFunctionReturn(PETSC_SUCCESS);
2100 }
2101 
2102 PetscErrorCode MatGetSeqNonzeroStructure_MPIBAIJ(Mat A, Mat *newmat)
2103 {
2104   Mat          B;
2105   Mat_MPIBAIJ *a  = (Mat_MPIBAIJ *)A->data;
2106   Mat_SeqBAIJ *ad = (Mat_SeqBAIJ *)a->A->data, *bd = (Mat_SeqBAIJ *)a->B->data;
2107   Mat_SeqAIJ  *b;
2108   PetscMPIInt  size, rank, *recvcounts = NULL, *displs = NULL;
2109   PetscInt     sendcount, i, *rstarts = A->rmap->range, n, cnt, j, bs = A->rmap->bs;
2110   PetscInt     m, *garray = a->garray, *lens, *jsendbuf, *a_jsendbuf, *b_jsendbuf;
2111 
2112   PetscFunctionBegin;
2113   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2114   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
2115 
2116   /*   Tell every processor the number of nonzeros per row  */
2117   PetscCall(PetscMalloc1(A->rmap->N / bs, &lens));
2118   for (i = A->rmap->rstart / bs; i < A->rmap->rend / bs; i++) lens[i] = ad->i[i - A->rmap->rstart / bs + 1] - ad->i[i - A->rmap->rstart / bs] + bd->i[i - A->rmap->rstart / bs + 1] - bd->i[i - A->rmap->rstart / bs];
2119   PetscCall(PetscMalloc1(2 * size, &recvcounts));
2120   displs = recvcounts + size;
2121   for (i = 0; i < size; i++) {
2122     recvcounts[i] = A->rmap->range[i + 1] / bs - A->rmap->range[i] / bs;
2123     displs[i]     = A->rmap->range[i] / bs;
2124   }
2125   PetscCallMPI(MPI_Allgatherv(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, lens, recvcounts, displs, MPIU_INT, PetscObjectComm((PetscObject)A)));
2126   /* Create the sequential matrix of the same type as the local block diagonal  */
2127   PetscCall(MatCreate(PETSC_COMM_SELF, &B));
2128   PetscCall(MatSetSizes(B, A->rmap->N / bs, A->cmap->N / bs, PETSC_DETERMINE, PETSC_DETERMINE));
2129   PetscCall(MatSetType(B, MATSEQAIJ));
2130   PetscCall(MatSeqAIJSetPreallocation(B, 0, lens));
2131   b = (Mat_SeqAIJ *)B->data;
2132 
2133   /*     Copy my part of matrix column indices over  */
2134   sendcount  = ad->nz + bd->nz;
2135   jsendbuf   = b->j + b->i[rstarts[rank] / bs];
2136   a_jsendbuf = ad->j;
2137   b_jsendbuf = bd->j;
2138   n          = A->rmap->rend / bs - A->rmap->rstart / bs;
2139   cnt        = 0;
2140   for (i = 0; i < n; i++) {
2141     /* put in lower diagonal portion */
2142     m = bd->i[i + 1] - bd->i[i];
2143     while (m > 0) {
2144       /* is it above diagonal (in bd (compressed) numbering) */
2145       if (garray[*b_jsendbuf] > A->rmap->rstart / bs + i) break;
2146       jsendbuf[cnt++] = garray[*b_jsendbuf++];
2147       m--;
2148     }
2149 
2150     /* put in diagonal portion */
2151     for (j = ad->i[i]; j < ad->i[i + 1]; j++) jsendbuf[cnt++] = A->rmap->rstart / bs + *a_jsendbuf++;
2152 
2153     /* put in upper diagonal portion */
2154     while (m-- > 0) jsendbuf[cnt++] = garray[*b_jsendbuf++];
2155   }
2156   PetscCheck(cnt == sendcount, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Corrupted PETSc matrix: nz given %" PetscInt_FMT " actual nz %" PetscInt_FMT, sendcount, cnt);
2157 
2158   /*  Gather all column indices to all processors  */
2159   for (i = 0; i < size; i++) {
2160     recvcounts[i] = 0;
2161     for (j = A->rmap->range[i] / bs; j < A->rmap->range[i + 1] / bs; j++) recvcounts[i] += lens[j];
2162   }
2163   displs[0] = 0;
2164   for (i = 1; i < size; i++) displs[i] = displs[i - 1] + recvcounts[i - 1];
2165   PetscCallMPI(MPI_Allgatherv(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, b->j, recvcounts, displs, MPIU_INT, PetscObjectComm((PetscObject)A)));
2166   /*  Assemble the matrix into useable form (note numerical values not yet set)  */
2167   /* set the b->ilen (length of each row) values */
2168   PetscCall(PetscArraycpy(b->ilen, lens, A->rmap->N / bs));
2169   /* set the b->i indices */
2170   b->i[0] = 0;
2171   for (i = 1; i <= A->rmap->N / bs; i++) b->i[i] = b->i[i - 1] + lens[i - 1];
2172   PetscCall(PetscFree(lens));
2173   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2174   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2175   PetscCall(PetscFree(recvcounts));
2176 
2177   PetscCall(MatPropagateSymmetryOptions(A, B));
2178   *newmat = B;
2179   PetscFunctionReturn(PETSC_SUCCESS);
2180 }
2181 
2182 PetscErrorCode MatSOR_MPIBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2183 {
2184   Mat_MPIBAIJ *mat = (Mat_MPIBAIJ *)matin->data;
2185   Vec          bb1 = NULL;
2186 
2187   PetscFunctionBegin;
2188   if (flag == SOR_APPLY_UPPER) {
2189     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2190     PetscFunctionReturn(PETSC_SUCCESS);
2191   }
2192 
2193   if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS) PetscCall(VecDuplicate(bb, &bb1));
2194 
2195   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2196     if (flag & SOR_ZERO_INITIAL_GUESS) {
2197       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2198       its--;
2199     }
2200 
2201     while (its--) {
2202       PetscCall(VecScatterBegin(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2203       PetscCall(VecScatterEnd(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2204 
2205       /* update rhs: bb1 = bb - B*x */
2206       PetscCall(VecScale(mat->lvec, -1.0));
2207       PetscCall((*mat->B->ops->multadd)(mat->B, mat->lvec, bb, bb1));
2208 
2209       /* local sweep */
2210       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, 1, xx));
2211     }
2212   } else if (flag & SOR_LOCAL_FORWARD_SWEEP) {
2213     if (flag & SOR_ZERO_INITIAL_GUESS) {
2214       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2215       its--;
2216     }
2217     while (its--) {
2218       PetscCall(VecScatterBegin(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2219       PetscCall(VecScatterEnd(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2220 
2221       /* update rhs: bb1 = bb - B*x */
2222       PetscCall(VecScale(mat->lvec, -1.0));
2223       PetscCall((*mat->B->ops->multadd)(mat->B, mat->lvec, bb, bb1));
2224 
2225       /* local sweep */
2226       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_FORWARD_SWEEP, fshift, lits, 1, xx));
2227     }
2228   } else if (flag & SOR_LOCAL_BACKWARD_SWEEP) {
2229     if (flag & SOR_ZERO_INITIAL_GUESS) {
2230       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2231       its--;
2232     }
2233     while (its--) {
2234       PetscCall(VecScatterBegin(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2235       PetscCall(VecScatterEnd(mat->Mvctx, xx, mat->lvec, INSERT_VALUES, SCATTER_FORWARD));
2236 
2237       /* update rhs: bb1 = bb - B*x */
2238       PetscCall(VecScale(mat->lvec, -1.0));
2239       PetscCall((*mat->B->ops->multadd)(mat->B, mat->lvec, bb, bb1));
2240 
2241       /* local sweep */
2242       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_BACKWARD_SWEEP, fshift, lits, 1, xx));
2243     }
2244   } else SETERRQ(PetscObjectComm((PetscObject)matin), PETSC_ERR_SUP, "Parallel version of SOR requested not supported");
2245 
2246   PetscCall(VecDestroy(&bb1));
2247   PetscFunctionReturn(PETSC_SUCCESS);
2248 }
2249 
2250 PetscErrorCode MatGetColumnReductions_MPIBAIJ(Mat A, PetscInt type, PetscReal *reductions)
2251 {
2252   Mat_MPIBAIJ *aij = (Mat_MPIBAIJ *)A->data;
2253   PetscInt     m, N, i, *garray = aij->garray;
2254   PetscInt     ib, jb, bs = A->rmap->bs;
2255   Mat_SeqBAIJ *a_aij = (Mat_SeqBAIJ *)aij->A->data;
2256   MatScalar   *a_val = a_aij->a;
2257   Mat_SeqBAIJ *b_aij = (Mat_SeqBAIJ *)aij->B->data;
2258   MatScalar   *b_val = b_aij->a;
2259   PetscReal   *work;
2260 
2261   PetscFunctionBegin;
2262   PetscCall(MatGetSize(A, &m, &N));
2263   PetscCall(PetscCalloc1(N, &work));
2264   if (type == NORM_2) {
2265     for (i = a_aij->i[0]; i < a_aij->i[aij->A->rmap->n / bs]; i++) {
2266       for (jb = 0; jb < bs; jb++) {
2267         for (ib = 0; ib < bs; ib++) {
2268           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val * *a_val);
2269           a_val++;
2270         }
2271       }
2272     }
2273     for (i = b_aij->i[0]; i < b_aij->i[aij->B->rmap->n / bs]; i++) {
2274       for (jb = 0; jb < bs; jb++) {
2275         for (ib = 0; ib < bs; ib++) {
2276           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val * *b_val);
2277           b_val++;
2278         }
2279       }
2280     }
2281   } else if (type == NORM_1) {
2282     for (i = a_aij->i[0]; i < a_aij->i[aij->A->rmap->n / bs]; i++) {
2283       for (jb = 0; jb < bs; jb++) {
2284         for (ib = 0; ib < bs; ib++) {
2285           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscAbsScalar(*a_val);
2286           a_val++;
2287         }
2288       }
2289     }
2290     for (i = b_aij->i[0]; i < b_aij->i[aij->B->rmap->n / bs]; i++) {
2291       for (jb = 0; jb < bs; jb++) {
2292         for (ib = 0; ib < bs; ib++) {
2293           work[garray[b_aij->j[i]] * bs + jb] += PetscAbsScalar(*b_val);
2294           b_val++;
2295         }
2296       }
2297     }
2298   } else if (type == NORM_INFINITY) {
2299     for (i = a_aij->i[0]; i < a_aij->i[aij->A->rmap->n / bs]; i++) {
2300       for (jb = 0; jb < bs; jb++) {
2301         for (ib = 0; ib < bs; ib++) {
2302           int col   = A->cmap->rstart + a_aij->j[i] * bs + jb;
2303           work[col] = PetscMax(PetscAbsScalar(*a_val), work[col]);
2304           a_val++;
2305         }
2306       }
2307     }
2308     for (i = b_aij->i[0]; i < b_aij->i[aij->B->rmap->n / bs]; i++) {
2309       for (jb = 0; jb < bs; jb++) {
2310         for (ib = 0; ib < bs; ib++) {
2311           int col   = garray[b_aij->j[i]] * bs + jb;
2312           work[col] = PetscMax(PetscAbsScalar(*b_val), work[col]);
2313           b_val++;
2314         }
2315       }
2316     }
2317   } else if (type == REDUCTION_SUM_REALPART || type == REDUCTION_MEAN_REALPART) {
2318     for (i = a_aij->i[0]; i < a_aij->i[aij->A->rmap->n / bs]; i++) {
2319       for (jb = 0; jb < bs; jb++) {
2320         for (ib = 0; ib < bs; ib++) {
2321           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscRealPart(*a_val);
2322           a_val++;
2323         }
2324       }
2325     }
2326     for (i = b_aij->i[0]; i < b_aij->i[aij->B->rmap->n / bs]; i++) {
2327       for (jb = 0; jb < bs; jb++) {
2328         for (ib = 0; ib < bs; ib++) {
2329           work[garray[b_aij->j[i]] * bs + jb] += PetscRealPart(*b_val);
2330           b_val++;
2331         }
2332       }
2333     }
2334   } else if (type == REDUCTION_SUM_IMAGINARYPART || type == REDUCTION_MEAN_IMAGINARYPART) {
2335     for (i = a_aij->i[0]; i < a_aij->i[aij->A->rmap->n / bs]; i++) {
2336       for (jb = 0; jb < bs; jb++) {
2337         for (ib = 0; ib < bs; ib++) {
2338           work[A->cmap->rstart + a_aij->j[i] * bs + jb] += PetscImaginaryPart(*a_val);
2339           a_val++;
2340         }
2341       }
2342     }
2343     for (i = b_aij->i[0]; i < b_aij->i[aij->B->rmap->n / bs]; i++) {
2344       for (jb = 0; jb < bs; jb++) {
2345         for (ib = 0; ib < bs; ib++) {
2346           work[garray[b_aij->j[i]] * bs + jb] += PetscImaginaryPart(*b_val);
2347           b_val++;
2348         }
2349       }
2350     }
2351   } else SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Unknown reduction type");
2352   if (type == NORM_INFINITY) {
2353     PetscCall(MPIU_Allreduce(work, reductions, N, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject)A)));
2354   } else {
2355     PetscCall(MPIU_Allreduce(work, reductions, N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)A)));
2356   }
2357   PetscCall(PetscFree(work));
2358   if (type == NORM_2) {
2359     for (i = 0; i < N; i++) reductions[i] = PetscSqrtReal(reductions[i]);
2360   } else if (type == REDUCTION_MEAN_REALPART || type == REDUCTION_MEAN_IMAGINARYPART) {
2361     for (i = 0; i < N; i++) reductions[i] /= m;
2362   }
2363   PetscFunctionReturn(PETSC_SUCCESS);
2364 }
2365 
2366 PetscErrorCode MatInvertBlockDiagonal_MPIBAIJ(Mat A, const PetscScalar **values)
2367 {
2368   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
2369 
2370   PetscFunctionBegin;
2371   PetscCall(MatInvertBlockDiagonal(a->A, values));
2372   A->factorerrortype             = a->A->factorerrortype;
2373   A->factorerror_zeropivot_value = a->A->factorerror_zeropivot_value;
2374   A->factorerror_zeropivot_row   = a->A->factorerror_zeropivot_row;
2375   PetscFunctionReturn(PETSC_SUCCESS);
2376 }
2377 
2378 PetscErrorCode MatShift_MPIBAIJ(Mat Y, PetscScalar a)
2379 {
2380   Mat_MPIBAIJ *maij = (Mat_MPIBAIJ *)Y->data;
2381   Mat_SeqBAIJ *aij  = (Mat_SeqBAIJ *)maij->A->data;
2382 
2383   PetscFunctionBegin;
2384   if (!Y->preallocated) {
2385     PetscCall(MatMPIBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
2386   } else if (!aij->nz) {
2387     PetscInt nonew = aij->nonew;
2388     PetscCall(MatSeqBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
2389     aij->nonew = nonew;
2390   }
2391   PetscCall(MatShift_Basic(Y, a));
2392   PetscFunctionReturn(PETSC_SUCCESS);
2393 }
2394 
2395 PetscErrorCode MatMissingDiagonal_MPIBAIJ(Mat A, PetscBool *missing, PetscInt *d)
2396 {
2397   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
2398 
2399   PetscFunctionBegin;
2400   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only works for square matrices");
2401   PetscCall(MatMissingDiagonal(a->A, missing, d));
2402   if (d) {
2403     PetscInt rstart;
2404     PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
2405     *d += rstart / A->rmap->bs;
2406   }
2407   PetscFunctionReturn(PETSC_SUCCESS);
2408 }
2409 
2410 PetscErrorCode MatGetDiagonalBlock_MPIBAIJ(Mat A, Mat *a)
2411 {
2412   PetscFunctionBegin;
2413   *a = ((Mat_MPIBAIJ *)A->data)->A;
2414   PetscFunctionReturn(PETSC_SUCCESS);
2415 }
2416 
2417 static struct _MatOps MatOps_Values = {MatSetValues_MPIBAIJ,
2418                                        MatGetRow_MPIBAIJ,
2419                                        MatRestoreRow_MPIBAIJ,
2420                                        MatMult_MPIBAIJ,
2421                                        /* 4*/ MatMultAdd_MPIBAIJ,
2422                                        MatMultTranspose_MPIBAIJ,
2423                                        MatMultTransposeAdd_MPIBAIJ,
2424                                        NULL,
2425                                        NULL,
2426                                        NULL,
2427                                        /*10*/ NULL,
2428                                        NULL,
2429                                        NULL,
2430                                        MatSOR_MPIBAIJ,
2431                                        MatTranspose_MPIBAIJ,
2432                                        /*15*/ MatGetInfo_MPIBAIJ,
2433                                        MatEqual_MPIBAIJ,
2434                                        MatGetDiagonal_MPIBAIJ,
2435                                        MatDiagonalScale_MPIBAIJ,
2436                                        MatNorm_MPIBAIJ,
2437                                        /*20*/ MatAssemblyBegin_MPIBAIJ,
2438                                        MatAssemblyEnd_MPIBAIJ,
2439                                        MatSetOption_MPIBAIJ,
2440                                        MatZeroEntries_MPIBAIJ,
2441                                        /*24*/ MatZeroRows_MPIBAIJ,
2442                                        NULL,
2443                                        NULL,
2444                                        NULL,
2445                                        NULL,
2446                                        /*29*/ MatSetUp_MPI_Hash,
2447                                        NULL,
2448                                        NULL,
2449                                        MatGetDiagonalBlock_MPIBAIJ,
2450                                        NULL,
2451                                        /*34*/ MatDuplicate_MPIBAIJ,
2452                                        NULL,
2453                                        NULL,
2454                                        NULL,
2455                                        NULL,
2456                                        /*39*/ MatAXPY_MPIBAIJ,
2457                                        MatCreateSubMatrices_MPIBAIJ,
2458                                        MatIncreaseOverlap_MPIBAIJ,
2459                                        MatGetValues_MPIBAIJ,
2460                                        MatCopy_MPIBAIJ,
2461                                        /*44*/ NULL,
2462                                        MatScale_MPIBAIJ,
2463                                        MatShift_MPIBAIJ,
2464                                        NULL,
2465                                        MatZeroRowsColumns_MPIBAIJ,
2466                                        /*49*/ NULL,
2467                                        NULL,
2468                                        NULL,
2469                                        NULL,
2470                                        NULL,
2471                                        /*54*/ MatFDColoringCreate_MPIXAIJ,
2472                                        NULL,
2473                                        MatSetUnfactored_MPIBAIJ,
2474                                        MatPermute_MPIBAIJ,
2475                                        MatSetValuesBlocked_MPIBAIJ,
2476                                        /*59*/ MatCreateSubMatrix_MPIBAIJ,
2477                                        MatDestroy_MPIBAIJ,
2478                                        MatView_MPIBAIJ,
2479                                        NULL,
2480                                        NULL,
2481                                        /*64*/ NULL,
2482                                        NULL,
2483                                        NULL,
2484                                        NULL,
2485                                        NULL,
2486                                        /*69*/ MatGetRowMaxAbs_MPIBAIJ,
2487                                        NULL,
2488                                        NULL,
2489                                        NULL,
2490                                        NULL,
2491                                        /*74*/ NULL,
2492                                        MatFDColoringApply_BAIJ,
2493                                        NULL,
2494                                        NULL,
2495                                        NULL,
2496                                        /*79*/ NULL,
2497                                        NULL,
2498                                        NULL,
2499                                        NULL,
2500                                        MatLoad_MPIBAIJ,
2501                                        /*84*/ NULL,
2502                                        NULL,
2503                                        NULL,
2504                                        NULL,
2505                                        NULL,
2506                                        /*89*/ NULL,
2507                                        NULL,
2508                                        NULL,
2509                                        NULL,
2510                                        NULL,
2511                                        /*94*/ NULL,
2512                                        NULL,
2513                                        NULL,
2514                                        NULL,
2515                                        NULL,
2516                                        /*99*/ NULL,
2517                                        NULL,
2518                                        NULL,
2519                                        MatConjugate_MPIBAIJ,
2520                                        NULL,
2521                                        /*104*/ NULL,
2522                                        MatRealPart_MPIBAIJ,
2523                                        MatImaginaryPart_MPIBAIJ,
2524                                        NULL,
2525                                        NULL,
2526                                        /*109*/ NULL,
2527                                        NULL,
2528                                        NULL,
2529                                        NULL,
2530                                        MatMissingDiagonal_MPIBAIJ,
2531                                        /*114*/ MatGetSeqNonzeroStructure_MPIBAIJ,
2532                                        NULL,
2533                                        MatGetGhosts_MPIBAIJ,
2534                                        NULL,
2535                                        NULL,
2536                                        /*119*/ NULL,
2537                                        NULL,
2538                                        NULL,
2539                                        NULL,
2540                                        MatGetMultiProcBlock_MPIBAIJ,
2541                                        /*124*/ NULL,
2542                                        MatGetColumnReductions_MPIBAIJ,
2543                                        MatInvertBlockDiagonal_MPIBAIJ,
2544                                        NULL,
2545                                        NULL,
2546                                        /*129*/ NULL,
2547                                        NULL,
2548                                        NULL,
2549                                        NULL,
2550                                        NULL,
2551                                        /*134*/ NULL,
2552                                        NULL,
2553                                        NULL,
2554                                        NULL,
2555                                        NULL,
2556                                        /*139*/ MatSetBlockSizes_Default,
2557                                        NULL,
2558                                        NULL,
2559                                        MatFDColoringSetUp_MPIXAIJ,
2560                                        NULL,
2561                                        /*144*/ MatCreateMPIMatConcatenateSeqMat_MPIBAIJ,
2562                                        NULL,
2563                                        NULL,
2564                                        NULL,
2565                                        NULL,
2566                                        NULL,
2567                                        /*150*/ NULL,
2568                                        NULL};
2569 
2570 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPISBAIJ(Mat, MatType, MatReuse, Mat *);
2571 PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat, MatType, MatReuse, Mat *);
2572 
2573 PetscErrorCode MatMPIBAIJSetPreallocationCSR_MPIBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
2574 {
2575   PetscInt        m, rstart, cstart, cend;
2576   PetscInt        i, j, dlen, olen, nz, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
2577   const PetscInt *JJ          = NULL;
2578   PetscScalar    *values      = NULL;
2579   PetscBool       roworiented = ((Mat_MPIBAIJ *)B->data)->roworiented;
2580   PetscBool       nooffprocentries;
2581 
2582   PetscFunctionBegin;
2583   PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
2584   PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2585   PetscCall(PetscLayoutSetUp(B->rmap));
2586   PetscCall(PetscLayoutSetUp(B->cmap));
2587   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2588   m      = B->rmap->n / bs;
2589   rstart = B->rmap->rstart / bs;
2590   cstart = B->cmap->rstart / bs;
2591   cend   = B->cmap->rend / bs;
2592 
2593   PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2594   PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2595   for (i = 0; i < m; i++) {
2596     nz = ii[i + 1] - ii[i];
2597     PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2598     nz_max = PetscMax(nz_max, nz);
2599     dlen   = 0;
2600     olen   = 0;
2601     JJ     = jj + ii[i];
2602     for (j = 0; j < nz; j++) {
2603       if (*JJ < cstart || *JJ >= cend) olen++;
2604       else dlen++;
2605       JJ++;
2606     }
2607     d_nnz[i] = dlen;
2608     o_nnz[i] = olen;
2609   }
2610   PetscCall(MatMPIBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2611   PetscCall(PetscFree2(d_nnz, o_nnz));
2612 
2613   values = (PetscScalar *)V;
2614   if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2615   for (i = 0; i < m; i++) {
2616     PetscInt        row   = i + rstart;
2617     PetscInt        ncols = ii[i + 1] - ii[i];
2618     const PetscInt *icols = jj + ii[i];
2619     if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2620       const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2621       PetscCall(MatSetValuesBlocked_MPIBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2622     } else { /* block ordering does not match so we can only insert one block at a time. */
2623       PetscInt j;
2624       for (j = 0; j < ncols; j++) {
2625         const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2626         PetscCall(MatSetValuesBlocked_MPIBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2627       }
2628     }
2629   }
2630 
2631   if (!V) PetscCall(PetscFree(values));
2632   nooffprocentries    = B->nooffprocentries;
2633   B->nooffprocentries = PETSC_TRUE;
2634   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2635   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2636   B->nooffprocentries = nooffprocentries;
2637 
2638   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2639   PetscFunctionReturn(PETSC_SUCCESS);
2640 }
2641 
2642 /*@C
2643    MatMPIBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATBAIJ` format using the given nonzero structure and (optional) numerical values
2644 
2645    Collective
2646 
2647    Input Parameters:
2648 +  B - the matrix
2649 .  bs - the block size
2650 .  i - the indices into `j` for the start of each local row (starts with zero)
2651 .  j - the column indices for each local row (starts with zero) these must be sorted for each row
2652 -  v - optional values in the matrix
2653 
2654    Level: advanced
2655 
2656    Notes:
2657     The order of the entries in values is specified by the `MatOption` `MAT_ROW_ORIENTED`.  For example, C programs
2658    may want to use the default `MAT_ROW_ORIENTED` with value `PETSC_TRUE` and use an array v[nnz][bs][bs] where the second index is
2659    over rows within a block and the last index is over columns within a block row.  Fortran programs will likely set
2660    `MAT_ROW_ORIENTED` with value `PETSC_FALSE` and use a Fortran array v(bs,bs,nnz) in which the first index is over rows within a
2661    block column and the second index is over columns within a block.
2662 
2663    Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries and usually the numerical values as well
2664 
2665 .seealso: `Mat`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MPIAIJ`, `MatCreateMPIBAIJWithArrays()`, `MPIBAIJ`
2666 @*/
2667 PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2668 {
2669   PetscFunctionBegin;
2670   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
2671   PetscValidType(B, 1);
2672   PetscValidLogicalCollectiveInt(B, bs, 2);
2673   PetscTryMethod(B, "MatMPIBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2674   PetscFunctionReturn(PETSC_SUCCESS);
2675 }
2676 
2677 PetscErrorCode MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
2678 {
2679   Mat_MPIBAIJ *b = (Mat_MPIBAIJ *)B->data;
2680   PetscInt     i;
2681   PetscMPIInt  size;
2682 
2683   PetscFunctionBegin;
2684   if (B->hash_active) {
2685     PetscCall(PetscMemcpy(&B->ops, &b->cops, sizeof(*(B->ops))));
2686     B->hash_active = PETSC_FALSE;
2687   }
2688   if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
2689   PetscCall(MatSetBlockSize(B, PetscAbs(bs)));
2690   PetscCall(PetscLayoutSetUp(B->rmap));
2691   PetscCall(PetscLayoutSetUp(B->cmap));
2692   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2693 
2694   if (d_nnz) {
2695     for (i = 0; i < B->rmap->n / bs; i++) PetscCheck(d_nnz[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "d_nnz cannot be less than -1: local row %" PetscInt_FMT " value %" PetscInt_FMT, i, d_nnz[i]);
2696   }
2697   if (o_nnz) {
2698     for (i = 0; i < B->rmap->n / bs; i++) PetscCheck(o_nnz[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "o_nnz cannot be less than -1: local row %" PetscInt_FMT " value %" PetscInt_FMT, i, o_nnz[i]);
2699   }
2700 
2701   b->bs2 = bs * bs;
2702   b->mbs = B->rmap->n / bs;
2703   b->nbs = B->cmap->n / bs;
2704   b->Mbs = B->rmap->N / bs;
2705   b->Nbs = B->cmap->N / bs;
2706 
2707   for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
2708   b->rstartbs = B->rmap->rstart / bs;
2709   b->rendbs   = B->rmap->rend / bs;
2710   b->cstartbs = B->cmap->rstart / bs;
2711   b->cendbs   = B->cmap->rend / bs;
2712 
2713 #if defined(PETSC_USE_CTABLE)
2714   PetscCall(PetscHMapIDestroy(&b->colmap));
2715 #else
2716   PetscCall(PetscFree(b->colmap));
2717 #endif
2718   PetscCall(PetscFree(b->garray));
2719   PetscCall(VecDestroy(&b->lvec));
2720   PetscCall(VecScatterDestroy(&b->Mvctx));
2721 
2722   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));
2723   PetscCall(MatDestroy(&b->B));
2724   PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
2725   PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
2726   PetscCall(MatSetType(b->B, MATSEQBAIJ));
2727 
2728   PetscCall(MatDestroy(&b->A));
2729   PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
2730   PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
2731   PetscCall(MatSetType(b->A, MATSEQBAIJ));
2732 
2733   PetscCall(MatSeqBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
2734   PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));
2735   B->preallocated  = PETSC_TRUE;
2736   B->was_assembled = PETSC_FALSE;
2737   B->assembled     = PETSC_FALSE;
2738   PetscFunctionReturn(PETSC_SUCCESS);
2739 }
2740 
2741 extern PetscErrorCode MatDiagonalScaleLocal_MPIBAIJ(Mat, Vec);
2742 extern PetscErrorCode MatSetHashTableFactor_MPIBAIJ(Mat, PetscReal);
2743 
2744 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAdj(Mat B, MatType newtype, MatReuse reuse, Mat *adj)
2745 {
2746   Mat_MPIBAIJ    *b = (Mat_MPIBAIJ *)B->data;
2747   Mat_SeqBAIJ    *d = (Mat_SeqBAIJ *)b->A->data, *o = (Mat_SeqBAIJ *)b->B->data;
2748   PetscInt        M = B->rmap->n / B->rmap->bs, i, *ii, *jj, cnt, j, k, rstart = B->rmap->rstart / B->rmap->bs;
2749   const PetscInt *id = d->i, *jd = d->j, *io = o->i, *jo = o->j, *garray = b->garray;
2750 
2751   PetscFunctionBegin;
2752   PetscCall(PetscMalloc1(M + 1, &ii));
2753   ii[0] = 0;
2754   for (i = 0; i < M; i++) {
2755     PetscCheck((id[i + 1] - id[i]) >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Indices wrong %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, i, id[i], id[i + 1]);
2756     PetscCheck((io[i + 1] - io[i]) >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Indices wrong %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, i, io[i], io[i + 1]);
2757     ii[i + 1] = ii[i] + id[i + 1] - id[i] + io[i + 1] - io[i];
2758     /* remove one from count of matrix has diagonal */
2759     for (j = id[i]; j < id[i + 1]; j++) {
2760       if (jd[j] == i) {
2761         ii[i + 1]--;
2762         break;
2763       }
2764     }
2765   }
2766   PetscCall(PetscMalloc1(ii[M], &jj));
2767   cnt = 0;
2768   for (i = 0; i < M; i++) {
2769     for (j = io[i]; j < io[i + 1]; j++) {
2770       if (garray[jo[j]] > rstart) break;
2771       jj[cnt++] = garray[jo[j]];
2772     }
2773     for (k = id[i]; k < id[i + 1]; k++) {
2774       if (jd[k] != i) jj[cnt++] = rstart + jd[k];
2775     }
2776     for (; j < io[i + 1]; j++) jj[cnt++] = garray[jo[j]];
2777   }
2778   PetscCall(MatCreateMPIAdj(PetscObjectComm((PetscObject)B), M, B->cmap->N / B->rmap->bs, ii, jj, NULL, adj));
2779   PetscFunctionReturn(PETSC_SUCCESS);
2780 }
2781 
2782 #include <../src/mat/impls/aij/mpi/mpiaij.h>
2783 
2784 PETSC_INTERN PetscErrorCode MatConvert_SeqBAIJ_SeqAIJ(Mat, MatType, MatReuse, Mat *);
2785 
2786 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIAIJ(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
2787 {
2788   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
2789   Mat_MPIAIJ  *b;
2790   Mat          B;
2791 
2792   PetscFunctionBegin;
2793   PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Matrix must be assembled");
2794 
2795   if (reuse == MAT_REUSE_MATRIX) {
2796     B = *newmat;
2797   } else {
2798     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
2799     PetscCall(MatSetType(B, MATMPIAIJ));
2800     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
2801     PetscCall(MatSetBlockSizes(B, A->rmap->bs, A->cmap->bs));
2802     PetscCall(MatSeqAIJSetPreallocation(B, 0, NULL));
2803     PetscCall(MatMPIAIJSetPreallocation(B, 0, NULL, 0, NULL));
2804   }
2805   b = (Mat_MPIAIJ *)B->data;
2806 
2807   if (reuse == MAT_REUSE_MATRIX) {
2808     PetscCall(MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_REUSE_MATRIX, &b->A));
2809     PetscCall(MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_REUSE_MATRIX, &b->B));
2810   } else {
2811     PetscBool3 sym = A->symmetric, hermitian = A->hermitian, structurally_symmetric = A->structurally_symmetric, spd = A->spd;
2812     PetscCall(MatDestroy(&b->A));
2813     PetscCall(MatDestroy(&b->B));
2814     PetscCall(MatDisAssemble_MPIBAIJ(A));
2815     PetscCall(MatConvert_SeqBAIJ_SeqAIJ(a->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->A));
2816     PetscCall(MatConvert_SeqBAIJ_SeqAIJ(a->B, MATSEQAIJ, MAT_INITIAL_MATRIX, &b->B));
2817     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
2818     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
2819     A->symmetric              = sym;
2820     A->hermitian              = hermitian;
2821     A->structurally_symmetric = structurally_symmetric;
2822     A->spd                    = spd;
2823   }
2824   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2825   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2826 
2827   if (reuse == MAT_INPLACE_MATRIX) {
2828     PetscCall(MatHeaderReplace(A, &B));
2829   } else {
2830     *newmat = B;
2831   }
2832   PetscFunctionReturn(PETSC_SUCCESS);
2833 }
2834 
2835 /*MC
2836    MATMPIBAIJ - MATMPIBAIJ = "mpibaij" - A matrix type to be used for distributed block sparse matrices.
2837 
2838    Options Database Keys:
2839 + -mat_type mpibaij - sets the matrix type to `MATMPIBAIJ` during a call to `MatSetFromOptions()`
2840 . -mat_block_size <bs> - set the blocksize used to store the matrix
2841 . -mat_baij_mult_version version - indicate the version of the matrix-vector product to use  (0 often indicates using BLAS)
2842 - -mat_use_hash_table <fact> - set hash table factor
2843 
2844    Level: beginner
2845 
2846    Note:
2847     `MatSetOptions`(,`MAT_STRUCTURE_ONLY`,`PETSC_TRUE`) may be called for this matrix type. In this no
2848     space is allocated for the nonzero entries and any entries passed with `MatSetValues()` are ignored
2849 
2850 .seealso: `Mat`, MATBAIJ`, MATSEQBAIJ`, `MatCreateBAIJ`
2851 M*/
2852 
2853 PETSC_INTERN PetscErrorCode MatConvert_MPIBAIJ_MPIBSTRM(Mat, MatType, MatReuse, Mat *);
2854 
2855 PETSC_EXTERN PetscErrorCode MatCreate_MPIBAIJ(Mat B)
2856 {
2857   Mat_MPIBAIJ *b;
2858   PetscBool    flg = PETSC_FALSE;
2859 
2860   PetscFunctionBegin;
2861   PetscCall(PetscNew(&b));
2862   B->data = (void *)b;
2863 
2864   PetscCall(PetscMemcpy(B->ops, &MatOps_Values, sizeof(struct _MatOps)));
2865   B->assembled = PETSC_FALSE;
2866 
2867   B->insertmode = NOT_SET_VALUES;
2868   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2869   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));
2870 
2871   /* build local table of row and column ownerships */
2872   PetscCall(PetscMalloc1(b->size + 1, &b->rangebs));
2873 
2874   /* build cache for off array entries formed */
2875   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2876 
2877   b->donotstash  = PETSC_FALSE;
2878   b->colmap      = NULL;
2879   b->garray      = NULL;
2880   b->roworiented = PETSC_TRUE;
2881 
2882   /* stuff used in block assembly */
2883   b->barray = NULL;
2884 
2885   /* stuff used for matrix vector multiply */
2886   b->lvec  = NULL;
2887   b->Mvctx = NULL;
2888 
2889   /* stuff for MatGetRow() */
2890   b->rowindices   = NULL;
2891   b->rowvalues    = NULL;
2892   b->getrowactive = PETSC_FALSE;
2893 
2894   /* hash table stuff */
2895   b->ht           = NULL;
2896   b->hd           = NULL;
2897   b->ht_size      = 0;
2898   b->ht_flag      = PETSC_FALSE;
2899   b->ht_fact      = 0;
2900   b->ht_total_ct  = 0;
2901   b->ht_insert_ct = 0;
2902 
2903   /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2904   b->ijonly = PETSC_FALSE;
2905 
2906   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpibaij_mpiadj_C", MatConvert_MPIBAIJ_MPIAdj));
2907   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpibaij_mpiaij_C", MatConvert_MPIBAIJ_MPIAIJ));
2908   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpibaij_mpisbaij_C", MatConvert_MPIBAIJ_MPISBAIJ));
2909 #if defined(PETSC_HAVE_HYPRE)
2910   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpibaij_hypre_C", MatConvert_AIJ_HYPRE));
2911 #endif
2912   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPIBAIJ));
2913   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPIBAIJ));
2914   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIBAIJSetPreallocation_C", MatMPIBAIJSetPreallocation_MPIBAIJ));
2915   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPIBAIJSetPreallocationCSR_C", MatMPIBAIJSetPreallocationCSR_MPIBAIJ));
2916   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatDiagonalScaleLocal_C", MatDiagonalScaleLocal_MPIBAIJ));
2917   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetHashTableFactor_C", MatSetHashTableFactor_MPIBAIJ));
2918   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpibaij_is_C", MatConvert_XAIJ_IS));
2919   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPIBAIJ));
2920 
2921   PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPIBAIJ matrix 1", "Mat");
2922   PetscCall(PetscOptionsName("-mat_use_hash_table", "Use hash table to save time in constructing matrix", "MatSetOption", &flg));
2923   if (flg) {
2924     PetscReal fact = 1.39;
2925     PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2926     PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2927     if (fact <= 1.0) fact = 1.39;
2928     PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2929     PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2930   }
2931   PetscOptionsEnd();
2932   PetscFunctionReturn(PETSC_SUCCESS);
2933 }
2934 
2935 /*MC
2936    MATBAIJ - MATBAIJ = "baij" - A matrix type to be used for block sparse matrices.
2937 
2938    This matrix type is identical to `MATSEQBAIJ` when constructed with a single process communicator,
2939    and `MATMPIBAIJ` otherwise.
2940 
2941    Options Database Keys:
2942 . -mat_type baij - sets the matrix type to `MATBAIJ` during a call to `MatSetFromOptions()`
2943 
2944   Level: beginner
2945 
2946 .seealso: `Mat`, `MatCreateBAIJ()`, `MATSEQBAIJ`, `MATMPIBAIJ`, `MatMPIBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocationCSR()`
2947 M*/
2948 
2949 /*@C
2950    MatMPIBAIJSetPreallocation - Allocates memory for a sparse parallel matrix in `MATMPIBAIJ` format
2951    (block compressed row).
2952 
2953    Collective
2954 
2955    Input Parameters:
2956 +  B - the matrix
2957 .  bs   - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2958           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2959 .  d_nz  - number of block nonzeros per block row in diagonal portion of local
2960            submatrix  (same for all local rows)
2961 .  d_nnz - array containing the number of block nonzeros in the various block rows
2962            of the in diagonal portion of the local (possibly different for each block
2963            row) or `NULL`.  If you plan to factor the matrix you must leave room for the diagonal entry and
2964            set it even if it is zero.
2965 .  o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2966            submatrix (same for all local rows).
2967 -  o_nnz - array containing the number of nonzeros in the various block rows of the
2968            off-diagonal portion of the local submatrix (possibly different for
2969            each block row) or `NULL`.
2970 
2971    If the *_nnz parameter is given then the *_nz parameter is ignored
2972 
2973    Options Database Keys:
2974 +   -mat_block_size - size of the blocks to use
2975 -   -mat_use_hash_table <fact> - set hash table factor
2976 
2977    Level: intermediate
2978 
2979    Notes:
2980    For good matrix assembly performance
2981    the user should preallocate the matrix storage by setting the parameters
2982    `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).  By setting these parameters accurately,
2983    performance can be increased by more than a factor of 50.
2984 
2985    If `PETSC_DECIDE` or  `PETSC_DETERMINE` is used for a particular argument on one processor
2986    than it must be used on all processors that share the object for that argument.
2987 
2988    Storage Information:
2989    For a square global matrix we define each processor's diagonal portion
2990    to be its local rows and the corresponding columns (a square submatrix);
2991    each processor's off-diagonal portion encompasses the remainder of the
2992    local matrix (a rectangular submatrix).
2993 
2994    The user can specify preallocated storage for the diagonal part of
2995    the local submatrix with either `d_nz` or `d_nnz` (not both).  Set
2996    `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2997    memory allocation.  Likewise, specify preallocated storage for the
2998    off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2999 
3000    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3001    the figure below we depict these three local rows and all columns (0-11).
3002 
3003 .vb
3004            0 1 2 3 4 5 6 7 8 9 10 11
3005           --------------------------
3006    row 3  |o o o d d d o o o o  o  o
3007    row 4  |o o o d d d o o o o  o  o
3008    row 5  |o o o d d d o o o o  o  o
3009           --------------------------
3010 .ve
3011 
3012    Thus, any entries in the d locations are stored in the d (diagonal)
3013    submatrix, and any entries in the o locations are stored in the
3014    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3015    stored simply in the `MATSEQBAIJ` format for compressed row storage.
3016 
3017    Now `d_nz` should indicate the number of block nonzeros per row in the d matrix,
3018    and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
3019    In general, for PDE problems in which most nonzeros are near the diagonal,
3020    one expects `d_nz` >> `o_nz`.
3021 
3022    You can call `MatGetInfo()` to get information on how effective the preallocation was;
3023    for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
3024    You can also run with the option `-info` and look for messages with the string
3025    malloc in them to see if additional memory allocation was needed.
3026 
3027 .seealso: `Mat`, `MATMPIBAIJ`, `MatCreate()`, `MatCreateSeqBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `MatMPIBAIJSetPreallocationCSR()`, `PetscSplitOwnership()`
3028 @*/
3029 PetscErrorCode MatMPIBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
3030 {
3031   PetscFunctionBegin;
3032   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
3033   PetscValidType(B, 1);
3034   PetscValidLogicalCollectiveInt(B, bs, 2);
3035   PetscTryMethod(B, "MatMPIBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
3036   PetscFunctionReturn(PETSC_SUCCESS);
3037 }
3038 
3039 /*@C
3040    MatCreateBAIJ - Creates a sparse parallel matrix in `MATBAIJ` format
3041    (block compressed row).
3042 
3043    Collective
3044 
3045    Input Parameters:
3046 +  comm - MPI communicator
3047 .  bs   - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
3048           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
3049 .  m - number of local rows (or `PETSC_DECIDE` to have calculated if M is given)
3050            This value should be the same as the local size used in creating the
3051            y vector for the matrix-vector product y = Ax.
3052 .  n - number of local columns (or `PETSC_DECIDE` to have calculated if N is given)
3053            This value should be the same as the local size used in creating the
3054            x vector for the matrix-vector product y = Ax.
3055 .  M - number of global rows (or `PETSC_DETERMINE` to have calculated if m is given)
3056 .  N - number of global columns (or `PETSC_DETERMINE` to have calculated if n is given)
3057 .  d_nz  - number of nonzero blocks per block row in diagonal portion of local
3058            submatrix  (same for all local rows)
3059 .  d_nnz - array containing the number of nonzero blocks in the various block rows
3060            of the in diagonal portion of the local (possibly different for each block
3061            row) or NULL.  If you plan to factor the matrix you must leave room for the diagonal entry
3062            and set it even if it is zero.
3063 .  o_nz  - number of nonzero blocks per block row in the off-diagonal portion of local
3064            submatrix (same for all local rows).
3065 -  o_nnz - array containing the number of nonzero blocks in the various block rows of the
3066            off-diagonal portion of the local submatrix (possibly different for
3067            each block row) or NULL.
3068 
3069    Output Parameter:
3070 .  A - the matrix
3071 
3072    Options Database Keys:
3073 +   -mat_block_size - size of the blocks to use
3074 -   -mat_use_hash_table <fact> - set hash table factor
3075 
3076    Level: intermediate
3077 
3078    Notes:
3079    For good matrix assembly performance
3080    the user should preallocate the matrix storage by setting the parameters
3081    `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).  By setting these parameters accurately,
3082    performance can be increased by more than a factor of 50.
3083 
3084    It is recommended that one use the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
3085    MatXXXXSetPreallocation() paradigm instead of this routine directly.
3086    [MatXXXXSetPreallocation() is, for example, `MatSeqBAIJSetPreallocation()`]
3087 
3088    If the *_nnz parameter is given then the *_nz parameter is ignored
3089 
3090    A nonzero block is any block that as 1 or more nonzeros in it
3091 
3092    The user MUST specify either the local or global matrix dimensions
3093    (possibly both).
3094 
3095    If `PETSC_DECIDE` or  `PETSC_DETERMINE` is used for a particular argument on one processor
3096    than it must be used on all processors that share the object for that argument.
3097 
3098    Storage Information:
3099    For a square global matrix we define each processor's diagonal portion
3100    to be its local rows and the corresponding columns (a square submatrix);
3101    each processor's off-diagonal portion encompasses the remainder of the
3102    local matrix (a rectangular submatrix).
3103 
3104    The user can specify preallocated storage for the diagonal part of
3105    the local submatrix with either d_nz or d_nnz (not both).  Set
3106    `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
3107    memory allocation.  Likewise, specify preallocated storage for the
3108    off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
3109 
3110    Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
3111    the figure below we depict these three local rows and all columns (0-11).
3112 
3113 .vb
3114            0 1 2 3 4 5 6 7 8 9 10 11
3115           --------------------------
3116    row 3  |o o o d d d o o o o  o  o
3117    row 4  |o o o d d d o o o o  o  o
3118    row 5  |o o o d d d o o o o  o  o
3119           --------------------------
3120 .ve
3121 
3122    Thus, any entries in the d locations are stored in the d (diagonal)
3123    submatrix, and any entries in the o locations are stored in the
3124    o (off-diagonal) submatrix.  Note that the d and the o submatrices are
3125    stored simply in the `MATSEQBAIJ` format for compressed row storage.
3126 
3127    Now `d_nz` should indicate the number of block nonzeros per row in the d matrix,
3128    and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
3129    In general, for PDE problems in which most nonzeros are near the diagonal,
3130    one expects `d_nz` >> `o_nz`.
3131 
3132 .seealso: `Mat`, `MatCreate()`, `MatCreateSeqBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `MatMPIBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocationCSR()`
3133 @*/
3134 PetscErrorCode MatCreateBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
3135 {
3136   PetscMPIInt size;
3137 
3138   PetscFunctionBegin;
3139   PetscCall(MatCreate(comm, A));
3140   PetscCall(MatSetSizes(*A, m, n, M, N));
3141   PetscCallMPI(MPI_Comm_size(comm, &size));
3142   if (size > 1) {
3143     PetscCall(MatSetType(*A, MATMPIBAIJ));
3144     PetscCall(MatMPIBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
3145   } else {
3146     PetscCall(MatSetType(*A, MATSEQBAIJ));
3147     PetscCall(MatSeqBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
3148   }
3149   PetscFunctionReturn(PETSC_SUCCESS);
3150 }
3151 
3152 static PetscErrorCode MatDuplicate_MPIBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
3153 {
3154   Mat          mat;
3155   Mat_MPIBAIJ *a, *oldmat = (Mat_MPIBAIJ *)matin->data;
3156   PetscInt     len = 0;
3157 
3158   PetscFunctionBegin;
3159   *newmat = NULL;
3160   PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
3161   PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
3162   PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
3163 
3164   mat->factortype   = matin->factortype;
3165   mat->preallocated = PETSC_TRUE;
3166   mat->assembled    = PETSC_TRUE;
3167   mat->insertmode   = NOT_SET_VALUES;
3168 
3169   a             = (Mat_MPIBAIJ *)mat->data;
3170   mat->rmap->bs = matin->rmap->bs;
3171   a->bs2        = oldmat->bs2;
3172   a->mbs        = oldmat->mbs;
3173   a->nbs        = oldmat->nbs;
3174   a->Mbs        = oldmat->Mbs;
3175   a->Nbs        = oldmat->Nbs;
3176 
3177   PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
3178   PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));
3179 
3180   a->size         = oldmat->size;
3181   a->rank         = oldmat->rank;
3182   a->donotstash   = oldmat->donotstash;
3183   a->roworiented  = oldmat->roworiented;
3184   a->rowindices   = NULL;
3185   a->rowvalues    = NULL;
3186   a->getrowactive = PETSC_FALSE;
3187   a->barray       = NULL;
3188   a->rstartbs     = oldmat->rstartbs;
3189   a->rendbs       = oldmat->rendbs;
3190   a->cstartbs     = oldmat->cstartbs;
3191   a->cendbs       = oldmat->cendbs;
3192 
3193   /* hash table stuff */
3194   a->ht           = NULL;
3195   a->hd           = NULL;
3196   a->ht_size      = 0;
3197   a->ht_flag      = oldmat->ht_flag;
3198   a->ht_fact      = oldmat->ht_fact;
3199   a->ht_total_ct  = 0;
3200   a->ht_insert_ct = 0;
3201 
3202   PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 1));
3203   if (oldmat->colmap) {
3204 #if defined(PETSC_USE_CTABLE)
3205     PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
3206 #else
3207     PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
3208     PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
3209 #endif
3210   } else a->colmap = NULL;
3211 
3212   if (oldmat->garray && (len = ((Mat_SeqBAIJ *)(oldmat->B->data))->nbs)) {
3213     PetscCall(PetscMalloc1(len, &a->garray));
3214     PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
3215   } else a->garray = NULL;
3216 
3217   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
3218   PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
3219   PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));
3220 
3221   PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
3222   PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
3223   PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
3224   *newmat = mat;
3225   PetscFunctionReturn(PETSC_SUCCESS);
3226 }
3227 
3228 /* Used for both MPIBAIJ and MPISBAIJ matrices */
3229 PetscErrorCode MatLoad_MPIBAIJ_Binary(Mat mat, PetscViewer viewer)
3230 {
3231   PetscInt     header[4], M, N, nz, bs, m, n, mbs, nbs, rows, cols, sum, i, j, k;
3232   PetscInt    *rowidxs, *colidxs, rs, cs, ce;
3233   PetscScalar *matvals;
3234 
3235   PetscFunctionBegin;
3236   PetscCall(PetscViewerSetUp(viewer));
3237 
3238   /* read in matrix header */
3239   PetscCall(PetscViewerBinaryRead(viewer, header, 4, NULL, PETSC_INT));
3240   PetscCheck(header[0] == MAT_FILE_CLASSID, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Not a matrix object in file");
3241   M  = header[1];
3242   N  = header[2];
3243   nz = header[3];
3244   PetscCheck(M >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix row size (%" PetscInt_FMT ") in file is negative", M);
3245   PetscCheck(N >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix column size (%" PetscInt_FMT ") in file is negative", N);
3246   PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix stored in special format on disk, cannot load as MPIBAIJ");
3247 
3248   /* set block sizes from the viewer's .info file */
3249   PetscCall(MatLoad_Binary_BlockSizes(mat, viewer));
3250   /* set local sizes if not set already */
3251   if (mat->rmap->n < 0 && M == N) mat->rmap->n = mat->cmap->n;
3252   if (mat->cmap->n < 0 && M == N) mat->cmap->n = mat->rmap->n;
3253   /* set global sizes if not set already */
3254   if (mat->rmap->N < 0) mat->rmap->N = M;
3255   if (mat->cmap->N < 0) mat->cmap->N = N;
3256   PetscCall(PetscLayoutSetUp(mat->rmap));
3257   PetscCall(PetscLayoutSetUp(mat->cmap));
3258 
3259   /* check if the matrix sizes are correct */
3260   PetscCall(MatGetSize(mat, &rows, &cols));
3261   PetscCheck(M == rows && N == cols, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix in file of different sizes (%" PetscInt_FMT ", %" PetscInt_FMT ") than the input matrix (%" PetscInt_FMT ", %" PetscInt_FMT ")", M, N, rows, cols);
3262   PetscCall(MatGetBlockSize(mat, &bs));
3263   PetscCall(MatGetLocalSize(mat, &m, &n));
3264   PetscCall(PetscLayoutGetRange(mat->rmap, &rs, NULL));
3265   PetscCall(PetscLayoutGetRange(mat->cmap, &cs, &ce));
3266   mbs = m / bs;
3267   nbs = n / bs;
3268 
3269   /* read in row lengths and build row indices */
3270   PetscCall(PetscMalloc1(m + 1, &rowidxs));
3271   PetscCall(PetscViewerBinaryReadAll(viewer, rowidxs + 1, m, PETSC_DECIDE, M, PETSC_INT));
3272   rowidxs[0] = 0;
3273   for (i = 0; i < m; i++) rowidxs[i + 1] += rowidxs[i];
3274   PetscCall(MPIU_Allreduce(&rowidxs[m], &sum, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)viewer)));
3275   PetscCheck(sum == nz, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Inconsistent matrix data in file: nonzeros = %" PetscInt_FMT ", sum-row-lengths = %" PetscInt_FMT, nz, sum);
3276 
3277   /* read in column indices and matrix values */
3278   PetscCall(PetscMalloc2(rowidxs[m], &colidxs, rowidxs[m], &matvals));
3279   PetscCall(PetscViewerBinaryReadAll(viewer, colidxs, rowidxs[m], PETSC_DETERMINE, PETSC_DETERMINE, PETSC_INT));
3280   PetscCall(PetscViewerBinaryReadAll(viewer, matvals, rowidxs[m], PETSC_DETERMINE, PETSC_DETERMINE, PETSC_SCALAR));
3281 
3282   {                /* preallocate matrix storage */
3283     PetscBT    bt; /* helper bit set to count diagonal nonzeros */
3284     PetscHSetI ht; /* helper hash set to count off-diagonal nonzeros */
3285     PetscBool  sbaij, done;
3286     PetscInt  *d_nnz, *o_nnz;
3287 
3288     PetscCall(PetscBTCreate(nbs, &bt));
3289     PetscCall(PetscHSetICreate(&ht));
3290     PetscCall(PetscCalloc2(mbs, &d_nnz, mbs, &o_nnz));
3291     PetscCall(PetscObjectTypeCompare((PetscObject)mat, MATMPISBAIJ, &sbaij));
3292     for (i = 0; i < mbs; i++) {
3293       PetscCall(PetscBTMemzero(nbs, bt));
3294       PetscCall(PetscHSetIClear(ht));
3295       for (k = 0; k < bs; k++) {
3296         PetscInt row = bs * i + k;
3297         for (j = rowidxs[row]; j < rowidxs[row + 1]; j++) {
3298           PetscInt col = colidxs[j];
3299           if (!sbaij || col >= row) {
3300             if (col >= cs && col < ce) {
3301               if (!PetscBTLookupSet(bt, (col - cs) / bs)) d_nnz[i]++;
3302             } else {
3303               PetscCall(PetscHSetIQueryAdd(ht, col / bs, &done));
3304               if (done) o_nnz[i]++;
3305             }
3306           }
3307         }
3308       }
3309     }
3310     PetscCall(PetscBTDestroy(&bt));
3311     PetscCall(PetscHSetIDestroy(&ht));
3312     PetscCall(MatMPIBAIJSetPreallocation(mat, bs, 0, d_nnz, 0, o_nnz));
3313     PetscCall(MatMPISBAIJSetPreallocation(mat, bs, 0, d_nnz, 0, o_nnz));
3314     PetscCall(PetscFree2(d_nnz, o_nnz));
3315   }
3316 
3317   /* store matrix values */
3318   for (i = 0; i < m; i++) {
3319     PetscInt row = rs + i, s = rowidxs[i], e = rowidxs[i + 1];
3320     PetscCall((*mat->ops->setvalues)(mat, 1, &row, e - s, colidxs + s, matvals + s, INSERT_VALUES));
3321   }
3322 
3323   PetscCall(PetscFree(rowidxs));
3324   PetscCall(PetscFree2(colidxs, matvals));
3325   PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY));
3326   PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY));
3327   PetscFunctionReturn(PETSC_SUCCESS);
3328 }
3329 
3330 PetscErrorCode MatLoad_MPIBAIJ(Mat mat, PetscViewer viewer)
3331 {
3332   PetscBool isbinary;
3333 
3334   PetscFunctionBegin;
3335   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
3336   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
3337   PetscCall(MatLoad_MPIBAIJ_Binary(mat, viewer));
3338   PetscFunctionReturn(PETSC_SUCCESS);
3339 }
3340 
3341 /*@
3342    MatMPIBAIJSetHashTableFactor - Sets the factor required to compute the size of the matrices hash table
3343 
3344    Input Parameters:
3345 +  mat  - the matrix
3346 -  fact - factor
3347 
3348    Options Database Key:
3349 .  -mat_use_hash_table <fact> - provide the factor
3350 
3351    Level: advanced
3352 
3353 .seealso: `Mat`, `MATMPIBAIJ`, `MatSetOption()`
3354 @*/
3355 PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat mat, PetscReal fact)
3356 {
3357   PetscFunctionBegin;
3358   PetscTryMethod(mat, "MatSetHashTableFactor_C", (Mat, PetscReal), (mat, fact));
3359   PetscFunctionReturn(PETSC_SUCCESS);
3360 }
3361 
3362 PetscErrorCode MatSetHashTableFactor_MPIBAIJ(Mat mat, PetscReal fact)
3363 {
3364   Mat_MPIBAIJ *baij;
3365 
3366   PetscFunctionBegin;
3367   baij          = (Mat_MPIBAIJ *)mat->data;
3368   baij->ht_fact = fact;
3369   PetscFunctionReturn(PETSC_SUCCESS);
3370 }
3371 
3372 PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat A, Mat *Ad, Mat *Ao, const PetscInt *colmap[])
3373 {
3374   Mat_MPIBAIJ *a = (Mat_MPIBAIJ *)A->data;
3375   PetscBool    flg;
3376 
3377   PetscFunctionBegin;
3378   PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIBAIJ, &flg));
3379   PetscCheck(flg, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "This function requires a MATMPIBAIJ matrix as input");
3380   if (Ad) *Ad = a->A;
3381   if (Ao) *Ao = a->B;
3382   if (colmap) *colmap = a->garray;
3383   PetscFunctionReturn(PETSC_SUCCESS);
3384 }
3385 
3386 /*
3387     Special version for direct calls from Fortran (to eliminate two function call overheads
3388 */
3389 #if defined(PETSC_HAVE_FORTRAN_CAPS)
3390   #define matmpibaijsetvaluesblocked_ MATMPIBAIJSETVALUESBLOCKED
3391 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE)
3392   #define matmpibaijsetvaluesblocked_ matmpibaijsetvaluesblocked
3393 #endif
3394 
3395 /*@C
3396   MatMPIBAIJSetValuesBlocked - Direct Fortran call to replace call to `MatSetValuesBlocked()`
3397 
3398   Collective
3399 
3400   Input Parameters:
3401 + mat - the matrix
3402 . min - number of input rows
3403 . im - input rows
3404 . nin - number of input columns
3405 . in - input columns
3406 . v - numerical values input
3407 - addvin - `INSERT_VALUES` or `ADD_VALUES`
3408 
3409   Level: advanced
3410 
3411   Developer Note:
3412     This has a complete copy of `MatSetValuesBlocked_MPIBAIJ()` which is terrible code un-reuse.
3413 
3414 .seealso: `Mat`, `MatSetValuesBlocked()`
3415 @*/
3416 PetscErrorCode matmpibaijsetvaluesblocked_(Mat *matin, PetscInt *min, const PetscInt im[], PetscInt *nin, const PetscInt in[], const MatScalar v[], InsertMode *addvin)
3417 {
3418   /* convert input arguments to C version */
3419   Mat        mat = *matin;
3420   PetscInt   m = *min, n = *nin;
3421   InsertMode addv = *addvin;
3422 
3423   Mat_MPIBAIJ     *baij = (Mat_MPIBAIJ *)mat->data;
3424   const MatScalar *value;
3425   MatScalar       *barray      = baij->barray;
3426   PetscBool        roworiented = baij->roworiented;
3427   PetscInt         i, j, ii, jj, row, col, rstart = baij->rstartbs;
3428   PetscInt         rend = baij->rendbs, cstart = baij->cstartbs, stepval;
3429   PetscInt         cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
3430 
3431   PetscFunctionBegin;
3432   /* tasks normally handled by MatSetValuesBlocked() */
3433   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
3434   else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
3435   PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3436   if (mat->assembled) {
3437     mat->was_assembled = PETSC_TRUE;
3438     mat->assembled     = PETSC_FALSE;
3439   }
3440   PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
3441 
3442   if (!barray) {
3443     PetscCall(PetscMalloc1(bs2, &barray));
3444     baij->barray = barray;
3445   }
3446 
3447   if (roworiented) stepval = (n - 1) * bs;
3448   else stepval = (m - 1) * bs;
3449 
3450   for (i = 0; i < m; i++) {
3451     if (im[i] < 0) continue;
3452     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large, row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
3453     if (im[i] >= rstart && im[i] < rend) {
3454       row = im[i] - rstart;
3455       for (j = 0; j < n; j++) {
3456         /* If NumCol = 1 then a copy is not required */
3457         if ((roworiented) && (n == 1)) {
3458           barray = (MatScalar *)v + i * bs2;
3459         } else if ((!roworiented) && (m == 1)) {
3460           barray = (MatScalar *)v + j * bs2;
3461         } else { /* Here a copy is required */
3462           if (roworiented) {
3463             value = v + i * (stepval + bs) * bs + j * bs;
3464           } else {
3465             value = v + j * (stepval + bs) * bs + i * bs;
3466           }
3467           for (ii = 0; ii < bs; ii++, value += stepval) {
3468             for (jj = 0; jj < bs; jj++) *barray++ = *value++;
3469           }
3470           barray -= bs2;
3471         }
3472 
3473         if (in[j] >= cstart && in[j] < cend) {
3474           col = in[j] - cstart;
3475           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
3476         } else if (in[j] < 0) {
3477           continue;
3478         } else {
3479           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large, col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
3480           if (mat->was_assembled) {
3481             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
3482 
3483 #if defined(PETSC_USE_DEBUG)
3484   #if defined(PETSC_USE_CTABLE)
3485             {
3486               PetscInt data;
3487               PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &data));
3488               PetscCheck((data - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap");
3489             }
3490   #else
3491             PetscCheck((baij->colmap[in[j]] - 1) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Incorrect colmap");
3492   #endif
3493 #endif
3494 #if defined(PETSC_USE_CTABLE)
3495             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
3496             col = (col - 1) / bs;
3497 #else
3498             col = (baij->colmap[in[j]] - 1) / bs;
3499 #endif
3500             if (col < 0 && !((Mat_SeqBAIJ *)(baij->A->data))->nonew) {
3501               PetscCall(MatDisAssemble_MPIBAIJ(mat));
3502               col = in[j];
3503             }
3504           } else col = in[j];
3505           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
3506         }
3507       }
3508     } else {
3509       if (!baij->donotstash) {
3510         if (roworiented) {
3511           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
3512         } else {
3513           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
3514         }
3515       }
3516     }
3517   }
3518 
3519   /* task normally handled by MatSetValuesBlocked() */
3520   PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
3521   PetscFunctionReturn(PETSC_SUCCESS);
3522 }
3523 
3524 /*@
3525      MatCreateMPIBAIJWithArrays - creates a `MATMPIBAIJ` matrix using arrays that contain in standard block
3526          CSR format the local rows.
3527 
3528    Collective
3529 
3530    Input Parameters:
3531 +  comm - MPI communicator
3532 .  bs - the block size, only a block size of 1 is supported
3533 .  m - number of local rows (Cannot be `PETSC_DECIDE`)
3534 .  n - This value should be the same as the local size used in creating the
3535        x vector for the matrix-vector product y = Ax. (or `PETSC_DECIDE` to have
3536        calculated if N is given) For square matrices n is almost always m.
3537 .  M - number of global rows (or `PETSC_DETERMINE` to have calculated if m is given)
3538 .  N - number of global columns (or `PETSC_DETERMINE` to have calculated if n is given)
3539 .   i - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that rowth block row of the matrix
3540 .   j - column indices
3541 -   a - matrix values
3542 
3543    Output Parameter:
3544 .   mat - the matrix
3545 
3546    Level: intermediate
3547 
3548    Notes:
3549        The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
3550      thus you CANNOT change the matrix entries by changing the values of a[] after you have
3551      called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.
3552 
3553      The order of the entries in values is the same as the block compressed sparse row storage format; that is, it is
3554      the same as a three dimensional array in Fortran values(bs,bs,nnz) that contains the first column of the first
3555      block, followed by the second column of the first block etc etc.  That is, the blocks are contiguous in memory
3556      with column-major ordering within blocks.
3557 
3558        The `i` and `j` indices are 0 based, and i indices are indices corresponding to the local `j` array.
3559 
3560 .seealso: `Mat`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
3561           `MPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`
3562 @*/
3563 PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
3564 {
3565   PetscFunctionBegin;
3566   PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
3567   PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
3568   PetscCall(MatCreate(comm, mat));
3569   PetscCall(MatSetSizes(*mat, m, n, M, N));
3570   PetscCall(MatSetType(*mat, MATMPIBAIJ));
3571   PetscCall(MatSetBlockSize(*mat, bs));
3572   PetscCall(MatSetUp(*mat));
3573   PetscCall(MatSetOption(*mat, MAT_ROW_ORIENTED, PETSC_FALSE));
3574   PetscCall(MatMPIBAIJSetPreallocationCSR(*mat, bs, i, j, a));
3575   PetscCall(MatSetOption(*mat, MAT_ROW_ORIENTED, PETSC_TRUE));
3576   PetscFunctionReturn(PETSC_SUCCESS);
3577 }
3578 
3579 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
3580 {
3581   PetscInt     m, N, i, rstart, nnz, Ii, bs, cbs;
3582   PetscInt    *indx;
3583   PetscScalar *values;
3584 
3585   PetscFunctionBegin;
3586   PetscCall(MatGetSize(inmat, &m, &N));
3587   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
3588     Mat_SeqBAIJ *a = (Mat_SeqBAIJ *)inmat->data;
3589     PetscInt    *dnz, *onz, mbs, Nbs, nbs;
3590     PetscInt    *bindx, rmax = a->rmax, j;
3591     PetscMPIInt  rank, size;
3592 
3593     PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
3594     mbs = m / bs;
3595     Nbs = N / cbs;
3596     if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
3597     nbs = n / cbs;
3598 
3599     PetscCall(PetscMalloc1(rmax, &bindx));
3600     MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */
3601 
3602     PetscCallMPI(MPI_Comm_rank(comm, &rank));
3603     PetscCallMPI(MPI_Comm_rank(comm, &size));
3604     if (rank == size - 1) {
3605       /* Check sum(nbs) = Nbs */
3606       PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
3607     }
3608 
3609     rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
3610     for (i = 0; i < mbs; i++) {
3611       PetscCall(MatGetRow_SeqBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
3612       nnz = nnz / bs;
3613       for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
3614       PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
3615       PetscCall(MatRestoreRow_SeqBAIJ(inmat, i * bs, &nnz, &indx, NULL));
3616     }
3617     PetscCall(PetscFree(bindx));
3618 
3619     PetscCall(MatCreate(comm, outmat));
3620     PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
3621     PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
3622     PetscCall(MatSetType(*outmat, MATBAIJ));
3623     PetscCall(MatSeqBAIJSetPreallocation(*outmat, bs, 0, dnz));
3624     PetscCall(MatMPIBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
3625     MatPreallocateEnd(dnz, onz);
3626     PetscCall(MatSetOption(*outmat, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
3627   }
3628 
3629   /* numeric phase */
3630   PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
3631   PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));
3632 
3633   for (i = 0; i < m; i++) {
3634     PetscCall(MatGetRow_SeqBAIJ(inmat, i, &nnz, &indx, &values));
3635     Ii = i + rstart;
3636     PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
3637     PetscCall(MatRestoreRow_SeqBAIJ(inmat, i, &nnz, &indx, &values));
3638   }
3639   PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
3640   PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
3641   PetscFunctionReturn(PETSC_SUCCESS);
3642 }
3643