xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision 418fb43b50673c8dcaa7cccd4409f0faf842ef3b)
1 #include <../src/mat/impls/baij/mpi/mpibaij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
3 #include <../src/mat/impls/sbaij/seq/sbaij.h>
4 #include <petscblaslapack.h>
5 
6 static PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
7 {
8   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
9 
10   PetscFunctionBegin;
11   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
12   PetscCall(MatStashDestroy_Private(&mat->stash));
13   PetscCall(MatStashDestroy_Private(&mat->bstash));
14   PetscCall(MatDestroy(&baij->A));
15   PetscCall(MatDestroy(&baij->B));
16 #if defined(PETSC_USE_CTABLE)
17   PetscCall(PetscHMapIDestroy(&baij->colmap));
18 #else
19   PetscCall(PetscFree(baij->colmap));
20 #endif
21   PetscCall(PetscFree(baij->garray));
22   PetscCall(VecDestroy(&baij->lvec));
23   PetscCall(VecScatterDestroy(&baij->Mvctx));
24   PetscCall(VecDestroy(&baij->slvec0));
25   PetscCall(VecDestroy(&baij->slvec0b));
26   PetscCall(VecDestroy(&baij->slvec1));
27   PetscCall(VecDestroy(&baij->slvec1a));
28   PetscCall(VecDestroy(&baij->slvec1b));
29   PetscCall(VecScatterDestroy(&baij->sMvctx));
30   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
31   PetscCall(PetscFree(baij->barray));
32   PetscCall(PetscFree(baij->hd));
33   PetscCall(VecDestroy(&baij->diag));
34   PetscCall(VecDestroy(&baij->bb1));
35   PetscCall(VecDestroy(&baij->xx1));
36 #if defined(PETSC_USE_REAL_MAT_SINGLE)
37   PetscCall(PetscFree(baij->setvaluescopy));
38 #endif
39   PetscCall(PetscFree(baij->in_loc));
40   PetscCall(PetscFree(baij->v_loc));
41   PetscCall(PetscFree(baij->rangebs));
42   PetscCall(PetscFree(mat->data));
43 
44   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
45   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
46   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocation_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocationCSR_C", NULL));
49 #if defined(PETSC_HAVE_ELEMENTAL)
50   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_elemental_C", NULL));
51 #endif
52 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
53   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_scalapack_C", NULL));
54 #endif
55   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpiaij_C", NULL));
56   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpibaij_C", NULL));
57   PetscFunctionReturn(PETSC_SUCCESS);
58 }
59 
60 /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), MatAssemblyEnd_MPI_Hash(), MatSetUp_MPI_Hash() */
61 #define TYPE SBAIJ
62 #define TYPE_SBAIJ
63 #include "../src/mat/impls/aij/mpi/mpihashmat.h"
64 #undef TYPE
65 #undef TYPE_SBAIJ
66 
67 #if defined(PETSC_HAVE_ELEMENTAL)
68 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
69 #endif
70 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
71 PETSC_INTERN PetscErrorCode MatConvert_SBAIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
72 #endif
73 
74 /* This could be moved to matimpl.h */
75 static PetscErrorCode MatPreallocateWithMats_Private(Mat B, PetscInt nm, Mat X[], PetscBool symm[], PetscBool fill)
76 {
77   Mat       preallocator;
78   PetscInt  r, rstart, rend;
79   PetscInt  bs, i, m, n, M, N;
80   PetscBool cong = PETSC_TRUE;
81 
82   PetscFunctionBegin;
83   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
84   PetscValidLogicalCollectiveInt(B, nm, 2);
85   for (i = 0; i < nm; i++) {
86     PetscValidHeaderSpecific(X[i], MAT_CLASSID, 3);
87     PetscCall(PetscLayoutCompare(B->rmap, X[i]->rmap, &cong));
88     PetscCheck(cong, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for different layouts");
89   }
90   PetscValidLogicalCollectiveBool(B, fill, 5);
91   PetscCall(MatGetBlockSize(B, &bs));
92   PetscCall(MatGetSize(B, &M, &N));
93   PetscCall(MatGetLocalSize(B, &m, &n));
94   PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &preallocator));
95   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
96   PetscCall(MatSetBlockSize(preallocator, bs));
97   PetscCall(MatSetSizes(preallocator, m, n, M, N));
98   PetscCall(MatSetUp(preallocator));
99   PetscCall(MatGetOwnershipRange(preallocator, &rstart, &rend));
100   for (r = rstart; r < rend; ++r) {
101     PetscInt           ncols;
102     const PetscInt    *row;
103     const PetscScalar *vals;
104 
105     for (i = 0; i < nm; i++) {
106       PetscCall(MatGetRow(X[i], r, &ncols, &row, &vals));
107       PetscCall(MatSetValues(preallocator, 1, &r, ncols, row, vals, INSERT_VALUES));
108       if (symm && symm[i]) PetscCall(MatSetValues(preallocator, ncols, row, 1, &r, vals, INSERT_VALUES));
109       PetscCall(MatRestoreRow(X[i], r, &ncols, &row, &vals));
110     }
111   }
112   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
113   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
114   PetscCall(MatPreallocatorPreallocate(preallocator, fill, B));
115   PetscCall(MatDestroy(&preallocator));
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Basic(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
120 {
121   Mat      B;
122   PetscInt r;
123 
124   PetscFunctionBegin;
125   if (reuse != MAT_REUSE_MATRIX) {
126     PetscBool symm = PETSC_TRUE, isdense;
127     PetscInt  bs;
128 
129     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
130     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
131     PetscCall(MatSetType(B, newtype));
132     PetscCall(MatGetBlockSize(A, &bs));
133     PetscCall(MatSetBlockSize(B, bs));
134     PetscCall(PetscLayoutSetUp(B->rmap));
135     PetscCall(PetscLayoutSetUp(B->cmap));
136     PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, ""));
137     if (!isdense) {
138       PetscCall(MatGetRowUpperTriangular(A));
139       PetscCall(MatPreallocateWithMats_Private(B, 1, &A, &symm, PETSC_TRUE));
140       PetscCall(MatRestoreRowUpperTriangular(A));
141     } else {
142       PetscCall(MatSetUp(B));
143     }
144   } else {
145     B = *newmat;
146     PetscCall(MatZeroEntries(B));
147   }
148 
149   PetscCall(MatGetRowUpperTriangular(A));
150   for (r = A->rmap->rstart; r < A->rmap->rend; r++) {
151     PetscInt           ncols;
152     const PetscInt    *row;
153     const PetscScalar *vals;
154 
155     PetscCall(MatGetRow(A, r, &ncols, &row, &vals));
156     PetscCall(MatSetValues(B, 1, &r, ncols, row, vals, INSERT_VALUES));
157 #if defined(PETSC_USE_COMPLEX)
158     if (A->hermitian == PETSC_BOOL3_TRUE) {
159       PetscInt i;
160       for (i = 0; i < ncols; i++) PetscCall(MatSetValue(B, row[i], r, PetscConj(vals[i]), INSERT_VALUES));
161     } else {
162       PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
163     }
164 #else
165     PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
166 #endif
167     PetscCall(MatRestoreRow(A, r, &ncols, &row, &vals));
168   }
169   PetscCall(MatRestoreRowUpperTriangular(A));
170   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
171   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
172 
173   if (reuse == MAT_INPLACE_MATRIX) {
174     PetscCall(MatHeaderReplace(A, &B));
175   } else {
176     *newmat = B;
177   }
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 static PetscErrorCode MatStoreValues_MPISBAIJ(Mat mat)
182 {
183   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
184 
185   PetscFunctionBegin;
186   PetscCall(MatStoreValues(aij->A));
187   PetscCall(MatStoreValues(aij->B));
188   PetscFunctionReturn(PETSC_SUCCESS);
189 }
190 
191 static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat)
192 {
193   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
194 
195   PetscFunctionBegin;
196   PetscCall(MatRetrieveValues(aij->A));
197   PetscCall(MatRetrieveValues(aij->B));
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 #define MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, orow, ocol) \
202   do { \
203     brow = row / bs; \
204     rp   = aj + ai[brow]; \
205     ap   = aa + bs2 * ai[brow]; \
206     rmax = aimax[brow]; \
207     nrow = ailen[brow]; \
208     bcol = col / bs; \
209     ridx = row % bs; \
210     cidx = col % bs; \
211     low  = 0; \
212     high = nrow; \
213     while (high - low > 3) { \
214       t = (low + high) / 2; \
215       if (rp[t] > bcol) high = t; \
216       else low = t; \
217     } \
218     for (_i = low; _i < high; _i++) { \
219       if (rp[_i] > bcol) break; \
220       if (rp[_i] == bcol) { \
221         bap = ap + bs2 * _i + bs * cidx + ridx; \
222         if (addv == ADD_VALUES) *bap += value; \
223         else *bap = value; \
224         goto a_noinsert; \
225       } \
226     } \
227     if (a->nonew == 1) goto a_noinsert; \
228     PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
229     MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
230     N = nrow++ - 1; \
231     /* shift up all the later entries in this row */ \
232     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
233     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
234     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
235     rp[_i]                          = bcol; \
236     ap[bs2 * _i + bs * cidx + ridx] = value; \
237   a_noinsert:; \
238     ailen[brow] = nrow; \
239   } while (0)
240 
241 #define MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, orow, ocol) \
242   do { \
243     brow = row / bs; \
244     rp   = bj + bi[brow]; \
245     ap   = ba + bs2 * bi[brow]; \
246     rmax = bimax[brow]; \
247     nrow = bilen[brow]; \
248     bcol = col / bs; \
249     ridx = row % bs; \
250     cidx = col % bs; \
251     low  = 0; \
252     high = nrow; \
253     while (high - low > 3) { \
254       t = (low + high) / 2; \
255       if (rp[t] > bcol) high = t; \
256       else low = t; \
257     } \
258     for (_i = low; _i < high; _i++) { \
259       if (rp[_i] > bcol) break; \
260       if (rp[_i] == bcol) { \
261         bap = ap + bs2 * _i + bs * cidx + ridx; \
262         if (addv == ADD_VALUES) *bap += value; \
263         else *bap = value; \
264         goto b_noinsert; \
265       } \
266     } \
267     if (b->nonew == 1) goto b_noinsert; \
268     PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
269     MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
270     N = nrow++ - 1; \
271     /* shift up all the later entries in this row */ \
272     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
273     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
274     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
275     rp[_i]                          = bcol; \
276     ap[bs2 * _i + bs * cidx + ridx] = value; \
277   b_noinsert:; \
278     bilen[brow] = nrow; \
279   } while (0)
280 
281 /* Only add/insert a(i,j) with i<=j (blocks).
282    Any a(i,j) with i>j input by user is ignored or generates an error
283 */
284 static PetscErrorCode MatSetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
285 {
286   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
287   MatScalar     value;
288   PetscBool     roworiented = baij->roworiented;
289   PetscInt      i, j, row, col;
290   PetscInt      rstart_orig = mat->rmap->rstart;
291   PetscInt      rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
292   PetscInt      cend_orig = mat->cmap->rend, bs = mat->rmap->bs;
293 
294   /* Some Variables required in the macro */
295   Mat           A     = baij->A;
296   Mat_SeqSBAIJ *a     = (Mat_SeqSBAIJ *)A->data;
297   PetscInt     *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
298   MatScalar    *aa = a->a;
299 
300   Mat          B     = baij->B;
301   Mat_SeqBAIJ *b     = (Mat_SeqBAIJ *)B->data;
302   PetscInt    *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
303   MatScalar   *ba = b->a;
304 
305   PetscInt  *rp, ii, nrow, _i, rmax, N, brow, bcol;
306   PetscInt   low, high, t, ridx, cidx, bs2 = a->bs2;
307   MatScalar *ap, *bap;
308 
309   /* for stash */
310   PetscInt   n_loc, *in_loc = NULL;
311   MatScalar *v_loc = NULL;
312 
313   PetscFunctionBegin;
314   if (!baij->donotstash) {
315     if (n > baij->n_loc) {
316       PetscCall(PetscFree(baij->in_loc));
317       PetscCall(PetscFree(baij->v_loc));
318       PetscCall(PetscMalloc1(n, &baij->in_loc));
319       PetscCall(PetscMalloc1(n, &baij->v_loc));
320 
321       baij->n_loc = n;
322     }
323     in_loc = baij->in_loc;
324     v_loc  = baij->v_loc;
325   }
326 
327   for (i = 0; i < m; i++) {
328     if (im[i] < 0) continue;
329     PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
330     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
331       row = im[i] - rstart_orig;                     /* local row index */
332       for (j = 0; j < n; j++) {
333         if (im[i] / bs > in[j] / bs) {
334           PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
335           continue; /* ignore lower triangular blocks */
336         }
337         if (in[j] >= cstart_orig && in[j] < cend_orig) { /* diag entry (A) */
338           col  = in[j] - cstart_orig;                    /* local col index */
339           brow = row / bs;
340           bcol = col / bs;
341           if (brow > bcol) continue; /* ignore lower triangular blocks of A */
342           if (roworiented) value = v[i * n + j];
343           else value = v[i + j * m];
344           MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
345           /* PetscCall(MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv)); */
346         } else if (in[j] < 0) {
347           continue;
348         } else {
349           PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
350           /* off-diag entry (B) */
351           if (mat->was_assembled) {
352             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
353 #if defined(PETSC_USE_CTABLE)
354             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
355             col = col - 1;
356 #else
357             col = baij->colmap[in[j] / bs] - 1;
358 #endif
359             if (col < 0 && !((Mat_SeqSBAIJ *)baij->A->data)->nonew) {
360               PetscCall(MatDisAssemble_MPISBAIJ(mat));
361               col = in[j];
362               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
363               B     = baij->B;
364               b     = (Mat_SeqBAIJ *)B->data;
365               bimax = b->imax;
366               bi    = b->i;
367               bilen = b->ilen;
368               bj    = b->j;
369               ba    = b->a;
370             } else col += in[j] % bs;
371           } else col = in[j];
372           if (roworiented) value = v[i * n + j];
373           else value = v[i + j * m];
374           MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
375           /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
376         }
377       }
378     } else { /* off processor entry */
379       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
380       if (!baij->donotstash) {
381         mat->assembled = PETSC_FALSE;
382         n_loc          = 0;
383         for (j = 0; j < n; j++) {
384           if (im[i] / bs > in[j] / bs) continue; /* ignore lower triangular blocks */
385           in_loc[n_loc] = in[j];
386           if (roworiented) {
387             v_loc[n_loc] = v[i * n + j];
388           } else {
389             v_loc[n_loc] = v[j * m + i];
390           }
391           n_loc++;
392         }
393         PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n_loc, in_loc, v_loc, PETSC_FALSE));
394       }
395     }
396   }
397   PetscFunctionReturn(PETSC_SUCCESS);
398 }
399 
400 static inline PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
401 {
402   Mat_SeqSBAIJ      *a = (Mat_SeqSBAIJ *)A->data;
403   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
404   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
405   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
406   PetscBool          roworiented = a->roworiented;
407   const PetscScalar *value       = v;
408   MatScalar         *ap, *aa = a->a, *bap;
409 
410   PetscFunctionBegin;
411   if (col < row) {
412     PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
413     PetscFunctionReturn(PETSC_SUCCESS); /* ignore lower triangular block */
414   }
415   rp    = aj + ai[row];
416   ap    = aa + bs2 * ai[row];
417   rmax  = imax[row];
418   nrow  = ailen[row];
419   value = v;
420   low   = 0;
421   high  = nrow;
422 
423   while (high - low > 7) {
424     t = (low + high) / 2;
425     if (rp[t] > col) high = t;
426     else low = t;
427   }
428   for (i = low; i < high; i++) {
429     if (rp[i] > col) break;
430     if (rp[i] == col) {
431       bap = ap + bs2 * i;
432       if (roworiented) {
433         if (is == ADD_VALUES) {
434           for (ii = 0; ii < bs; ii++) {
435             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
436           }
437         } else {
438           for (ii = 0; ii < bs; ii++) {
439             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
440           }
441         }
442       } else {
443         if (is == ADD_VALUES) {
444           for (ii = 0; ii < bs; ii++) {
445             for (jj = 0; jj < bs; jj++) *bap++ += *value++;
446           }
447         } else {
448           for (ii = 0; ii < bs; ii++) {
449             for (jj = 0; jj < bs; jj++) *bap++ = *value++;
450           }
451         }
452       }
453       goto noinsert2;
454     }
455   }
456   if (nonew == 1) goto noinsert2;
457   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new block index nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
458   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
459   N = nrow++ - 1;
460   high++;
461   /* shift up all the later entries in this row */
462   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
463   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
464   rp[i] = col;
465   bap   = ap + bs2 * i;
466   if (roworiented) {
467     for (ii = 0; ii < bs; ii++) {
468       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
469     }
470   } else {
471     for (ii = 0; ii < bs; ii++) {
472       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
473     }
474   }
475 noinsert2:;
476   ailen[row] = nrow;
477   PetscFunctionReturn(PETSC_SUCCESS);
478 }
479 
480 /*
481    This routine is exactly duplicated in mpibaij.c
482 */
483 static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
484 {
485   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ *)A->data;
486   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
487   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
488   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
489   PetscBool          roworiented = a->roworiented;
490   const PetscScalar *value       = v;
491   MatScalar         *ap, *aa = a->a, *bap;
492 
493   PetscFunctionBegin;
494   rp    = aj + ai[row];
495   ap    = aa + bs2 * ai[row];
496   rmax  = imax[row];
497   nrow  = ailen[row];
498   low   = 0;
499   high  = nrow;
500   value = v;
501   while (high - low > 7) {
502     t = (low + high) / 2;
503     if (rp[t] > col) high = t;
504     else low = t;
505   }
506   for (i = low; i < high; i++) {
507     if (rp[i] > col) break;
508     if (rp[i] == col) {
509       bap = ap + bs2 * i;
510       if (roworiented) {
511         if (is == ADD_VALUES) {
512           for (ii = 0; ii < bs; ii++) {
513             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
514           }
515         } else {
516           for (ii = 0; ii < bs; ii++) {
517             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
518           }
519         }
520       } else {
521         if (is == ADD_VALUES) {
522           for (ii = 0; ii < bs; ii++, value += bs) {
523             for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
524             bap += bs;
525           }
526         } else {
527           for (ii = 0; ii < bs; ii++, value += bs) {
528             for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
529             bap += bs;
530           }
531         }
532       }
533       goto noinsert2;
534     }
535   }
536   if (nonew == 1) goto noinsert2;
537   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
538   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
539   N = nrow++ - 1;
540   high++;
541   /* shift up all the later entries in this row */
542   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
543   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
544   rp[i] = col;
545   bap   = ap + bs2 * i;
546   if (roworiented) {
547     for (ii = 0; ii < bs; ii++) {
548       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
549     }
550   } else {
551     for (ii = 0; ii < bs; ii++) {
552       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
553     }
554   }
555 noinsert2:;
556   ailen[row] = nrow;
557   PetscFunctionReturn(PETSC_SUCCESS);
558 }
559 
560 /*
561     This routine could be optimized by removing the need for the block copy below and passing stride information
562   to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
563 */
564 static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const MatScalar v[], InsertMode addv)
565 {
566   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ *)mat->data;
567   const MatScalar *value;
568   MatScalar       *barray      = baij->barray;
569   PetscBool        roworiented = baij->roworiented, ignore_ltriangular = ((Mat_SeqSBAIJ *)baij->A->data)->ignore_ltriangular;
570   PetscInt         i, j, ii, jj, row, col, rstart = baij->rstartbs;
571   PetscInt         rend = baij->rendbs, cstart = baij->cstartbs, stepval;
572   PetscInt         cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
573 
574   PetscFunctionBegin;
575   if (!barray) {
576     PetscCall(PetscMalloc1(bs2, &barray));
577     baij->barray = barray;
578   }
579 
580   if (roworiented) {
581     stepval = (n - 1) * bs;
582   } else {
583     stepval = (m - 1) * bs;
584   }
585   for (i = 0; i < m; i++) {
586     if (im[i] < 0) continue;
587     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
588     if (im[i] >= rstart && im[i] < rend) {
589       row = im[i] - rstart;
590       for (j = 0; j < n; j++) {
591         if (im[i] > in[j]) {
592           PetscCheck(ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
593           continue; /* ignore lower triangular blocks */
594         }
595         /* If NumCol = 1 then a copy is not required */
596         if (roworiented && n == 1) {
597           barray = (MatScalar *)v + i * bs2;
598         } else if ((!roworiented) && (m == 1)) {
599           barray = (MatScalar *)v + j * bs2;
600         } else { /* Here a copy is required */
601           if (roworiented) {
602             value = v + i * (stepval + bs) * bs + j * bs;
603           } else {
604             value = v + j * (stepval + bs) * bs + i * bs;
605           }
606           for (ii = 0; ii < bs; ii++, value += stepval) {
607             for (jj = 0; jj < bs; jj++) *barray++ = *value++;
608           }
609           barray -= bs2;
610         }
611 
612         if (in[j] >= cstart && in[j] < cend) {
613           col = in[j] - cstart;
614           PetscCall(MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
615         } else if (in[j] < 0) {
616           continue;
617         } else {
618           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
619           if (mat->was_assembled) {
620             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
621 
622 #if defined(PETSC_USE_CTABLE)
623             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
624             col = col < 1 ? -1 : (col - 1) / bs;
625 #else
626             col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs;
627 #endif
628             if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
629               PetscCall(MatDisAssemble_MPISBAIJ(mat));
630               col = in[j];
631             }
632           } else col = in[j];
633           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
634         }
635       }
636     } else {
637       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
638       if (!baij->donotstash) {
639         if (roworiented) {
640           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
641         } else {
642           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
643         }
644       }
645     }
646   }
647   PetscFunctionReturn(PETSC_SUCCESS);
648 }
649 
650 static PetscErrorCode MatGetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
651 {
652   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
653   PetscInt      bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
654   PetscInt      bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;
655 
656   PetscFunctionBegin;
657   for (i = 0; i < m; i++) {
658     if (idxm[i] < 0) continue; /* negative row */
659     PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
660     PetscCheck(idxm[i] >= bsrstart && idxm[i] < bsrend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
661     row = idxm[i] - bsrstart;
662     for (j = 0; j < n; j++) {
663       if (idxn[j] < 0) continue; /* negative column */
664       PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
665       if (idxn[j] >= bscstart && idxn[j] < bscend) {
666         col = idxn[j] - bscstart;
667         PetscCall(MatGetValues_SeqSBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
668       } else {
669         if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
670 #if defined(PETSC_USE_CTABLE)
671         PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
672         data--;
673 #else
674         data = baij->colmap[idxn[j] / bs] - 1;
675 #endif
676         if (data < 0 || baij->garray[data / bs] != idxn[j] / bs) *(v + i * n + j) = 0.0;
677         else {
678           col = data + idxn[j] % bs;
679           PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
680         }
681       }
682     }
683   }
684   PetscFunctionReturn(PETSC_SUCCESS);
685 }
686 
687 static PetscErrorCode MatNorm_MPISBAIJ(Mat mat, NormType type, PetscReal *norm)
688 {
689   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
690   PetscReal     sum[2], *lnorm2;
691 
692   PetscFunctionBegin;
693   if (baij->size == 1) {
694     PetscCall(MatNorm(baij->A, type, norm));
695   } else {
696     if (type == NORM_FROBENIUS) {
697       PetscCall(PetscMalloc1(2, &lnorm2));
698       PetscCall(MatNorm(baij->A, type, lnorm2));
699       *lnorm2 = (*lnorm2) * (*lnorm2);
700       lnorm2++; /* squar power of norm(A) */
701       PetscCall(MatNorm(baij->B, type, lnorm2));
702       *lnorm2 = (*lnorm2) * (*lnorm2);
703       lnorm2--; /* squar power of norm(B) */
704       PetscCallMPI(MPIU_Allreduce(lnorm2, sum, 2, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
705       *norm = PetscSqrtReal(sum[0] + 2 * sum[1]);
706       PetscCall(PetscFree(lnorm2));
707     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
708       Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ *)baij->A->data;
709       Mat_SeqBAIJ  *bmat = (Mat_SeqBAIJ *)baij->B->data;
710       PetscReal    *rsum, vabs;
711       PetscInt     *jj, *garray = baij->garray, rstart = baij->rstartbs, nz;
712       PetscInt      brow, bcol, col, bs = baij->A->rmap->bs, row, grow, gcol, mbs = amat->mbs;
713       MatScalar    *v;
714 
715       PetscCall(PetscMalloc1(mat->cmap->N, &rsum));
716       PetscCall(PetscArrayzero(rsum, mat->cmap->N));
717       /* Amat */
718       v  = amat->a;
719       jj = amat->j;
720       for (brow = 0; brow < mbs; brow++) {
721         grow = bs * (rstart + brow);
722         nz   = amat->i[brow + 1] - amat->i[brow];
723         for (bcol = 0; bcol < nz; bcol++) {
724           gcol = bs * (rstart + *jj);
725           jj++;
726           for (col = 0; col < bs; col++) {
727             for (row = 0; row < bs; row++) {
728               vabs = PetscAbsScalar(*v);
729               v++;
730               rsum[gcol + col] += vabs;
731               /* non-diagonal block */
732               if (bcol > 0 && vabs > 0.0) rsum[grow + row] += vabs;
733             }
734           }
735         }
736         PetscCall(PetscLogFlops(nz * bs * bs));
737       }
738       /* Bmat */
739       v  = bmat->a;
740       jj = bmat->j;
741       for (brow = 0; brow < mbs; brow++) {
742         grow = bs * (rstart + brow);
743         nz   = bmat->i[brow + 1] - bmat->i[brow];
744         for (bcol = 0; bcol < nz; bcol++) {
745           gcol = bs * garray[*jj];
746           jj++;
747           for (col = 0; col < bs; col++) {
748             for (row = 0; row < bs; row++) {
749               vabs = PetscAbsScalar(*v);
750               v++;
751               rsum[gcol + col] += vabs;
752               rsum[grow + row] += vabs;
753             }
754           }
755         }
756         PetscCall(PetscLogFlops(nz * bs * bs));
757       }
758       PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, rsum, mat->cmap->N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
759       *norm = 0.0;
760       for (col = 0; col < mat->cmap->N; col++) {
761         if (rsum[col] > *norm) *norm = rsum[col];
762       }
763       PetscCall(PetscFree(rsum));
764     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for this norm yet");
765   }
766   PetscFunctionReturn(PETSC_SUCCESS);
767 }
768 
769 static PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat, MatAssemblyType mode)
770 {
771   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
772   PetscInt      nstash, reallocs;
773 
774   PetscFunctionBegin;
775   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
776 
777   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
778   PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
779   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
780   PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
781   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
782   PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
783   PetscFunctionReturn(PETSC_SUCCESS);
784 }
785 
786 static PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat, MatAssemblyType mode)
787 {
788   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
789   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ *)baij->A->data;
790   PetscInt      i, j, rstart, ncols, flg, bs2 = baij->bs2;
791   PetscInt     *row, *col;
792   PetscBool     all_assembled;
793   PetscMPIInt   n;
794   PetscBool     r1, r2, r3;
795   MatScalar    *val;
796 
797   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
798   PetscFunctionBegin;
799   if (!baij->donotstash && !mat->nooffprocentries) {
800     while (1) {
801       PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
802       if (!flg) break;
803 
804       for (i = 0; i < n;) {
805         /* Now identify the consecutive vals belonging to the same row */
806         for (j = i, rstart = row[j]; j < n; j++) {
807           if (row[j] != rstart) break;
808         }
809         if (j < n) ncols = j - i;
810         else ncols = n - i;
811         /* Now assemble all these values with a single function call */
812         PetscCall(MatSetValues_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
813         i = j;
814       }
815     }
816     PetscCall(MatStashScatterEnd_Private(&mat->stash));
817     /* Now process the block-stash. Since the values are stashed column-oriented,
818        set the row-oriented flag to column-oriented, and after MatSetValues()
819        restore the original flags */
820     r1 = baij->roworiented;
821     r2 = a->roworiented;
822     r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;
823 
824     baij->roworiented = PETSC_FALSE;
825     a->roworiented    = PETSC_FALSE;
826 
827     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworiented */
828     while (1) {
829       PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
830       if (!flg) break;
831 
832       for (i = 0; i < n;) {
833         /* Now identify the consecutive vals belonging to the same row */
834         for (j = i, rstart = row[j]; j < n; j++) {
835           if (row[j] != rstart) break;
836         }
837         if (j < n) ncols = j - i;
838         else ncols = n - i;
839         PetscCall(MatSetValuesBlocked_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
840         i = j;
841       }
842     }
843     PetscCall(MatStashScatterEnd_Private(&mat->bstash));
844 
845     baij->roworiented = r1;
846     a->roworiented    = r2;
847 
848     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworiented */
849   }
850 
851   PetscCall(MatAssemblyBegin(baij->A, mode));
852   PetscCall(MatAssemblyEnd(baij->A, mode));
853 
854   /* determine if any process has disassembled, if so we must
855      also disassemble ourselves, in order that we may reassemble. */
856   /*
857      if nonzero structure of submatrix B cannot change then we know that
858      no process disassembled thus we can skip this stuff
859   */
860   if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
861     PetscCallMPI(MPIU_Allreduce(&mat->was_assembled, &all_assembled, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
862     if (mat->was_assembled && !all_assembled) PetscCall(MatDisAssemble_MPISBAIJ(mat));
863   }
864 
865   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) PetscCall(MatSetUpMultiply_MPISBAIJ(mat)); /* setup Mvctx and sMvctx */
866   PetscCall(MatAssemblyBegin(baij->B, mode));
867   PetscCall(MatAssemblyEnd(baij->B, mode));
868 
869   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
870 
871   baij->rowvalues = NULL;
872 
873   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
874   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
875     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
876     PetscCallMPI(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
877   }
878   PetscFunctionReturn(PETSC_SUCCESS);
879 }
880 
881 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
882 #include <petscdraw.h>
883 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
884 {
885   Mat_MPISBAIJ     *baij = (Mat_MPISBAIJ *)mat->data;
886   PetscInt          bs   = mat->rmap->bs;
887   PetscMPIInt       rank = baij->rank;
888   PetscBool         isascii, isdraw;
889   PetscViewer       sviewer;
890   PetscViewerFormat format;
891 
892   PetscFunctionBegin;
893   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
894   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
895   if (isascii) {
896     PetscCall(PetscViewerGetFormat(viewer, &format));
897     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
898       MatInfo info;
899       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
900       PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
901       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
902       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
903                                                    mat->rmap->bs, info.memory));
904       PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
905       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
906       PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
907       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
908       PetscCall(PetscViewerFlush(viewer));
909       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
910       PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
911       PetscCall(VecScatterView(baij->Mvctx, viewer));
912       PetscFunctionReturn(PETSC_SUCCESS);
913     } else if (format == PETSC_VIEWER_ASCII_INFO) {
914       PetscCall(PetscViewerASCIIPrintf(viewer, "  block size is %" PetscInt_FMT "\n", bs));
915       PetscFunctionReturn(PETSC_SUCCESS);
916     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
917       PetscFunctionReturn(PETSC_SUCCESS);
918     }
919   }
920 
921   if (isdraw) {
922     PetscDraw draw;
923     PetscBool isnull;
924     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
925     PetscCall(PetscDrawIsNull(draw, &isnull));
926     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
927   }
928 
929   {
930     /* assemble the entire matrix onto first processor. */
931     Mat           A;
932     Mat_SeqSBAIJ *Aloc;
933     Mat_SeqBAIJ  *Bloc;
934     PetscInt      M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
935     MatScalar    *a;
936     const char   *matname;
937 
938     /* Should this be the same type as mat? */
939     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
940     if (rank == 0) {
941       PetscCall(MatSetSizes(A, M, N, M, N));
942     } else {
943       PetscCall(MatSetSizes(A, 0, 0, M, N));
944     }
945     PetscCall(MatSetType(A, MATMPISBAIJ));
946     PetscCall(MatMPISBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
947     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
948 
949     /* copy over the A part */
950     Aloc = (Mat_SeqSBAIJ *)baij->A->data;
951     ai   = Aloc->i;
952     aj   = Aloc->j;
953     a    = Aloc->a;
954     PetscCall(PetscMalloc1(bs, &rvals));
955 
956     for (i = 0; i < mbs; i++) {
957       rvals[0] = bs * (baij->rstartbs + i);
958       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
959       for (j = ai[i]; j < ai[i + 1]; j++) {
960         col = (baij->cstartbs + aj[j]) * bs;
961         for (k = 0; k < bs; k++) {
962           PetscCall(MatSetValues_MPISBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
963           col++;
964           a += bs;
965         }
966       }
967     }
968     /* copy over the B part */
969     Bloc = (Mat_SeqBAIJ *)baij->B->data;
970     ai   = Bloc->i;
971     aj   = Bloc->j;
972     a    = Bloc->a;
973     for (i = 0; i < mbs; i++) {
974       rvals[0] = bs * (baij->rstartbs + i);
975       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
976       for (j = ai[i]; j < ai[i + 1]; j++) {
977         col = baij->garray[aj[j]] * bs;
978         for (k = 0; k < bs; k++) {
979           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
980           col++;
981           a += bs;
982         }
983       }
984     }
985     PetscCall(PetscFree(rvals));
986     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
987     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
988     /*
989        Everyone has to call to draw the matrix since the graphics waits are
990        synchronized across all processors that share the PetscDraw object
991     */
992     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
993     if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
994     if (rank == 0) {
995       if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPISBAIJ *)A->data)->A, matname));
996       PetscCall(MatView_SeqSBAIJ(((Mat_MPISBAIJ *)A->data)->A, sviewer));
997     }
998     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
999     PetscCall(MatDestroy(&A));
1000   }
1001   PetscFunctionReturn(PETSC_SUCCESS);
1002 }
1003 
1004 /* Used for both MPIBAIJ and MPISBAIJ matrices */
1005 #define MatView_MPISBAIJ_Binary MatView_MPIBAIJ_Binary
1006 
1007 static PetscErrorCode MatView_MPISBAIJ(Mat mat, PetscViewer viewer)
1008 {
1009   PetscBool isascii, isdraw, issocket, isbinary;
1010 
1011   PetscFunctionBegin;
1012   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1013   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1014   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1015   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1016   if (isascii || isdraw || issocket) PetscCall(MatView_MPISBAIJ_ASCIIorDraworSocket(mat, viewer));
1017   else if (isbinary) PetscCall(MatView_MPISBAIJ_Binary(mat, viewer));
1018   PetscFunctionReturn(PETSC_SUCCESS);
1019 }
1020 
1021 #if defined(PETSC_USE_COMPLEX)
1022 static PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy)
1023 {
1024   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1025   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1026   PetscScalar       *from;
1027   const PetscScalar *x;
1028 
1029   PetscFunctionBegin;
1030   /* diagonal part */
1031   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1032   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1033   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1034   PetscCall(VecZeroEntries(a->slvec1b));
1035 
1036   /* subdiagonal part */
1037   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1038   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1039 
1040   /* copy x into the vec slvec0 */
1041   PetscCall(VecGetArray(a->slvec0, &from));
1042   PetscCall(VecGetArrayRead(xx, &x));
1043 
1044   PetscCall(PetscArraycpy(from, x, bs * mbs));
1045   PetscCall(VecRestoreArray(a->slvec0, &from));
1046   PetscCall(VecRestoreArrayRead(xx, &x));
1047 
1048   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1049   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1050   /* supperdiagonal part */
1051   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1052   PetscFunctionReturn(PETSC_SUCCESS);
1053 }
1054 #endif
1055 
1056 static PetscErrorCode MatMult_MPISBAIJ(Mat A, Vec xx, Vec yy)
1057 {
1058   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1059   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1060   PetscScalar       *from;
1061   const PetscScalar *x;
1062 
1063   PetscFunctionBegin;
1064   /* diagonal part */
1065   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1066   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1067   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1068   PetscCall(VecZeroEntries(a->slvec1b));
1069 
1070   /* subdiagonal part */
1071   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1072 
1073   /* copy x into the vec slvec0 */
1074   PetscCall(VecGetArray(a->slvec0, &from));
1075   PetscCall(VecGetArrayRead(xx, &x));
1076 
1077   PetscCall(PetscArraycpy(from, x, bs * mbs));
1078   PetscCall(VecRestoreArray(a->slvec0, &from));
1079   PetscCall(VecRestoreArrayRead(xx, &x));
1080 
1081   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1082   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1083   /* supperdiagonal part */
1084   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1085   PetscFunctionReturn(PETSC_SUCCESS);
1086 }
1087 
1088 #if PetscDefined(USE_COMPLEX)
1089 static PetscErrorCode MatMultAdd_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy, Vec zz)
1090 {
1091   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1092   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1093   PetscScalar       *from;
1094   const PetscScalar *x;
1095 
1096   PetscFunctionBegin;
1097   /* diagonal part */
1098   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1099   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1100   PetscCall(VecZeroEntries(a->slvec1b));
1101 
1102   /* subdiagonal part */
1103   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1104   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1105 
1106   /* copy x into the vec slvec0 */
1107   PetscCall(VecGetArray(a->slvec0, &from));
1108   PetscCall(VecGetArrayRead(xx, &x));
1109   PetscCall(PetscArraycpy(from, x, bs * mbs));
1110   PetscCall(VecRestoreArray(a->slvec0, &from));
1111 
1112   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1113   PetscCall(VecRestoreArrayRead(xx, &x));
1114   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1115 
1116   /* supperdiagonal part */
1117   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1118   PetscFunctionReturn(PETSC_SUCCESS);
1119 }
1120 #endif
1121 
1122 static PetscErrorCode MatMultAdd_MPISBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1123 {
1124   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1125   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1126   PetscScalar       *from;
1127   const PetscScalar *x;
1128 
1129   PetscFunctionBegin;
1130   /* diagonal part */
1131   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1132   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1133   PetscCall(VecZeroEntries(a->slvec1b));
1134 
1135   /* subdiagonal part */
1136   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1137 
1138   /* copy x into the vec slvec0 */
1139   PetscCall(VecGetArray(a->slvec0, &from));
1140   PetscCall(VecGetArrayRead(xx, &x));
1141   PetscCall(PetscArraycpy(from, x, bs * mbs));
1142   PetscCall(VecRestoreArray(a->slvec0, &from));
1143 
1144   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1145   PetscCall(VecRestoreArrayRead(xx, &x));
1146   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1147 
1148   /* supperdiagonal part */
1149   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1150   PetscFunctionReturn(PETSC_SUCCESS);
1151 }
1152 
1153 /*
1154   This only works correctly for square matrices where the subblock A->A is the
1155    diagonal block
1156 */
1157 static PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A, Vec v)
1158 {
1159   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1160 
1161   PetscFunctionBegin;
1162   /* PetscCheck(a->rmap->N == a->cmap->N,PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1163   PetscCall(MatGetDiagonal(a->A, v));
1164   PetscFunctionReturn(PETSC_SUCCESS);
1165 }
1166 
1167 static PetscErrorCode MatScale_MPISBAIJ(Mat A, PetscScalar aa)
1168 {
1169   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1170 
1171   PetscFunctionBegin;
1172   PetscCall(MatScale(a->A, aa));
1173   PetscCall(MatScale(a->B, aa));
1174   PetscFunctionReturn(PETSC_SUCCESS);
1175 }
1176 
1177 static PetscErrorCode MatGetRow_MPISBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1178 {
1179   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
1180   PetscScalar  *vworkA, *vworkB, **pvA, **pvB, *v_p;
1181   PetscInt      bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1182   PetscInt      nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1183   PetscInt     *cmap, *idx_p, cstart = mat->rstartbs;
1184 
1185   PetscFunctionBegin;
1186   PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1187   mat->getrowactive = PETSC_TRUE;
1188 
1189   if (!mat->rowvalues && (idx || v)) {
1190     /*
1191         allocate enough space to hold information from the longest row.
1192     */
1193     Mat_SeqSBAIJ *Aa  = (Mat_SeqSBAIJ *)mat->A->data;
1194     Mat_SeqBAIJ  *Ba  = (Mat_SeqBAIJ *)mat->B->data;
1195     PetscInt      max = 1, mbs = mat->mbs, tmp;
1196     for (i = 0; i < mbs; i++) {
1197       tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i]; /* row length */
1198       if (max < tmp) max = tmp;
1199     }
1200     PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1201   }
1202 
1203   PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1204   lrow = row - brstart; /* local row index */
1205 
1206   pvA = &vworkA;
1207   pcA = &cworkA;
1208   pvB = &vworkB;
1209   pcB = &cworkB;
1210   if (!v) {
1211     pvA = NULL;
1212     pvB = NULL;
1213   }
1214   if (!idx) {
1215     pcA = NULL;
1216     if (!v) pcB = NULL;
1217   }
1218   PetscCall((*mat->A->ops->getrow)(mat->A, lrow, &nzA, pcA, pvA));
1219   PetscCall((*mat->B->ops->getrow)(mat->B, lrow, &nzB, pcB, pvB));
1220   nztot = nzA + nzB;
1221 
1222   cmap = mat->garray;
1223   if (v || idx) {
1224     if (nztot) {
1225       /* Sort by increasing column numbers, assuming A and B already sorted */
1226       PetscInt imark = -1;
1227       if (v) {
1228         *v = v_p = mat->rowvalues;
1229         for (i = 0; i < nzB; i++) {
1230           if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1231           else break;
1232         }
1233         imark = i;
1234         for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1235         for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1236       }
1237       if (idx) {
1238         *idx = idx_p = mat->rowindices;
1239         if (imark > -1) {
1240           for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1241         } else {
1242           for (i = 0; i < nzB; i++) {
1243             if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1244             else break;
1245           }
1246           imark = i;
1247         }
1248         for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1249         for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1250       }
1251     } else {
1252       if (idx) *idx = NULL;
1253       if (v) *v = NULL;
1254     }
1255   }
1256   *nz = nztot;
1257   PetscCall((*mat->A->ops->restorerow)(mat->A, lrow, &nzA, pcA, pvA));
1258   PetscCall((*mat->B->ops->restorerow)(mat->B, lrow, &nzB, pcB, pvB));
1259   PetscFunctionReturn(PETSC_SUCCESS);
1260 }
1261 
1262 static PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1263 {
1264   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1265 
1266   PetscFunctionBegin;
1267   PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow() must be called first");
1268   baij->getrowactive = PETSC_FALSE;
1269   PetscFunctionReturn(PETSC_SUCCESS);
1270 }
1271 
1272 static PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1273 {
1274   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1275   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1276 
1277   PetscFunctionBegin;
1278   aA->getrow_utriangular = PETSC_TRUE;
1279   PetscFunctionReturn(PETSC_SUCCESS);
1280 }
1281 static PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1282 {
1283   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1284   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1285 
1286   PetscFunctionBegin;
1287   aA->getrow_utriangular = PETSC_FALSE;
1288   PetscFunctionReturn(PETSC_SUCCESS);
1289 }
1290 
1291 static PetscErrorCode MatConjugate_MPISBAIJ(Mat mat)
1292 {
1293   PetscFunctionBegin;
1294   if (PetscDefined(USE_COMPLEX)) {
1295     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)mat->data;
1296 
1297     PetscCall(MatConjugate(a->A));
1298     PetscCall(MatConjugate(a->B));
1299   }
1300   PetscFunctionReturn(PETSC_SUCCESS);
1301 }
1302 
1303 static PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1304 {
1305   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1306 
1307   PetscFunctionBegin;
1308   PetscCall(MatRealPart(a->A));
1309   PetscCall(MatRealPart(a->B));
1310   PetscFunctionReturn(PETSC_SUCCESS);
1311 }
1312 
1313 static PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1314 {
1315   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1316 
1317   PetscFunctionBegin;
1318   PetscCall(MatImaginaryPart(a->A));
1319   PetscCall(MatImaginaryPart(a->B));
1320   PetscFunctionReturn(PETSC_SUCCESS);
1321 }
1322 
1323 /* Check if isrow is a subset of iscol_local, called by MatCreateSubMatrix_MPISBAIJ()
1324    Input: isrow       - distributed(parallel),
1325           iscol_local - locally owned (seq)
1326 */
1327 static PetscErrorCode ISEqual_private(IS isrow, IS iscol_local, PetscBool *flg)
1328 {
1329   PetscInt        sz1, sz2, *a1, *a2, i, j, k, nmatch;
1330   const PetscInt *ptr1, *ptr2;
1331 
1332   PetscFunctionBegin;
1333   *flg = PETSC_FALSE;
1334   PetscCall(ISGetLocalSize(isrow, &sz1));
1335   PetscCall(ISGetLocalSize(iscol_local, &sz2));
1336   if (sz1 > sz2) PetscFunctionReturn(PETSC_SUCCESS);
1337 
1338   PetscCall(ISGetIndices(isrow, &ptr1));
1339   PetscCall(ISGetIndices(iscol_local, &ptr2));
1340 
1341   PetscCall(PetscMalloc1(sz1, &a1));
1342   PetscCall(PetscMalloc1(sz2, &a2));
1343   PetscCall(PetscArraycpy(a1, ptr1, sz1));
1344   PetscCall(PetscArraycpy(a2, ptr2, sz2));
1345   PetscCall(PetscSortInt(sz1, a1));
1346   PetscCall(PetscSortInt(sz2, a2));
1347 
1348   nmatch = 0;
1349   k      = 0;
1350   for (i = 0; i < sz1; i++) {
1351     for (j = k; j < sz2; j++) {
1352       if (a1[i] == a2[j]) {
1353         k = j;
1354         nmatch++;
1355         break;
1356       }
1357     }
1358   }
1359   PetscCall(ISRestoreIndices(isrow, &ptr1));
1360   PetscCall(ISRestoreIndices(iscol_local, &ptr2));
1361   PetscCall(PetscFree(a1));
1362   PetscCall(PetscFree(a2));
1363   if (nmatch < sz1) {
1364     *flg = PETSC_FALSE;
1365   } else {
1366     *flg = PETSC_TRUE;
1367   }
1368   PetscFunctionReturn(PETSC_SUCCESS);
1369 }
1370 
1371 static PetscErrorCode MatCreateSubMatrix_MPISBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1372 {
1373   Mat       C[2];
1374   IS        iscol_local, isrow_local;
1375   PetscInt  csize, csize_local, rsize;
1376   PetscBool isequal, issorted, isidentity = PETSC_FALSE;
1377 
1378   PetscFunctionBegin;
1379   PetscCall(ISGetLocalSize(iscol, &csize));
1380   PetscCall(ISGetLocalSize(isrow, &rsize));
1381   if (call == MAT_REUSE_MATRIX) {
1382     PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1383     PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1384   } else {
1385     PetscCall(ISAllGather(iscol, &iscol_local));
1386     PetscCall(ISSorted(iscol_local, &issorted));
1387     PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, iscol must be sorted");
1388   }
1389   PetscCall(ISEqual_private(isrow, iscol_local, &isequal));
1390   if (!isequal) {
1391     PetscCall(ISGetLocalSize(iscol_local, &csize_local));
1392     isidentity = (PetscBool)(mat->cmap->N == csize_local);
1393     if (!isidentity) {
1394       if (call == MAT_REUSE_MATRIX) {
1395         PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather_other", (PetscObject *)&isrow_local));
1396         PetscCheck(isrow_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1397       } else {
1398         PetscCall(ISAllGather(isrow, &isrow_local));
1399         PetscCall(ISSorted(isrow_local, &issorted));
1400         PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, isrow must be sorted");
1401       }
1402     }
1403   }
1404   /* now call MatCreateSubMatrix_MPIBAIJ() */
1405   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, isequal || isidentity ? call : MAT_INITIAL_MATRIX, isequal || isidentity ? newmat : C, (PetscBool)(isequal || isidentity)));
1406   if (!isequal && !isidentity) {
1407     if (call == MAT_INITIAL_MATRIX) {
1408       IS       intersect;
1409       PetscInt ni;
1410 
1411       PetscCall(ISIntersect(isrow_local, iscol_local, &intersect));
1412       PetscCall(ISGetLocalSize(intersect, &ni));
1413       PetscCall(ISDestroy(&intersect));
1414       PetscCheck(ni == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create such a submatrix: for symmetric format, when requesting an off-diagonal submatrix, isrow and iscol should have an empty intersection (number of common indices is %" PetscInt_FMT ")", ni);
1415     }
1416     PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, iscol, isrow_local, rsize, MAT_INITIAL_MATRIX, C + 1, PETSC_FALSE));
1417     PetscCall(MatTranspose(C[1], MAT_INPLACE_MATRIX, C + 1));
1418     PetscCall(MatAXPY(C[0], 1.0, C[1], DIFFERENT_NONZERO_PATTERN));
1419     if (call == MAT_REUSE_MATRIX) PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1420     else if (mat->rmap->bs == 1) PetscCall(MatConvert(C[0], MATAIJ, MAT_INITIAL_MATRIX, newmat));
1421     else PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1422     PetscCall(MatDestroy(C));
1423     PetscCall(MatDestroy(C + 1));
1424   }
1425   if (call == MAT_INITIAL_MATRIX) {
1426     if (!isequal && !isidentity) {
1427       PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather_other", (PetscObject)isrow_local));
1428       PetscCall(ISDestroy(&isrow_local));
1429     }
1430     PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1431     PetscCall(ISDestroy(&iscol_local));
1432   }
1433   PetscFunctionReturn(PETSC_SUCCESS);
1434 }
1435 
1436 static PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1437 {
1438   Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;
1439 
1440   PetscFunctionBegin;
1441   PetscCall(MatZeroEntries(l->A));
1442   PetscCall(MatZeroEntries(l->B));
1443   PetscFunctionReturn(PETSC_SUCCESS);
1444 }
1445 
1446 static PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1447 {
1448   Mat_MPISBAIJ  *a = (Mat_MPISBAIJ *)matin->data;
1449   Mat            A = a->A, B = a->B;
1450   PetscLogDouble isend[5], irecv[5];
1451 
1452   PetscFunctionBegin;
1453   info->block_size = (PetscReal)matin->rmap->bs;
1454 
1455   PetscCall(MatGetInfo(A, MAT_LOCAL, info));
1456 
1457   isend[0] = info->nz_used;
1458   isend[1] = info->nz_allocated;
1459   isend[2] = info->nz_unneeded;
1460   isend[3] = info->memory;
1461   isend[4] = info->mallocs;
1462 
1463   PetscCall(MatGetInfo(B, MAT_LOCAL, info));
1464 
1465   isend[0] += info->nz_used;
1466   isend[1] += info->nz_allocated;
1467   isend[2] += info->nz_unneeded;
1468   isend[3] += info->memory;
1469   isend[4] += info->mallocs;
1470   if (flag == MAT_LOCAL) {
1471     info->nz_used      = isend[0];
1472     info->nz_allocated = isend[1];
1473     info->nz_unneeded  = isend[2];
1474     info->memory       = isend[3];
1475     info->mallocs      = isend[4];
1476   } else if (flag == MAT_GLOBAL_MAX) {
1477     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));
1478 
1479     info->nz_used      = irecv[0];
1480     info->nz_allocated = irecv[1];
1481     info->nz_unneeded  = irecv[2];
1482     info->memory       = irecv[3];
1483     info->mallocs      = irecv[4];
1484   } else if (flag == MAT_GLOBAL_SUM) {
1485     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));
1486 
1487     info->nz_used      = irecv[0];
1488     info->nz_allocated = irecv[1];
1489     info->nz_unneeded  = irecv[2];
1490     info->memory       = irecv[3];
1491     info->mallocs      = irecv[4];
1492   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1493   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1494   info->fill_ratio_needed = 0;
1495   info->factor_mallocs    = 0;
1496   PetscFunctionReturn(PETSC_SUCCESS);
1497 }
1498 
1499 static PetscErrorCode MatSetOption_MPISBAIJ(Mat A, MatOption op, PetscBool flg)
1500 {
1501   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1502   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1503 
1504   PetscFunctionBegin;
1505   switch (op) {
1506   case MAT_NEW_NONZERO_LOCATIONS:
1507   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1508   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1509   case MAT_KEEP_NONZERO_PATTERN:
1510   case MAT_NEW_NONZERO_LOCATION_ERR:
1511     MatCheckPreallocated(A, 1);
1512     PetscCall(MatSetOption(a->A, op, flg));
1513     PetscCall(MatSetOption(a->B, op, flg));
1514     break;
1515   case MAT_ROW_ORIENTED:
1516     MatCheckPreallocated(A, 1);
1517     a->roworiented = flg;
1518 
1519     PetscCall(MatSetOption(a->A, op, flg));
1520     PetscCall(MatSetOption(a->B, op, flg));
1521     break;
1522   case MAT_IGNORE_OFF_PROC_ENTRIES:
1523     a->donotstash = flg;
1524     break;
1525   case MAT_USE_HASH_TABLE:
1526     a->ht_flag = flg;
1527     break;
1528   case MAT_HERMITIAN:
1529     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1530 #if defined(PETSC_USE_COMPLEX)
1531     if (flg) { /* need different mat-vec ops */
1532       A->ops->mult             = MatMult_MPISBAIJ_Hermitian;
1533       A->ops->multadd          = MatMultAdd_MPISBAIJ_Hermitian;
1534       A->ops->multtranspose    = NULL;
1535       A->ops->multtransposeadd = NULL;
1536       A->symmetric             = PETSC_BOOL3_FALSE;
1537     }
1538 #endif
1539     break;
1540   case MAT_SPD:
1541   case MAT_SYMMETRIC:
1542     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1543 #if defined(PETSC_USE_COMPLEX)
1544     if (flg) { /* restore to use default mat-vec ops */
1545       A->ops->mult             = MatMult_MPISBAIJ;
1546       A->ops->multadd          = MatMultAdd_MPISBAIJ;
1547       A->ops->multtranspose    = MatMult_MPISBAIJ;
1548       A->ops->multtransposeadd = MatMultAdd_MPISBAIJ;
1549     }
1550 #endif
1551     break;
1552   case MAT_STRUCTURALLY_SYMMETRIC:
1553     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1554     break;
1555   case MAT_IGNORE_LOWER_TRIANGULAR:
1556   case MAT_ERROR_LOWER_TRIANGULAR:
1557     aA->ignore_ltriangular = flg;
1558     break;
1559   case MAT_GETROW_UPPERTRIANGULAR:
1560     aA->getrow_utriangular = flg;
1561     break;
1562   default:
1563     break;
1564   }
1565   PetscFunctionReturn(PETSC_SUCCESS);
1566 }
1567 
1568 static PetscErrorCode MatTranspose_MPISBAIJ(Mat A, MatReuse reuse, Mat *B)
1569 {
1570   PetscFunctionBegin;
1571   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1572   if (reuse == MAT_INITIAL_MATRIX) {
1573     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, B));
1574   } else if (reuse == MAT_REUSE_MATRIX) {
1575     PetscCall(MatCopy(A, *B, SAME_NONZERO_PATTERN));
1576   }
1577   PetscFunctionReturn(PETSC_SUCCESS);
1578 }
1579 
1580 static PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat, Vec ll, Vec rr)
1581 {
1582   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1583   Mat           a = baij->A, b = baij->B;
1584   PetscInt      nv, m, n;
1585   PetscBool     flg;
1586 
1587   PetscFunctionBegin;
1588   if (ll != rr) {
1589     PetscCall(VecEqual(ll, rr, &flg));
1590     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be same");
1591   }
1592   if (!ll) PetscFunctionReturn(PETSC_SUCCESS);
1593 
1594   PetscCall(MatGetLocalSize(mat, &m, &n));
1595   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "For symmetric format, local size %" PetscInt_FMT " %" PetscInt_FMT " must be same", m, n);
1596 
1597   PetscCall(VecGetLocalSize(rr, &nv));
1598   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left and right vector non-conforming local size");
1599 
1600   PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1601 
1602   /* left diagonalscale the off-diagonal part */
1603   PetscUseTypeMethod(b, diagonalscale, ll, NULL);
1604 
1605   /* scale the diagonal part */
1606   PetscUseTypeMethod(a, diagonalscale, ll, rr);
1607 
1608   /* right diagonalscale the off-diagonal part */
1609   PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1610   PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1611   PetscFunctionReturn(PETSC_SUCCESS);
1612 }
1613 
1614 static PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1615 {
1616   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1617 
1618   PetscFunctionBegin;
1619   PetscCall(MatSetUnfactored(a->A));
1620   PetscFunctionReturn(PETSC_SUCCESS);
1621 }
1622 
1623 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat, MatDuplicateOption, Mat *);
1624 
1625 static PetscErrorCode MatEqual_MPISBAIJ(Mat A, Mat B, PetscBool *flag)
1626 {
1627   Mat_MPISBAIJ *matB = (Mat_MPISBAIJ *)B->data, *matA = (Mat_MPISBAIJ *)A->data;
1628   Mat           a, b, c, d;
1629   PetscBool     flg;
1630 
1631   PetscFunctionBegin;
1632   a = matA->A;
1633   b = matA->B;
1634   c = matB->A;
1635   d = matB->B;
1636 
1637   PetscCall(MatEqual(a, c, &flg));
1638   if (flg) PetscCall(MatEqual(b, d, &flg));
1639   PetscCallMPI(MPIU_Allreduce(&flg, flag, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1640   PetscFunctionReturn(PETSC_SUCCESS);
1641 }
1642 
1643 static PetscErrorCode MatCopy_MPISBAIJ(Mat A, Mat B, MatStructure str)
1644 {
1645   PetscBool isbaij;
1646 
1647   PetscFunctionBegin;
1648   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isbaij, MATSEQSBAIJ, MATMPISBAIJ, ""));
1649   PetscCheck(isbaij, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)B)->type_name);
1650   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1651   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1652     PetscCall(MatGetRowUpperTriangular(A));
1653     PetscCall(MatCopy_Basic(A, B, str));
1654     PetscCall(MatRestoreRowUpperTriangular(A));
1655   } else {
1656     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1657     Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1658 
1659     PetscCall(MatCopy(a->A, b->A, str));
1660     PetscCall(MatCopy(a->B, b->B, str));
1661   }
1662   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1663   PetscFunctionReturn(PETSC_SUCCESS);
1664 }
1665 
1666 static PetscErrorCode MatAXPY_MPISBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1667 {
1668   Mat_MPISBAIJ *xx = (Mat_MPISBAIJ *)X->data, *yy = (Mat_MPISBAIJ *)Y->data;
1669   PetscBLASInt  bnz, one                          = 1;
1670   Mat_SeqSBAIJ *xa, *ya;
1671   Mat_SeqBAIJ  *xb, *yb;
1672 
1673   PetscFunctionBegin;
1674   if (str == SAME_NONZERO_PATTERN) {
1675     PetscScalar alpha = a;
1676     xa                = (Mat_SeqSBAIJ *)xx->A->data;
1677     ya                = (Mat_SeqSBAIJ *)yy->A->data;
1678     PetscCall(PetscBLASIntCast(xa->nz, &bnz));
1679     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa->a, &one, ya->a, &one));
1680     xb = (Mat_SeqBAIJ *)xx->B->data;
1681     yb = (Mat_SeqBAIJ *)yy->B->data;
1682     PetscCall(PetscBLASIntCast(xb->nz, &bnz));
1683     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xb->a, &one, yb->a, &one));
1684     PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1685   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1686     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
1687     PetscCall(MatAXPY_Basic(Y, a, X, str));
1688     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
1689   } else {
1690     Mat       B;
1691     PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1692     PetscCheck(bs == X->rmap->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Matrices must have same block size");
1693     PetscCall(MatGetRowUpperTriangular(X));
1694     PetscCall(MatGetRowUpperTriangular(Y));
1695     PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1696     PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1697     PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1698     PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1699     PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1700     PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1701     PetscCall(MatSetType(B, MATMPISBAIJ));
1702     PetscCall(MatAXPYGetPreallocation_SeqSBAIJ(yy->A, xx->A, nnz_d));
1703     PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1704     PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1705     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1706     PetscCall(MatHeaderMerge(Y, &B));
1707     PetscCall(PetscFree(nnz_d));
1708     PetscCall(PetscFree(nnz_o));
1709     PetscCall(MatRestoreRowUpperTriangular(X));
1710     PetscCall(MatRestoreRowUpperTriangular(Y));
1711   }
1712   PetscFunctionReturn(PETSC_SUCCESS);
1713 }
1714 
1715 static PetscErrorCode MatCreateSubMatrices_MPISBAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[])
1716 {
1717   PetscInt  i;
1718   PetscBool flg;
1719 
1720   PetscFunctionBegin;
1721   PetscCall(MatCreateSubMatrices_MPIBAIJ(A, n, irow, icol, scall, B)); /* B[] are sbaij matrices */
1722   for (i = 0; i < n; i++) {
1723     PetscCall(ISEqual(irow[i], icol[i], &flg));
1724     if (!flg) PetscCall(MatSeqSBAIJZeroOps_Private(*B[i]));
1725   }
1726   PetscFunctionReturn(PETSC_SUCCESS);
1727 }
1728 
1729 static PetscErrorCode MatShift_MPISBAIJ(Mat Y, PetscScalar a)
1730 {
1731   Mat_MPISBAIJ *maij = (Mat_MPISBAIJ *)Y->data;
1732   Mat_SeqSBAIJ *aij  = (Mat_SeqSBAIJ *)maij->A->data;
1733 
1734   PetscFunctionBegin;
1735   if (!Y->preallocated) {
1736     PetscCall(MatMPISBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
1737   } else if (!aij->nz) {
1738     PetscInt nonew = aij->nonew;
1739     PetscCall(MatSeqSBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
1740     aij->nonew = nonew;
1741   }
1742   PetscCall(MatShift_Basic(Y, a));
1743   PetscFunctionReturn(PETSC_SUCCESS);
1744 }
1745 
1746 static PetscErrorCode MatGetDiagonalBlock_MPISBAIJ(Mat A, Mat *a)
1747 {
1748   PetscFunctionBegin;
1749   *a = ((Mat_MPISBAIJ *)A->data)->A;
1750   PetscFunctionReturn(PETSC_SUCCESS);
1751 }
1752 
1753 static PetscErrorCode MatEliminateZeros_MPISBAIJ(Mat A, PetscBool keep)
1754 {
1755   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1756 
1757   PetscFunctionBegin;
1758   PetscCall(MatEliminateZeros_SeqSBAIJ(a->A, keep));       // possibly keep zero diagonal coefficients
1759   PetscCall(MatEliminateZeros_SeqBAIJ(a->B, PETSC_FALSE)); // never keep zero diagonal coefficients
1760   PetscFunctionReturn(PETSC_SUCCESS);
1761 }
1762 
1763 static PetscErrorCode MatLoad_MPISBAIJ(Mat, PetscViewer);
1764 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat, Vec, PetscInt[]);
1765 static PetscErrorCode MatSOR_MPISBAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
1766 
1767 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1768                                        MatGetRow_MPISBAIJ,
1769                                        MatRestoreRow_MPISBAIJ,
1770                                        MatMult_MPISBAIJ,
1771                                        /*  4*/ MatMultAdd_MPISBAIJ,
1772                                        MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */
1773                                        MatMultAdd_MPISBAIJ,
1774                                        NULL,
1775                                        NULL,
1776                                        NULL,
1777                                        /* 10*/ NULL,
1778                                        NULL,
1779                                        NULL,
1780                                        MatSOR_MPISBAIJ,
1781                                        MatTranspose_MPISBAIJ,
1782                                        /* 15*/ MatGetInfo_MPISBAIJ,
1783                                        MatEqual_MPISBAIJ,
1784                                        MatGetDiagonal_MPISBAIJ,
1785                                        MatDiagonalScale_MPISBAIJ,
1786                                        MatNorm_MPISBAIJ,
1787                                        /* 20*/ MatAssemblyBegin_MPISBAIJ,
1788                                        MatAssemblyEnd_MPISBAIJ,
1789                                        MatSetOption_MPISBAIJ,
1790                                        MatZeroEntries_MPISBAIJ,
1791                                        /* 24*/ NULL,
1792                                        NULL,
1793                                        NULL,
1794                                        NULL,
1795                                        NULL,
1796                                        /* 29*/ MatSetUp_MPI_Hash,
1797                                        NULL,
1798                                        NULL,
1799                                        MatGetDiagonalBlock_MPISBAIJ,
1800                                        NULL,
1801                                        /* 34*/ MatDuplicate_MPISBAIJ,
1802                                        NULL,
1803                                        NULL,
1804                                        NULL,
1805                                        NULL,
1806                                        /* 39*/ MatAXPY_MPISBAIJ,
1807                                        MatCreateSubMatrices_MPISBAIJ,
1808                                        MatIncreaseOverlap_MPISBAIJ,
1809                                        MatGetValues_MPISBAIJ,
1810                                        MatCopy_MPISBAIJ,
1811                                        /* 44*/ NULL,
1812                                        MatScale_MPISBAIJ,
1813                                        MatShift_MPISBAIJ,
1814                                        NULL,
1815                                        NULL,
1816                                        /* 49*/ NULL,
1817                                        NULL,
1818                                        NULL,
1819                                        NULL,
1820                                        NULL,
1821                                        /* 54*/ NULL,
1822                                        NULL,
1823                                        MatSetUnfactored_MPISBAIJ,
1824                                        NULL,
1825                                        MatSetValuesBlocked_MPISBAIJ,
1826                                        /* 59*/ MatCreateSubMatrix_MPISBAIJ,
1827                                        NULL,
1828                                        NULL,
1829                                        NULL,
1830                                        NULL,
1831                                        /* 64*/ NULL,
1832                                        NULL,
1833                                        NULL,
1834                                        NULL,
1835                                        MatGetRowMaxAbs_MPISBAIJ,
1836                                        /* 69*/ NULL,
1837                                        MatConvert_MPISBAIJ_Basic,
1838                                        NULL,
1839                                        NULL,
1840                                        NULL,
1841                                        NULL,
1842                                        NULL,
1843                                        NULL,
1844                                        NULL,
1845                                        MatLoad_MPISBAIJ,
1846                                        /* 79*/ NULL,
1847                                        NULL,
1848                                        NULL,
1849                                        NULL,
1850                                        NULL,
1851                                        /* 84*/ NULL,
1852                                        NULL,
1853                                        NULL,
1854                                        NULL,
1855                                        NULL,
1856                                        /* 89*/ NULL,
1857                                        NULL,
1858                                        NULL,
1859                                        NULL,
1860                                        MatConjugate_MPISBAIJ,
1861                                        /* 94*/ NULL,
1862                                        NULL,
1863                                        MatRealPart_MPISBAIJ,
1864                                        MatImaginaryPart_MPISBAIJ,
1865                                        MatGetRowUpperTriangular_MPISBAIJ,
1866                                        /* 99*/ MatRestoreRowUpperTriangular_MPISBAIJ,
1867                                        NULL,
1868                                        NULL,
1869                                        NULL,
1870                                        NULL,
1871                                        /*104*/ NULL,
1872                                        NULL,
1873                                        NULL,
1874                                        NULL,
1875                                        NULL,
1876                                        /*109*/ NULL,
1877                                        NULL,
1878                                        NULL,
1879                                        NULL,
1880                                        NULL,
1881                                        /*114*/ NULL,
1882                                        NULL,
1883                                        NULL,
1884                                        NULL,
1885                                        NULL,
1886                                        /*119*/ NULL,
1887                                        NULL,
1888                                        NULL,
1889                                        NULL,
1890                                        NULL,
1891                                        /*124*/ NULL,
1892                                        NULL,
1893                                        MatSetBlockSizes_Default,
1894                                        NULL,
1895                                        NULL,
1896                                        /*129*/ NULL,
1897                                        MatCreateMPIMatConcatenateSeqMat_MPISBAIJ,
1898                                        NULL,
1899                                        NULL,
1900                                        NULL,
1901                                        /*134*/ NULL,
1902                                        NULL,
1903                                        MatEliminateZeros_MPISBAIJ,
1904                                        NULL,
1905                                        NULL,
1906                                        /*139*/ NULL,
1907                                        NULL,
1908                                        MatCopyHashToXAIJ_MPI_Hash,
1909                                        NULL,
1910                                        NULL};
1911 
1912 static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
1913 {
1914   Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1915   PetscInt      i, mbs, Mbs;
1916   PetscMPIInt   size;
1917 
1918   PetscFunctionBegin;
1919   if (B->hash_active) {
1920     B->ops[0]      = b->cops;
1921     B->hash_active = PETSC_FALSE;
1922   }
1923   if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
1924   PetscCall(MatSetBlockSize(B, bs));
1925   PetscCall(PetscLayoutSetUp(B->rmap));
1926   PetscCall(PetscLayoutSetUp(B->cmap));
1927   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
1928   PetscCheck(B->rmap->N <= B->cmap->N, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->N, B->cmap->N);
1929   PetscCheck(B->rmap->n <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more local rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->n, B->cmap->n);
1930 
1931   mbs = B->rmap->n / bs;
1932   Mbs = B->rmap->N / bs;
1933   PetscCheck(mbs * bs == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "No of local rows %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, B->rmap->N, bs);
1934 
1935   B->rmap->bs = bs;
1936   b->bs2      = bs * bs;
1937   b->mbs      = mbs;
1938   b->Mbs      = Mbs;
1939   b->nbs      = B->cmap->n / bs;
1940   b->Nbs      = B->cmap->N / bs;
1941 
1942   for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
1943   b->rstartbs = B->rmap->rstart / bs;
1944   b->rendbs   = B->rmap->rend / bs;
1945 
1946   b->cstartbs = B->cmap->rstart / bs;
1947   b->cendbs   = B->cmap->rend / bs;
1948 
1949 #if defined(PETSC_USE_CTABLE)
1950   PetscCall(PetscHMapIDestroy(&b->colmap));
1951 #else
1952   PetscCall(PetscFree(b->colmap));
1953 #endif
1954   PetscCall(PetscFree(b->garray));
1955   PetscCall(VecDestroy(&b->lvec));
1956   PetscCall(VecScatterDestroy(&b->Mvctx));
1957   PetscCall(VecDestroy(&b->slvec0));
1958   PetscCall(VecDestroy(&b->slvec0b));
1959   PetscCall(VecDestroy(&b->slvec1));
1960   PetscCall(VecDestroy(&b->slvec1a));
1961   PetscCall(VecDestroy(&b->slvec1b));
1962   PetscCall(VecScatterDestroy(&b->sMvctx));
1963 
1964   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));
1965 
1966   MatSeqXAIJGetOptions_Private(b->B);
1967   PetscCall(MatDestroy(&b->B));
1968   PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
1969   PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
1970   PetscCall(MatSetType(b->B, MATSEQBAIJ));
1971   MatSeqXAIJRestoreOptions_Private(b->B);
1972 
1973   MatSeqXAIJGetOptions_Private(b->A);
1974   PetscCall(MatDestroy(&b->A));
1975   PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
1976   PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
1977   PetscCall(MatSetType(b->A, MATSEQSBAIJ));
1978   MatSeqXAIJRestoreOptions_Private(b->A);
1979 
1980   PetscCall(MatSeqSBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
1981   PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));
1982 
1983   B->preallocated  = PETSC_TRUE;
1984   B->was_assembled = PETSC_FALSE;
1985   B->assembled     = PETSC_FALSE;
1986   PetscFunctionReturn(PETSC_SUCCESS);
1987 }
1988 
1989 static PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
1990 {
1991   PetscInt        m, rstart, cend;
1992   PetscInt        i, j, d, nz, bd, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
1993   const PetscInt *JJ          = NULL;
1994   PetscScalar    *values      = NULL;
1995   PetscBool       roworiented = ((Mat_MPISBAIJ *)B->data)->roworiented;
1996   PetscBool       nooffprocentries;
1997 
1998   PetscFunctionBegin;
1999   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs);
2000   PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
2001   PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2002   PetscCall(PetscLayoutSetUp(B->rmap));
2003   PetscCall(PetscLayoutSetUp(B->cmap));
2004   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2005   m      = B->rmap->n / bs;
2006   rstart = B->rmap->rstart / bs;
2007   cend   = B->cmap->rend / bs;
2008 
2009   PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2010   PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2011   for (i = 0; i < m; i++) {
2012     nz = ii[i + 1] - ii[i];
2013     PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2014     /* count the ones on the diagonal and above, split into diagonal and off-diagonal portions. */
2015     JJ = jj + ii[i];
2016     bd = 0;
2017     for (j = 0; j < nz; j++) {
2018       if (*JJ >= i + rstart) break;
2019       JJ++;
2020       bd++;
2021     }
2022     d = 0;
2023     for (; j < nz; j++) {
2024       if (*JJ++ >= cend) break;
2025       d++;
2026     }
2027     d_nnz[i] = d;
2028     o_nnz[i] = nz - d - bd;
2029     nz       = nz - bd;
2030     nz_max   = PetscMax(nz_max, nz);
2031   }
2032   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2033   PetscCall(MatSetOption(B, MAT_IGNORE_LOWER_TRIANGULAR, PETSC_TRUE));
2034   PetscCall(PetscFree2(d_nnz, o_nnz));
2035 
2036   values = (PetscScalar *)V;
2037   if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2038   for (i = 0; i < m; i++) {
2039     PetscInt        row   = i + rstart;
2040     PetscInt        ncols = ii[i + 1] - ii[i];
2041     const PetscInt *icols = jj + ii[i];
2042     if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2043       const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2044       PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2045     } else { /* block ordering does not match so we can only insert one block at a time. */
2046       PetscInt j;
2047       for (j = 0; j < ncols; j++) {
2048         const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2049         PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2050       }
2051     }
2052   }
2053 
2054   if (!V) PetscCall(PetscFree(values));
2055   nooffprocentries    = B->nooffprocentries;
2056   B->nooffprocentries = PETSC_TRUE;
2057   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2058   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2059   B->nooffprocentries = nooffprocentries;
2060 
2061   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2062   PetscFunctionReturn(PETSC_SUCCESS);
2063 }
2064 
2065 /*MC
2066    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2067    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
2068    the matrix is stored.
2069 
2070    For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2071    can call `MatSetOption`(`Mat`, `MAT_HERMITIAN`);
2072 
2073    Options Database Key:
2074 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to `MatSetFromOptions()`
2075 
2076    Level: beginner
2077 
2078    Note:
2079      The number of rows in the matrix must be less than or equal to the number of columns. Similarly the number of rows in the
2080      diagonal portion of the matrix of each process has to less than or equal the number of columns.
2081 
2082 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MATBAIJ`, `MatCreateBAIJ()`, `MATSEQSBAIJ`, `MatType`
2083 M*/
2084 
2085 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2086 {
2087   Mat_MPISBAIJ *b;
2088   PetscBool     flg = PETSC_FALSE;
2089 
2090   PetscFunctionBegin;
2091   PetscCall(PetscNew(&b));
2092   B->data   = (void *)b;
2093   B->ops[0] = MatOps_Values;
2094 
2095   B->ops->destroy = MatDestroy_MPISBAIJ;
2096   B->ops->view    = MatView_MPISBAIJ;
2097   B->assembled    = PETSC_FALSE;
2098   B->insertmode   = NOT_SET_VALUES;
2099 
2100   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2101   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));
2102 
2103   /* build local table of row and column ownerships */
2104   PetscCall(PetscMalloc1(b->size + 2, &b->rangebs));
2105 
2106   /* build cache for off array entries formed */
2107   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2108 
2109   b->donotstash  = PETSC_FALSE;
2110   b->colmap      = NULL;
2111   b->garray      = NULL;
2112   b->roworiented = PETSC_TRUE;
2113 
2114   /* stuff used in block assembly */
2115   b->barray = NULL;
2116 
2117   /* stuff used for matrix vector multiply */
2118   b->lvec    = NULL;
2119   b->Mvctx   = NULL;
2120   b->slvec0  = NULL;
2121   b->slvec0b = NULL;
2122   b->slvec1  = NULL;
2123   b->slvec1a = NULL;
2124   b->slvec1b = NULL;
2125   b->sMvctx  = NULL;
2126 
2127   /* stuff for MatGetRow() */
2128   b->rowindices   = NULL;
2129   b->rowvalues    = NULL;
2130   b->getrowactive = PETSC_FALSE;
2131 
2132   /* hash table stuff */
2133   b->ht           = NULL;
2134   b->hd           = NULL;
2135   b->ht_size      = 0;
2136   b->ht_flag      = PETSC_FALSE;
2137   b->ht_fact      = 0;
2138   b->ht_total_ct  = 0;
2139   b->ht_insert_ct = 0;
2140 
2141   /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2142   b->ijonly = PETSC_FALSE;
2143 
2144   b->in_loc = NULL;
2145   b->v_loc  = NULL;
2146   b->n_loc  = 0;
2147 
2148   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPISBAIJ));
2149   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPISBAIJ));
2150   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocation_C", MatMPISBAIJSetPreallocation_MPISBAIJ));
2151   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocationCSR_C", MatMPISBAIJSetPreallocationCSR_MPISBAIJ));
2152 #if defined(PETSC_HAVE_ELEMENTAL)
2153   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_elemental_C", MatConvert_MPISBAIJ_Elemental));
2154 #endif
2155 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
2156   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_scalapack_C", MatConvert_SBAIJ_ScaLAPACK));
2157 #endif
2158   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpiaij_C", MatConvert_MPISBAIJ_Basic));
2159   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpibaij_C", MatConvert_MPISBAIJ_Basic));
2160 
2161   B->symmetric                   = PETSC_BOOL3_TRUE;
2162   B->structurally_symmetric      = PETSC_BOOL3_TRUE;
2163   B->symmetry_eternal            = PETSC_TRUE;
2164   B->structural_symmetry_eternal = PETSC_TRUE;
2165 #if defined(PETSC_USE_COMPLEX)
2166   B->hermitian = PETSC_BOOL3_FALSE;
2167 #else
2168   B->hermitian = PETSC_BOOL3_TRUE;
2169 #endif
2170 
2171   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPISBAIJ));
2172   PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPISBAIJ matrix 1", "Mat");
2173   PetscCall(PetscOptionsBool("-mat_use_hash_table", "Use hash table to save memory in constructing matrix", "MatSetOption", flg, &flg, NULL));
2174   if (flg) {
2175     PetscReal fact = 1.39;
2176     PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2177     PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2178     if (fact <= 1.0) fact = 1.39;
2179     PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2180     PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2181   }
2182   PetscOptionsEnd();
2183   PetscFunctionReturn(PETSC_SUCCESS);
2184 }
2185 
2186 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2187 /*MC
2188    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
2189 
2190    This matrix type is identical to `MATSEQSBAIJ` when constructed with a single process communicator,
2191    and `MATMPISBAIJ` otherwise.
2192 
2193    Options Database Key:
2194 . -mat_type sbaij - sets the matrix type to `MATSBAIJ` during a call to `MatSetFromOptions()`
2195 
2196   Level: beginner
2197 
2198 .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQSBAIJ`, `MATMPISBAIJ`
2199 M*/
2200 
2201 /*@
2202   MatMPISBAIJSetPreallocation - For good matrix assembly performance
2203   the user should preallocate the matrix storage by setting the parameters
2204   d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2205   performance can be increased by more than a factor of 50.
2206 
2207   Collective
2208 
2209   Input Parameters:
2210 + B     - the matrix
2211 . bs    - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2212           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2213 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2214           submatrix  (same for all local rows)
2215 . d_nnz - array containing the number of block nonzeros in the various block rows
2216           in the upper triangular and diagonal part of the in diagonal portion of the local
2217           (possibly different for each block row) or `NULL`.  If you plan to factor the matrix you must leave room
2218           for the diagonal entry and set a value even if it is zero.
2219 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2220           submatrix (same for all local rows).
2221 - o_nnz - array containing the number of nonzeros in the various block rows of the
2222           off-diagonal portion of the local submatrix that is right of the diagonal
2223           (possibly different for each block row) or `NULL`.
2224 
2225   Options Database Keys:
2226 + -mat_no_unroll  - uses code that does not unroll the loops in the
2227                     block calculations (much slower)
2228 - -mat_block_size - size of the blocks to use
2229 
2230   Level: intermediate
2231 
2232   Notes:
2233 
2234   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2235   than it must be used on all processors that share the object for that argument.
2236 
2237   If the *_nnz parameter is given then the *_nz parameter is ignored
2238 
2239   Storage Information:
2240   For a square global matrix we define each processor's diagonal portion
2241   to be its local rows and the corresponding columns (a square submatrix);
2242   each processor's off-diagonal portion encompasses the remainder of the
2243   local matrix (a rectangular submatrix).
2244 
2245   The user can specify preallocated storage for the diagonal part of
2246   the local submatrix with either `d_nz` or `d_nnz` (not both).  Set
2247   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2248   memory allocation.  Likewise, specify preallocated storage for the
2249   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2250 
2251   You can call `MatGetInfo()` to get information on how effective the preallocation was;
2252   for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2253   You can also run with the option `-info` and look for messages with the string
2254   malloc in them to see if additional memory allocation was needed.
2255 
2256   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2257   the figure below we depict these three local rows and all columns (0-11).
2258 
2259 .vb
2260            0 1 2 3 4 5 6 7 8 9 10 11
2261           --------------------------
2262    row 3  |. . . d d d o o o o  o  o
2263    row 4  |. . . d d d o o o o  o  o
2264    row 5  |. . . d d d o o o o  o  o
2265           --------------------------
2266 .ve
2267 
2268   Thus, any entries in the d locations are stored in the d (diagonal)
2269   submatrix, and any entries in the o locations are stored in the
2270   o (off-diagonal) submatrix.  Note that the d matrix is stored in
2271   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2272 
2273   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2274   plus the diagonal part of the d matrix,
2275   and `o_nz` should indicate the number of block nonzeros per row in the o matrix
2276 
2277   In general, for PDE problems in which most nonzeros are near the diagonal,
2278   one expects `d_nz` >> `o_nz`.
2279 
2280 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `PetscSplitOwnership()`
2281 @*/
2282 PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
2283 {
2284   PetscFunctionBegin;
2285   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
2286   PetscValidType(B, 1);
2287   PetscValidLogicalCollectiveInt(B, bs, 2);
2288   PetscTryMethod(B, "MatMPISBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
2289   PetscFunctionReturn(PETSC_SUCCESS);
2290 }
2291 
2292 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2293 /*@
2294   MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`,
2295   (block compressed row).  For good matrix assembly performance
2296   the user should preallocate the matrix storage by setting the parameters
2297   `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).
2298 
2299   Collective
2300 
2301   Input Parameters:
2302 + comm  - MPI communicator
2303 . bs    - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2304           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2305 . m     - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
2306           This value should be the same as the local size used in creating the
2307           y vector for the matrix-vector product y = Ax.
2308 . n     - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
2309           This value should be the same as the local size used in creating the
2310           x vector for the matrix-vector product y = Ax.
2311 . M     - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2312 . N     - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2313 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2314           submatrix (same for all local rows)
2315 . d_nnz - array containing the number of block nonzeros in the various block rows
2316           in the upper triangular portion of the in diagonal portion of the local
2317           (possibly different for each block block row) or `NULL`.
2318           If you plan to factor the matrix you must leave room for the diagonal entry and
2319           set its value even if it is zero.
2320 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2321           submatrix (same for all local rows).
2322 - o_nnz - array containing the number of nonzeros in the various block rows of the
2323           off-diagonal portion of the local submatrix (possibly different for
2324           each block row) or `NULL`.
2325 
2326   Output Parameter:
2327 . A - the matrix
2328 
2329   Options Database Keys:
2330 + -mat_no_unroll  - uses code that does not unroll the loops in the
2331                     block calculations (much slower)
2332 . -mat_block_size - size of the blocks to use
2333 - -mat_mpi        - use the parallel matrix data structures even on one processor
2334                     (defaults to using SeqBAIJ format on one processor)
2335 
2336   Level: intermediate
2337 
2338   Notes:
2339   It is recommended that one use `MatCreateFromOptions()` or the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
2340   MatXXXXSetPreallocation() paradigm instead of this routine directly.
2341   [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`]
2342 
2343   The number of rows and columns must be divisible by blocksize.
2344   This matrix type does not support complex Hermitian operation.
2345 
2346   The user MUST specify either the local or global matrix dimensions
2347   (possibly both).
2348 
2349   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2350   than it must be used on all processors that share the object for that argument.
2351 
2352   If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by
2353   `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`.
2354 
2355   If the *_nnz parameter is given then the *_nz parameter is ignored
2356 
2357   Storage Information:
2358   For a square global matrix we define each processor's diagonal portion
2359   to be its local rows and the corresponding columns (a square submatrix);
2360   each processor's off-diagonal portion encompasses the remainder of the
2361   local matrix (a rectangular submatrix).
2362 
2363   The user can specify preallocated storage for the diagonal part of
2364   the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2365   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2366   memory allocation. Likewise, specify preallocated storage for the
2367   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2368 
2369   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2370   the figure below we depict these three local rows and all columns (0-11).
2371 
2372 .vb
2373            0 1 2 3 4 5 6 7 8 9 10 11
2374           --------------------------
2375    row 3  |. . . d d d o o o o  o  o
2376    row 4  |. . . d d d o o o o  o  o
2377    row 5  |. . . d d d o o o o  o  o
2378           --------------------------
2379 .ve
2380 
2381   Thus, any entries in the d locations are stored in the d (diagonal)
2382   submatrix, and any entries in the o locations are stored in the
2383   o (off-diagonal) submatrix. Note that the d matrix is stored in
2384   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2385 
2386   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2387   plus the diagonal part of the d matrix,
2388   and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
2389   In general, for PDE problems in which most nonzeros are near the diagonal,
2390   one expects `d_nz` >> `o_nz`.
2391 
2392 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`,
2393           `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
2394 @*/
2395 PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
2396 {
2397   PetscMPIInt size;
2398 
2399   PetscFunctionBegin;
2400   PetscCall(MatCreate(comm, A));
2401   PetscCall(MatSetSizes(*A, m, n, M, N));
2402   PetscCallMPI(MPI_Comm_size(comm, &size));
2403   if (size > 1) {
2404     PetscCall(MatSetType(*A, MATMPISBAIJ));
2405     PetscCall(MatMPISBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
2406   } else {
2407     PetscCall(MatSetType(*A, MATSEQSBAIJ));
2408     PetscCall(MatSeqSBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
2409   }
2410   PetscFunctionReturn(PETSC_SUCCESS);
2411 }
2412 
2413 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
2414 {
2415   Mat           mat;
2416   Mat_MPISBAIJ *a, *oldmat = (Mat_MPISBAIJ *)matin->data;
2417   PetscInt      len = 0, nt, bs = matin->rmap->bs, mbs = oldmat->mbs;
2418   PetscScalar  *array;
2419 
2420   PetscFunctionBegin;
2421   *newmat = NULL;
2422 
2423   PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
2424   PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
2425   PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
2426   PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
2427   PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));
2428 
2429   if (matin->hash_active) {
2430     PetscCall(MatSetUp(mat));
2431   } else {
2432     mat->factortype   = matin->factortype;
2433     mat->preallocated = PETSC_TRUE;
2434     mat->assembled    = PETSC_TRUE;
2435     mat->insertmode   = NOT_SET_VALUES;
2436 
2437     a      = (Mat_MPISBAIJ *)mat->data;
2438     a->bs2 = oldmat->bs2;
2439     a->mbs = oldmat->mbs;
2440     a->nbs = oldmat->nbs;
2441     a->Mbs = oldmat->Mbs;
2442     a->Nbs = oldmat->Nbs;
2443 
2444     a->size         = oldmat->size;
2445     a->rank         = oldmat->rank;
2446     a->donotstash   = oldmat->donotstash;
2447     a->roworiented  = oldmat->roworiented;
2448     a->rowindices   = NULL;
2449     a->rowvalues    = NULL;
2450     a->getrowactive = PETSC_FALSE;
2451     a->barray       = NULL;
2452     a->rstartbs     = oldmat->rstartbs;
2453     a->rendbs       = oldmat->rendbs;
2454     a->cstartbs     = oldmat->cstartbs;
2455     a->cendbs       = oldmat->cendbs;
2456 
2457     /* hash table stuff */
2458     a->ht           = NULL;
2459     a->hd           = NULL;
2460     a->ht_size      = 0;
2461     a->ht_flag      = oldmat->ht_flag;
2462     a->ht_fact      = oldmat->ht_fact;
2463     a->ht_total_ct  = 0;
2464     a->ht_insert_ct = 0;
2465 
2466     PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 2));
2467     if (oldmat->colmap) {
2468 #if defined(PETSC_USE_CTABLE)
2469       PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
2470 #else
2471       PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
2472       PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
2473 #endif
2474     } else a->colmap = NULL;
2475 
2476     if (oldmat->garray && (len = ((Mat_SeqBAIJ *)oldmat->B->data)->nbs)) {
2477       PetscCall(PetscMalloc1(len, &a->garray));
2478       PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
2479     } else a->garray = NULL;
2480 
2481     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
2482     PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
2483     PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));
2484 
2485     PetscCall(VecDuplicate(oldmat->slvec0, &a->slvec0));
2486     PetscCall(VecDuplicate(oldmat->slvec1, &a->slvec1));
2487 
2488     PetscCall(VecGetLocalSize(a->slvec1, &nt));
2489     PetscCall(VecGetArray(a->slvec1, &array));
2490     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, bs * mbs, array, &a->slvec1a));
2491     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec1b));
2492     PetscCall(VecRestoreArray(a->slvec1, &array));
2493     PetscCall(VecGetArray(a->slvec0, &array));
2494     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec0b));
2495     PetscCall(VecRestoreArray(a->slvec0, &array));
2496 
2497     /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2498     PetscCall(PetscObjectReference((PetscObject)oldmat->sMvctx));
2499     a->sMvctx = oldmat->sMvctx;
2500 
2501     PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
2502     PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
2503   }
2504   PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
2505   *newmat = mat;
2506   PetscFunctionReturn(PETSC_SUCCESS);
2507 }
2508 
2509 /* Used for both MPIBAIJ and MPISBAIJ matrices */
2510 #define MatLoad_MPISBAIJ_Binary MatLoad_MPIBAIJ_Binary
2511 
2512 static PetscErrorCode MatLoad_MPISBAIJ(Mat mat, PetscViewer viewer)
2513 {
2514   PetscBool isbinary;
2515 
2516   PetscFunctionBegin;
2517   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2518   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
2519   PetscCall(MatLoad_MPISBAIJ_Binary(mat, viewer));
2520   PetscFunctionReturn(PETSC_SUCCESS);
2521 }
2522 
2523 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A, Vec v, PetscInt idx[])
2524 {
2525   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
2526   Mat_SeqBAIJ  *b = (Mat_SeqBAIJ *)a->B->data;
2527   PetscReal     atmp;
2528   PetscReal    *work, *svalues, *rvalues;
2529   PetscInt      i, bs, mbs, *bi, *bj, brow, j, ncols, krow, kcol, col, row, Mbs, bcol;
2530   PetscMPIInt   rank, size;
2531   PetscInt     *rowners_bs, count, source;
2532   PetscScalar  *va;
2533   MatScalar    *ba;
2534   MPI_Status    stat;
2535 
2536   PetscFunctionBegin;
2537   PetscCheck(!idx, PETSC_COMM_SELF, PETSC_ERR_SUP, "Send email to petsc-maint@mcs.anl.gov");
2538   PetscCall(MatGetRowMaxAbs(a->A, v, NULL));
2539   PetscCall(VecGetArray(v, &va));
2540 
2541   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2542   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
2543 
2544   bs  = A->rmap->bs;
2545   mbs = a->mbs;
2546   Mbs = a->Mbs;
2547   ba  = b->a;
2548   bi  = b->i;
2549   bj  = b->j;
2550 
2551   /* find ownerships */
2552   rowners_bs = A->rmap->range;
2553 
2554   /* each proc creates an array to be distributed */
2555   PetscCall(PetscCalloc1(bs * Mbs, &work));
2556 
2557   /* row_max for B */
2558   if (rank != size - 1) {
2559     for (i = 0; i < mbs; i++) {
2560       ncols = bi[1] - bi[0];
2561       bi++;
2562       brow = bs * i;
2563       for (j = 0; j < ncols; j++) {
2564         bcol = bs * (*bj);
2565         for (kcol = 0; kcol < bs; kcol++) {
2566           col = bcol + kcol;           /* local col index */
2567           col += rowners_bs[rank + 1]; /* global col index */
2568           for (krow = 0; krow < bs; krow++) {
2569             atmp = PetscAbsScalar(*ba);
2570             ba++;
2571             row = brow + krow; /* local row index */
2572             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2573             if (work[col] < atmp) work[col] = atmp;
2574           }
2575         }
2576         bj++;
2577       }
2578     }
2579 
2580     /* send values to its owners */
2581     for (PetscMPIInt dest = rank + 1; dest < size; dest++) {
2582       svalues = work + rowners_bs[dest];
2583       count   = rowners_bs[dest + 1] - rowners_bs[dest];
2584       PetscCallMPI(MPIU_Send(svalues, count, MPIU_REAL, dest, rank, PetscObjectComm((PetscObject)A)));
2585     }
2586   }
2587 
2588   /* receive values */
2589   if (rank) {
2590     rvalues = work;
2591     count   = rowners_bs[rank + 1] - rowners_bs[rank];
2592     for (source = 0; source < rank; source++) {
2593       PetscCallMPI(MPIU_Recv(rvalues, count, MPIU_REAL, MPI_ANY_SOURCE, MPI_ANY_TAG, PetscObjectComm((PetscObject)A), &stat));
2594       /* process values */
2595       for (i = 0; i < count; i++) {
2596         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2597       }
2598     }
2599   }
2600 
2601   PetscCall(VecRestoreArray(v, &va));
2602   PetscCall(PetscFree(work));
2603   PetscFunctionReturn(PETSC_SUCCESS);
2604 }
2605 
2606 static PetscErrorCode MatSOR_MPISBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2607 {
2608   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ *)matin->data;
2609   PetscInt           mbs = mat->mbs, bs = matin->rmap->bs;
2610   PetscScalar       *x, *ptr, *from;
2611   Vec                bb1;
2612   const PetscScalar *b;
2613 
2614   PetscFunctionBegin;
2615   PetscCheck(its > 0 && lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " and local its %" PetscInt_FMT " both positive", its, lits);
2616   PetscCheck(bs <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "SSOR for block size > 1 is not yet implemented");
2617 
2618   if (flag == SOR_APPLY_UPPER) {
2619     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2620     PetscFunctionReturn(PETSC_SUCCESS);
2621   }
2622 
2623   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2624     if (flag & SOR_ZERO_INITIAL_GUESS) {
2625       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, lits, xx));
2626       its--;
2627     }
2628 
2629     PetscCall(VecDuplicate(bb, &bb1));
2630     while (its--) {
2631       /* lower triangular part: slvec0b = - B^T*xx */
2632       PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2633 
2634       /* copy xx into slvec0a */
2635       PetscCall(VecGetArray(mat->slvec0, &ptr));
2636       PetscCall(VecGetArray(xx, &x));
2637       PetscCall(PetscArraycpy(ptr, x, bs * mbs));
2638       PetscCall(VecRestoreArray(mat->slvec0, &ptr));
2639 
2640       PetscCall(VecScale(mat->slvec0, -1.0));
2641 
2642       /* copy bb into slvec1a */
2643       PetscCall(VecGetArray(mat->slvec1, &ptr));
2644       PetscCall(VecGetArrayRead(bb, &b));
2645       PetscCall(PetscArraycpy(ptr, b, bs * mbs));
2646       PetscCall(VecRestoreArray(mat->slvec1, &ptr));
2647 
2648       /* set slvec1b = 0 */
2649       PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2650       PetscCall(VecZeroEntries(mat->slvec1b));
2651 
2652       PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2653       PetscCall(VecRestoreArray(xx, &x));
2654       PetscCall(VecRestoreArrayRead(bb, &b));
2655       PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2656 
2657       /* upper triangular part: bb1 = bb1 - B*x */
2658       PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, bb1));
2659 
2660       /* local diagonal sweep */
2661       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, lits, xx));
2662     }
2663     PetscCall(VecDestroy(&bb1));
2664   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2665     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2666   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2667     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2668   } else if (flag & SOR_EISENSTAT) {
2669     Vec                xx1;
2670     PetscBool          hasop;
2671     const PetscScalar *diag;
2672     PetscScalar       *sl, scale = (omega - 2.0) / omega;
2673     PetscInt           i, n;
2674 
2675     if (!mat->xx1) {
2676       PetscCall(VecDuplicate(bb, &mat->xx1));
2677       PetscCall(VecDuplicate(bb, &mat->bb1));
2678     }
2679     xx1 = mat->xx1;
2680     bb1 = mat->bb1;
2681 
2682     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP), fshift, lits, 1, xx));
2683 
2684     if (!mat->diag) {
2685       /* this is wrong for same matrix with new nonzero values */
2686       PetscCall(MatCreateVecs(matin, &mat->diag, NULL));
2687       PetscCall(MatGetDiagonal(matin, mat->diag));
2688     }
2689     PetscCall(MatHasOperation(matin, MATOP_MULT_DIAGONAL_BLOCK, &hasop));
2690 
2691     if (hasop) {
2692       PetscCall(MatMultDiagonalBlock(matin, xx, bb1));
2693       PetscCall(VecAYPX(mat->slvec1a, scale, bb));
2694     } else {
2695       /*
2696           These two lines are replaced by code that may be a bit faster for a good compiler
2697       PetscCall(VecPointwiseMult(mat->slvec1a,mat->diag,xx));
2698       PetscCall(VecAYPX(mat->slvec1a,scale,bb));
2699       */
2700       PetscCall(VecGetArray(mat->slvec1a, &sl));
2701       PetscCall(VecGetArrayRead(mat->diag, &diag));
2702       PetscCall(VecGetArrayRead(bb, &b));
2703       PetscCall(VecGetArray(xx, &x));
2704       PetscCall(VecGetLocalSize(xx, &n));
2705       if (omega == 1.0) {
2706         for (i = 0; i < n; i++) sl[i] = b[i] - diag[i] * x[i];
2707         PetscCall(PetscLogFlops(2.0 * n));
2708       } else {
2709         for (i = 0; i < n; i++) sl[i] = b[i] + scale * diag[i] * x[i];
2710         PetscCall(PetscLogFlops(3.0 * n));
2711       }
2712       PetscCall(VecRestoreArray(mat->slvec1a, &sl));
2713       PetscCall(VecRestoreArrayRead(mat->diag, &diag));
2714       PetscCall(VecRestoreArrayRead(bb, &b));
2715       PetscCall(VecRestoreArray(xx, &x));
2716     }
2717 
2718     /* multiply off-diagonal portion of matrix */
2719     PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2720     PetscCall(VecZeroEntries(mat->slvec1b));
2721     PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2722     PetscCall(VecGetArray(mat->slvec0, &from));
2723     PetscCall(VecGetArray(xx, &x));
2724     PetscCall(PetscArraycpy(from, x, bs * mbs));
2725     PetscCall(VecRestoreArray(mat->slvec0, &from));
2726     PetscCall(VecRestoreArray(xx, &x));
2727     PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2728     PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2729     PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, mat->slvec1a));
2730 
2731     /* local sweep */
2732     PetscCall((*mat->A->ops->sor)(mat->A, mat->slvec1a, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP), fshift, lits, 1, xx1));
2733     PetscCall(VecAXPY(xx, 1.0, xx1));
2734   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatSORType is not supported for SBAIJ matrix format");
2735   PetscFunctionReturn(PETSC_SUCCESS);
2736 }
2737 
2738 /*@
2739   MatCreateMPISBAIJWithArrays - creates a `MATMPISBAIJ` matrix using arrays that contain in standard CSR format for the local rows.
2740 
2741   Collective
2742 
2743   Input Parameters:
2744 + comm - MPI communicator
2745 . bs   - the block size, only a block size of 1 is supported
2746 . m    - number of local rows (Cannot be `PETSC_DECIDE`)
2747 . n    - This value should be the same as the local size used in creating the
2748          x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have
2749          calculated if `N` is given) For square matrices `n` is almost always `m`.
2750 . M    - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2751 . N    - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2752 . i    - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that row block row of the matrix
2753 . j    - column indices
2754 - a    - matrix values
2755 
2756   Output Parameter:
2757 . mat - the matrix
2758 
2759   Level: intermediate
2760 
2761   Notes:
2762   The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
2763   thus you CANNOT change the matrix entries by changing the values of `a` after you have
2764   called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.
2765 
2766   The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array.
2767 
2768 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
2769           `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatMPISBAIJSetPreallocationCSR()`
2770 @*/
2771 PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
2772 {
2773   PetscFunctionBegin;
2774   PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
2775   PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
2776   PetscCall(MatCreate(comm, mat));
2777   PetscCall(MatSetSizes(*mat, m, n, M, N));
2778   PetscCall(MatSetType(*mat, MATMPISBAIJ));
2779   PetscCall(MatMPISBAIJSetPreallocationCSR(*mat, bs, i, j, a));
2780   PetscFunctionReturn(PETSC_SUCCESS);
2781 }
2782 
2783 /*@
2784   MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values
2785 
2786   Collective
2787 
2788   Input Parameters:
2789 + B  - the matrix
2790 . bs - the block size
2791 . i  - the indices into `j` for the start of each local row (indices start with zero)
2792 . j  - the column indices for each local row (indices start with zero) these must be sorted for each row
2793 - v  - optional values in the matrix, pass `NULL` if not provided
2794 
2795   Level: advanced
2796 
2797   Notes:
2798   The `i`, `j`, and `v` arrays ARE copied by this routine into the internal format used by PETSc;
2799   thus you CANNOT change the matrix entries by changing the values of `v` after you have
2800   called this routine.
2801 
2802   Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries
2803   and usually the numerical values as well
2804 
2805   Any entries passed in that are below the diagonal are ignored
2806 
2807 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MATMPIAIJ`,
2808           `MatCreateMPISBAIJWithArrays()`
2809 @*/
2810 PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2811 {
2812   PetscFunctionBegin;
2813   PetscTryMethod(B, "MatMPISBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2814   PetscFunctionReturn(PETSC_SUCCESS);
2815 }
2816 
2817 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
2818 {
2819   PetscInt     m, N, i, rstart, nnz, Ii, bs, cbs;
2820   PetscInt    *indx;
2821   PetscScalar *values;
2822 
2823   PetscFunctionBegin;
2824   PetscCall(MatGetSize(inmat, &m, &N));
2825   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2826     Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)inmat->data;
2827     PetscInt     *dnz, *onz, mbs, Nbs, nbs;
2828     PetscInt     *bindx, rmax = a->rmax, j;
2829     PetscMPIInt   rank, size;
2830 
2831     PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2832     mbs = m / bs;
2833     Nbs = N / cbs;
2834     if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
2835     nbs = n / cbs;
2836 
2837     PetscCall(PetscMalloc1(rmax, &bindx));
2838     MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */
2839 
2840     PetscCallMPI(MPI_Comm_rank(comm, &rank));
2841     PetscCallMPI(MPI_Comm_rank(comm, &size));
2842     if (rank == size - 1) {
2843       /* Check sum(nbs) = Nbs */
2844       PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
2845     }
2846 
2847     rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
2848     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2849     for (i = 0; i < mbs; i++) {
2850       PetscCall(MatGetRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
2851       nnz = nnz / bs;
2852       for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
2853       PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
2854       PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL));
2855     }
2856     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2857     PetscCall(PetscFree(bindx));
2858 
2859     PetscCall(MatCreate(comm, outmat));
2860     PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
2861     PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
2862     PetscCall(MatSetType(*outmat, MATSBAIJ));
2863     PetscCall(MatSeqSBAIJSetPreallocation(*outmat, bs, 0, dnz));
2864     PetscCall(MatMPISBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
2865     MatPreallocateEnd(dnz, onz);
2866   }
2867 
2868   /* numeric phase */
2869   PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2870   PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));
2871 
2872   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2873   for (i = 0; i < m; i++) {
2874     PetscCall(MatGetRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2875     Ii = i + rstart;
2876     PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
2877     PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2878   }
2879   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2880   PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
2881   PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
2882   PetscFunctionReturn(PETSC_SUCCESS);
2883 }
2884