xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision b0c98d1d8bc8fbb369fd6b04fbfd2a9276aa7d86)
1 #include <../src/mat/impls/baij/mpi/mpibaij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
3 #include <../src/mat/impls/sbaij/seq/sbaij.h>
4 #include <petscblaslapack.h>
5 
6 static PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
7 {
8   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
9 
10   PetscFunctionBegin;
11   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
12   PetscCall(MatStashDestroy_Private(&mat->stash));
13   PetscCall(MatStashDestroy_Private(&mat->bstash));
14   PetscCall(MatDestroy(&baij->A));
15   PetscCall(MatDestroy(&baij->B));
16 #if defined(PETSC_USE_CTABLE)
17   PetscCall(PetscHMapIDestroy(&baij->colmap));
18 #else
19   PetscCall(PetscFree(baij->colmap));
20 #endif
21   PetscCall(PetscFree(baij->garray));
22   PetscCall(VecDestroy(&baij->lvec));
23   PetscCall(VecScatterDestroy(&baij->Mvctx));
24   PetscCall(VecDestroy(&baij->slvec0));
25   PetscCall(VecDestroy(&baij->slvec0b));
26   PetscCall(VecDestroy(&baij->slvec1));
27   PetscCall(VecDestroy(&baij->slvec1a));
28   PetscCall(VecDestroy(&baij->slvec1b));
29   PetscCall(VecScatterDestroy(&baij->sMvctx));
30   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
31   PetscCall(PetscFree(baij->barray));
32   PetscCall(PetscFree(baij->hd));
33   PetscCall(VecDestroy(&baij->diag));
34   PetscCall(VecDestroy(&baij->bb1));
35   PetscCall(VecDestroy(&baij->xx1));
36 #if defined(PETSC_USE_REAL_MAT_SINGLE)
37   PetscCall(PetscFree(baij->setvaluescopy));
38 #endif
39   PetscCall(PetscFree(baij->in_loc));
40   PetscCall(PetscFree(baij->v_loc));
41   PetscCall(PetscFree(baij->rangebs));
42   PetscCall(PetscFree(mat->data));
43 
44   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
45   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
46   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocation_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocationCSR_C", NULL));
49 #if defined(PETSC_HAVE_ELEMENTAL)
50   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_elemental_C", NULL));
51 #endif
52 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
53   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_scalapack_C", NULL));
54 #endif
55   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpiaij_C", NULL));
56   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpibaij_C", NULL));
57   PetscFunctionReturn(PETSC_SUCCESS);
58 }
59 
60 /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), MatAssemblyEnd_MPI_Hash(), MatSetUp_MPI_Hash() */
61 #define TYPE SBAIJ
62 #define TYPE_SBAIJ
63 #include "../src/mat/impls/aij/mpi/mpihashmat.h"
64 #undef TYPE
65 #undef TYPE_SBAIJ
66 
67 #if defined(PETSC_HAVE_ELEMENTAL)
68 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
69 #endif
70 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
71 PETSC_INTERN PetscErrorCode MatConvert_SBAIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
72 #endif
73 
74 /* This could be moved to matimpl.h */
75 static PetscErrorCode MatPreallocateWithMats_Private(Mat B, PetscInt nm, Mat X[], PetscBool symm[], PetscBool fill)
76 {
77   Mat       preallocator;
78   PetscInt  r, rstart, rend;
79   PetscInt  bs, i, m, n, M, N;
80   PetscBool cong = PETSC_TRUE;
81 
82   PetscFunctionBegin;
83   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
84   PetscValidLogicalCollectiveInt(B, nm, 2);
85   for (i = 0; i < nm; i++) {
86     PetscValidHeaderSpecific(X[i], MAT_CLASSID, 3);
87     PetscCall(PetscLayoutCompare(B->rmap, X[i]->rmap, &cong));
88     PetscCheck(cong, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for different layouts");
89   }
90   PetscValidLogicalCollectiveBool(B, fill, 5);
91   PetscCall(MatGetBlockSize(B, &bs));
92   PetscCall(MatGetSize(B, &M, &N));
93   PetscCall(MatGetLocalSize(B, &m, &n));
94   PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &preallocator));
95   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
96   PetscCall(MatSetBlockSize(preallocator, bs));
97   PetscCall(MatSetSizes(preallocator, m, n, M, N));
98   PetscCall(MatSetUp(preallocator));
99   PetscCall(MatGetOwnershipRange(preallocator, &rstart, &rend));
100   for (r = rstart; r < rend; ++r) {
101     PetscInt           ncols;
102     const PetscInt    *row;
103     const PetscScalar *vals;
104 
105     for (i = 0; i < nm; i++) {
106       PetscCall(MatGetRow(X[i], r, &ncols, &row, &vals));
107       PetscCall(MatSetValues(preallocator, 1, &r, ncols, row, vals, INSERT_VALUES));
108       if (symm && symm[i]) PetscCall(MatSetValues(preallocator, ncols, row, 1, &r, vals, INSERT_VALUES));
109       PetscCall(MatRestoreRow(X[i], r, &ncols, &row, &vals));
110     }
111   }
112   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
113   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
114   PetscCall(MatPreallocatorPreallocate(preallocator, fill, B));
115   PetscCall(MatDestroy(&preallocator));
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Basic(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
120 {
121   Mat      B;
122   PetscInt r;
123 
124   PetscFunctionBegin;
125   if (reuse != MAT_REUSE_MATRIX) {
126     PetscBool symm = PETSC_TRUE, isdense;
127     PetscInt  bs;
128 
129     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
130     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
131     PetscCall(MatSetType(B, newtype));
132     PetscCall(MatGetBlockSize(A, &bs));
133     PetscCall(MatSetBlockSize(B, bs));
134     PetscCall(PetscLayoutSetUp(B->rmap));
135     PetscCall(PetscLayoutSetUp(B->cmap));
136     PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, ""));
137     if (!isdense) {
138       PetscCall(MatGetRowUpperTriangular(A));
139       PetscCall(MatPreallocateWithMats_Private(B, 1, &A, &symm, PETSC_TRUE));
140       PetscCall(MatRestoreRowUpperTriangular(A));
141     } else {
142       PetscCall(MatSetUp(B));
143     }
144   } else {
145     B = *newmat;
146     PetscCall(MatZeroEntries(B));
147   }
148 
149   PetscCall(MatGetRowUpperTriangular(A));
150   for (r = A->rmap->rstart; r < A->rmap->rend; r++) {
151     PetscInt           ncols;
152     const PetscInt    *row;
153     const PetscScalar *vals;
154 
155     PetscCall(MatGetRow(A, r, &ncols, &row, &vals));
156     PetscCall(MatSetValues(B, 1, &r, ncols, row, vals, INSERT_VALUES));
157 #if defined(PETSC_USE_COMPLEX)
158     if (A->hermitian == PETSC_BOOL3_TRUE) {
159       PetscInt i;
160       for (i = 0; i < ncols; i++) PetscCall(MatSetValue(B, row[i], r, PetscConj(vals[i]), INSERT_VALUES));
161     } else {
162       PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
163     }
164 #else
165     PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
166 #endif
167     PetscCall(MatRestoreRow(A, r, &ncols, &row, &vals));
168   }
169   PetscCall(MatRestoreRowUpperTriangular(A));
170   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
171   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
172 
173   if (reuse == MAT_INPLACE_MATRIX) {
174     PetscCall(MatHeaderReplace(A, &B));
175   } else {
176     *newmat = B;
177   }
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 static PetscErrorCode MatStoreValues_MPISBAIJ(Mat mat)
182 {
183   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
184 
185   PetscFunctionBegin;
186   PetscCall(MatStoreValues(aij->A));
187   PetscCall(MatStoreValues(aij->B));
188   PetscFunctionReturn(PETSC_SUCCESS);
189 }
190 
191 static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat)
192 {
193   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
194 
195   PetscFunctionBegin;
196   PetscCall(MatRetrieveValues(aij->A));
197   PetscCall(MatRetrieveValues(aij->B));
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 #define MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, orow, ocol) \
202   do { \
203     brow = row / bs; \
204     rp   = aj + ai[brow]; \
205     ap   = aa + bs2 * ai[brow]; \
206     rmax = aimax[brow]; \
207     nrow = ailen[brow]; \
208     bcol = col / bs; \
209     ridx = row % bs; \
210     cidx = col % bs; \
211     low  = 0; \
212     high = nrow; \
213     while (high - low > 3) { \
214       t = (low + high) / 2; \
215       if (rp[t] > bcol) high = t; \
216       else low = t; \
217     } \
218     for (_i = low; _i < high; _i++) { \
219       if (rp[_i] > bcol) break; \
220       if (rp[_i] == bcol) { \
221         bap = ap + bs2 * _i + bs * cidx + ridx; \
222         if (addv == ADD_VALUES) *bap += value; \
223         else *bap = value; \
224         goto a_noinsert; \
225       } \
226     } \
227     if (a->nonew == 1) goto a_noinsert; \
228     PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
229     MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
230     N = nrow++ - 1; \
231     /* shift up all the later entries in this row */ \
232     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
233     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
234     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
235     rp[_i]                          = bcol; \
236     ap[bs2 * _i + bs * cidx + ridx] = value; \
237   a_noinsert:; \
238     ailen[brow] = nrow; \
239   } while (0)
240 
241 #define MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, orow, ocol) \
242   do { \
243     brow = row / bs; \
244     rp   = bj + bi[brow]; \
245     ap   = ba + bs2 * bi[brow]; \
246     rmax = bimax[brow]; \
247     nrow = bilen[brow]; \
248     bcol = col / bs; \
249     ridx = row % bs; \
250     cidx = col % bs; \
251     low  = 0; \
252     high = nrow; \
253     while (high - low > 3) { \
254       t = (low + high) / 2; \
255       if (rp[t] > bcol) high = t; \
256       else low = t; \
257     } \
258     for (_i = low; _i < high; _i++) { \
259       if (rp[_i] > bcol) break; \
260       if (rp[_i] == bcol) { \
261         bap = ap + bs2 * _i + bs * cidx + ridx; \
262         if (addv == ADD_VALUES) *bap += value; \
263         else *bap = value; \
264         goto b_noinsert; \
265       } \
266     } \
267     if (b->nonew == 1) goto b_noinsert; \
268     PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
269     MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
270     N = nrow++ - 1; \
271     /* shift up all the later entries in this row */ \
272     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
273     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
274     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
275     rp[_i]                          = bcol; \
276     ap[bs2 * _i + bs * cidx + ridx] = value; \
277   b_noinsert:; \
278     bilen[brow] = nrow; \
279   } while (0)
280 
281 /* Only add/insert a(i,j) with i<=j (blocks).
282    Any a(i,j) with i>j input by user is ignored or generates an error
283 */
284 static PetscErrorCode MatSetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
285 {
286   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
287   MatScalar     value;
288   PetscBool     roworiented = baij->roworiented;
289   PetscInt      i, j, row, col;
290   PetscInt      rstart_orig = mat->rmap->rstart;
291   PetscInt      rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
292   PetscInt      cend_orig = mat->cmap->rend, bs = mat->rmap->bs;
293 
294   /* Some Variables required in the macro */
295   Mat           A     = baij->A;
296   Mat_SeqSBAIJ *a     = (Mat_SeqSBAIJ *)A->data;
297   PetscInt     *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
298   MatScalar    *aa = a->a;
299 
300   Mat          B     = baij->B;
301   Mat_SeqBAIJ *b     = (Mat_SeqBAIJ *)B->data;
302   PetscInt    *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
303   MatScalar   *ba = b->a;
304 
305   PetscInt  *rp, ii, nrow, _i, rmax, N, brow, bcol;
306   PetscInt   low, high, t, ridx, cidx, bs2 = a->bs2;
307   MatScalar *ap, *bap;
308 
309   /* for stash */
310   PetscInt   n_loc, *in_loc = NULL;
311   MatScalar *v_loc = NULL;
312 
313   PetscFunctionBegin;
314   if (!baij->donotstash) {
315     if (n > baij->n_loc) {
316       PetscCall(PetscFree(baij->in_loc));
317       PetscCall(PetscFree(baij->v_loc));
318       PetscCall(PetscMalloc1(n, &baij->in_loc));
319       PetscCall(PetscMalloc1(n, &baij->v_loc));
320 
321       baij->n_loc = n;
322     }
323     in_loc = baij->in_loc;
324     v_loc  = baij->v_loc;
325   }
326 
327   for (i = 0; i < m; i++) {
328     if (im[i] < 0) continue;
329     PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
330     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
331       row = im[i] - rstart_orig;                     /* local row index */
332       for (j = 0; j < n; j++) {
333         if (im[i] / bs > in[j] / bs) {
334           PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
335           continue; /* ignore lower triangular blocks */
336         }
337         if (in[j] >= cstart_orig && in[j] < cend_orig) { /* diag entry (A) */
338           col  = in[j] - cstart_orig;                    /* local col index */
339           brow = row / bs;
340           bcol = col / bs;
341           if (brow > bcol) continue; /* ignore lower triangular blocks of A */
342           if (roworiented) value = v[i * n + j];
343           else value = v[i + j * m];
344           MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
345           /* PetscCall(MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv)); */
346         } else if (in[j] < 0) {
347           continue;
348         } else {
349           PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
350           /* off-diag entry (B) */
351           if (mat->was_assembled) {
352             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
353 #if defined(PETSC_USE_CTABLE)
354             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
355             col = col - 1;
356 #else
357             col = baij->colmap[in[j] / bs] - 1;
358 #endif
359             if (col < 0 && !((Mat_SeqSBAIJ *)baij->A->data)->nonew) {
360               PetscCall(MatDisAssemble_MPISBAIJ(mat));
361               col = in[j];
362               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
363               B     = baij->B;
364               b     = (Mat_SeqBAIJ *)B->data;
365               bimax = b->imax;
366               bi    = b->i;
367               bilen = b->ilen;
368               bj    = b->j;
369               ba    = b->a;
370             } else col += in[j] % bs;
371           } else col = in[j];
372           if (roworiented) value = v[i * n + j];
373           else value = v[i + j * m];
374           MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
375           /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
376         }
377       }
378     } else { /* off processor entry */
379       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
380       if (!baij->donotstash) {
381         mat->assembled = PETSC_FALSE;
382         n_loc          = 0;
383         for (j = 0; j < n; j++) {
384           if (im[i] / bs > in[j] / bs) continue; /* ignore lower triangular blocks */
385           in_loc[n_loc] = in[j];
386           if (roworiented) {
387             v_loc[n_loc] = v[i * n + j];
388           } else {
389             v_loc[n_loc] = v[j * m + i];
390           }
391           n_loc++;
392         }
393         PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n_loc, in_loc, v_loc, PETSC_FALSE));
394       }
395     }
396   }
397   PetscFunctionReturn(PETSC_SUCCESS);
398 }
399 
400 static inline PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
401 {
402   Mat_SeqSBAIJ      *a = (Mat_SeqSBAIJ *)A->data;
403   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
404   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
405   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
406   PetscBool          roworiented = a->roworiented;
407   const PetscScalar *value       = v;
408   MatScalar         *ap, *aa = a->a, *bap;
409 
410   PetscFunctionBegin;
411   if (col < row) {
412     PetscCheck(a->ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
413     PetscFunctionReturn(PETSC_SUCCESS); /* ignore lower triangular block */
414   }
415   rp    = aj + ai[row];
416   ap    = aa + bs2 * ai[row];
417   rmax  = imax[row];
418   nrow  = ailen[row];
419   value = v;
420   low   = 0;
421   high  = nrow;
422 
423   while (high - low > 7) {
424     t = (low + high) / 2;
425     if (rp[t] > col) high = t;
426     else low = t;
427   }
428   for (i = low; i < high; i++) {
429     if (rp[i] > col) break;
430     if (rp[i] == col) {
431       bap = ap + bs2 * i;
432       if (roworiented) {
433         if (is == ADD_VALUES) {
434           for (ii = 0; ii < bs; ii++) {
435             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
436           }
437         } else {
438           for (ii = 0; ii < bs; ii++) {
439             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
440           }
441         }
442       } else {
443         if (is == ADD_VALUES) {
444           for (ii = 0; ii < bs; ii++) {
445             for (jj = 0; jj < bs; jj++) *bap++ += *value++;
446           }
447         } else {
448           for (ii = 0; ii < bs; ii++) {
449             for (jj = 0; jj < bs; jj++) *bap++ = *value++;
450           }
451         }
452       }
453       goto noinsert2;
454     }
455   }
456   if (nonew == 1) goto noinsert2;
457   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new block index nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
458   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
459   N = nrow++ - 1;
460   high++;
461   /* shift up all the later entries in this row */
462   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
463   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
464   rp[i] = col;
465   bap   = ap + bs2 * i;
466   if (roworiented) {
467     for (ii = 0; ii < bs; ii++) {
468       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
469     }
470   } else {
471     for (ii = 0; ii < bs; ii++) {
472       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
473     }
474   }
475 noinsert2:;
476   ailen[row] = nrow;
477   PetscFunctionReturn(PETSC_SUCCESS);
478 }
479 
480 /*
481    This routine is exactly duplicated in mpibaij.c
482 */
483 static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
484 {
485   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ *)A->data;
486   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
487   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
488   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
489   PetscBool          roworiented = a->roworiented;
490   const PetscScalar *value       = v;
491   MatScalar         *ap, *aa = a->a, *bap;
492 
493   PetscFunctionBegin;
494   rp    = aj + ai[row];
495   ap    = aa + bs2 * ai[row];
496   rmax  = imax[row];
497   nrow  = ailen[row];
498   low   = 0;
499   high  = nrow;
500   value = v;
501   while (high - low > 7) {
502     t = (low + high) / 2;
503     if (rp[t] > col) high = t;
504     else low = t;
505   }
506   for (i = low; i < high; i++) {
507     if (rp[i] > col) break;
508     if (rp[i] == col) {
509       bap = ap + bs2 * i;
510       if (roworiented) {
511         if (is == ADD_VALUES) {
512           for (ii = 0; ii < bs; ii++) {
513             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
514           }
515         } else {
516           for (ii = 0; ii < bs; ii++) {
517             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
518           }
519         }
520       } else {
521         if (is == ADD_VALUES) {
522           for (ii = 0; ii < bs; ii++, value += bs) {
523             for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
524             bap += bs;
525           }
526         } else {
527           for (ii = 0; ii < bs; ii++, value += bs) {
528             for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
529             bap += bs;
530           }
531         }
532       }
533       goto noinsert2;
534     }
535   }
536   if (nonew == 1) goto noinsert2;
537   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
538   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
539   N = nrow++ - 1;
540   high++;
541   /* shift up all the later entries in this row */
542   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
543   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
544   rp[i] = col;
545   bap   = ap + bs2 * i;
546   if (roworiented) {
547     for (ii = 0; ii < bs; ii++) {
548       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
549     }
550   } else {
551     for (ii = 0; ii < bs; ii++) {
552       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
553     }
554   }
555 noinsert2:;
556   ailen[row] = nrow;
557   PetscFunctionReturn(PETSC_SUCCESS);
558 }
559 
560 /*
561     This routine could be optimized by removing the need for the block copy below and passing stride information
562   to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
563 */
564 static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const MatScalar v[], InsertMode addv)
565 {
566   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ *)mat->data;
567   const MatScalar *value;
568   MatScalar       *barray      = baij->barray;
569   PetscBool        roworiented = baij->roworiented, ignore_ltriangular = ((Mat_SeqSBAIJ *)baij->A->data)->ignore_ltriangular;
570   PetscInt         i, j, ii, jj, row, col, rstart = baij->rstartbs;
571   PetscInt         rend = baij->rendbs, cstart = baij->cstartbs, stepval;
572   PetscInt         cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
573 
574   PetscFunctionBegin;
575   if (!barray) {
576     PetscCall(PetscMalloc1(bs2, &barray));
577     baij->barray = barray;
578   }
579 
580   if (roworiented) {
581     stepval = (n - 1) * bs;
582   } else {
583     stepval = (m - 1) * bs;
584   }
585   for (i = 0; i < m; i++) {
586     if (im[i] < 0) continue;
587     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
588     if (im[i] >= rstart && im[i] < rend) {
589       row = im[i] - rstart;
590       for (j = 0; j < n; j++) {
591         if (im[i] > in[j]) {
592           PetscCheck(ignore_ltriangular, PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
593           continue; /* ignore lower triangular blocks */
594         }
595         /* If NumCol = 1 then a copy is not required */
596         if (roworiented && n == 1) {
597           barray = (MatScalar *)v + i * bs2;
598         } else if ((!roworiented) && (m == 1)) {
599           barray = (MatScalar *)v + j * bs2;
600         } else { /* Here a copy is required */
601           if (roworiented) {
602             value = v + i * (stepval + bs) * bs + j * bs;
603           } else {
604             value = v + j * (stepval + bs) * bs + i * bs;
605           }
606           for (ii = 0; ii < bs; ii++, value += stepval) {
607             for (jj = 0; jj < bs; jj++) *barray++ = *value++;
608           }
609           barray -= bs2;
610         }
611 
612         if (in[j] >= cstart && in[j] < cend) {
613           col = in[j] - cstart;
614           PetscCall(MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
615         } else if (in[j] < 0) {
616           continue;
617         } else {
618           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
619           if (mat->was_assembled) {
620             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
621 
622 #if defined(PETSC_USE_CTABLE)
623             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
624             col = col < 1 ? -1 : (col - 1) / bs;
625 #else
626             col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs;
627 #endif
628             if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
629               PetscCall(MatDisAssemble_MPISBAIJ(mat));
630               col = in[j];
631             }
632           } else col = in[j];
633           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
634         }
635       }
636     } else {
637       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
638       if (!baij->donotstash) {
639         if (roworiented) {
640           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
641         } else {
642           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
643         }
644       }
645     }
646   }
647   PetscFunctionReturn(PETSC_SUCCESS);
648 }
649 
650 static PetscErrorCode MatGetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
651 {
652   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
653   PetscInt      bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
654   PetscInt      bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;
655 
656   PetscFunctionBegin;
657   for (i = 0; i < m; i++) {
658     if (idxm[i] < 0) continue; /* negative row */
659     PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
660     PetscCheck(idxm[i] >= bsrstart && idxm[i] < bsrend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
661     row = idxm[i] - bsrstart;
662     for (j = 0; j < n; j++) {
663       if (idxn[j] < 0) continue; /* negative column */
664       PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
665       if (idxn[j] >= bscstart && idxn[j] < bscend) {
666         col = idxn[j] - bscstart;
667         PetscCall(MatGetValues_SeqSBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
668       } else {
669         if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
670 #if defined(PETSC_USE_CTABLE)
671         PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
672         data--;
673 #else
674         data = baij->colmap[idxn[j] / bs] - 1;
675 #endif
676         if (data < 0 || baij->garray[data / bs] != idxn[j] / bs) *(v + i * n + j) = 0.0;
677         else {
678           col = data + idxn[j] % bs;
679           PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
680         }
681       }
682     }
683   }
684   PetscFunctionReturn(PETSC_SUCCESS);
685 }
686 
687 static PetscErrorCode MatNorm_MPISBAIJ(Mat mat, NormType type, PetscReal *norm)
688 {
689   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
690   PetscReal     sum[2], *lnorm2;
691 
692   PetscFunctionBegin;
693   if (baij->size == 1) {
694     PetscCall(MatNorm(baij->A, type, norm));
695   } else {
696     if (type == NORM_FROBENIUS) {
697       PetscCall(PetscMalloc1(2, &lnorm2));
698       PetscCall(MatNorm(baij->A, type, lnorm2));
699       *lnorm2 = (*lnorm2) * (*lnorm2);
700       lnorm2++; /* square power of norm(A) */
701       PetscCall(MatNorm(baij->B, type, lnorm2));
702       *lnorm2 = (*lnorm2) * (*lnorm2);
703       lnorm2--; /* square power of norm(B) */
704       PetscCallMPI(MPIU_Allreduce(lnorm2, sum, 2, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
705       *norm = PetscSqrtReal(sum[0] + 2 * sum[1]);
706       PetscCall(PetscFree(lnorm2));
707     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
708       Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ *)baij->A->data;
709       Mat_SeqBAIJ  *bmat = (Mat_SeqBAIJ *)baij->B->data;
710       PetscReal    *rsum, vabs;
711       PetscInt     *jj, *garray = baij->garray, rstart = baij->rstartbs, nz;
712       PetscInt      brow, bcol, col, bs = baij->A->rmap->bs, row, grow, gcol, mbs = amat->mbs;
713       MatScalar    *v;
714 
715       PetscCall(PetscCalloc1(mat->cmap->N, &rsum));
716       /* Amat */
717       v  = amat->a;
718       jj = amat->j;
719       for (brow = 0; brow < mbs; brow++) {
720         grow = bs * (rstart + brow);
721         nz   = amat->i[brow + 1] - amat->i[brow];
722         for (bcol = 0; bcol < nz; bcol++) {
723           gcol = bs * (rstart + *jj);
724           jj++;
725           for (col = 0; col < bs; col++) {
726             for (row = 0; row < bs; row++) {
727               vabs = PetscAbsScalar(*v);
728               v++;
729               rsum[gcol + col] += vabs;
730               /* non-diagonal block */
731               if (bcol > 0 && vabs > 0.0) rsum[grow + row] += vabs;
732             }
733           }
734         }
735         PetscCall(PetscLogFlops(nz * bs * bs));
736       }
737       /* Bmat */
738       v  = bmat->a;
739       jj = bmat->j;
740       for (brow = 0; brow < mbs; brow++) {
741         grow = bs * (rstart + brow);
742         nz   = bmat->i[brow + 1] - bmat->i[brow];
743         for (bcol = 0; bcol < nz; bcol++) {
744           gcol = bs * garray[*jj];
745           jj++;
746           for (col = 0; col < bs; col++) {
747             for (row = 0; row < bs; row++) {
748               vabs = PetscAbsScalar(*v);
749               v++;
750               rsum[gcol + col] += vabs;
751               rsum[grow + row] += vabs;
752             }
753           }
754         }
755         PetscCall(PetscLogFlops(nz * bs * bs));
756       }
757       PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, rsum, mat->cmap->N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
758       *norm = 0.0;
759       for (col = 0; col < mat->cmap->N; col++) {
760         if (rsum[col] > *norm) *norm = rsum[col];
761       }
762       PetscCall(PetscFree(rsum));
763     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for this norm yet");
764   }
765   PetscFunctionReturn(PETSC_SUCCESS);
766 }
767 
768 static PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat, MatAssemblyType mode)
769 {
770   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
771   PetscInt      nstash, reallocs;
772 
773   PetscFunctionBegin;
774   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
775 
776   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
777   PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
778   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
779   PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
780   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
781   PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
782   PetscFunctionReturn(PETSC_SUCCESS);
783 }
784 
785 static PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat, MatAssemblyType mode)
786 {
787   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
788   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ *)baij->A->data;
789   PetscInt      i, j, rstart, ncols, flg, bs2 = baij->bs2;
790   PetscInt     *row, *col;
791   PetscBool     all_assembled;
792   PetscMPIInt   n;
793   PetscBool     r1, r2, r3;
794   MatScalar    *val;
795 
796   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
797   PetscFunctionBegin;
798   if (!baij->donotstash && !mat->nooffprocentries) {
799     while (1) {
800       PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
801       if (!flg) break;
802 
803       for (i = 0; i < n;) {
804         /* Now identify the consecutive vals belonging to the same row */
805         for (j = i, rstart = row[j]; j < n; j++) {
806           if (row[j] != rstart) break;
807         }
808         if (j < n) ncols = j - i;
809         else ncols = n - i;
810         /* Now assemble all these values with a single function call */
811         PetscCall(MatSetValues_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
812         i = j;
813       }
814     }
815     PetscCall(MatStashScatterEnd_Private(&mat->stash));
816     /* Now process the block-stash. Since the values are stashed column-oriented,
817        set the row-oriented flag to column-oriented, and after MatSetValues()
818        restore the original flags */
819     r1 = baij->roworiented;
820     r2 = a->roworiented;
821     r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;
822 
823     baij->roworiented = PETSC_FALSE;
824     a->roworiented    = PETSC_FALSE;
825 
826     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworiented */
827     while (1) {
828       PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
829       if (!flg) break;
830 
831       for (i = 0; i < n;) {
832         /* Now identify the consecutive vals belonging to the same row */
833         for (j = i, rstart = row[j]; j < n; j++) {
834           if (row[j] != rstart) break;
835         }
836         if (j < n) ncols = j - i;
837         else ncols = n - i;
838         PetscCall(MatSetValuesBlocked_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
839         i = j;
840       }
841     }
842     PetscCall(MatStashScatterEnd_Private(&mat->bstash));
843 
844     baij->roworiented = r1;
845     a->roworiented    = r2;
846 
847     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworiented */
848   }
849 
850   PetscCall(MatAssemblyBegin(baij->A, mode));
851   PetscCall(MatAssemblyEnd(baij->A, mode));
852 
853   /* determine if any process has disassembled, if so we must
854      also disassemble ourselves, in order that we may reassemble. */
855   /*
856      if nonzero structure of submatrix B cannot change then we know that
857      no process disassembled thus we can skip this stuff
858   */
859   if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
860     PetscCallMPI(MPIU_Allreduce(&mat->was_assembled, &all_assembled, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
861     if (mat->was_assembled && !all_assembled) PetscCall(MatDisAssemble_MPISBAIJ(mat));
862   }
863 
864   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) PetscCall(MatSetUpMultiply_MPISBAIJ(mat)); /* setup Mvctx and sMvctx */
865   PetscCall(MatAssemblyBegin(baij->B, mode));
866   PetscCall(MatAssemblyEnd(baij->B, mode));
867 
868   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
869 
870   baij->rowvalues = NULL;
871 
872   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
873   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
874     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
875     PetscCallMPI(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
876   }
877   PetscFunctionReturn(PETSC_SUCCESS);
878 }
879 
880 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
881 #include <petscdraw.h>
882 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
883 {
884   Mat_MPISBAIJ     *baij = (Mat_MPISBAIJ *)mat->data;
885   PetscInt          bs   = mat->rmap->bs;
886   PetscMPIInt       rank = baij->rank;
887   PetscBool         isascii, isdraw;
888   PetscViewer       sviewer;
889   PetscViewerFormat format;
890 
891   PetscFunctionBegin;
892   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
893   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
894   if (isascii) {
895     PetscCall(PetscViewerGetFormat(viewer, &format));
896     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
897       MatInfo info;
898       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
899       PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
900       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
901       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
902                                                    mat->rmap->bs, info.memory));
903       PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
904       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
905       PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
906       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
907       PetscCall(PetscViewerFlush(viewer));
908       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
909       PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
910       PetscCall(VecScatterView(baij->Mvctx, viewer));
911       PetscFunctionReturn(PETSC_SUCCESS);
912     } else if (format == PETSC_VIEWER_ASCII_INFO) {
913       PetscCall(PetscViewerASCIIPrintf(viewer, "  block size is %" PetscInt_FMT "\n", bs));
914       PetscFunctionReturn(PETSC_SUCCESS);
915     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
916       PetscFunctionReturn(PETSC_SUCCESS);
917     }
918   }
919 
920   if (isdraw) {
921     PetscDraw draw;
922     PetscBool isnull;
923     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
924     PetscCall(PetscDrawIsNull(draw, &isnull));
925     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
926   }
927 
928   {
929     /* assemble the entire matrix onto first processor. */
930     Mat           A;
931     Mat_SeqSBAIJ *Aloc;
932     Mat_SeqBAIJ  *Bloc;
933     PetscInt      M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
934     MatScalar    *a;
935     const char   *matname;
936 
937     /* Should this be the same type as mat? */
938     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
939     if (rank == 0) {
940       PetscCall(MatSetSizes(A, M, N, M, N));
941     } else {
942       PetscCall(MatSetSizes(A, 0, 0, M, N));
943     }
944     PetscCall(MatSetType(A, MATMPISBAIJ));
945     PetscCall(MatMPISBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
946     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
947 
948     /* copy over the A part */
949     Aloc = (Mat_SeqSBAIJ *)baij->A->data;
950     ai   = Aloc->i;
951     aj   = Aloc->j;
952     a    = Aloc->a;
953     PetscCall(PetscMalloc1(bs, &rvals));
954 
955     for (i = 0; i < mbs; i++) {
956       rvals[0] = bs * (baij->rstartbs + i);
957       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
958       for (j = ai[i]; j < ai[i + 1]; j++) {
959         col = (baij->cstartbs + aj[j]) * bs;
960         for (k = 0; k < bs; k++) {
961           PetscCall(MatSetValues_MPISBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
962           col++;
963           a += bs;
964         }
965       }
966     }
967     /* copy over the B part */
968     Bloc = (Mat_SeqBAIJ *)baij->B->data;
969     ai   = Bloc->i;
970     aj   = Bloc->j;
971     a    = Bloc->a;
972     for (i = 0; i < mbs; i++) {
973       rvals[0] = bs * (baij->rstartbs + i);
974       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
975       for (j = ai[i]; j < ai[i + 1]; j++) {
976         col = baij->garray[aj[j]] * bs;
977         for (k = 0; k < bs; k++) {
978           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
979           col++;
980           a += bs;
981         }
982       }
983     }
984     PetscCall(PetscFree(rvals));
985     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
986     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
987     /*
988        Everyone has to call to draw the matrix since the graphics waits are
989        synchronized across all processors that share the PetscDraw object
990     */
991     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
992     if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
993     if (rank == 0) {
994       if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPISBAIJ *)A->data)->A, matname));
995       PetscCall(MatView_SeqSBAIJ(((Mat_MPISBAIJ *)A->data)->A, sviewer));
996     }
997     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
998     PetscCall(MatDestroy(&A));
999   }
1000   PetscFunctionReturn(PETSC_SUCCESS);
1001 }
1002 
1003 /* Used for both MPIBAIJ and MPISBAIJ matrices */
1004 #define MatView_MPISBAIJ_Binary MatView_MPIBAIJ_Binary
1005 
1006 static PetscErrorCode MatView_MPISBAIJ(Mat mat, PetscViewer viewer)
1007 {
1008   PetscBool isascii, isdraw, issocket, isbinary;
1009 
1010   PetscFunctionBegin;
1011   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1012   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1013   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1014   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1015   if (isascii || isdraw || issocket) PetscCall(MatView_MPISBAIJ_ASCIIorDraworSocket(mat, viewer));
1016   else if (isbinary) PetscCall(MatView_MPISBAIJ_Binary(mat, viewer));
1017   PetscFunctionReturn(PETSC_SUCCESS);
1018 }
1019 
1020 #if defined(PETSC_USE_COMPLEX)
1021 static PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy)
1022 {
1023   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1024   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1025   PetscScalar       *from;
1026   const PetscScalar *x;
1027 
1028   PetscFunctionBegin;
1029   /* diagonal part */
1030   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1031   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1032   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1033   PetscCall(VecZeroEntries(a->slvec1b));
1034 
1035   /* subdiagonal part */
1036   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1037   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1038 
1039   /* copy x into the vec slvec0 */
1040   PetscCall(VecGetArray(a->slvec0, &from));
1041   PetscCall(VecGetArrayRead(xx, &x));
1042 
1043   PetscCall(PetscArraycpy(from, x, bs * mbs));
1044   PetscCall(VecRestoreArray(a->slvec0, &from));
1045   PetscCall(VecRestoreArrayRead(xx, &x));
1046 
1047   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1048   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1049   /* supperdiagonal part */
1050   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1051   PetscFunctionReturn(PETSC_SUCCESS);
1052 }
1053 #endif
1054 
1055 static PetscErrorCode MatMult_MPISBAIJ(Mat A, Vec xx, Vec yy)
1056 {
1057   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1058   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1059   PetscScalar       *from;
1060   const PetscScalar *x;
1061 
1062   PetscFunctionBegin;
1063   /* diagonal part */
1064   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1065   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1066   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1067   PetscCall(VecZeroEntries(a->slvec1b));
1068 
1069   /* subdiagonal part */
1070   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1071 
1072   /* copy x into the vec slvec0 */
1073   PetscCall(VecGetArray(a->slvec0, &from));
1074   PetscCall(VecGetArrayRead(xx, &x));
1075 
1076   PetscCall(PetscArraycpy(from, x, bs * mbs));
1077   PetscCall(VecRestoreArray(a->slvec0, &from));
1078   PetscCall(VecRestoreArrayRead(xx, &x));
1079 
1080   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1081   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1082   /* supperdiagonal part */
1083   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1084   PetscFunctionReturn(PETSC_SUCCESS);
1085 }
1086 
1087 #if PetscDefined(USE_COMPLEX)
1088 static PetscErrorCode MatMultAdd_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy, Vec zz)
1089 {
1090   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1091   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1092   PetscScalar       *from;
1093   const PetscScalar *x;
1094 
1095   PetscFunctionBegin;
1096   /* diagonal part */
1097   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1098   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1099   PetscCall(VecZeroEntries(a->slvec1b));
1100 
1101   /* subdiagonal part */
1102   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1103   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1104 
1105   /* copy x into the vec slvec0 */
1106   PetscCall(VecGetArray(a->slvec0, &from));
1107   PetscCall(VecGetArrayRead(xx, &x));
1108   PetscCall(PetscArraycpy(from, x, bs * mbs));
1109   PetscCall(VecRestoreArray(a->slvec0, &from));
1110 
1111   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1112   PetscCall(VecRestoreArrayRead(xx, &x));
1113   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1114 
1115   /* supperdiagonal part */
1116   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1117   PetscFunctionReturn(PETSC_SUCCESS);
1118 }
1119 #endif
1120 
1121 static PetscErrorCode MatMultAdd_MPISBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1122 {
1123   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1124   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1125   PetscScalar       *from;
1126   const PetscScalar *x;
1127 
1128   PetscFunctionBegin;
1129   /* diagonal part */
1130   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1131   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1132   PetscCall(VecZeroEntries(a->slvec1b));
1133 
1134   /* subdiagonal part */
1135   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1136 
1137   /* copy x into the vec slvec0 */
1138   PetscCall(VecGetArray(a->slvec0, &from));
1139   PetscCall(VecGetArrayRead(xx, &x));
1140   PetscCall(PetscArraycpy(from, x, bs * mbs));
1141   PetscCall(VecRestoreArray(a->slvec0, &from));
1142 
1143   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1144   PetscCall(VecRestoreArrayRead(xx, &x));
1145   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1146 
1147   /* supperdiagonal part */
1148   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1149   PetscFunctionReturn(PETSC_SUCCESS);
1150 }
1151 
1152 /*
1153   This only works correctly for square matrices where the subblock A->A is the
1154    diagonal block
1155 */
1156 static PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A, Vec v)
1157 {
1158   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1159 
1160   PetscFunctionBegin;
1161   /* PetscCheck(a->rmap->N == a->cmap->N,PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1162   PetscCall(MatGetDiagonal(a->A, v));
1163   PetscFunctionReturn(PETSC_SUCCESS);
1164 }
1165 
1166 static PetscErrorCode MatScale_MPISBAIJ(Mat A, PetscScalar aa)
1167 {
1168   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1169 
1170   PetscFunctionBegin;
1171   PetscCall(MatScale(a->A, aa));
1172   PetscCall(MatScale(a->B, aa));
1173   PetscFunctionReturn(PETSC_SUCCESS);
1174 }
1175 
1176 static PetscErrorCode MatGetRow_MPISBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1177 {
1178   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
1179   PetscScalar  *vworkA, *vworkB, **pvA, **pvB, *v_p;
1180   PetscInt      bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1181   PetscInt      nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1182   PetscInt     *cmap, *idx_p, cstart = mat->rstartbs;
1183 
1184   PetscFunctionBegin;
1185   PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1186   mat->getrowactive = PETSC_TRUE;
1187 
1188   if (!mat->rowvalues && (idx || v)) {
1189     /*
1190         allocate enough space to hold information from the longest row.
1191     */
1192     Mat_SeqSBAIJ *Aa  = (Mat_SeqSBAIJ *)mat->A->data;
1193     Mat_SeqBAIJ  *Ba  = (Mat_SeqBAIJ *)mat->B->data;
1194     PetscInt      max = 1, mbs = mat->mbs, tmp;
1195     for (i = 0; i < mbs; i++) {
1196       tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i]; /* row length */
1197       if (max < tmp) max = tmp;
1198     }
1199     PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1200   }
1201 
1202   PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1203   lrow = row - brstart; /* local row index */
1204 
1205   pvA = &vworkA;
1206   pcA = &cworkA;
1207   pvB = &vworkB;
1208   pcB = &cworkB;
1209   if (!v) {
1210     pvA = NULL;
1211     pvB = NULL;
1212   }
1213   if (!idx) {
1214     pcA = NULL;
1215     if (!v) pcB = NULL;
1216   }
1217   PetscCall((*mat->A->ops->getrow)(mat->A, lrow, &nzA, pcA, pvA));
1218   PetscCall((*mat->B->ops->getrow)(mat->B, lrow, &nzB, pcB, pvB));
1219   nztot = nzA + nzB;
1220 
1221   cmap = mat->garray;
1222   if (v || idx) {
1223     if (nztot) {
1224       /* Sort by increasing column numbers, assuming A and B already sorted */
1225       PetscInt imark = -1;
1226       if (v) {
1227         *v = v_p = mat->rowvalues;
1228         for (i = 0; i < nzB; i++) {
1229           if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1230           else break;
1231         }
1232         imark = i;
1233         for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1234         for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1235       }
1236       if (idx) {
1237         *idx = idx_p = mat->rowindices;
1238         if (imark > -1) {
1239           for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1240         } else {
1241           for (i = 0; i < nzB; i++) {
1242             if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1243             else break;
1244           }
1245           imark = i;
1246         }
1247         for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1248         for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1249       }
1250     } else {
1251       if (idx) *idx = NULL;
1252       if (v) *v = NULL;
1253     }
1254   }
1255   *nz = nztot;
1256   PetscCall((*mat->A->ops->restorerow)(mat->A, lrow, &nzA, pcA, pvA));
1257   PetscCall((*mat->B->ops->restorerow)(mat->B, lrow, &nzB, pcB, pvB));
1258   PetscFunctionReturn(PETSC_SUCCESS);
1259 }
1260 
1261 static PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1262 {
1263   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1264 
1265   PetscFunctionBegin;
1266   PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow() must be called first");
1267   baij->getrowactive = PETSC_FALSE;
1268   PetscFunctionReturn(PETSC_SUCCESS);
1269 }
1270 
1271 static PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1272 {
1273   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1274   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1275 
1276   PetscFunctionBegin;
1277   aA->getrow_utriangular = PETSC_TRUE;
1278   PetscFunctionReturn(PETSC_SUCCESS);
1279 }
1280 static PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1281 {
1282   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1283   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1284 
1285   PetscFunctionBegin;
1286   aA->getrow_utriangular = PETSC_FALSE;
1287   PetscFunctionReturn(PETSC_SUCCESS);
1288 }
1289 
1290 static PetscErrorCode MatConjugate_MPISBAIJ(Mat mat)
1291 {
1292   PetscFunctionBegin;
1293   if (PetscDefined(USE_COMPLEX)) {
1294     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)mat->data;
1295 
1296     PetscCall(MatConjugate(a->A));
1297     PetscCall(MatConjugate(a->B));
1298   }
1299   PetscFunctionReturn(PETSC_SUCCESS);
1300 }
1301 
1302 static PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1303 {
1304   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1305 
1306   PetscFunctionBegin;
1307   PetscCall(MatRealPart(a->A));
1308   PetscCall(MatRealPart(a->B));
1309   PetscFunctionReturn(PETSC_SUCCESS);
1310 }
1311 
1312 static PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1313 {
1314   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1315 
1316   PetscFunctionBegin;
1317   PetscCall(MatImaginaryPart(a->A));
1318   PetscCall(MatImaginaryPart(a->B));
1319   PetscFunctionReturn(PETSC_SUCCESS);
1320 }
1321 
1322 /* Check if isrow is a subset of iscol_local, called by MatCreateSubMatrix_MPISBAIJ()
1323    Input: isrow       - distributed(parallel),
1324           iscol_local - locally owned (seq)
1325 */
1326 static PetscErrorCode ISEqual_private(IS isrow, IS iscol_local, PetscBool *flg)
1327 {
1328   PetscInt        sz1, sz2, *a1, *a2, i, j, k, nmatch;
1329   const PetscInt *ptr1, *ptr2;
1330 
1331   PetscFunctionBegin;
1332   *flg = PETSC_FALSE;
1333   PetscCall(ISGetLocalSize(isrow, &sz1));
1334   PetscCall(ISGetLocalSize(iscol_local, &sz2));
1335   if (sz1 > sz2) PetscFunctionReturn(PETSC_SUCCESS);
1336 
1337   PetscCall(ISGetIndices(isrow, &ptr1));
1338   PetscCall(ISGetIndices(iscol_local, &ptr2));
1339 
1340   PetscCall(PetscMalloc1(sz1, &a1));
1341   PetscCall(PetscMalloc1(sz2, &a2));
1342   PetscCall(PetscArraycpy(a1, ptr1, sz1));
1343   PetscCall(PetscArraycpy(a2, ptr2, sz2));
1344   PetscCall(PetscSortInt(sz1, a1));
1345   PetscCall(PetscSortInt(sz2, a2));
1346 
1347   nmatch = 0;
1348   k      = 0;
1349   for (i = 0; i < sz1; i++) {
1350     for (j = k; j < sz2; j++) {
1351       if (a1[i] == a2[j]) {
1352         k = j;
1353         nmatch++;
1354         break;
1355       }
1356     }
1357   }
1358   PetscCall(ISRestoreIndices(isrow, &ptr1));
1359   PetscCall(ISRestoreIndices(iscol_local, &ptr2));
1360   PetscCall(PetscFree(a1));
1361   PetscCall(PetscFree(a2));
1362   if (nmatch < sz1) {
1363     *flg = PETSC_FALSE;
1364   } else {
1365     *flg = PETSC_TRUE;
1366   }
1367   PetscFunctionReturn(PETSC_SUCCESS);
1368 }
1369 
1370 static PetscErrorCode MatCreateSubMatrix_MPISBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1371 {
1372   Mat       C[2];
1373   IS        iscol_local, isrow_local;
1374   PetscInt  csize, csize_local, rsize;
1375   PetscBool isequal, issorted, isidentity = PETSC_FALSE;
1376 
1377   PetscFunctionBegin;
1378   PetscCall(ISGetLocalSize(iscol, &csize));
1379   PetscCall(ISGetLocalSize(isrow, &rsize));
1380   if (call == MAT_REUSE_MATRIX) {
1381     PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1382     PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1383   } else {
1384     PetscCall(ISAllGather(iscol, &iscol_local));
1385     PetscCall(ISSorted(iscol_local, &issorted));
1386     PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, iscol must be sorted");
1387   }
1388   PetscCall(ISEqual_private(isrow, iscol_local, &isequal));
1389   if (!isequal) {
1390     PetscCall(ISGetLocalSize(iscol_local, &csize_local));
1391     isidentity = (PetscBool)(mat->cmap->N == csize_local);
1392     if (!isidentity) {
1393       if (call == MAT_REUSE_MATRIX) {
1394         PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather_other", (PetscObject *)&isrow_local));
1395         PetscCheck(isrow_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1396       } else {
1397         PetscCall(ISAllGather(isrow, &isrow_local));
1398         PetscCall(ISSorted(isrow_local, &issorted));
1399         PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, isrow must be sorted");
1400       }
1401     }
1402   }
1403   /* now call MatCreateSubMatrix_MPIBAIJ() */
1404   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, isequal || isidentity ? call : MAT_INITIAL_MATRIX, isequal || isidentity ? newmat : C, (PetscBool)(isequal || isidentity)));
1405   if (!isequal && !isidentity) {
1406     if (call == MAT_INITIAL_MATRIX) {
1407       IS       intersect;
1408       PetscInt ni;
1409 
1410       PetscCall(ISIntersect(isrow_local, iscol_local, &intersect));
1411       PetscCall(ISGetLocalSize(intersect, &ni));
1412       PetscCall(ISDestroy(&intersect));
1413       PetscCheck(ni == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create such a submatrix: for symmetric format, when requesting an off-diagonal submatrix, isrow and iscol should have an empty intersection (number of common indices is %" PetscInt_FMT ")", ni);
1414     }
1415     PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, iscol, isrow_local, rsize, MAT_INITIAL_MATRIX, C + 1, PETSC_FALSE));
1416     PetscCall(MatTranspose(C[1], MAT_INPLACE_MATRIX, C + 1));
1417     PetscCall(MatAXPY(C[0], 1.0, C[1], DIFFERENT_NONZERO_PATTERN));
1418     if (call == MAT_REUSE_MATRIX) PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1419     else if (mat->rmap->bs == 1) PetscCall(MatConvert(C[0], MATAIJ, MAT_INITIAL_MATRIX, newmat));
1420     else PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1421     PetscCall(MatDestroy(C));
1422     PetscCall(MatDestroy(C + 1));
1423   }
1424   if (call == MAT_INITIAL_MATRIX) {
1425     if (!isequal && !isidentity) {
1426       PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather_other", (PetscObject)isrow_local));
1427       PetscCall(ISDestroy(&isrow_local));
1428     }
1429     PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1430     PetscCall(ISDestroy(&iscol_local));
1431   }
1432   PetscFunctionReturn(PETSC_SUCCESS);
1433 }
1434 
1435 static PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1436 {
1437   Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;
1438 
1439   PetscFunctionBegin;
1440   PetscCall(MatZeroEntries(l->A));
1441   PetscCall(MatZeroEntries(l->B));
1442   PetscFunctionReturn(PETSC_SUCCESS);
1443 }
1444 
1445 static PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1446 {
1447   Mat_MPISBAIJ  *a = (Mat_MPISBAIJ *)matin->data;
1448   Mat            A = a->A, B = a->B;
1449   PetscLogDouble isend[5], irecv[5];
1450 
1451   PetscFunctionBegin;
1452   info->block_size = (PetscReal)matin->rmap->bs;
1453 
1454   PetscCall(MatGetInfo(A, MAT_LOCAL, info));
1455 
1456   isend[0] = info->nz_used;
1457   isend[1] = info->nz_allocated;
1458   isend[2] = info->nz_unneeded;
1459   isend[3] = info->memory;
1460   isend[4] = info->mallocs;
1461 
1462   PetscCall(MatGetInfo(B, MAT_LOCAL, info));
1463 
1464   isend[0] += info->nz_used;
1465   isend[1] += info->nz_allocated;
1466   isend[2] += info->nz_unneeded;
1467   isend[3] += info->memory;
1468   isend[4] += info->mallocs;
1469   if (flag == MAT_LOCAL) {
1470     info->nz_used      = isend[0];
1471     info->nz_allocated = isend[1];
1472     info->nz_unneeded  = isend[2];
1473     info->memory       = isend[3];
1474     info->mallocs      = isend[4];
1475   } else if (flag == MAT_GLOBAL_MAX) {
1476     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));
1477 
1478     info->nz_used      = irecv[0];
1479     info->nz_allocated = irecv[1];
1480     info->nz_unneeded  = irecv[2];
1481     info->memory       = irecv[3];
1482     info->mallocs      = irecv[4];
1483   } else if (flag == MAT_GLOBAL_SUM) {
1484     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));
1485 
1486     info->nz_used      = irecv[0];
1487     info->nz_allocated = irecv[1];
1488     info->nz_unneeded  = irecv[2];
1489     info->memory       = irecv[3];
1490     info->mallocs      = irecv[4];
1491   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1492   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1493   info->fill_ratio_needed = 0;
1494   info->factor_mallocs    = 0;
1495   PetscFunctionReturn(PETSC_SUCCESS);
1496 }
1497 
1498 static PetscErrorCode MatSetOption_MPISBAIJ(Mat A, MatOption op, PetscBool flg)
1499 {
1500   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1501   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1502 
1503   PetscFunctionBegin;
1504   switch (op) {
1505   case MAT_NEW_NONZERO_LOCATIONS:
1506   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1507   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1508   case MAT_KEEP_NONZERO_PATTERN:
1509   case MAT_NEW_NONZERO_LOCATION_ERR:
1510     MatCheckPreallocated(A, 1);
1511     PetscCall(MatSetOption(a->A, op, flg));
1512     PetscCall(MatSetOption(a->B, op, flg));
1513     break;
1514   case MAT_ROW_ORIENTED:
1515     MatCheckPreallocated(A, 1);
1516     a->roworiented = flg;
1517 
1518     PetscCall(MatSetOption(a->A, op, flg));
1519     PetscCall(MatSetOption(a->B, op, flg));
1520     break;
1521   case MAT_IGNORE_OFF_PROC_ENTRIES:
1522     a->donotstash = flg;
1523     break;
1524   case MAT_USE_HASH_TABLE:
1525     a->ht_flag = flg;
1526     break;
1527   case MAT_HERMITIAN:
1528     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1529 #if defined(PETSC_USE_COMPLEX)
1530     if (flg) { /* need different mat-vec ops */
1531       A->ops->mult             = MatMult_MPISBAIJ_Hermitian;
1532       A->ops->multadd          = MatMultAdd_MPISBAIJ_Hermitian;
1533       A->ops->multtranspose    = NULL;
1534       A->ops->multtransposeadd = NULL;
1535     }
1536 #endif
1537     break;
1538   case MAT_SPD:
1539   case MAT_SYMMETRIC:
1540     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1541 #if defined(PETSC_USE_COMPLEX)
1542     if (flg) { /* restore to use default mat-vec ops */
1543       A->ops->mult             = MatMult_MPISBAIJ;
1544       A->ops->multadd          = MatMultAdd_MPISBAIJ;
1545       A->ops->multtranspose    = MatMult_MPISBAIJ;
1546       A->ops->multtransposeadd = MatMultAdd_MPISBAIJ;
1547     }
1548 #endif
1549     break;
1550   case MAT_STRUCTURALLY_SYMMETRIC:
1551     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1552     break;
1553   case MAT_IGNORE_LOWER_TRIANGULAR:
1554   case MAT_ERROR_LOWER_TRIANGULAR:
1555     aA->ignore_ltriangular = flg;
1556     break;
1557   case MAT_GETROW_UPPERTRIANGULAR:
1558     aA->getrow_utriangular = flg;
1559     break;
1560   default:
1561     break;
1562   }
1563   PetscFunctionReturn(PETSC_SUCCESS);
1564 }
1565 
1566 static PetscErrorCode MatTranspose_MPISBAIJ(Mat A, MatReuse reuse, Mat *B)
1567 {
1568   PetscFunctionBegin;
1569   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1570   if (reuse == MAT_INITIAL_MATRIX) {
1571     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, B));
1572   } else if (reuse == MAT_REUSE_MATRIX) {
1573     PetscCall(MatCopy(A, *B, SAME_NONZERO_PATTERN));
1574   }
1575   PetscFunctionReturn(PETSC_SUCCESS);
1576 }
1577 
1578 static PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat, Vec ll, Vec rr)
1579 {
1580   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1581   Mat           a = baij->A, b = baij->B;
1582   PetscInt      nv, m, n;
1583   PetscBool     flg;
1584 
1585   PetscFunctionBegin;
1586   if (ll != rr) {
1587     PetscCall(VecEqual(ll, rr, &flg));
1588     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be same");
1589   }
1590   if (!ll) PetscFunctionReturn(PETSC_SUCCESS);
1591 
1592   PetscCall(MatGetLocalSize(mat, &m, &n));
1593   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "For symmetric format, local size %" PetscInt_FMT " %" PetscInt_FMT " must be same", m, n);
1594 
1595   PetscCall(VecGetLocalSize(rr, &nv));
1596   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left and right vector non-conforming local size");
1597 
1598   PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1599 
1600   /* left diagonalscale the off-diagonal part */
1601   PetscUseTypeMethod(b, diagonalscale, ll, NULL);
1602 
1603   /* scale the diagonal part */
1604   PetscUseTypeMethod(a, diagonalscale, ll, rr);
1605 
1606   /* right diagonalscale the off-diagonal part */
1607   PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1608   PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1609   PetscFunctionReturn(PETSC_SUCCESS);
1610 }
1611 
1612 static PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1613 {
1614   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1615 
1616   PetscFunctionBegin;
1617   PetscCall(MatSetUnfactored(a->A));
1618   PetscFunctionReturn(PETSC_SUCCESS);
1619 }
1620 
1621 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat, MatDuplicateOption, Mat *);
1622 
1623 static PetscErrorCode MatEqual_MPISBAIJ(Mat A, Mat B, PetscBool *flag)
1624 {
1625   Mat_MPISBAIJ *matB = (Mat_MPISBAIJ *)B->data, *matA = (Mat_MPISBAIJ *)A->data;
1626   Mat           a, b, c, d;
1627   PetscBool     flg;
1628 
1629   PetscFunctionBegin;
1630   a = matA->A;
1631   b = matA->B;
1632   c = matB->A;
1633   d = matB->B;
1634 
1635   PetscCall(MatEqual(a, c, &flg));
1636   if (flg) PetscCall(MatEqual(b, d, &flg));
1637   PetscCallMPI(MPIU_Allreduce(&flg, flag, 1, MPI_C_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1638   PetscFunctionReturn(PETSC_SUCCESS);
1639 }
1640 
1641 static PetscErrorCode MatCopy_MPISBAIJ(Mat A, Mat B, MatStructure str)
1642 {
1643   PetscBool isbaij;
1644 
1645   PetscFunctionBegin;
1646   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isbaij, MATSEQSBAIJ, MATMPISBAIJ, ""));
1647   PetscCheck(isbaij, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)B)->type_name);
1648   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1649   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1650     PetscCall(MatGetRowUpperTriangular(A));
1651     PetscCall(MatCopy_Basic(A, B, str));
1652     PetscCall(MatRestoreRowUpperTriangular(A));
1653   } else {
1654     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1655     Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1656 
1657     PetscCall(MatCopy(a->A, b->A, str));
1658     PetscCall(MatCopy(a->B, b->B, str));
1659   }
1660   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1661   PetscFunctionReturn(PETSC_SUCCESS);
1662 }
1663 
1664 static PetscErrorCode MatAXPY_MPISBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1665 {
1666   Mat_MPISBAIJ *xx = (Mat_MPISBAIJ *)X->data, *yy = (Mat_MPISBAIJ *)Y->data;
1667   PetscBLASInt  bnz, one                          = 1;
1668   Mat_SeqSBAIJ *xa, *ya;
1669   Mat_SeqBAIJ  *xb, *yb;
1670 
1671   PetscFunctionBegin;
1672   if (str == SAME_NONZERO_PATTERN) {
1673     PetscScalar alpha = a;
1674     xa                = (Mat_SeqSBAIJ *)xx->A->data;
1675     ya                = (Mat_SeqSBAIJ *)yy->A->data;
1676     PetscCall(PetscBLASIntCast(xa->nz, &bnz));
1677     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa->a, &one, ya->a, &one));
1678     xb = (Mat_SeqBAIJ *)xx->B->data;
1679     yb = (Mat_SeqBAIJ *)yy->B->data;
1680     PetscCall(PetscBLASIntCast(xb->nz, &bnz));
1681     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xb->a, &one, yb->a, &one));
1682     PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1683   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1684     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
1685     PetscCall(MatAXPY_Basic(Y, a, X, str));
1686     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
1687   } else {
1688     Mat       B;
1689     PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1690     PetscCheck(bs == X->rmap->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Matrices must have same block size");
1691     PetscCall(MatGetRowUpperTriangular(X));
1692     PetscCall(MatGetRowUpperTriangular(Y));
1693     PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1694     PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1695     PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1696     PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1697     PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1698     PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1699     PetscCall(MatSetType(B, MATMPISBAIJ));
1700     PetscCall(MatAXPYGetPreallocation_SeqSBAIJ(yy->A, xx->A, nnz_d));
1701     PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1702     PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1703     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1704     PetscCall(MatHeaderMerge(Y, &B));
1705     PetscCall(PetscFree(nnz_d));
1706     PetscCall(PetscFree(nnz_o));
1707     PetscCall(MatRestoreRowUpperTriangular(X));
1708     PetscCall(MatRestoreRowUpperTriangular(Y));
1709   }
1710   PetscFunctionReturn(PETSC_SUCCESS);
1711 }
1712 
1713 static PetscErrorCode MatCreateSubMatrices_MPISBAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[])
1714 {
1715   PetscInt  i;
1716   PetscBool flg;
1717 
1718   PetscFunctionBegin;
1719   PetscCall(MatCreateSubMatrices_MPIBAIJ(A, n, irow, icol, scall, B)); /* B[] are sbaij matrices */
1720   for (i = 0; i < n; i++) {
1721     PetscCall(ISEqual(irow[i], icol[i], &flg));
1722     if (!flg) PetscCall(MatSeqSBAIJZeroOps_Private(*B[i]));
1723   }
1724   PetscFunctionReturn(PETSC_SUCCESS);
1725 }
1726 
1727 static PetscErrorCode MatShift_MPISBAIJ(Mat Y, PetscScalar a)
1728 {
1729   Mat_MPISBAIJ *maij = (Mat_MPISBAIJ *)Y->data;
1730   Mat_SeqSBAIJ *aij  = (Mat_SeqSBAIJ *)maij->A->data;
1731 
1732   PetscFunctionBegin;
1733   if (!Y->preallocated) {
1734     PetscCall(MatMPISBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
1735   } else if (!aij->nz) {
1736     PetscInt nonew = aij->nonew;
1737     PetscCall(MatSeqSBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
1738     aij->nonew = nonew;
1739   }
1740   PetscCall(MatShift_Basic(Y, a));
1741   PetscFunctionReturn(PETSC_SUCCESS);
1742 }
1743 
1744 static PetscErrorCode MatGetDiagonalBlock_MPISBAIJ(Mat A, Mat *a)
1745 {
1746   PetscFunctionBegin;
1747   *a = ((Mat_MPISBAIJ *)A->data)->A;
1748   PetscFunctionReturn(PETSC_SUCCESS);
1749 }
1750 
1751 static PetscErrorCode MatEliminateZeros_MPISBAIJ(Mat A, PetscBool keep)
1752 {
1753   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1754 
1755   PetscFunctionBegin;
1756   PetscCall(MatEliminateZeros_SeqSBAIJ(a->A, keep));       // possibly keep zero diagonal coefficients
1757   PetscCall(MatEliminateZeros_SeqBAIJ(a->B, PETSC_FALSE)); // never keep zero diagonal coefficients
1758   PetscFunctionReturn(PETSC_SUCCESS);
1759 }
1760 
1761 static PetscErrorCode MatLoad_MPISBAIJ(Mat, PetscViewer);
1762 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat, Vec, PetscInt[]);
1763 static PetscErrorCode MatSOR_MPISBAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
1764 
1765 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1766                                        MatGetRow_MPISBAIJ,
1767                                        MatRestoreRow_MPISBAIJ,
1768                                        MatMult_MPISBAIJ,
1769                                        /*  4*/ MatMultAdd_MPISBAIJ,
1770                                        MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */
1771                                        MatMultAdd_MPISBAIJ,
1772                                        NULL,
1773                                        NULL,
1774                                        NULL,
1775                                        /* 10*/ NULL,
1776                                        NULL,
1777                                        NULL,
1778                                        MatSOR_MPISBAIJ,
1779                                        MatTranspose_MPISBAIJ,
1780                                        /* 15*/ MatGetInfo_MPISBAIJ,
1781                                        MatEqual_MPISBAIJ,
1782                                        MatGetDiagonal_MPISBAIJ,
1783                                        MatDiagonalScale_MPISBAIJ,
1784                                        MatNorm_MPISBAIJ,
1785                                        /* 20*/ MatAssemblyBegin_MPISBAIJ,
1786                                        MatAssemblyEnd_MPISBAIJ,
1787                                        MatSetOption_MPISBAIJ,
1788                                        MatZeroEntries_MPISBAIJ,
1789                                        /* 24*/ NULL,
1790                                        NULL,
1791                                        NULL,
1792                                        NULL,
1793                                        NULL,
1794                                        /* 29*/ MatSetUp_MPI_Hash,
1795                                        NULL,
1796                                        NULL,
1797                                        MatGetDiagonalBlock_MPISBAIJ,
1798                                        NULL,
1799                                        /* 34*/ MatDuplicate_MPISBAIJ,
1800                                        NULL,
1801                                        NULL,
1802                                        NULL,
1803                                        NULL,
1804                                        /* 39*/ MatAXPY_MPISBAIJ,
1805                                        MatCreateSubMatrices_MPISBAIJ,
1806                                        MatIncreaseOverlap_MPISBAIJ,
1807                                        MatGetValues_MPISBAIJ,
1808                                        MatCopy_MPISBAIJ,
1809                                        /* 44*/ NULL,
1810                                        MatScale_MPISBAIJ,
1811                                        MatShift_MPISBAIJ,
1812                                        NULL,
1813                                        NULL,
1814                                        /* 49*/ NULL,
1815                                        NULL,
1816                                        NULL,
1817                                        NULL,
1818                                        NULL,
1819                                        /* 54*/ NULL,
1820                                        NULL,
1821                                        MatSetUnfactored_MPISBAIJ,
1822                                        NULL,
1823                                        MatSetValuesBlocked_MPISBAIJ,
1824                                        /* 59*/ MatCreateSubMatrix_MPISBAIJ,
1825                                        NULL,
1826                                        NULL,
1827                                        NULL,
1828                                        NULL,
1829                                        /* 64*/ NULL,
1830                                        NULL,
1831                                        NULL,
1832                                        NULL,
1833                                        MatGetRowMaxAbs_MPISBAIJ,
1834                                        /* 69*/ NULL,
1835                                        MatConvert_MPISBAIJ_Basic,
1836                                        NULL,
1837                                        NULL,
1838                                        NULL,
1839                                        NULL,
1840                                        NULL,
1841                                        NULL,
1842                                        NULL,
1843                                        MatLoad_MPISBAIJ,
1844                                        /* 79*/ NULL,
1845                                        NULL,
1846                                        NULL,
1847                                        NULL,
1848                                        NULL,
1849                                        /* 84*/ NULL,
1850                                        NULL,
1851                                        NULL,
1852                                        NULL,
1853                                        NULL,
1854                                        /* 89*/ NULL,
1855                                        NULL,
1856                                        NULL,
1857                                        NULL,
1858                                        MatConjugate_MPISBAIJ,
1859                                        /* 94*/ NULL,
1860                                        NULL,
1861                                        MatRealPart_MPISBAIJ,
1862                                        MatImaginaryPart_MPISBAIJ,
1863                                        MatGetRowUpperTriangular_MPISBAIJ,
1864                                        /* 99*/ MatRestoreRowUpperTriangular_MPISBAIJ,
1865                                        NULL,
1866                                        NULL,
1867                                        NULL,
1868                                        NULL,
1869                                        /*104*/ NULL,
1870                                        NULL,
1871                                        NULL,
1872                                        NULL,
1873                                        NULL,
1874                                        /*109*/ NULL,
1875                                        NULL,
1876                                        NULL,
1877                                        NULL,
1878                                        NULL,
1879                                        /*114*/ NULL,
1880                                        NULL,
1881                                        NULL,
1882                                        NULL,
1883                                        NULL,
1884                                        /*119*/ NULL,
1885                                        NULL,
1886                                        NULL,
1887                                        NULL,
1888                                        NULL,
1889                                        /*124*/ NULL,
1890                                        NULL,
1891                                        MatSetBlockSizes_Default,
1892                                        NULL,
1893                                        NULL,
1894                                        /*129*/ NULL,
1895                                        MatCreateMPIMatConcatenateSeqMat_MPISBAIJ,
1896                                        NULL,
1897                                        NULL,
1898                                        NULL,
1899                                        /*134*/ NULL,
1900                                        NULL,
1901                                        MatEliminateZeros_MPISBAIJ,
1902                                        NULL,
1903                                        NULL,
1904                                        /*139*/ NULL,
1905                                        NULL,
1906                                        MatCopyHashToXAIJ_MPI_Hash,
1907                                        NULL,
1908                                        NULL};
1909 
1910 static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
1911 {
1912   Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1913   PetscInt      i, mbs, Mbs;
1914   PetscMPIInt   size;
1915 
1916   PetscFunctionBegin;
1917   if (B->hash_active) {
1918     B->ops[0]      = b->cops;
1919     B->hash_active = PETSC_FALSE;
1920   }
1921   if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
1922   PetscCall(MatSetBlockSize(B, bs));
1923   PetscCall(PetscLayoutSetUp(B->rmap));
1924   PetscCall(PetscLayoutSetUp(B->cmap));
1925   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
1926   PetscCheck(B->rmap->N <= B->cmap->N, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->N, B->cmap->N);
1927   PetscCheck(B->rmap->n <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more local rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->n, B->cmap->n);
1928 
1929   mbs = B->rmap->n / bs;
1930   Mbs = B->rmap->N / bs;
1931   PetscCheck(mbs * bs == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "No of local rows %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, B->rmap->N, bs);
1932 
1933   B->rmap->bs = bs;
1934   b->bs2      = bs * bs;
1935   b->mbs      = mbs;
1936   b->Mbs      = Mbs;
1937   b->nbs      = B->cmap->n / bs;
1938   b->Nbs      = B->cmap->N / bs;
1939 
1940   for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
1941   b->rstartbs = B->rmap->rstart / bs;
1942   b->rendbs   = B->rmap->rend / bs;
1943 
1944   b->cstartbs = B->cmap->rstart / bs;
1945   b->cendbs   = B->cmap->rend / bs;
1946 
1947 #if defined(PETSC_USE_CTABLE)
1948   PetscCall(PetscHMapIDestroy(&b->colmap));
1949 #else
1950   PetscCall(PetscFree(b->colmap));
1951 #endif
1952   PetscCall(PetscFree(b->garray));
1953   PetscCall(VecDestroy(&b->lvec));
1954   PetscCall(VecScatterDestroy(&b->Mvctx));
1955   PetscCall(VecDestroy(&b->slvec0));
1956   PetscCall(VecDestroy(&b->slvec0b));
1957   PetscCall(VecDestroy(&b->slvec1));
1958   PetscCall(VecDestroy(&b->slvec1a));
1959   PetscCall(VecDestroy(&b->slvec1b));
1960   PetscCall(VecScatterDestroy(&b->sMvctx));
1961 
1962   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));
1963 
1964   MatSeqXAIJGetOptions_Private(b->B);
1965   PetscCall(MatDestroy(&b->B));
1966   PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
1967   PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
1968   PetscCall(MatSetType(b->B, MATSEQBAIJ));
1969   MatSeqXAIJRestoreOptions_Private(b->B);
1970 
1971   MatSeqXAIJGetOptions_Private(b->A);
1972   PetscCall(MatDestroy(&b->A));
1973   PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
1974   PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
1975   PetscCall(MatSetType(b->A, MATSEQSBAIJ));
1976   MatSeqXAIJRestoreOptions_Private(b->A);
1977 
1978   PetscCall(MatSeqSBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
1979   PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));
1980 
1981   B->preallocated  = PETSC_TRUE;
1982   B->was_assembled = PETSC_FALSE;
1983   B->assembled     = PETSC_FALSE;
1984   PetscFunctionReturn(PETSC_SUCCESS);
1985 }
1986 
1987 static PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
1988 {
1989   PetscInt        m, rstart, cend;
1990   PetscInt        i, j, d, nz, bd, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
1991   const PetscInt *JJ          = NULL;
1992   PetscScalar    *values      = NULL;
1993   PetscBool       roworiented = ((Mat_MPISBAIJ *)B->data)->roworiented;
1994   PetscBool       nooffprocentries;
1995 
1996   PetscFunctionBegin;
1997   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs);
1998   PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
1999   PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2000   PetscCall(PetscLayoutSetUp(B->rmap));
2001   PetscCall(PetscLayoutSetUp(B->cmap));
2002   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2003   m      = B->rmap->n / bs;
2004   rstart = B->rmap->rstart / bs;
2005   cend   = B->cmap->rend / bs;
2006 
2007   PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2008   PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2009   for (i = 0; i < m; i++) {
2010     nz = ii[i + 1] - ii[i];
2011     PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2012     /* count the ones on the diagonal and above, split into diagonal and off-diagonal portions. */
2013     JJ = jj + ii[i];
2014     bd = 0;
2015     for (j = 0; j < nz; j++) {
2016       if (*JJ >= i + rstart) break;
2017       JJ++;
2018       bd++;
2019     }
2020     d = 0;
2021     for (; j < nz; j++) {
2022       if (*JJ++ >= cend) break;
2023       d++;
2024     }
2025     d_nnz[i] = d;
2026     o_nnz[i] = nz - d - bd;
2027     nz       = nz - bd;
2028     nz_max   = PetscMax(nz_max, nz);
2029   }
2030   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2031   PetscCall(MatSetOption(B, MAT_IGNORE_LOWER_TRIANGULAR, PETSC_TRUE));
2032   PetscCall(PetscFree2(d_nnz, o_nnz));
2033 
2034   values = (PetscScalar *)V;
2035   if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2036   for (i = 0; i < m; i++) {
2037     PetscInt        row   = i + rstart;
2038     PetscInt        ncols = ii[i + 1] - ii[i];
2039     const PetscInt *icols = jj + ii[i];
2040     if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2041       const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2042       PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2043     } else { /* block ordering does not match so we can only insert one block at a time. */
2044       PetscInt j;
2045       for (j = 0; j < ncols; j++) {
2046         const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2047         PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2048       }
2049     }
2050   }
2051 
2052   if (!V) PetscCall(PetscFree(values));
2053   nooffprocentries    = B->nooffprocentries;
2054   B->nooffprocentries = PETSC_TRUE;
2055   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2056   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2057   B->nooffprocentries = nooffprocentries;
2058 
2059   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2060   PetscFunctionReturn(PETSC_SUCCESS);
2061 }
2062 
2063 /*MC
2064    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2065    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
2066    the matrix is stored.
2067 
2068    For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2069    can call `MatSetOption`(`Mat`, `MAT_HERMITIAN`);
2070 
2071    Options Database Key:
2072 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to `MatSetFromOptions()`
2073 
2074    Level: beginner
2075 
2076    Note:
2077      The number of rows in the matrix must be less than or equal to the number of columns. Similarly the number of rows in the
2078      diagonal portion of the matrix of each process has to less than or equal the number of columns.
2079 
2080 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MATBAIJ`, `MatCreateBAIJ()`, `MATSEQSBAIJ`, `MatType`
2081 M*/
2082 
2083 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2084 {
2085   Mat_MPISBAIJ *b;
2086   PetscBool     flg = PETSC_FALSE;
2087 
2088   PetscFunctionBegin;
2089   PetscCall(PetscNew(&b));
2090   B->data   = (void *)b;
2091   B->ops[0] = MatOps_Values;
2092 
2093   B->ops->destroy = MatDestroy_MPISBAIJ;
2094   B->ops->view    = MatView_MPISBAIJ;
2095   B->assembled    = PETSC_FALSE;
2096   B->insertmode   = NOT_SET_VALUES;
2097 
2098   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2099   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));
2100 
2101   /* build local table of row and column ownerships */
2102   PetscCall(PetscMalloc1(b->size + 2, &b->rangebs));
2103 
2104   /* build cache for off array entries formed */
2105   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2106 
2107   b->donotstash  = PETSC_FALSE;
2108   b->colmap      = NULL;
2109   b->garray      = NULL;
2110   b->roworiented = PETSC_TRUE;
2111 
2112   /* stuff used in block assembly */
2113   b->barray = NULL;
2114 
2115   /* stuff used for matrix vector multiply */
2116   b->lvec    = NULL;
2117   b->Mvctx   = NULL;
2118   b->slvec0  = NULL;
2119   b->slvec0b = NULL;
2120   b->slvec1  = NULL;
2121   b->slvec1a = NULL;
2122   b->slvec1b = NULL;
2123   b->sMvctx  = NULL;
2124 
2125   /* stuff for MatGetRow() */
2126   b->rowindices   = NULL;
2127   b->rowvalues    = NULL;
2128   b->getrowactive = PETSC_FALSE;
2129 
2130   /* hash table stuff */
2131   b->ht           = NULL;
2132   b->hd           = NULL;
2133   b->ht_size      = 0;
2134   b->ht_flag      = PETSC_FALSE;
2135   b->ht_fact      = 0;
2136   b->ht_total_ct  = 0;
2137   b->ht_insert_ct = 0;
2138 
2139   /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2140   b->ijonly = PETSC_FALSE;
2141 
2142   b->in_loc = NULL;
2143   b->v_loc  = NULL;
2144   b->n_loc  = 0;
2145 
2146   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPISBAIJ));
2147   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPISBAIJ));
2148   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocation_C", MatMPISBAIJSetPreallocation_MPISBAIJ));
2149   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocationCSR_C", MatMPISBAIJSetPreallocationCSR_MPISBAIJ));
2150 #if defined(PETSC_HAVE_ELEMENTAL)
2151   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_elemental_C", MatConvert_MPISBAIJ_Elemental));
2152 #endif
2153 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE))
2154   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_scalapack_C", MatConvert_SBAIJ_ScaLAPACK));
2155 #endif
2156   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpiaij_C", MatConvert_MPISBAIJ_Basic));
2157   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpibaij_C", MatConvert_MPISBAIJ_Basic));
2158 
2159   B->symmetric                   = PETSC_BOOL3_TRUE;
2160   B->structurally_symmetric      = PETSC_BOOL3_TRUE;
2161   B->symmetry_eternal            = PETSC_TRUE;
2162   B->structural_symmetry_eternal = PETSC_TRUE;
2163 #if !defined(PETSC_USE_COMPLEX)
2164   B->hermitian = PETSC_BOOL3_TRUE;
2165 #endif
2166 
2167   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPISBAIJ));
2168   PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPISBAIJ matrix 1", "Mat");
2169   PetscCall(PetscOptionsBool("-mat_use_hash_table", "Use hash table to save memory in constructing matrix", "MatSetOption", flg, &flg, NULL));
2170   if (flg) {
2171     PetscReal fact = 1.39;
2172     PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2173     PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2174     if (fact <= 1.0) fact = 1.39;
2175     PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2176     PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2177   }
2178   PetscOptionsEnd();
2179   PetscFunctionReturn(PETSC_SUCCESS);
2180 }
2181 
2182 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2183 /*MC
2184    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
2185 
2186    This matrix type is identical to `MATSEQSBAIJ` when constructed with a single process communicator,
2187    and `MATMPISBAIJ` otherwise.
2188 
2189    Options Database Key:
2190 . -mat_type sbaij - sets the matrix type to `MATSBAIJ` during a call to `MatSetFromOptions()`
2191 
2192   Level: beginner
2193 
2194 .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQSBAIJ`, `MATMPISBAIJ`
2195 M*/
2196 
2197 /*@
2198   MatMPISBAIJSetPreallocation - For good matrix assembly performance
2199   the user should preallocate the matrix storage by setting the parameters
2200   d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2201   performance can be increased by more than a factor of 50.
2202 
2203   Collective
2204 
2205   Input Parameters:
2206 + B     - the matrix
2207 . bs    - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2208           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2209 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2210           submatrix  (same for all local rows)
2211 . d_nnz - array containing the number of block nonzeros in the various block rows
2212           in the upper triangular and diagonal part of the in diagonal portion of the local
2213           (possibly different for each block row) or `NULL`.  If you plan to factor the matrix you must leave room
2214           for the diagonal entry and set a value even if it is zero.
2215 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2216           submatrix (same for all local rows).
2217 - o_nnz - array containing the number of nonzeros in the various block rows of the
2218           off-diagonal portion of the local submatrix that is right of the diagonal
2219           (possibly different for each block row) or `NULL`.
2220 
2221   Options Database Keys:
2222 + -mat_no_unroll  - uses code that does not unroll the loops in the
2223                     block calculations (much slower)
2224 - -mat_block_size - size of the blocks to use
2225 
2226   Level: intermediate
2227 
2228   Notes:
2229 
2230   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2231   than it must be used on all processors that share the object for that argument.
2232 
2233   If the *_nnz parameter is given then the *_nz parameter is ignored
2234 
2235   Storage Information:
2236   For a square global matrix we define each processor's diagonal portion
2237   to be its local rows and the corresponding columns (a square submatrix);
2238   each processor's off-diagonal portion encompasses the remainder of the
2239   local matrix (a rectangular submatrix).
2240 
2241   The user can specify preallocated storage for the diagonal part of
2242   the local submatrix with either `d_nz` or `d_nnz` (not both).  Set
2243   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2244   memory allocation.  Likewise, specify preallocated storage for the
2245   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2246 
2247   You can call `MatGetInfo()` to get information on how effective the preallocation was;
2248   for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2249   You can also run with the option `-info` and look for messages with the string
2250   malloc in them to see if additional memory allocation was needed.
2251 
2252   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2253   the figure below we depict these three local rows and all columns (0-11).
2254 
2255 .vb
2256            0 1 2 3 4 5 6 7 8 9 10 11
2257           --------------------------
2258    row 3  |. . . d d d o o o o  o  o
2259    row 4  |. . . d d d o o o o  o  o
2260    row 5  |. . . d d d o o o o  o  o
2261           --------------------------
2262 .ve
2263 
2264   Thus, any entries in the d locations are stored in the d (diagonal)
2265   submatrix, and any entries in the o locations are stored in the
2266   o (off-diagonal) submatrix.  Note that the d matrix is stored in
2267   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2268 
2269   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2270   plus the diagonal part of the d matrix,
2271   and `o_nz` should indicate the number of block nonzeros per row in the o matrix
2272 
2273   In general, for PDE problems in which most nonzeros are near the diagonal,
2274   one expects `d_nz` >> `o_nz`.
2275 
2276 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `PetscSplitOwnership()`
2277 @*/
2278 PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
2279 {
2280   PetscFunctionBegin;
2281   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
2282   PetscValidType(B, 1);
2283   PetscValidLogicalCollectiveInt(B, bs, 2);
2284   PetscTryMethod(B, "MatMPISBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
2285   PetscFunctionReturn(PETSC_SUCCESS);
2286 }
2287 
2288 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2289 /*@
2290   MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`,
2291   (block compressed row).  For good matrix assembly performance
2292   the user should preallocate the matrix storage by setting the parameters
2293   `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).
2294 
2295   Collective
2296 
2297   Input Parameters:
2298 + comm  - MPI communicator
2299 . bs    - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2300           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2301 . m     - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
2302           This value should be the same as the local size used in creating the
2303           y vector for the matrix-vector product y = Ax.
2304 . n     - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
2305           This value should be the same as the local size used in creating the
2306           x vector for the matrix-vector product y = Ax.
2307 . M     - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2308 . N     - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2309 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2310           submatrix (same for all local rows)
2311 . d_nnz - array containing the number of block nonzeros in the various block rows
2312           in the upper triangular portion of the in diagonal portion of the local
2313           (possibly different for each block block row) or `NULL`.
2314           If you plan to factor the matrix you must leave room for the diagonal entry and
2315           set its value even if it is zero.
2316 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2317           submatrix (same for all local rows).
2318 - o_nnz - array containing the number of nonzeros in the various block rows of the
2319           off-diagonal portion of the local submatrix (possibly different for
2320           each block row) or `NULL`.
2321 
2322   Output Parameter:
2323 . A - the matrix
2324 
2325   Options Database Keys:
2326 + -mat_no_unroll  - uses code that does not unroll the loops in the
2327                     block calculations (much slower)
2328 . -mat_block_size - size of the blocks to use
2329 - -mat_mpi        - use the parallel matrix data structures even on one processor
2330                     (defaults to using SeqBAIJ format on one processor)
2331 
2332   Level: intermediate
2333 
2334   Notes:
2335   It is recommended that one use `MatCreateFromOptions()` or the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
2336   MatXXXXSetPreallocation() paradigm instead of this routine directly.
2337   [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`]
2338 
2339   The number of rows and columns must be divisible by blocksize.
2340   This matrix type does not support complex Hermitian operation.
2341 
2342   The user MUST specify either the local or global matrix dimensions
2343   (possibly both).
2344 
2345   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2346   than it must be used on all processors that share the object for that argument.
2347 
2348   If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by
2349   `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`.
2350 
2351   If the *_nnz parameter is given then the *_nz parameter is ignored
2352 
2353   Storage Information:
2354   For a square global matrix we define each processor's diagonal portion
2355   to be its local rows and the corresponding columns (a square submatrix);
2356   each processor's off-diagonal portion encompasses the remainder of the
2357   local matrix (a rectangular submatrix).
2358 
2359   The user can specify preallocated storage for the diagonal part of
2360   the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2361   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2362   memory allocation. Likewise, specify preallocated storage for the
2363   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2364 
2365   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2366   the figure below we depict these three local rows and all columns (0-11).
2367 
2368 .vb
2369            0 1 2 3 4 5 6 7 8 9 10 11
2370           --------------------------
2371    row 3  |. . . d d d o o o o  o  o
2372    row 4  |. . . d d d o o o o  o  o
2373    row 5  |. . . d d d o o o o  o  o
2374           --------------------------
2375 .ve
2376 
2377   Thus, any entries in the d locations are stored in the d (diagonal)
2378   submatrix, and any entries in the o locations are stored in the
2379   o (off-diagonal) submatrix. Note that the d matrix is stored in
2380   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2381 
2382   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2383   plus the diagonal part of the d matrix,
2384   and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
2385   In general, for PDE problems in which most nonzeros are near the diagonal,
2386   one expects `d_nz` >> `o_nz`.
2387 
2388 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`,
2389           `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
2390 @*/
2391 PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
2392 {
2393   PetscMPIInt size;
2394 
2395   PetscFunctionBegin;
2396   PetscCall(MatCreate(comm, A));
2397   PetscCall(MatSetSizes(*A, m, n, M, N));
2398   PetscCallMPI(MPI_Comm_size(comm, &size));
2399   if (size > 1) {
2400     PetscCall(MatSetType(*A, MATMPISBAIJ));
2401     PetscCall(MatMPISBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
2402   } else {
2403     PetscCall(MatSetType(*A, MATSEQSBAIJ));
2404     PetscCall(MatSeqSBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
2405   }
2406   PetscFunctionReturn(PETSC_SUCCESS);
2407 }
2408 
2409 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
2410 {
2411   Mat           mat;
2412   Mat_MPISBAIJ *a, *oldmat = (Mat_MPISBAIJ *)matin->data;
2413   PetscInt      len = 0, nt, bs = matin->rmap->bs, mbs = oldmat->mbs;
2414   PetscScalar  *array;
2415 
2416   PetscFunctionBegin;
2417   *newmat = NULL;
2418 
2419   PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
2420   PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
2421   PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
2422   PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
2423   PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));
2424 
2425   if (matin->hash_active) {
2426     PetscCall(MatSetUp(mat));
2427   } else {
2428     mat->factortype   = matin->factortype;
2429     mat->preallocated = PETSC_TRUE;
2430     mat->assembled    = PETSC_TRUE;
2431     mat->insertmode   = NOT_SET_VALUES;
2432 
2433     a      = (Mat_MPISBAIJ *)mat->data;
2434     a->bs2 = oldmat->bs2;
2435     a->mbs = oldmat->mbs;
2436     a->nbs = oldmat->nbs;
2437     a->Mbs = oldmat->Mbs;
2438     a->Nbs = oldmat->Nbs;
2439 
2440     a->size         = oldmat->size;
2441     a->rank         = oldmat->rank;
2442     a->donotstash   = oldmat->donotstash;
2443     a->roworiented  = oldmat->roworiented;
2444     a->rowindices   = NULL;
2445     a->rowvalues    = NULL;
2446     a->getrowactive = PETSC_FALSE;
2447     a->barray       = NULL;
2448     a->rstartbs     = oldmat->rstartbs;
2449     a->rendbs       = oldmat->rendbs;
2450     a->cstartbs     = oldmat->cstartbs;
2451     a->cendbs       = oldmat->cendbs;
2452 
2453     /* hash table stuff */
2454     a->ht           = NULL;
2455     a->hd           = NULL;
2456     a->ht_size      = 0;
2457     a->ht_flag      = oldmat->ht_flag;
2458     a->ht_fact      = oldmat->ht_fact;
2459     a->ht_total_ct  = 0;
2460     a->ht_insert_ct = 0;
2461 
2462     PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 2));
2463     if (oldmat->colmap) {
2464 #if defined(PETSC_USE_CTABLE)
2465       PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
2466 #else
2467       PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
2468       PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
2469 #endif
2470     } else a->colmap = NULL;
2471 
2472     if (oldmat->garray && (len = ((Mat_SeqBAIJ *)oldmat->B->data)->nbs)) {
2473       PetscCall(PetscMalloc1(len, &a->garray));
2474       PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
2475     } else a->garray = NULL;
2476 
2477     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
2478     PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
2479     PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));
2480 
2481     PetscCall(VecDuplicate(oldmat->slvec0, &a->slvec0));
2482     PetscCall(VecDuplicate(oldmat->slvec1, &a->slvec1));
2483 
2484     PetscCall(VecGetLocalSize(a->slvec1, &nt));
2485     PetscCall(VecGetArray(a->slvec1, &array));
2486     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, bs * mbs, array, &a->slvec1a));
2487     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec1b));
2488     PetscCall(VecRestoreArray(a->slvec1, &array));
2489     PetscCall(VecGetArray(a->slvec0, &array));
2490     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec0b));
2491     PetscCall(VecRestoreArray(a->slvec0, &array));
2492 
2493     /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2494     PetscCall(PetscObjectReference((PetscObject)oldmat->sMvctx));
2495     a->sMvctx = oldmat->sMvctx;
2496 
2497     PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
2498     PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
2499   }
2500   PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
2501   *newmat = mat;
2502   PetscFunctionReturn(PETSC_SUCCESS);
2503 }
2504 
2505 /* Used for both MPIBAIJ and MPISBAIJ matrices */
2506 #define MatLoad_MPISBAIJ_Binary MatLoad_MPIBAIJ_Binary
2507 
2508 static PetscErrorCode MatLoad_MPISBAIJ(Mat mat, PetscViewer viewer)
2509 {
2510   PetscBool isbinary;
2511 
2512   PetscFunctionBegin;
2513   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2514   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
2515   PetscCall(MatLoad_MPISBAIJ_Binary(mat, viewer));
2516   PetscFunctionReturn(PETSC_SUCCESS);
2517 }
2518 
2519 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A, Vec v, PetscInt idx[])
2520 {
2521   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
2522   Mat_SeqBAIJ  *b = (Mat_SeqBAIJ *)a->B->data;
2523   PetscReal     atmp;
2524   PetscReal    *work, *svalues, *rvalues;
2525   PetscInt      i, bs, mbs, *bi, *bj, brow, j, ncols, krow, kcol, col, row, Mbs, bcol;
2526   PetscMPIInt   rank, size;
2527   PetscInt     *rowners_bs, count, source;
2528   PetscScalar  *va;
2529   MatScalar    *ba;
2530   MPI_Status    stat;
2531 
2532   PetscFunctionBegin;
2533   PetscCheck(!idx, PETSC_COMM_SELF, PETSC_ERR_SUP, "Send email to petsc-maint@mcs.anl.gov");
2534   PetscCall(MatGetRowMaxAbs(a->A, v, NULL));
2535   PetscCall(VecGetArray(v, &va));
2536 
2537   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2538   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
2539 
2540   bs  = A->rmap->bs;
2541   mbs = a->mbs;
2542   Mbs = a->Mbs;
2543   ba  = b->a;
2544   bi  = b->i;
2545   bj  = b->j;
2546 
2547   /* find ownerships */
2548   rowners_bs = A->rmap->range;
2549 
2550   /* each proc creates an array to be distributed */
2551   PetscCall(PetscCalloc1(bs * Mbs, &work));
2552 
2553   /* row_max for B */
2554   if (rank != size - 1) {
2555     for (i = 0; i < mbs; i++) {
2556       ncols = bi[1] - bi[0];
2557       bi++;
2558       brow = bs * i;
2559       for (j = 0; j < ncols; j++) {
2560         bcol = bs * (*bj);
2561         for (kcol = 0; kcol < bs; kcol++) {
2562           col = bcol + kcol;           /* local col index */
2563           col += rowners_bs[rank + 1]; /* global col index */
2564           for (krow = 0; krow < bs; krow++) {
2565             atmp = PetscAbsScalar(*ba);
2566             ba++;
2567             row = brow + krow; /* local row index */
2568             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2569             if (work[col] < atmp) work[col] = atmp;
2570           }
2571         }
2572         bj++;
2573       }
2574     }
2575 
2576     /* send values to its owners */
2577     for (PetscMPIInt dest = rank + 1; dest < size; dest++) {
2578       svalues = work + rowners_bs[dest];
2579       count   = rowners_bs[dest + 1] - rowners_bs[dest];
2580       PetscCallMPI(MPIU_Send(svalues, count, MPIU_REAL, dest, rank, PetscObjectComm((PetscObject)A)));
2581     }
2582   }
2583 
2584   /* receive values */
2585   if (rank) {
2586     rvalues = work;
2587     count   = rowners_bs[rank + 1] - rowners_bs[rank];
2588     for (source = 0; source < rank; source++) {
2589       PetscCallMPI(MPIU_Recv(rvalues, count, MPIU_REAL, MPI_ANY_SOURCE, MPI_ANY_TAG, PetscObjectComm((PetscObject)A), &stat));
2590       /* process values */
2591       for (i = 0; i < count; i++) {
2592         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2593       }
2594     }
2595   }
2596 
2597   PetscCall(VecRestoreArray(v, &va));
2598   PetscCall(PetscFree(work));
2599   PetscFunctionReturn(PETSC_SUCCESS);
2600 }
2601 
2602 static PetscErrorCode MatSOR_MPISBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2603 {
2604   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ *)matin->data;
2605   PetscInt           mbs = mat->mbs, bs = matin->rmap->bs;
2606   PetscScalar       *x, *ptr, *from;
2607   Vec                bb1;
2608   const PetscScalar *b;
2609 
2610   PetscFunctionBegin;
2611   PetscCheck(its > 0 && lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " and local its %" PetscInt_FMT " both positive", its, lits);
2612   PetscCheck(bs <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "SSOR for block size > 1 is not yet implemented");
2613 
2614   if (flag == SOR_APPLY_UPPER) {
2615     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2616     PetscFunctionReturn(PETSC_SUCCESS);
2617   }
2618 
2619   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2620     if (flag & SOR_ZERO_INITIAL_GUESS) {
2621       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, lits, xx));
2622       its--;
2623     }
2624 
2625     PetscCall(VecDuplicate(bb, &bb1));
2626     while (its--) {
2627       /* lower triangular part: slvec0b = - B^T*xx */
2628       PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2629 
2630       /* copy xx into slvec0a */
2631       PetscCall(VecGetArray(mat->slvec0, &ptr));
2632       PetscCall(VecGetArray(xx, &x));
2633       PetscCall(PetscArraycpy(ptr, x, bs * mbs));
2634       PetscCall(VecRestoreArray(mat->slvec0, &ptr));
2635 
2636       PetscCall(VecScale(mat->slvec0, -1.0));
2637 
2638       /* copy bb into slvec1a */
2639       PetscCall(VecGetArray(mat->slvec1, &ptr));
2640       PetscCall(VecGetArrayRead(bb, &b));
2641       PetscCall(PetscArraycpy(ptr, b, bs * mbs));
2642       PetscCall(VecRestoreArray(mat->slvec1, &ptr));
2643 
2644       /* set slvec1b = 0 */
2645       PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2646       PetscCall(VecZeroEntries(mat->slvec1b));
2647 
2648       PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2649       PetscCall(VecRestoreArray(xx, &x));
2650       PetscCall(VecRestoreArrayRead(bb, &b));
2651       PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2652 
2653       /* upper triangular part: bb1 = bb1 - B*x */
2654       PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, bb1));
2655 
2656       /* local diagonal sweep */
2657       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, lits, xx));
2658     }
2659     PetscCall(VecDestroy(&bb1));
2660   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2661     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2662   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2663     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2664   } else if (flag & SOR_EISENSTAT) {
2665     Vec                xx1;
2666     PetscBool          hasop;
2667     const PetscScalar *diag;
2668     PetscScalar       *sl, scale = (omega - 2.0) / omega;
2669     PetscInt           i, n;
2670 
2671     if (!mat->xx1) {
2672       PetscCall(VecDuplicate(bb, &mat->xx1));
2673       PetscCall(VecDuplicate(bb, &mat->bb1));
2674     }
2675     xx1 = mat->xx1;
2676     bb1 = mat->bb1;
2677 
2678     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP), fshift, lits, 1, xx));
2679 
2680     if (!mat->diag) {
2681       /* this is wrong for same matrix with new nonzero values */
2682       PetscCall(MatCreateVecs(matin, &mat->diag, NULL));
2683       PetscCall(MatGetDiagonal(matin, mat->diag));
2684     }
2685     PetscCall(MatHasOperation(matin, MATOP_MULT_DIAGONAL_BLOCK, &hasop));
2686 
2687     if (hasop) {
2688       PetscCall(MatMultDiagonalBlock(matin, xx, bb1));
2689       PetscCall(VecAYPX(mat->slvec1a, scale, bb));
2690     } else {
2691       /*
2692           These two lines are replaced by code that may be a bit faster for a good compiler
2693       PetscCall(VecPointwiseMult(mat->slvec1a,mat->diag,xx));
2694       PetscCall(VecAYPX(mat->slvec1a,scale,bb));
2695       */
2696       PetscCall(VecGetArray(mat->slvec1a, &sl));
2697       PetscCall(VecGetArrayRead(mat->diag, &diag));
2698       PetscCall(VecGetArrayRead(bb, &b));
2699       PetscCall(VecGetArray(xx, &x));
2700       PetscCall(VecGetLocalSize(xx, &n));
2701       if (omega == 1.0) {
2702         for (i = 0; i < n; i++) sl[i] = b[i] - diag[i] * x[i];
2703         PetscCall(PetscLogFlops(2.0 * n));
2704       } else {
2705         for (i = 0; i < n; i++) sl[i] = b[i] + scale * diag[i] * x[i];
2706         PetscCall(PetscLogFlops(3.0 * n));
2707       }
2708       PetscCall(VecRestoreArray(mat->slvec1a, &sl));
2709       PetscCall(VecRestoreArrayRead(mat->diag, &diag));
2710       PetscCall(VecRestoreArrayRead(bb, &b));
2711       PetscCall(VecRestoreArray(xx, &x));
2712     }
2713 
2714     /* multiply off-diagonal portion of matrix */
2715     PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2716     PetscCall(VecZeroEntries(mat->slvec1b));
2717     PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2718     PetscCall(VecGetArray(mat->slvec0, &from));
2719     PetscCall(VecGetArray(xx, &x));
2720     PetscCall(PetscArraycpy(from, x, bs * mbs));
2721     PetscCall(VecRestoreArray(mat->slvec0, &from));
2722     PetscCall(VecRestoreArray(xx, &x));
2723     PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2724     PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2725     PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, mat->slvec1a));
2726 
2727     /* local sweep */
2728     PetscCall((*mat->A->ops->sor)(mat->A, mat->slvec1a, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP), fshift, lits, 1, xx1));
2729     PetscCall(VecAXPY(xx, 1.0, xx1));
2730   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatSORType is not supported for SBAIJ matrix format");
2731   PetscFunctionReturn(PETSC_SUCCESS);
2732 }
2733 
2734 /*@
2735   MatCreateMPISBAIJWithArrays - creates a `MATMPISBAIJ` matrix using arrays that contain in standard CSR format for the local rows.
2736 
2737   Collective
2738 
2739   Input Parameters:
2740 + comm - MPI communicator
2741 . bs   - the block size, only a block size of 1 is supported
2742 . m    - number of local rows (Cannot be `PETSC_DECIDE`)
2743 . n    - This value should be the same as the local size used in creating the
2744          x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have
2745          calculated if `N` is given) For square matrices `n` is almost always `m`.
2746 . M    - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2747 . N    - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2748 . i    - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that row block row of the matrix
2749 . j    - column indices
2750 - a    - matrix values
2751 
2752   Output Parameter:
2753 . mat - the matrix
2754 
2755   Level: intermediate
2756 
2757   Notes:
2758   The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
2759   thus you CANNOT change the matrix entries by changing the values of `a` after you have
2760   called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.
2761 
2762   The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array.
2763 
2764 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
2765           `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatMPISBAIJSetPreallocationCSR()`
2766 @*/
2767 PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
2768 {
2769   PetscFunctionBegin;
2770   PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
2771   PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
2772   PetscCall(MatCreate(comm, mat));
2773   PetscCall(MatSetSizes(*mat, m, n, M, N));
2774   PetscCall(MatSetType(*mat, MATMPISBAIJ));
2775   PetscCall(MatMPISBAIJSetPreallocationCSR(*mat, bs, i, j, a));
2776   PetscFunctionReturn(PETSC_SUCCESS);
2777 }
2778 
2779 /*@
2780   MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values
2781 
2782   Collective
2783 
2784   Input Parameters:
2785 + B  - the matrix
2786 . bs - the block size
2787 . i  - the indices into `j` for the start of each local row (indices start with zero)
2788 . j  - the column indices for each local row (indices start with zero) these must be sorted for each row
2789 - v  - optional values in the matrix, pass `NULL` if not provided
2790 
2791   Level: advanced
2792 
2793   Notes:
2794   The `i`, `j`, and `v` arrays ARE copied by this routine into the internal format used by PETSc;
2795   thus you CANNOT change the matrix entries by changing the values of `v` after you have
2796   called this routine.
2797 
2798   Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries
2799   and usually the numerical values as well
2800 
2801   Any entries passed in that are below the diagonal are ignored
2802 
2803 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MATMPIAIJ`,
2804           `MatCreateMPISBAIJWithArrays()`
2805 @*/
2806 PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2807 {
2808   PetscFunctionBegin;
2809   PetscTryMethod(B, "MatMPISBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2810   PetscFunctionReturn(PETSC_SUCCESS);
2811 }
2812 
2813 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
2814 {
2815   PetscInt     m, N, i, rstart, nnz, Ii, bs, cbs;
2816   PetscInt    *indx;
2817   PetscScalar *values;
2818 
2819   PetscFunctionBegin;
2820   PetscCall(MatGetSize(inmat, &m, &N));
2821   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2822     Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)inmat->data;
2823     PetscInt     *dnz, *onz, mbs, Nbs, nbs;
2824     PetscInt     *bindx, rmax = a->rmax, j;
2825     PetscMPIInt   rank, size;
2826 
2827     PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2828     mbs = m / bs;
2829     Nbs = N / cbs;
2830     if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
2831     nbs = n / cbs;
2832 
2833     PetscCall(PetscMalloc1(rmax, &bindx));
2834     MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */
2835 
2836     PetscCallMPI(MPI_Comm_rank(comm, &rank));
2837     PetscCallMPI(MPI_Comm_rank(comm, &size));
2838     if (rank == size - 1) {
2839       /* Check sum(nbs) = Nbs */
2840       PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
2841     }
2842 
2843     rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
2844     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2845     for (i = 0; i < mbs; i++) {
2846       PetscCall(MatGetRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
2847       nnz = nnz / bs;
2848       for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
2849       PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
2850       PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL));
2851     }
2852     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2853     PetscCall(PetscFree(bindx));
2854 
2855     PetscCall(MatCreate(comm, outmat));
2856     PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
2857     PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
2858     PetscCall(MatSetType(*outmat, MATSBAIJ));
2859     PetscCall(MatSeqSBAIJSetPreallocation(*outmat, bs, 0, dnz));
2860     PetscCall(MatMPISBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
2861     MatPreallocateEnd(dnz, onz);
2862   }
2863 
2864   /* numeric phase */
2865   PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2866   PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));
2867 
2868   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2869   for (i = 0; i < m; i++) {
2870     PetscCall(MatGetRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2871     Ii = i + rstart;
2872     PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
2873     PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2874   }
2875   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2876   PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
2877   PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
2878   PetscFunctionReturn(PETSC_SUCCESS);
2879 }
2880