xref: /petsc/src/mat/impls/sbaij/mpi/mpisbaij.c (revision 3f7878b3ed00daeee6ae8d2839a0d41a4d3e79ca)
1 #include <../src/mat/impls/baij/mpi/mpibaij.h> /*I "petscmat.h" I*/
2 #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
3 #include <../src/mat/impls/sbaij/seq/sbaij.h>
4 #include <petscblaslapack.h>
5 
6 static PetscErrorCode MatDestroy_MPISBAIJ(Mat mat)
7 {
8   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
9 
10   PetscFunctionBegin;
11   PetscCall(PetscLogObjectState((PetscObject)mat, "Rows=%" PetscInt_FMT ",Cols=%" PetscInt_FMT, mat->rmap->N, mat->cmap->N));
12   PetscCall(MatStashDestroy_Private(&mat->stash));
13   PetscCall(MatStashDestroy_Private(&mat->bstash));
14   PetscCall(MatDestroy(&baij->A));
15   PetscCall(MatDestroy(&baij->B));
16 #if defined(PETSC_USE_CTABLE)
17   PetscCall(PetscHMapIDestroy(&baij->colmap));
18 #else
19   PetscCall(PetscFree(baij->colmap));
20 #endif
21   PetscCall(PetscFree(baij->garray));
22   PetscCall(VecDestroy(&baij->lvec));
23   PetscCall(VecScatterDestroy(&baij->Mvctx));
24   PetscCall(VecDestroy(&baij->slvec0));
25   PetscCall(VecDestroy(&baij->slvec0b));
26   PetscCall(VecDestroy(&baij->slvec1));
27   PetscCall(VecDestroy(&baij->slvec1a));
28   PetscCall(VecDestroy(&baij->slvec1b));
29   PetscCall(VecScatterDestroy(&baij->sMvctx));
30   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
31   PetscCall(PetscFree(baij->barray));
32   PetscCall(PetscFree(baij->hd));
33   PetscCall(VecDestroy(&baij->diag));
34   PetscCall(VecDestroy(&baij->bb1));
35   PetscCall(VecDestroy(&baij->xx1));
36 #if defined(PETSC_USE_REAL_MAT_SINGLE)
37   PetscCall(PetscFree(baij->setvaluescopy));
38 #endif
39   PetscCall(PetscFree(baij->in_loc));
40   PetscCall(PetscFree(baij->v_loc));
41   PetscCall(PetscFree(baij->rangebs));
42   PetscCall(PetscFree(mat->data));
43 
44   PetscCall(PetscObjectChangeTypeName((PetscObject)mat, NULL));
45   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatStoreValues_C", NULL));
46   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatRetrieveValues_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocation_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatMPISBAIJSetPreallocationCSR_C", NULL));
49 #if defined(PETSC_HAVE_ELEMENTAL)
50   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_elemental_C", NULL));
51 #endif
52 #if defined(PETSC_HAVE_SCALAPACK)
53   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_scalapack_C", NULL));
54 #endif
55   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpiaij_C", NULL));
56   PetscCall(PetscObjectComposeFunction((PetscObject)mat, "MatConvert_mpisbaij_mpibaij_C", NULL));
57   PetscFunctionReturn(PETSC_SUCCESS);
58 }
59 
60 /* defines MatSetValues_MPI_Hash(), MatAssemblyBegin_MPI_Hash(), MatAssemblyEnd_MPI_Hash(), MatSetUp_MPI_Hash() */
61 #define TYPE SBAIJ
62 #define TYPE_SBAIJ
63 #include "../src/mat/impls/aij/mpi/mpihashmat.h"
64 #undef TYPE
65 #undef TYPE_SBAIJ
66 
67 #if defined(PETSC_HAVE_ELEMENTAL)
68 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
69 #endif
70 #if defined(PETSC_HAVE_SCALAPACK)
71 PETSC_INTERN PetscErrorCode MatConvert_SBAIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
72 #endif
73 
74 /* This could be moved to matimpl.h */
75 static PetscErrorCode MatPreallocateWithMats_Private(Mat B, PetscInt nm, Mat X[], PetscBool symm[], PetscBool fill)
76 {
77   Mat       preallocator;
78   PetscInt  r, rstart, rend;
79   PetscInt  bs, i, m, n, M, N;
80   PetscBool cong = PETSC_TRUE;
81 
82   PetscFunctionBegin;
83   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
84   PetscValidLogicalCollectiveInt(B, nm, 2);
85   for (i = 0; i < nm; i++) {
86     PetscValidHeaderSpecific(X[i], MAT_CLASSID, 3);
87     PetscCall(PetscLayoutCompare(B->rmap, X[i]->rmap, &cong));
88     PetscCheck(cong, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for different layouts");
89   }
90   PetscValidLogicalCollectiveBool(B, fill, 5);
91   PetscCall(MatGetBlockSize(B, &bs));
92   PetscCall(MatGetSize(B, &M, &N));
93   PetscCall(MatGetLocalSize(B, &m, &n));
94   PetscCall(MatCreate(PetscObjectComm((PetscObject)B), &preallocator));
95   PetscCall(MatSetType(preallocator, MATPREALLOCATOR));
96   PetscCall(MatSetBlockSize(preallocator, bs));
97   PetscCall(MatSetSizes(preallocator, m, n, M, N));
98   PetscCall(MatSetUp(preallocator));
99   PetscCall(MatGetOwnershipRange(preallocator, &rstart, &rend));
100   for (r = rstart; r < rend; ++r) {
101     PetscInt           ncols;
102     const PetscInt    *row;
103     const PetscScalar *vals;
104 
105     for (i = 0; i < nm; i++) {
106       PetscCall(MatGetRow(X[i], r, &ncols, &row, &vals));
107       PetscCall(MatSetValues(preallocator, 1, &r, ncols, row, vals, INSERT_VALUES));
108       if (symm && symm[i]) PetscCall(MatSetValues(preallocator, ncols, row, 1, &r, vals, INSERT_VALUES));
109       PetscCall(MatRestoreRow(X[i], r, &ncols, &row, &vals));
110     }
111   }
112   PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY));
113   PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY));
114   PetscCall(MatPreallocatorPreallocate(preallocator, fill, B));
115   PetscCall(MatDestroy(&preallocator));
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Basic(Mat A, MatType newtype, MatReuse reuse, Mat *newmat)
120 {
121   Mat      B;
122   PetscInt r;
123 
124   PetscFunctionBegin;
125   if (reuse != MAT_REUSE_MATRIX) {
126     PetscBool symm = PETSC_TRUE, isdense;
127     PetscInt  bs;
128 
129     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
130     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
131     PetscCall(MatSetType(B, newtype));
132     PetscCall(MatGetBlockSize(A, &bs));
133     PetscCall(MatSetBlockSize(B, bs));
134     PetscCall(PetscLayoutSetUp(B->rmap));
135     PetscCall(PetscLayoutSetUp(B->cmap));
136     PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isdense, MATSEQDENSE, MATMPIDENSE, MATSEQDENSECUDA, ""));
137     if (!isdense) {
138       PetscCall(MatGetRowUpperTriangular(A));
139       PetscCall(MatPreallocateWithMats_Private(B, 1, &A, &symm, PETSC_TRUE));
140       PetscCall(MatRestoreRowUpperTriangular(A));
141     } else {
142       PetscCall(MatSetUp(B));
143     }
144   } else {
145     B = *newmat;
146     PetscCall(MatZeroEntries(B));
147   }
148 
149   PetscCall(MatGetRowUpperTriangular(A));
150   for (r = A->rmap->rstart; r < A->rmap->rend; r++) {
151     PetscInt           ncols;
152     const PetscInt    *row;
153     const PetscScalar *vals;
154 
155     PetscCall(MatGetRow(A, r, &ncols, &row, &vals));
156     PetscCall(MatSetValues(B, 1, &r, ncols, row, vals, INSERT_VALUES));
157 #if defined(PETSC_USE_COMPLEX)
158     if (A->hermitian == PETSC_BOOL3_TRUE) {
159       PetscInt i;
160       for (i = 0; i < ncols; i++) PetscCall(MatSetValue(B, row[i], r, PetscConj(vals[i]), INSERT_VALUES));
161     } else {
162       PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
163     }
164 #else
165     PetscCall(MatSetValues(B, ncols, row, 1, &r, vals, INSERT_VALUES));
166 #endif
167     PetscCall(MatRestoreRow(A, r, &ncols, &row, &vals));
168   }
169   PetscCall(MatRestoreRowUpperTriangular(A));
170   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
171   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
172 
173   if (reuse == MAT_INPLACE_MATRIX) {
174     PetscCall(MatHeaderReplace(A, &B));
175   } else {
176     *newmat = B;
177   }
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 static PetscErrorCode MatStoreValues_MPISBAIJ(Mat mat)
182 {
183   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
184 
185   PetscFunctionBegin;
186   PetscCall(MatStoreValues(aij->A));
187   PetscCall(MatStoreValues(aij->B));
188   PetscFunctionReturn(PETSC_SUCCESS);
189 }
190 
191 static PetscErrorCode MatRetrieveValues_MPISBAIJ(Mat mat)
192 {
193   Mat_MPISBAIJ *aij = (Mat_MPISBAIJ *)mat->data;
194 
195   PetscFunctionBegin;
196   PetscCall(MatRetrieveValues(aij->A));
197   PetscCall(MatRetrieveValues(aij->B));
198   PetscFunctionReturn(PETSC_SUCCESS);
199 }
200 
201 #define MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, orow, ocol) \
202   do { \
203     brow = row / bs; \
204     rp   = aj + ai[brow]; \
205     ap   = aa + bs2 * ai[brow]; \
206     rmax = aimax[brow]; \
207     nrow = ailen[brow]; \
208     bcol = col / bs; \
209     ridx = row % bs; \
210     cidx = col % bs; \
211     low  = 0; \
212     high = nrow; \
213     while (high - low > 3) { \
214       t = (low + high) / 2; \
215       if (rp[t] > bcol) high = t; \
216       else low = t; \
217     } \
218     for (_i = low; _i < high; _i++) { \
219       if (rp[_i] > bcol) break; \
220       if (rp[_i] == bcol) { \
221         bap = ap + bs2 * _i + bs * cidx + ridx; \
222         if (addv == ADD_VALUES) *bap += value; \
223         else *bap = value; \
224         goto a_noinsert; \
225       } \
226     } \
227     if (a->nonew == 1) goto a_noinsert; \
228     PetscCheck(a->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
229     MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, brow, bcol, rmax, aa, ai, aj, rp, ap, aimax, a->nonew, MatScalar); \
230     N = nrow++ - 1; \
231     /* shift up all the later entries in this row */ \
232     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
233     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
234     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
235     rp[_i]                          = bcol; \
236     ap[bs2 * _i + bs * cidx + ridx] = value; \
237   a_noinsert:; \
238     ailen[brow] = nrow; \
239   } while (0)
240 
241 #define MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, orow, ocol) \
242   do { \
243     brow = row / bs; \
244     rp   = bj + bi[brow]; \
245     ap   = ba + bs2 * bi[brow]; \
246     rmax = bimax[brow]; \
247     nrow = bilen[brow]; \
248     bcol = col / bs; \
249     ridx = row % bs; \
250     cidx = col % bs; \
251     low  = 0; \
252     high = nrow; \
253     while (high - low > 3) { \
254       t = (low + high) / 2; \
255       if (rp[t] > bcol) high = t; \
256       else low = t; \
257     } \
258     for (_i = low; _i < high; _i++) { \
259       if (rp[_i] > bcol) break; \
260       if (rp[_i] == bcol) { \
261         bap = ap + bs2 * _i + bs * cidx + ridx; \
262         if (addv == ADD_VALUES) *bap += value; \
263         else *bap = value; \
264         goto b_noinsert; \
265       } \
266     } \
267     if (b->nonew == 1) goto b_noinsert; \
268     PetscCheck(b->nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at global row/column (%" PetscInt_FMT ", %" PetscInt_FMT ") into matrix", orow, ocol); \
269     MatSeqXAIJReallocateAIJ(B, b->mbs, bs2, nrow, brow, bcol, rmax, ba, bi, bj, rp, ap, bimax, b->nonew, MatScalar); \
270     N = nrow++ - 1; \
271     /* shift up all the later entries in this row */ \
272     PetscCall(PetscArraymove(rp + _i + 1, rp + _i, N - _i + 1)); \
273     PetscCall(PetscArraymove(ap + bs2 * (_i + 1), ap + bs2 * _i, bs2 * (N - _i + 1))); \
274     PetscCall(PetscArrayzero(ap + bs2 * _i, bs2)); \
275     rp[_i]                          = bcol; \
276     ap[bs2 * _i + bs * cidx + ridx] = value; \
277   b_noinsert:; \
278     bilen[brow] = nrow; \
279   } while (0)
280 
281 /* Only add/insert a(i,j) with i<=j (blocks).
282    Any a(i,j) with i>j input by user is ignored or generates an error
283 */
284 static PetscErrorCode MatSetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode addv)
285 {
286   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
287   MatScalar     value;
288   PetscBool     roworiented = baij->roworiented;
289   PetscInt      i, j, row, col;
290   PetscInt      rstart_orig = mat->rmap->rstart;
291   PetscInt      rend_orig = mat->rmap->rend, cstart_orig = mat->cmap->rstart;
292   PetscInt      cend_orig = mat->cmap->rend, bs = mat->rmap->bs;
293 
294   /* Some Variables required in the macro */
295   Mat           A     = baij->A;
296   Mat_SeqSBAIJ *a     = (Mat_SeqSBAIJ *)A->data;
297   PetscInt     *aimax = a->imax, *ai = a->i, *ailen = a->ilen, *aj = a->j;
298   MatScalar    *aa = a->a;
299 
300   Mat          B     = baij->B;
301   Mat_SeqBAIJ *b     = (Mat_SeqBAIJ *)B->data;
302   PetscInt    *bimax = b->imax, *bi = b->i, *bilen = b->ilen, *bj = b->j;
303   MatScalar   *ba = b->a;
304 
305   PetscInt  *rp, ii, nrow, _i, rmax, N, brow, bcol;
306   PetscInt   low, high, t, ridx, cidx, bs2 = a->bs2;
307   MatScalar *ap, *bap;
308 
309   /* for stash */
310   PetscInt   n_loc, *in_loc = NULL;
311   MatScalar *v_loc = NULL;
312 
313   PetscFunctionBegin;
314   if (!baij->donotstash) {
315     if (n > baij->n_loc) {
316       PetscCall(PetscFree(baij->in_loc));
317       PetscCall(PetscFree(baij->v_loc));
318       PetscCall(PetscMalloc1(n, &baij->in_loc));
319       PetscCall(PetscMalloc1(n, &baij->v_loc));
320 
321       baij->n_loc = n;
322     }
323     in_loc = baij->in_loc;
324     v_loc  = baij->v_loc;
325   }
326 
327   for (i = 0; i < m; i++) {
328     if (im[i] < 0) continue;
329     PetscCheck(im[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, im[i], mat->rmap->N - 1);
330     if (im[i] >= rstart_orig && im[i] < rend_orig) { /* this processor entry */
331       row = im[i] - rstart_orig;                     /* local row index */
332       for (j = 0; j < n; j++) {
333         if (im[i] / bs > in[j] / bs) {
334           if (a->ignore_ltriangular) {
335             continue; /* ignore lower triangular blocks */
336           } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
337         }
338         if (in[j] >= cstart_orig && in[j] < cend_orig) { /* diag entry (A) */
339           col  = in[j] - cstart_orig;                    /* local col index */
340           brow = row / bs;
341           bcol = col / bs;
342           if (brow > bcol) continue; /* ignore lower triangular blocks of A */
343           if (roworiented) value = v[i * n + j];
344           else value = v[i + j * m];
345           MatSetValues_SeqSBAIJ_A_Private(row, col, value, addv, im[i], in[j]);
346           /* PetscCall(MatSetValues_SeqBAIJ(baij->A,1,&row,1,&col,&value,addv)); */
347         } else if (in[j] < 0) {
348           continue;
349         } else {
350           PetscCheck(in[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[j], mat->cmap->N - 1);
351           /* off-diag entry (B) */
352           if (mat->was_assembled) {
353             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
354 #if defined(PETSC_USE_CTABLE)
355             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] / bs + 1, 0, &col));
356             col = col - 1;
357 #else
358             col = baij->colmap[in[j] / bs] - 1;
359 #endif
360             if (col < 0 && !((Mat_SeqSBAIJ *)baij->A->data)->nonew) {
361               PetscCall(MatDisAssemble_MPISBAIJ(mat));
362               col = in[j];
363               /* Reinitialize the variables required by MatSetValues_SeqBAIJ_B_Private() */
364               B     = baij->B;
365               b     = (Mat_SeqBAIJ *)B->data;
366               bimax = b->imax;
367               bi    = b->i;
368               bilen = b->ilen;
369               bj    = b->j;
370               ba    = b->a;
371             } else col += in[j] % bs;
372           } else col = in[j];
373           if (roworiented) value = v[i * n + j];
374           else value = v[i + j * m];
375           MatSetValues_SeqSBAIJ_B_Private(row, col, value, addv, im[i], in[j]);
376           /* PetscCall(MatSetValues_SeqBAIJ(baij->B,1,&row,1,&col,&value,addv)); */
377         }
378       }
379     } else { /* off processor entry */
380       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
381       if (!baij->donotstash) {
382         mat->assembled = PETSC_FALSE;
383         n_loc          = 0;
384         for (j = 0; j < n; j++) {
385           if (im[i] / bs > in[j] / bs) continue; /* ignore lower triangular blocks */
386           in_loc[n_loc] = in[j];
387           if (roworiented) {
388             v_loc[n_loc] = v[i * n + j];
389           } else {
390             v_loc[n_loc] = v[j * m + i];
391           }
392           n_loc++;
393         }
394         PetscCall(MatStashValuesRow_Private(&mat->stash, im[i], n_loc, in_loc, v_loc, PETSC_FALSE));
395       }
396     }
397   }
398   PetscFunctionReturn(PETSC_SUCCESS);
399 }
400 
401 static inline PetscErrorCode MatSetValuesBlocked_SeqSBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
402 {
403   Mat_SeqSBAIJ      *a = (Mat_SeqSBAIJ *)A->data;
404   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
405   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
406   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
407   PetscBool          roworiented = a->roworiented;
408   const PetscScalar *value       = v;
409   MatScalar         *ap, *aa = a->a, *bap;
410 
411   PetscFunctionBegin;
412   if (col < row) {
413     if (a->ignore_ltriangular) PetscFunctionReturn(PETSC_SUCCESS); /* ignore lower triangular block */
414     else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
415   }
416   rp    = aj + ai[row];
417   ap    = aa + bs2 * ai[row];
418   rmax  = imax[row];
419   nrow  = ailen[row];
420   value = v;
421   low   = 0;
422   high  = nrow;
423 
424   while (high - low > 7) {
425     t = (low + high) / 2;
426     if (rp[t] > col) high = t;
427     else low = t;
428   }
429   for (i = low; i < high; i++) {
430     if (rp[i] > col) break;
431     if (rp[i] == col) {
432       bap = ap + bs2 * i;
433       if (roworiented) {
434         if (is == ADD_VALUES) {
435           for (ii = 0; ii < bs; ii++) {
436             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
437           }
438         } else {
439           for (ii = 0; ii < bs; ii++) {
440             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
441           }
442         }
443       } else {
444         if (is == ADD_VALUES) {
445           for (ii = 0; ii < bs; ii++) {
446             for (jj = 0; jj < bs; jj++) *bap++ += *value++;
447           }
448         } else {
449           for (ii = 0; ii < bs; ii++) {
450             for (jj = 0; jj < bs; jj++) *bap++ = *value++;
451           }
452         }
453       }
454       goto noinsert2;
455     }
456   }
457   if (nonew == 1) goto noinsert2;
458   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new block index nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
459   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
460   N = nrow++ - 1;
461   high++;
462   /* shift up all the later entries in this row */
463   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
464   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
465   rp[i] = col;
466   bap   = ap + bs2 * i;
467   if (roworiented) {
468     for (ii = 0; ii < bs; ii++) {
469       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
470     }
471   } else {
472     for (ii = 0; ii < bs; ii++) {
473       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
474     }
475   }
476 noinsert2:;
477   ailen[row] = nrow;
478   PetscFunctionReturn(PETSC_SUCCESS);
479 }
480 
481 /*
482    This routine is exactly duplicated in mpibaij.c
483 */
484 static inline PetscErrorCode MatSetValuesBlocked_SeqBAIJ_Inlined(Mat A, PetscInt row, PetscInt col, const PetscScalar v[], InsertMode is, PetscInt orow, PetscInt ocol)
485 {
486   Mat_SeqBAIJ       *a = (Mat_SeqBAIJ *)A->data;
487   PetscInt          *rp, low, high, t, ii, jj, nrow, i, rmax, N;
488   PetscInt          *imax = a->imax, *ai = a->i, *ailen = a->ilen;
489   PetscInt          *aj = a->j, nonew = a->nonew, bs2 = a->bs2, bs = A->rmap->bs;
490   PetscBool          roworiented = a->roworiented;
491   const PetscScalar *value       = v;
492   MatScalar         *ap, *aa = a->a, *bap;
493 
494   PetscFunctionBegin;
495   rp    = aj + ai[row];
496   ap    = aa + bs2 * ai[row];
497   rmax  = imax[row];
498   nrow  = ailen[row];
499   low   = 0;
500   high  = nrow;
501   value = v;
502   while (high - low > 7) {
503     t = (low + high) / 2;
504     if (rp[t] > col) high = t;
505     else low = t;
506   }
507   for (i = low; i < high; i++) {
508     if (rp[i] > col) break;
509     if (rp[i] == col) {
510       bap = ap + bs2 * i;
511       if (roworiented) {
512         if (is == ADD_VALUES) {
513           for (ii = 0; ii < bs; ii++) {
514             for (jj = ii; jj < bs2; jj += bs) bap[jj] += *value++;
515           }
516         } else {
517           for (ii = 0; ii < bs; ii++) {
518             for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
519           }
520         }
521       } else {
522         if (is == ADD_VALUES) {
523           for (ii = 0; ii < bs; ii++, value += bs) {
524             for (jj = 0; jj < bs; jj++) bap[jj] += value[jj];
525             bap += bs;
526           }
527         } else {
528           for (ii = 0; ii < bs; ii++, value += bs) {
529             for (jj = 0; jj < bs; jj++) bap[jj] = value[jj];
530             bap += bs;
531           }
532         }
533       }
534       goto noinsert2;
535     }
536   }
537   if (nonew == 1) goto noinsert2;
538   PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new global block indexed nonzero block (%" PetscInt_FMT ", %" PetscInt_FMT ") in the matrix", orow, ocol);
539   MatSeqXAIJReallocateAIJ(A, a->mbs, bs2, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar);
540   N = nrow++ - 1;
541   high++;
542   /* shift up all the later entries in this row */
543   PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1));
544   PetscCall(PetscArraymove(ap + bs2 * (i + 1), ap + bs2 * i, bs2 * (N - i + 1)));
545   rp[i] = col;
546   bap   = ap + bs2 * i;
547   if (roworiented) {
548     for (ii = 0; ii < bs; ii++) {
549       for (jj = ii; jj < bs2; jj += bs) bap[jj] = *value++;
550     }
551   } else {
552     for (ii = 0; ii < bs; ii++) {
553       for (jj = 0; jj < bs; jj++) *bap++ = *value++;
554     }
555   }
556 noinsert2:;
557   ailen[row] = nrow;
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 /*
562     This routine could be optimized by removing the need for the block copy below and passing stride information
563   to the above inline routines; similarly in MatSetValuesBlocked_MPIBAIJ()
564 */
565 static PetscErrorCode MatSetValuesBlocked_MPISBAIJ(Mat mat, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const MatScalar v[], InsertMode addv)
566 {
567   Mat_MPISBAIJ    *baij = (Mat_MPISBAIJ *)mat->data;
568   const MatScalar *value;
569   MatScalar       *barray      = baij->barray;
570   PetscBool        roworiented = baij->roworiented, ignore_ltriangular = ((Mat_SeqSBAIJ *)baij->A->data)->ignore_ltriangular;
571   PetscInt         i, j, ii, jj, row, col, rstart = baij->rstartbs;
572   PetscInt         rend = baij->rendbs, cstart = baij->cstartbs, stepval;
573   PetscInt         cend = baij->cendbs, bs = mat->rmap->bs, bs2 = baij->bs2;
574 
575   PetscFunctionBegin;
576   if (!barray) {
577     PetscCall(PetscMalloc1(bs2, &barray));
578     baij->barray = barray;
579   }
580 
581   if (roworiented) {
582     stepval = (n - 1) * bs;
583   } else {
584     stepval = (m - 1) * bs;
585   }
586   for (i = 0; i < m; i++) {
587     if (im[i] < 0) continue;
588     PetscCheck(im[i] < baij->Mbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed row too large %" PetscInt_FMT " max %" PetscInt_FMT, im[i], baij->Mbs - 1);
589     if (im[i] >= rstart && im[i] < rend) {
590       row = im[i] - rstart;
591       for (j = 0; j < n; j++) {
592         if (im[i] > in[j]) {
593           if (ignore_ltriangular) continue; /* ignore lower triangular blocks */
594           else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_USER, "Lower triangular value cannot be set for sbaij format. Ignoring these values, run with -mat_ignore_lower_triangular or call MatSetOption(mat,MAT_IGNORE_LOWER_TRIANGULAR,PETSC_TRUE)");
595         }
596         /* If NumCol = 1 then a copy is not required */
597         if ((roworiented) && (n == 1)) {
598           barray = (MatScalar *)v + i * bs2;
599         } else if ((!roworiented) && (m == 1)) {
600           barray = (MatScalar *)v + j * bs2;
601         } else { /* Here a copy is required */
602           if (roworiented) {
603             value = v + i * (stepval + bs) * bs + j * bs;
604           } else {
605             value = v + j * (stepval + bs) * bs + i * bs;
606           }
607           for (ii = 0; ii < bs; ii++, value += stepval) {
608             for (jj = 0; jj < bs; jj++) *barray++ = *value++;
609           }
610           barray -= bs2;
611         }
612 
613         if (in[j] >= cstart && in[j] < cend) {
614           col = in[j] - cstart;
615           PetscCall(MatSetValuesBlocked_SeqSBAIJ_Inlined(baij->A, row, col, barray, addv, im[i], in[j]));
616         } else if (in[j] < 0) {
617           continue;
618         } else {
619           PetscCheck(in[j] < baij->Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Block indexed column too large %" PetscInt_FMT " max %" PetscInt_FMT, in[j], baij->Nbs - 1);
620           if (mat->was_assembled) {
621             if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
622 
623 #if defined(PETSC_USE_CTABLE)
624             PetscCall(PetscHMapIGetWithDefault(baij->colmap, in[j] + 1, 0, &col));
625             col = col < 1 ? -1 : (col - 1) / bs;
626 #else
627             col = baij->colmap[in[j]] < 1 ? -1 : (baij->colmap[in[j]] - 1) / bs;
628 #endif
629             if (col < 0 && !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
630               PetscCall(MatDisAssemble_MPISBAIJ(mat));
631               col = in[j];
632             }
633           } else col = in[j];
634           PetscCall(MatSetValuesBlocked_SeqBAIJ_Inlined(baij->B, row, col, barray, addv, im[i], in[j]));
635         }
636       }
637     } else {
638       PetscCheck(!mat->nooffprocentries, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Setting off process block indexed row %" PetscInt_FMT " even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set", im[i]);
639       if (!baij->donotstash) {
640         if (roworiented) {
641           PetscCall(MatStashValuesRowBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
642         } else {
643           PetscCall(MatStashValuesColBlocked_Private(&mat->bstash, im[i], n, in, v, m, n, i));
644         }
645       }
646     }
647   }
648   PetscFunctionReturn(PETSC_SUCCESS);
649 }
650 
651 static PetscErrorCode MatGetValues_MPISBAIJ(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
652 {
653   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
654   PetscInt      bs = mat->rmap->bs, i, j, bsrstart = mat->rmap->rstart, bsrend = mat->rmap->rend;
655   PetscInt      bscstart = mat->cmap->rstart, bscend = mat->cmap->rend, row, col, data;
656 
657   PetscFunctionBegin;
658   for (i = 0; i < m; i++) {
659     if (idxm[i] < 0) continue; /* negative row */
660     PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
661     if (idxm[i] >= bsrstart && idxm[i] < bsrend) {
662       row = idxm[i] - bsrstart;
663       for (j = 0; j < n; j++) {
664         if (idxn[j] < 0) continue; /* negative column */
665         PetscCheck(idxn[j] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, idxn[j], mat->cmap->N - 1);
666         if (idxn[j] >= bscstart && idxn[j] < bscend) {
667           col = idxn[j] - bscstart;
668           PetscCall(MatGetValues_SeqSBAIJ(baij->A, 1, &row, 1, &col, v + i * n + j));
669         } else {
670           if (!baij->colmap) PetscCall(MatCreateColmap_MPIBAIJ_Private(mat));
671 #if defined(PETSC_USE_CTABLE)
672           PetscCall(PetscHMapIGetWithDefault(baij->colmap, idxn[j] / bs + 1, 0, &data));
673           data--;
674 #else
675           data = baij->colmap[idxn[j] / bs] - 1;
676 #endif
677           if ((data < 0) || (baij->garray[data / bs] != idxn[j] / bs)) *(v + i * n + j) = 0.0;
678           else {
679             col = data + idxn[j] % bs;
680             PetscCall(MatGetValues_SeqBAIJ(baij->B, 1, &row, 1, &col, v + i * n + j));
681           }
682         }
683       }
684     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local values currently supported");
685   }
686   PetscFunctionReturn(PETSC_SUCCESS);
687 }
688 
689 static PetscErrorCode MatNorm_MPISBAIJ(Mat mat, NormType type, PetscReal *norm)
690 {
691   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
692   PetscReal     sum[2], *lnorm2;
693 
694   PetscFunctionBegin;
695   if (baij->size == 1) {
696     PetscCall(MatNorm(baij->A, type, norm));
697   } else {
698     if (type == NORM_FROBENIUS) {
699       PetscCall(PetscMalloc1(2, &lnorm2));
700       PetscCall(MatNorm(baij->A, type, lnorm2));
701       *lnorm2 = (*lnorm2) * (*lnorm2);
702       lnorm2++; /* squar power of norm(A) */
703       PetscCall(MatNorm(baij->B, type, lnorm2));
704       *lnorm2 = (*lnorm2) * (*lnorm2);
705       lnorm2--; /* squar power of norm(B) */
706       PetscCallMPI(MPIU_Allreduce(lnorm2, sum, 2, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
707       *norm = PetscSqrtReal(sum[0] + 2 * sum[1]);
708       PetscCall(PetscFree(lnorm2));
709     } else if (type == NORM_INFINITY || type == NORM_1) { /* max row/column sum */
710       Mat_SeqSBAIJ *amat = (Mat_SeqSBAIJ *)baij->A->data;
711       Mat_SeqBAIJ  *bmat = (Mat_SeqBAIJ *)baij->B->data;
712       PetscReal    *rsum, vabs;
713       PetscInt     *jj, *garray = baij->garray, rstart = baij->rstartbs, nz;
714       PetscInt      brow, bcol, col, bs = baij->A->rmap->bs, row, grow, gcol, mbs = amat->mbs;
715       MatScalar    *v;
716 
717       PetscCall(PetscMalloc1(mat->cmap->N, &rsum));
718       PetscCall(PetscArrayzero(rsum, mat->cmap->N));
719       /* Amat */
720       v  = amat->a;
721       jj = amat->j;
722       for (brow = 0; brow < mbs; brow++) {
723         grow = bs * (rstart + brow);
724         nz   = amat->i[brow + 1] - amat->i[brow];
725         for (bcol = 0; bcol < nz; bcol++) {
726           gcol = bs * (rstart + *jj);
727           jj++;
728           for (col = 0; col < bs; col++) {
729             for (row = 0; row < bs; row++) {
730               vabs = PetscAbsScalar(*v);
731               v++;
732               rsum[gcol + col] += vabs;
733               /* non-diagonal block */
734               if (bcol > 0 && vabs > 0.0) rsum[grow + row] += vabs;
735             }
736           }
737         }
738         PetscCall(PetscLogFlops(nz * bs * bs));
739       }
740       /* Bmat */
741       v  = bmat->a;
742       jj = bmat->j;
743       for (brow = 0; brow < mbs; brow++) {
744         grow = bs * (rstart + brow);
745         nz   = bmat->i[brow + 1] - bmat->i[brow];
746         for (bcol = 0; bcol < nz; bcol++) {
747           gcol = bs * garray[*jj];
748           jj++;
749           for (col = 0; col < bs; col++) {
750             for (row = 0; row < bs; row++) {
751               vabs = PetscAbsScalar(*v);
752               v++;
753               rsum[gcol + col] += vabs;
754               rsum[grow + row] += vabs;
755             }
756           }
757         }
758         PetscCall(PetscLogFlops(nz * bs * bs));
759       }
760       PetscCallMPI(MPIU_Allreduce(MPI_IN_PLACE, rsum, mat->cmap->N, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)mat)));
761       *norm = 0.0;
762       for (col = 0; col < mat->cmap->N; col++) {
763         if (rsum[col] > *norm) *norm = rsum[col];
764       }
765       PetscCall(PetscFree(rsum));
766     } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for this norm yet");
767   }
768   PetscFunctionReturn(PETSC_SUCCESS);
769 }
770 
771 static PetscErrorCode MatAssemblyBegin_MPISBAIJ(Mat mat, MatAssemblyType mode)
772 {
773   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
774   PetscInt      nstash, reallocs;
775 
776   PetscFunctionBegin;
777   if (baij->donotstash || mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS);
778 
779   PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range));
780   PetscCall(MatStashScatterBegin_Private(mat, &mat->bstash, baij->rangebs));
781   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
782   PetscCall(PetscInfo(mat, "Stash has %" PetscInt_FMT " entries,uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
783   PetscCall(MatStashGetInfo_Private(&mat->stash, &nstash, &reallocs));
784   PetscCall(PetscInfo(mat, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
785   PetscFunctionReturn(PETSC_SUCCESS);
786 }
787 
788 static PetscErrorCode MatAssemblyEnd_MPISBAIJ(Mat mat, MatAssemblyType mode)
789 {
790   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
791   Mat_SeqSBAIJ *a    = (Mat_SeqSBAIJ *)baij->A->data;
792   PetscInt      i, j, rstart, ncols, flg, bs2 = baij->bs2;
793   PetscInt     *row, *col;
794   PetscBool     all_assembled;
795   PetscMPIInt   n;
796   PetscBool     r1, r2, r3;
797   MatScalar    *val;
798 
799   /* do not use 'b=(Mat_SeqBAIJ*)baij->B->data' as B can be reset in disassembly */
800   PetscFunctionBegin;
801   if (!baij->donotstash && !mat->nooffprocentries) {
802     while (1) {
803       PetscCall(MatStashScatterGetMesg_Private(&mat->stash, &n, &row, &col, &val, &flg));
804       if (!flg) break;
805 
806       for (i = 0; i < n;) {
807         /* Now identify the consecutive vals belonging to the same row */
808         for (j = i, rstart = row[j]; j < n; j++) {
809           if (row[j] != rstart) break;
810         }
811         if (j < n) ncols = j - i;
812         else ncols = n - i;
813         /* Now assemble all these values with a single function call */
814         PetscCall(MatSetValues_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i, mat->insertmode));
815         i = j;
816       }
817     }
818     PetscCall(MatStashScatterEnd_Private(&mat->stash));
819     /* Now process the block-stash. Since the values are stashed column-oriented,
820        set the row-oriented flag to column-oriented, and after MatSetValues()
821        restore the original flags */
822     r1 = baij->roworiented;
823     r2 = a->roworiented;
824     r3 = ((Mat_SeqBAIJ *)baij->B->data)->roworiented;
825 
826     baij->roworiented = PETSC_FALSE;
827     a->roworiented    = PETSC_FALSE;
828 
829     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = PETSC_FALSE; /* b->roworiented */
830     while (1) {
831       PetscCall(MatStashScatterGetMesg_Private(&mat->bstash, &n, &row, &col, &val, &flg));
832       if (!flg) break;
833 
834       for (i = 0; i < n;) {
835         /* Now identify the consecutive vals belonging to the same row */
836         for (j = i, rstart = row[j]; j < n; j++) {
837           if (row[j] != rstart) break;
838         }
839         if (j < n) ncols = j - i;
840         else ncols = n - i;
841         PetscCall(MatSetValuesBlocked_MPISBAIJ(mat, 1, row + i, ncols, col + i, val + i * bs2, mat->insertmode));
842         i = j;
843       }
844     }
845     PetscCall(MatStashScatterEnd_Private(&mat->bstash));
846 
847     baij->roworiented = r1;
848     a->roworiented    = r2;
849 
850     ((Mat_SeqBAIJ *)baij->B->data)->roworiented = r3; /* b->roworiented */
851   }
852 
853   PetscCall(MatAssemblyBegin(baij->A, mode));
854   PetscCall(MatAssemblyEnd(baij->A, mode));
855 
856   /* determine if any process has disassembled, if so we must
857      also disassemble ourselves, in order that we may reassemble. */
858   /*
859      if nonzero structure of submatrix B cannot change then we know that
860      no process disassembled thus we can skip this stuff
861   */
862   if (!((Mat_SeqBAIJ *)baij->B->data)->nonew) {
863     PetscCallMPI(MPIU_Allreduce(&mat->was_assembled, &all_assembled, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
864     if (mat->was_assembled && !all_assembled) PetscCall(MatDisAssemble_MPISBAIJ(mat));
865   }
866 
867   if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { PetscCall(MatSetUpMultiply_MPISBAIJ(mat)); /* setup Mvctx and sMvctx */ }
868   PetscCall(MatAssemblyBegin(baij->B, mode));
869   PetscCall(MatAssemblyEnd(baij->B, mode));
870 
871   PetscCall(PetscFree2(baij->rowvalues, baij->rowindices));
872 
873   baij->rowvalues = NULL;
874 
875   /* if no new nonzero locations are allowed in matrix then only set the matrix state the first time through */
876   if ((!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) || !((Mat_SeqBAIJ *)baij->A->data)->nonew) {
877     PetscObjectState state = baij->A->nonzerostate + baij->B->nonzerostate;
878     PetscCallMPI(MPIU_Allreduce(&state, &mat->nonzerostate, 1, MPIU_INT64, MPI_SUM, PetscObjectComm((PetscObject)mat)));
879   }
880   PetscFunctionReturn(PETSC_SUCCESS);
881 }
882 
883 extern PetscErrorCode MatSetValues_MPIBAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
884 #include <petscdraw.h>
885 static PetscErrorCode MatView_MPISBAIJ_ASCIIorDraworSocket(Mat mat, PetscViewer viewer)
886 {
887   Mat_MPISBAIJ     *baij = (Mat_MPISBAIJ *)mat->data;
888   PetscInt          bs   = mat->rmap->bs;
889   PetscMPIInt       rank = baij->rank;
890   PetscBool         iascii, isdraw;
891   PetscViewer       sviewer;
892   PetscViewerFormat format;
893 
894   PetscFunctionBegin;
895   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
896   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
897   if (iascii) {
898     PetscCall(PetscViewerGetFormat(viewer, &format));
899     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
900       MatInfo info;
901       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat), &rank));
902       PetscCall(MatGetInfo(mat, MAT_LOCAL, &info));
903       PetscCall(PetscViewerASCIIPushSynchronized(viewer));
904       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] Local rows %" PetscInt_FMT " nz %" PetscInt_FMT " nz alloced %" PetscInt_FMT " bs %" PetscInt_FMT " mem %g\n", rank, mat->rmap->n, (PetscInt)info.nz_used, (PetscInt)info.nz_allocated,
905                                                    mat->rmap->bs, info.memory));
906       PetscCall(MatGetInfo(baij->A, MAT_LOCAL, &info));
907       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] on-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
908       PetscCall(MatGetInfo(baij->B, MAT_LOCAL, &info));
909       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] off-diagonal part: nz %" PetscInt_FMT " \n", rank, (PetscInt)info.nz_used));
910       PetscCall(PetscViewerFlush(viewer));
911       PetscCall(PetscViewerASCIIPopSynchronized(viewer));
912       PetscCall(PetscViewerASCIIPrintf(viewer, "Information on VecScatter used in matrix-vector product: \n"));
913       PetscCall(VecScatterView(baij->Mvctx, viewer));
914       PetscFunctionReturn(PETSC_SUCCESS);
915     } else if (format == PETSC_VIEWER_ASCII_INFO) {
916       PetscCall(PetscViewerASCIIPrintf(viewer, "  block size is %" PetscInt_FMT "\n", bs));
917       PetscFunctionReturn(PETSC_SUCCESS);
918     } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
919       PetscFunctionReturn(PETSC_SUCCESS);
920     }
921   }
922 
923   if (isdraw) {
924     PetscDraw draw;
925     PetscBool isnull;
926     PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw));
927     PetscCall(PetscDrawIsNull(draw, &isnull));
928     if (isnull) PetscFunctionReturn(PETSC_SUCCESS);
929   }
930 
931   {
932     /* assemble the entire matrix onto first processor. */
933     Mat           A;
934     Mat_SeqSBAIJ *Aloc;
935     Mat_SeqBAIJ  *Bloc;
936     PetscInt      M = mat->rmap->N, N = mat->cmap->N, *ai, *aj, col, i, j, k, *rvals, mbs = baij->mbs;
937     MatScalar    *a;
938     const char   *matname;
939 
940     /* Should this be the same type as mat? */
941     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &A));
942     if (rank == 0) {
943       PetscCall(MatSetSizes(A, M, N, M, N));
944     } else {
945       PetscCall(MatSetSizes(A, 0, 0, M, N));
946     }
947     PetscCall(MatSetType(A, MATMPISBAIJ));
948     PetscCall(MatMPISBAIJSetPreallocation(A, mat->rmap->bs, 0, NULL, 0, NULL));
949     PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_FALSE));
950 
951     /* copy over the A part */
952     Aloc = (Mat_SeqSBAIJ *)baij->A->data;
953     ai   = Aloc->i;
954     aj   = Aloc->j;
955     a    = Aloc->a;
956     PetscCall(PetscMalloc1(bs, &rvals));
957 
958     for (i = 0; i < mbs; i++) {
959       rvals[0] = bs * (baij->rstartbs + i);
960       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
961       for (j = ai[i]; j < ai[i + 1]; j++) {
962         col = (baij->cstartbs + aj[j]) * bs;
963         for (k = 0; k < bs; k++) {
964           PetscCall(MatSetValues_MPISBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
965           col++;
966           a += bs;
967         }
968       }
969     }
970     /* copy over the B part */
971     Bloc = (Mat_SeqBAIJ *)baij->B->data;
972     ai   = Bloc->i;
973     aj   = Bloc->j;
974     a    = Bloc->a;
975     for (i = 0; i < mbs; i++) {
976       rvals[0] = bs * (baij->rstartbs + i);
977       for (j = 1; j < bs; j++) rvals[j] = rvals[j - 1] + 1;
978       for (j = ai[i]; j < ai[i + 1]; j++) {
979         col = baij->garray[aj[j]] * bs;
980         for (k = 0; k < bs; k++) {
981           PetscCall(MatSetValues_MPIBAIJ(A, bs, rvals, 1, &col, a, INSERT_VALUES));
982           col++;
983           a += bs;
984         }
985       }
986     }
987     PetscCall(PetscFree(rvals));
988     PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
989     PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
990     /*
991        Everyone has to call to draw the matrix since the graphics waits are
992        synchronized across all processors that share the PetscDraw object
993     */
994     PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
995     if (((PetscObject)mat)->name) PetscCall(PetscObjectGetName((PetscObject)mat, &matname));
996     if (rank == 0) {
997       if (((PetscObject)mat)->name) PetscCall(PetscObjectSetName((PetscObject)((Mat_MPISBAIJ *)A->data)->A, matname));
998       PetscCall(MatView_SeqSBAIJ(((Mat_MPISBAIJ *)A->data)->A, sviewer));
999     }
1000     PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
1001     PetscCall(MatDestroy(&A));
1002   }
1003   PetscFunctionReturn(PETSC_SUCCESS);
1004 }
1005 
1006 /* Used for both MPIBAIJ and MPISBAIJ matrices */
1007 #define MatView_MPISBAIJ_Binary MatView_MPIBAIJ_Binary
1008 
1009 static PetscErrorCode MatView_MPISBAIJ(Mat mat, PetscViewer viewer)
1010 {
1011   PetscBool iascii, isdraw, issocket, isbinary;
1012 
1013   PetscFunctionBegin;
1014   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
1015   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
1016   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSOCKET, &issocket));
1017   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
1018   if (iascii || isdraw || issocket) {
1019     PetscCall(MatView_MPISBAIJ_ASCIIorDraworSocket(mat, viewer));
1020   } else if (isbinary) PetscCall(MatView_MPISBAIJ_Binary(mat, viewer));
1021   PetscFunctionReturn(PETSC_SUCCESS);
1022 }
1023 
1024 #if defined(PETSC_USE_COMPLEX)
1025 static PetscErrorCode MatMult_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy)
1026 {
1027   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1028   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1029   PetscScalar       *from;
1030   const PetscScalar *x;
1031 
1032   PetscFunctionBegin;
1033   /* diagonal part */
1034   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1035   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1036   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1037   PetscCall(VecZeroEntries(a->slvec1b));
1038 
1039   /* subdiagonal part */
1040   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1041   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1042 
1043   /* copy x into the vec slvec0 */
1044   PetscCall(VecGetArray(a->slvec0, &from));
1045   PetscCall(VecGetArrayRead(xx, &x));
1046 
1047   PetscCall(PetscArraycpy(from, x, bs * mbs));
1048   PetscCall(VecRestoreArray(a->slvec0, &from));
1049   PetscCall(VecRestoreArrayRead(xx, &x));
1050 
1051   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1052   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1053   /* supperdiagonal part */
1054   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1055   PetscFunctionReturn(PETSC_SUCCESS);
1056 }
1057 #endif
1058 
1059 static PetscErrorCode MatMult_MPISBAIJ(Mat A, Vec xx, Vec yy)
1060 {
1061   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1062   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1063   PetscScalar       *from;
1064   const PetscScalar *x;
1065 
1066   PetscFunctionBegin;
1067   /* diagonal part */
1068   PetscCall((*a->A->ops->mult)(a->A, xx, a->slvec1a));
1069   /* since a->slvec1b shares memory (dangerously) with a->slec1 changes to a->slec1 will affect it */
1070   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1071   PetscCall(VecZeroEntries(a->slvec1b));
1072 
1073   /* subdiagonal part */
1074   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1075 
1076   /* copy x into the vec slvec0 */
1077   PetscCall(VecGetArray(a->slvec0, &from));
1078   PetscCall(VecGetArrayRead(xx, &x));
1079 
1080   PetscCall(PetscArraycpy(from, x, bs * mbs));
1081   PetscCall(VecRestoreArray(a->slvec0, &from));
1082   PetscCall(VecRestoreArrayRead(xx, &x));
1083 
1084   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1085   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1086   /* supperdiagonal part */
1087   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, yy));
1088   PetscFunctionReturn(PETSC_SUCCESS);
1089 }
1090 
1091 #if PetscDefined(USE_COMPLEX)
1092 static PetscErrorCode MatMultAdd_MPISBAIJ_Hermitian(Mat A, Vec xx, Vec yy, Vec zz)
1093 {
1094   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1095   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1096   PetscScalar       *from;
1097   const PetscScalar *x;
1098 
1099   PetscFunctionBegin;
1100   /* diagonal part */
1101   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1102   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1103   PetscCall(VecZeroEntries(a->slvec1b));
1104 
1105   /* subdiagonal part */
1106   PetscCheck(a->B->ops->multhermitiantranspose, PetscObjectComm((PetscObject)a->B), PETSC_ERR_SUP, "Not for type %s", ((PetscObject)a->B)->type_name);
1107   PetscCall((*a->B->ops->multhermitiantranspose)(a->B, xx, a->slvec0b));
1108 
1109   /* copy x into the vec slvec0 */
1110   PetscCall(VecGetArray(a->slvec0, &from));
1111   PetscCall(VecGetArrayRead(xx, &x));
1112   PetscCall(PetscArraycpy(from, x, bs * mbs));
1113   PetscCall(VecRestoreArray(a->slvec0, &from));
1114 
1115   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1116   PetscCall(VecRestoreArrayRead(xx, &x));
1117   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1118 
1119   /* supperdiagonal part */
1120   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1121   PetscFunctionReturn(PETSC_SUCCESS);
1122 }
1123 #endif
1124 
1125 static PetscErrorCode MatMultAdd_MPISBAIJ(Mat A, Vec xx, Vec yy, Vec zz)
1126 {
1127   Mat_MPISBAIJ      *a   = (Mat_MPISBAIJ *)A->data;
1128   PetscInt           mbs = a->mbs, bs = A->rmap->bs;
1129   PetscScalar       *from;
1130   const PetscScalar *x;
1131 
1132   PetscFunctionBegin;
1133   /* diagonal part */
1134   PetscCall((*a->A->ops->multadd)(a->A, xx, yy, a->slvec1a));
1135   PetscCall(PetscObjectStateIncrease((PetscObject)a->slvec1b));
1136   PetscCall(VecZeroEntries(a->slvec1b));
1137 
1138   /* subdiagonal part */
1139   PetscCall((*a->B->ops->multtranspose)(a->B, xx, a->slvec0b));
1140 
1141   /* copy x into the vec slvec0 */
1142   PetscCall(VecGetArray(a->slvec0, &from));
1143   PetscCall(VecGetArrayRead(xx, &x));
1144   PetscCall(PetscArraycpy(from, x, bs * mbs));
1145   PetscCall(VecRestoreArray(a->slvec0, &from));
1146 
1147   PetscCall(VecScatterBegin(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1148   PetscCall(VecRestoreArrayRead(xx, &x));
1149   PetscCall(VecScatterEnd(a->sMvctx, a->slvec0, a->slvec1, ADD_VALUES, SCATTER_FORWARD));
1150 
1151   /* supperdiagonal part */
1152   PetscCall((*a->B->ops->multadd)(a->B, a->slvec1b, a->slvec1a, zz));
1153   PetscFunctionReturn(PETSC_SUCCESS);
1154 }
1155 
1156 /*
1157   This only works correctly for square matrices where the subblock A->A is the
1158    diagonal block
1159 */
1160 static PetscErrorCode MatGetDiagonal_MPISBAIJ(Mat A, Vec v)
1161 {
1162   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1163 
1164   PetscFunctionBegin;
1165   /* PetscCheck(a->rmap->N == a->cmap->N,PETSC_COMM_SELF,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); */
1166   PetscCall(MatGetDiagonal(a->A, v));
1167   PetscFunctionReturn(PETSC_SUCCESS);
1168 }
1169 
1170 static PetscErrorCode MatScale_MPISBAIJ(Mat A, PetscScalar aa)
1171 {
1172   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1173 
1174   PetscFunctionBegin;
1175   PetscCall(MatScale(a->A, aa));
1176   PetscCall(MatScale(a->B, aa));
1177   PetscFunctionReturn(PETSC_SUCCESS);
1178 }
1179 
1180 static PetscErrorCode MatGetRow_MPISBAIJ(Mat matin, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1181 {
1182   Mat_MPISBAIJ *mat = (Mat_MPISBAIJ *)matin->data;
1183   PetscScalar  *vworkA, *vworkB, **pvA, **pvB, *v_p;
1184   PetscInt      bs = matin->rmap->bs, bs2 = mat->bs2, i, *cworkA, *cworkB, **pcA, **pcB;
1185   PetscInt      nztot, nzA, nzB, lrow, brstart = matin->rmap->rstart, brend = matin->rmap->rend;
1186   PetscInt     *cmap, *idx_p, cstart = mat->rstartbs;
1187 
1188   PetscFunctionBegin;
1189   PetscCheck(!mat->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Already active");
1190   mat->getrowactive = PETSC_TRUE;
1191 
1192   if (!mat->rowvalues && (idx || v)) {
1193     /*
1194         allocate enough space to hold information from the longest row.
1195     */
1196     Mat_SeqSBAIJ *Aa  = (Mat_SeqSBAIJ *)mat->A->data;
1197     Mat_SeqBAIJ  *Ba  = (Mat_SeqBAIJ *)mat->B->data;
1198     PetscInt      max = 1, mbs = mat->mbs, tmp;
1199     for (i = 0; i < mbs; i++) {
1200       tmp = Aa->i[i + 1] - Aa->i[i] + Ba->i[i + 1] - Ba->i[i]; /* row length */
1201       if (max < tmp) max = tmp;
1202     }
1203     PetscCall(PetscMalloc2(max * bs2, &mat->rowvalues, max * bs2, &mat->rowindices));
1204   }
1205 
1206   PetscCheck(row >= brstart && row < brend, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only local rows");
1207   lrow = row - brstart; /* local row index */
1208 
1209   pvA = &vworkA;
1210   pcA = &cworkA;
1211   pvB = &vworkB;
1212   pcB = &cworkB;
1213   if (!v) {
1214     pvA = NULL;
1215     pvB = NULL;
1216   }
1217   if (!idx) {
1218     pcA = NULL;
1219     if (!v) pcB = NULL;
1220   }
1221   PetscCall((*mat->A->ops->getrow)(mat->A, lrow, &nzA, pcA, pvA));
1222   PetscCall((*mat->B->ops->getrow)(mat->B, lrow, &nzB, pcB, pvB));
1223   nztot = nzA + nzB;
1224 
1225   cmap = mat->garray;
1226   if (v || idx) {
1227     if (nztot) {
1228       /* Sort by increasing column numbers, assuming A and B already sorted */
1229       PetscInt imark = -1;
1230       if (v) {
1231         *v = v_p = mat->rowvalues;
1232         for (i = 0; i < nzB; i++) {
1233           if (cmap[cworkB[i] / bs] < cstart) v_p[i] = vworkB[i];
1234           else break;
1235         }
1236         imark = i;
1237         for (i = 0; i < nzA; i++) v_p[imark + i] = vworkA[i];
1238         for (i = imark; i < nzB; i++) v_p[nzA + i] = vworkB[i];
1239       }
1240       if (idx) {
1241         *idx = idx_p = mat->rowindices;
1242         if (imark > -1) {
1243           for (i = 0; i < imark; i++) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1244         } else {
1245           for (i = 0; i < nzB; i++) {
1246             if (cmap[cworkB[i] / bs] < cstart) idx_p[i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1247             else break;
1248           }
1249           imark = i;
1250         }
1251         for (i = 0; i < nzA; i++) idx_p[imark + i] = cstart * bs + cworkA[i];
1252         for (i = imark; i < nzB; i++) idx_p[nzA + i] = cmap[cworkB[i] / bs] * bs + cworkB[i] % bs;
1253       }
1254     } else {
1255       if (idx) *idx = NULL;
1256       if (v) *v = NULL;
1257     }
1258   }
1259   *nz = nztot;
1260   PetscCall((*mat->A->ops->restorerow)(mat->A, lrow, &nzA, pcA, pvA));
1261   PetscCall((*mat->B->ops->restorerow)(mat->B, lrow, &nzB, pcB, pvB));
1262   PetscFunctionReturn(PETSC_SUCCESS);
1263 }
1264 
1265 static PetscErrorCode MatRestoreRow_MPISBAIJ(Mat mat, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v)
1266 {
1267   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1268 
1269   PetscFunctionBegin;
1270   PetscCheck(baij->getrowactive, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "MatGetRow() must be called first");
1271   baij->getrowactive = PETSC_FALSE;
1272   PetscFunctionReturn(PETSC_SUCCESS);
1273 }
1274 
1275 static PetscErrorCode MatGetRowUpperTriangular_MPISBAIJ(Mat A)
1276 {
1277   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1278   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1279 
1280   PetscFunctionBegin;
1281   aA->getrow_utriangular = PETSC_TRUE;
1282   PetscFunctionReturn(PETSC_SUCCESS);
1283 }
1284 static PetscErrorCode MatRestoreRowUpperTriangular_MPISBAIJ(Mat A)
1285 {
1286   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1287   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1288 
1289   PetscFunctionBegin;
1290   aA->getrow_utriangular = PETSC_FALSE;
1291   PetscFunctionReturn(PETSC_SUCCESS);
1292 }
1293 
1294 static PetscErrorCode MatConjugate_MPISBAIJ(Mat mat)
1295 {
1296   PetscFunctionBegin;
1297   if (PetscDefined(USE_COMPLEX)) {
1298     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)mat->data;
1299 
1300     PetscCall(MatConjugate(a->A));
1301     PetscCall(MatConjugate(a->B));
1302   }
1303   PetscFunctionReturn(PETSC_SUCCESS);
1304 }
1305 
1306 static PetscErrorCode MatRealPart_MPISBAIJ(Mat A)
1307 {
1308   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1309 
1310   PetscFunctionBegin;
1311   PetscCall(MatRealPart(a->A));
1312   PetscCall(MatRealPart(a->B));
1313   PetscFunctionReturn(PETSC_SUCCESS);
1314 }
1315 
1316 static PetscErrorCode MatImaginaryPart_MPISBAIJ(Mat A)
1317 {
1318   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1319 
1320   PetscFunctionBegin;
1321   PetscCall(MatImaginaryPart(a->A));
1322   PetscCall(MatImaginaryPart(a->B));
1323   PetscFunctionReturn(PETSC_SUCCESS);
1324 }
1325 
1326 /* Check if isrow is a subset of iscol_local, called by MatCreateSubMatrix_MPISBAIJ()
1327    Input: isrow       - distributed(parallel),
1328           iscol_local - locally owned (seq)
1329 */
1330 static PetscErrorCode ISEqual_private(IS isrow, IS iscol_local, PetscBool *flg)
1331 {
1332   PetscInt        sz1, sz2, *a1, *a2, i, j, k, nmatch;
1333   const PetscInt *ptr1, *ptr2;
1334 
1335   PetscFunctionBegin;
1336   *flg = PETSC_FALSE;
1337   PetscCall(ISGetLocalSize(isrow, &sz1));
1338   PetscCall(ISGetLocalSize(iscol_local, &sz2));
1339   if (sz1 > sz2) PetscFunctionReturn(PETSC_SUCCESS);
1340 
1341   PetscCall(ISGetIndices(isrow, &ptr1));
1342   PetscCall(ISGetIndices(iscol_local, &ptr2));
1343 
1344   PetscCall(PetscMalloc1(sz1, &a1));
1345   PetscCall(PetscMalloc1(sz2, &a2));
1346   PetscCall(PetscArraycpy(a1, ptr1, sz1));
1347   PetscCall(PetscArraycpy(a2, ptr2, sz2));
1348   PetscCall(PetscSortInt(sz1, a1));
1349   PetscCall(PetscSortInt(sz2, a2));
1350 
1351   nmatch = 0;
1352   k      = 0;
1353   for (i = 0; i < sz1; i++) {
1354     for (j = k; j < sz2; j++) {
1355       if (a1[i] == a2[j]) {
1356         k = j;
1357         nmatch++;
1358         break;
1359       }
1360     }
1361   }
1362   PetscCall(ISRestoreIndices(isrow, &ptr1));
1363   PetscCall(ISRestoreIndices(iscol_local, &ptr2));
1364   PetscCall(PetscFree(a1));
1365   PetscCall(PetscFree(a2));
1366   if (nmatch < sz1) {
1367     *flg = PETSC_FALSE;
1368   } else {
1369     *flg = PETSC_TRUE;
1370   }
1371   PetscFunctionReturn(PETSC_SUCCESS);
1372 }
1373 
1374 static PetscErrorCode MatCreateSubMatrix_MPISBAIJ(Mat mat, IS isrow, IS iscol, MatReuse call, Mat *newmat)
1375 {
1376   Mat       C[2];
1377   IS        iscol_local, isrow_local;
1378   PetscInt  csize, csize_local, rsize;
1379   PetscBool isequal, issorted, isidentity = PETSC_FALSE;
1380 
1381   PetscFunctionBegin;
1382   PetscCall(ISGetLocalSize(iscol, &csize));
1383   PetscCall(ISGetLocalSize(isrow, &rsize));
1384   if (call == MAT_REUSE_MATRIX) {
1385     PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather", (PetscObject *)&iscol_local));
1386     PetscCheck(iscol_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1387   } else {
1388     PetscCall(ISAllGather(iscol, &iscol_local));
1389     PetscCall(ISSorted(iscol_local, &issorted));
1390     PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, iscol must be sorted");
1391   }
1392   PetscCall(ISEqual_private(isrow, iscol_local, &isequal));
1393   if (!isequal) {
1394     PetscCall(ISGetLocalSize(iscol_local, &csize_local));
1395     isidentity = (PetscBool)(mat->cmap->N == csize_local);
1396     if (!isidentity) {
1397       if (call == MAT_REUSE_MATRIX) {
1398         PetscCall(PetscObjectQuery((PetscObject)*newmat, "ISAllGather_other", (PetscObject *)&isrow_local));
1399         PetscCheck(isrow_local, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Submatrix passed in was not used before, cannot reuse");
1400       } else {
1401         PetscCall(ISAllGather(isrow, &isrow_local));
1402         PetscCall(ISSorted(isrow_local, &issorted));
1403         PetscCheck(issorted, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "For symmetric format, isrow must be sorted");
1404       }
1405     }
1406   }
1407   /* now call MatCreateSubMatrix_MPIBAIJ() */
1408   PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, isrow, iscol_local, csize, isequal || isidentity ? call : MAT_INITIAL_MATRIX, isequal || isidentity ? newmat : C, (PetscBool)(isequal || isidentity)));
1409   if (!isequal && !isidentity) {
1410     if (call == MAT_INITIAL_MATRIX) {
1411       IS       intersect;
1412       PetscInt ni;
1413 
1414       PetscCall(ISIntersect(isrow_local, iscol_local, &intersect));
1415       PetscCall(ISGetLocalSize(intersect, &ni));
1416       PetscCall(ISDestroy(&intersect));
1417       PetscCheck(ni == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot create such a submatrix: for symmetric format, when requesting an off-diagonal submatrix, isrow and iscol should have an empty intersection (number of common indices is %" PetscInt_FMT ")", ni);
1418     }
1419     PetscCall(MatCreateSubMatrix_MPIBAIJ_Private(mat, iscol, isrow_local, rsize, MAT_INITIAL_MATRIX, C + 1, PETSC_FALSE));
1420     PetscCall(MatTranspose(C[1], MAT_INPLACE_MATRIX, C + 1));
1421     PetscCall(MatAXPY(C[0], 1.0, C[1], DIFFERENT_NONZERO_PATTERN));
1422     if (call == MAT_REUSE_MATRIX) PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1423     else if (mat->rmap->bs == 1) PetscCall(MatConvert(C[0], MATAIJ, MAT_INITIAL_MATRIX, newmat));
1424     else PetscCall(MatCopy(C[0], *newmat, SAME_NONZERO_PATTERN));
1425     PetscCall(MatDestroy(C));
1426     PetscCall(MatDestroy(C + 1));
1427   }
1428   if (call == MAT_INITIAL_MATRIX) {
1429     if (!isequal && !isidentity) {
1430       PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather_other", (PetscObject)isrow_local));
1431       PetscCall(ISDestroy(&isrow_local));
1432     }
1433     PetscCall(PetscObjectCompose((PetscObject)*newmat, "ISAllGather", (PetscObject)iscol_local));
1434     PetscCall(ISDestroy(&iscol_local));
1435   }
1436   PetscFunctionReturn(PETSC_SUCCESS);
1437 }
1438 
1439 static PetscErrorCode MatZeroEntries_MPISBAIJ(Mat A)
1440 {
1441   Mat_MPISBAIJ *l = (Mat_MPISBAIJ *)A->data;
1442 
1443   PetscFunctionBegin;
1444   PetscCall(MatZeroEntries(l->A));
1445   PetscCall(MatZeroEntries(l->B));
1446   PetscFunctionReturn(PETSC_SUCCESS);
1447 }
1448 
1449 static PetscErrorCode MatGetInfo_MPISBAIJ(Mat matin, MatInfoType flag, MatInfo *info)
1450 {
1451   Mat_MPISBAIJ  *a = (Mat_MPISBAIJ *)matin->data;
1452   Mat            A = a->A, B = a->B;
1453   PetscLogDouble isend[5], irecv[5];
1454 
1455   PetscFunctionBegin;
1456   info->block_size = (PetscReal)matin->rmap->bs;
1457 
1458   PetscCall(MatGetInfo(A, MAT_LOCAL, info));
1459 
1460   isend[0] = info->nz_used;
1461   isend[1] = info->nz_allocated;
1462   isend[2] = info->nz_unneeded;
1463   isend[3] = info->memory;
1464   isend[4] = info->mallocs;
1465 
1466   PetscCall(MatGetInfo(B, MAT_LOCAL, info));
1467 
1468   isend[0] += info->nz_used;
1469   isend[1] += info->nz_allocated;
1470   isend[2] += info->nz_unneeded;
1471   isend[3] += info->memory;
1472   isend[4] += info->mallocs;
1473   if (flag == MAT_LOCAL) {
1474     info->nz_used      = isend[0];
1475     info->nz_allocated = isend[1];
1476     info->nz_unneeded  = isend[2];
1477     info->memory       = isend[3];
1478     info->mallocs      = isend[4];
1479   } else if (flag == MAT_GLOBAL_MAX) {
1480     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)matin)));
1481 
1482     info->nz_used      = irecv[0];
1483     info->nz_allocated = irecv[1];
1484     info->nz_unneeded  = irecv[2];
1485     info->memory       = irecv[3];
1486     info->mallocs      = irecv[4];
1487   } else if (flag == MAT_GLOBAL_SUM) {
1488     PetscCallMPI(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)matin)));
1489 
1490     info->nz_used      = irecv[0];
1491     info->nz_allocated = irecv[1];
1492     info->nz_unneeded  = irecv[2];
1493     info->memory       = irecv[3];
1494     info->mallocs      = irecv[4];
1495   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown MatInfoType argument %d", (int)flag);
1496   info->fill_ratio_given  = 0; /* no parallel LU/ILU/Cholesky */
1497   info->fill_ratio_needed = 0;
1498   info->factor_mallocs    = 0;
1499   PetscFunctionReturn(PETSC_SUCCESS);
1500 }
1501 
1502 static PetscErrorCode MatSetOption_MPISBAIJ(Mat A, MatOption op, PetscBool flg)
1503 {
1504   Mat_MPISBAIJ *a  = (Mat_MPISBAIJ *)A->data;
1505   Mat_SeqSBAIJ *aA = (Mat_SeqSBAIJ *)a->A->data;
1506 
1507   PetscFunctionBegin;
1508   switch (op) {
1509   case MAT_NEW_NONZERO_LOCATIONS:
1510   case MAT_NEW_NONZERO_ALLOCATION_ERR:
1511   case MAT_UNUSED_NONZERO_LOCATION_ERR:
1512   case MAT_KEEP_NONZERO_PATTERN:
1513   case MAT_NEW_NONZERO_LOCATION_ERR:
1514     MatCheckPreallocated(A, 1);
1515     PetscCall(MatSetOption(a->A, op, flg));
1516     PetscCall(MatSetOption(a->B, op, flg));
1517     break;
1518   case MAT_ROW_ORIENTED:
1519     MatCheckPreallocated(A, 1);
1520     a->roworiented = flg;
1521 
1522     PetscCall(MatSetOption(a->A, op, flg));
1523     PetscCall(MatSetOption(a->B, op, flg));
1524     break;
1525   case MAT_IGNORE_OFF_PROC_ENTRIES:
1526     a->donotstash = flg;
1527     break;
1528   case MAT_USE_HASH_TABLE:
1529     a->ht_flag = flg;
1530     break;
1531   case MAT_HERMITIAN:
1532     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1533 #if defined(PETSC_USE_COMPLEX)
1534     if (flg) { /* need different mat-vec ops */
1535       A->ops->mult             = MatMult_MPISBAIJ_Hermitian;
1536       A->ops->multadd          = MatMultAdd_MPISBAIJ_Hermitian;
1537       A->ops->multtranspose    = NULL;
1538       A->ops->multtransposeadd = NULL;
1539       A->symmetric             = PETSC_BOOL3_FALSE;
1540     }
1541 #endif
1542     break;
1543   case MAT_SPD:
1544   case MAT_SYMMETRIC:
1545     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1546 #if defined(PETSC_USE_COMPLEX)
1547     if (flg) { /* restore to use default mat-vec ops */
1548       A->ops->mult             = MatMult_MPISBAIJ;
1549       A->ops->multadd          = MatMultAdd_MPISBAIJ;
1550       A->ops->multtranspose    = MatMult_MPISBAIJ;
1551       A->ops->multtransposeadd = MatMultAdd_MPISBAIJ;
1552     }
1553 #endif
1554     break;
1555   case MAT_STRUCTURALLY_SYMMETRIC:
1556     if (a->A && A->rmap->n == A->cmap->n) PetscCall(MatSetOption(a->A, op, flg));
1557     break;
1558   case MAT_IGNORE_LOWER_TRIANGULAR:
1559   case MAT_ERROR_LOWER_TRIANGULAR:
1560     aA->ignore_ltriangular = flg;
1561     break;
1562   case MAT_GETROW_UPPERTRIANGULAR:
1563     aA->getrow_utriangular = flg;
1564     break;
1565   default:
1566     break;
1567   }
1568   PetscFunctionReturn(PETSC_SUCCESS);
1569 }
1570 
1571 static PetscErrorCode MatTranspose_MPISBAIJ(Mat A, MatReuse reuse, Mat *B)
1572 {
1573   PetscFunctionBegin;
1574   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1575   if (reuse == MAT_INITIAL_MATRIX) {
1576     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, B));
1577   } else if (reuse == MAT_REUSE_MATRIX) {
1578     PetscCall(MatCopy(A, *B, SAME_NONZERO_PATTERN));
1579   }
1580   PetscFunctionReturn(PETSC_SUCCESS);
1581 }
1582 
1583 static PetscErrorCode MatDiagonalScale_MPISBAIJ(Mat mat, Vec ll, Vec rr)
1584 {
1585   Mat_MPISBAIJ *baij = (Mat_MPISBAIJ *)mat->data;
1586   Mat           a = baij->A, b = baij->B;
1587   PetscInt      nv, m, n;
1588   PetscBool     flg;
1589 
1590   PetscFunctionBegin;
1591   if (ll != rr) {
1592     PetscCall(VecEqual(ll, rr, &flg));
1593     PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "For symmetric format, left and right scaling vectors must be same");
1594   }
1595   if (!ll) PetscFunctionReturn(PETSC_SUCCESS);
1596 
1597   PetscCall(MatGetLocalSize(mat, &m, &n));
1598   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "For symmetric format, local size %" PetscInt_FMT " %" PetscInt_FMT " must be same", m, n);
1599 
1600   PetscCall(VecGetLocalSize(rr, &nv));
1601   PetscCheck(nv == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left and right vector non-conforming local size");
1602 
1603   PetscCall(VecScatterBegin(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1604 
1605   /* left diagonalscale the off-diagonal part */
1606   PetscUseTypeMethod(b, diagonalscale, ll, NULL);
1607 
1608   /* scale the diagonal part */
1609   PetscUseTypeMethod(a, diagonalscale, ll, rr);
1610 
1611   /* right diagonalscale the off-diagonal part */
1612   PetscCall(VecScatterEnd(baij->Mvctx, rr, baij->lvec, INSERT_VALUES, SCATTER_FORWARD));
1613   PetscUseTypeMethod(b, diagonalscale, NULL, baij->lvec);
1614   PetscFunctionReturn(PETSC_SUCCESS);
1615 }
1616 
1617 static PetscErrorCode MatSetUnfactored_MPISBAIJ(Mat A)
1618 {
1619   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1620 
1621   PetscFunctionBegin;
1622   PetscCall(MatSetUnfactored(a->A));
1623   PetscFunctionReturn(PETSC_SUCCESS);
1624 }
1625 
1626 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat, MatDuplicateOption, Mat *);
1627 
1628 static PetscErrorCode MatEqual_MPISBAIJ(Mat A, Mat B, PetscBool *flag)
1629 {
1630   Mat_MPISBAIJ *matB = (Mat_MPISBAIJ *)B->data, *matA = (Mat_MPISBAIJ *)A->data;
1631   Mat           a, b, c, d;
1632   PetscBool     flg;
1633 
1634   PetscFunctionBegin;
1635   a = matA->A;
1636   b = matA->B;
1637   c = matB->A;
1638   d = matB->B;
1639 
1640   PetscCall(MatEqual(a, c, &flg));
1641   if (flg) PetscCall(MatEqual(b, d, &flg));
1642   PetscCallMPI(MPIU_Allreduce(&flg, flag, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
1643   PetscFunctionReturn(PETSC_SUCCESS);
1644 }
1645 
1646 static PetscErrorCode MatCopy_MPISBAIJ(Mat A, Mat B, MatStructure str)
1647 {
1648   PetscBool isbaij;
1649 
1650   PetscFunctionBegin;
1651   PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &isbaij, MATSEQSBAIJ, MATMPISBAIJ, ""));
1652   PetscCheck(isbaij, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)B)->type_name);
1653   /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */
1654   if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) {
1655     PetscCall(MatGetRowUpperTriangular(A));
1656     PetscCall(MatCopy_Basic(A, B, str));
1657     PetscCall(MatRestoreRowUpperTriangular(A));
1658   } else {
1659     Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1660     Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1661 
1662     PetscCall(MatCopy(a->A, b->A, str));
1663     PetscCall(MatCopy(a->B, b->B, str));
1664   }
1665   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1666   PetscFunctionReturn(PETSC_SUCCESS);
1667 }
1668 
1669 static PetscErrorCode MatAXPY_MPISBAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str)
1670 {
1671   Mat_MPISBAIJ *xx = (Mat_MPISBAIJ *)X->data, *yy = (Mat_MPISBAIJ *)Y->data;
1672   PetscBLASInt  bnz, one                          = 1;
1673   Mat_SeqSBAIJ *xa, *ya;
1674   Mat_SeqBAIJ  *xb, *yb;
1675 
1676   PetscFunctionBegin;
1677   if (str == SAME_NONZERO_PATTERN) {
1678     PetscScalar alpha = a;
1679     xa                = (Mat_SeqSBAIJ *)xx->A->data;
1680     ya                = (Mat_SeqSBAIJ *)yy->A->data;
1681     PetscCall(PetscBLASIntCast(xa->nz, &bnz));
1682     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa->a, &one, ya->a, &one));
1683     xb = (Mat_SeqBAIJ *)xx->B->data;
1684     yb = (Mat_SeqBAIJ *)yy->B->data;
1685     PetscCall(PetscBLASIntCast(xb->nz, &bnz));
1686     PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xb->a, &one, yb->a, &one));
1687     PetscCall(PetscObjectStateIncrease((PetscObject)Y));
1688   } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */
1689     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
1690     PetscCall(MatAXPY_Basic(Y, a, X, str));
1691     PetscCall(MatSetOption(X, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
1692   } else {
1693     Mat       B;
1694     PetscInt *nnz_d, *nnz_o, bs = Y->rmap->bs;
1695     PetscCheck(bs == X->rmap->bs, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Matrices must have same block size");
1696     PetscCall(MatGetRowUpperTriangular(X));
1697     PetscCall(MatGetRowUpperTriangular(Y));
1698     PetscCall(PetscMalloc1(yy->A->rmap->N, &nnz_d));
1699     PetscCall(PetscMalloc1(yy->B->rmap->N, &nnz_o));
1700     PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B));
1701     PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name));
1702     PetscCall(MatSetSizes(B, Y->rmap->n, Y->cmap->n, Y->rmap->N, Y->cmap->N));
1703     PetscCall(MatSetBlockSizesFromMats(B, Y, Y));
1704     PetscCall(MatSetType(B, MATMPISBAIJ));
1705     PetscCall(MatAXPYGetPreallocation_SeqSBAIJ(yy->A, xx->A, nnz_d));
1706     PetscCall(MatAXPYGetPreallocation_MPIBAIJ(yy->B, yy->garray, xx->B, xx->garray, nnz_o));
1707     PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, nnz_d, 0, nnz_o));
1708     PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str));
1709     PetscCall(MatHeaderMerge(Y, &B));
1710     PetscCall(PetscFree(nnz_d));
1711     PetscCall(PetscFree(nnz_o));
1712     PetscCall(MatRestoreRowUpperTriangular(X));
1713     PetscCall(MatRestoreRowUpperTriangular(Y));
1714   }
1715   PetscFunctionReturn(PETSC_SUCCESS);
1716 }
1717 
1718 static PetscErrorCode MatCreateSubMatrices_MPISBAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[])
1719 {
1720   PetscInt  i;
1721   PetscBool flg;
1722 
1723   PetscFunctionBegin;
1724   PetscCall(MatCreateSubMatrices_MPIBAIJ(A, n, irow, icol, scall, B)); /* B[] are sbaij matrices */
1725   for (i = 0; i < n; i++) {
1726     PetscCall(ISEqual(irow[i], icol[i], &flg));
1727     if (!flg) PetscCall(MatSeqSBAIJZeroOps_Private(*B[i]));
1728   }
1729   PetscFunctionReturn(PETSC_SUCCESS);
1730 }
1731 
1732 static PetscErrorCode MatShift_MPISBAIJ(Mat Y, PetscScalar a)
1733 {
1734   Mat_MPISBAIJ *maij = (Mat_MPISBAIJ *)Y->data;
1735   Mat_SeqSBAIJ *aij  = (Mat_SeqSBAIJ *)maij->A->data;
1736 
1737   PetscFunctionBegin;
1738   if (!Y->preallocated) {
1739     PetscCall(MatMPISBAIJSetPreallocation(Y, Y->rmap->bs, 1, NULL, 0, NULL));
1740   } else if (!aij->nz) {
1741     PetscInt nonew = aij->nonew;
1742     PetscCall(MatSeqSBAIJSetPreallocation(maij->A, Y->rmap->bs, 1, NULL));
1743     aij->nonew = nonew;
1744   }
1745   PetscCall(MatShift_Basic(Y, a));
1746   PetscFunctionReturn(PETSC_SUCCESS);
1747 }
1748 
1749 static PetscErrorCode MatMissingDiagonal_MPISBAIJ(Mat A, PetscBool *missing, PetscInt *d)
1750 {
1751   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1752 
1753   PetscFunctionBegin;
1754   PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only works for square matrices");
1755   PetscCall(MatMissingDiagonal(a->A, missing, d));
1756   if (d) {
1757     PetscInt rstart;
1758     PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
1759     *d += rstart / A->rmap->bs;
1760   }
1761   PetscFunctionReturn(PETSC_SUCCESS);
1762 }
1763 
1764 static PetscErrorCode MatGetDiagonalBlock_MPISBAIJ(Mat A, Mat *a)
1765 {
1766   PetscFunctionBegin;
1767   *a = ((Mat_MPISBAIJ *)A->data)->A;
1768   PetscFunctionReturn(PETSC_SUCCESS);
1769 }
1770 
1771 static PetscErrorCode MatEliminateZeros_MPISBAIJ(Mat A, PetscBool keep)
1772 {
1773   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
1774 
1775   PetscFunctionBegin;
1776   PetscCall(MatEliminateZeros_SeqSBAIJ(a->A, keep));       // possibly keep zero diagonal coefficients
1777   PetscCall(MatEliminateZeros_SeqBAIJ(a->B, PETSC_FALSE)); // never keep zero diagonal coefficients
1778   PetscFunctionReturn(PETSC_SUCCESS);
1779 }
1780 
1781 static PetscErrorCode MatLoad_MPISBAIJ(Mat, PetscViewer);
1782 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat, Vec, PetscInt[]);
1783 static PetscErrorCode MatSOR_MPISBAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
1784 
1785 static struct _MatOps MatOps_Values = {MatSetValues_MPISBAIJ,
1786                                        MatGetRow_MPISBAIJ,
1787                                        MatRestoreRow_MPISBAIJ,
1788                                        MatMult_MPISBAIJ,
1789                                        /*  4*/ MatMultAdd_MPISBAIJ,
1790                                        MatMult_MPISBAIJ, /* transpose versions are same as non-transpose */
1791                                        MatMultAdd_MPISBAIJ,
1792                                        NULL,
1793                                        NULL,
1794                                        NULL,
1795                                        /* 10*/ NULL,
1796                                        NULL,
1797                                        NULL,
1798                                        MatSOR_MPISBAIJ,
1799                                        MatTranspose_MPISBAIJ,
1800                                        /* 15*/ MatGetInfo_MPISBAIJ,
1801                                        MatEqual_MPISBAIJ,
1802                                        MatGetDiagonal_MPISBAIJ,
1803                                        MatDiagonalScale_MPISBAIJ,
1804                                        MatNorm_MPISBAIJ,
1805                                        /* 20*/ MatAssemblyBegin_MPISBAIJ,
1806                                        MatAssemblyEnd_MPISBAIJ,
1807                                        MatSetOption_MPISBAIJ,
1808                                        MatZeroEntries_MPISBAIJ,
1809                                        /* 24*/ NULL,
1810                                        NULL,
1811                                        NULL,
1812                                        NULL,
1813                                        NULL,
1814                                        /* 29*/ MatSetUp_MPI_Hash,
1815                                        NULL,
1816                                        NULL,
1817                                        MatGetDiagonalBlock_MPISBAIJ,
1818                                        NULL,
1819                                        /* 34*/ MatDuplicate_MPISBAIJ,
1820                                        NULL,
1821                                        NULL,
1822                                        NULL,
1823                                        NULL,
1824                                        /* 39*/ MatAXPY_MPISBAIJ,
1825                                        MatCreateSubMatrices_MPISBAIJ,
1826                                        MatIncreaseOverlap_MPISBAIJ,
1827                                        MatGetValues_MPISBAIJ,
1828                                        MatCopy_MPISBAIJ,
1829                                        /* 44*/ NULL,
1830                                        MatScale_MPISBAIJ,
1831                                        MatShift_MPISBAIJ,
1832                                        NULL,
1833                                        NULL,
1834                                        /* 49*/ NULL,
1835                                        NULL,
1836                                        NULL,
1837                                        NULL,
1838                                        NULL,
1839                                        /* 54*/ NULL,
1840                                        NULL,
1841                                        MatSetUnfactored_MPISBAIJ,
1842                                        NULL,
1843                                        MatSetValuesBlocked_MPISBAIJ,
1844                                        /* 59*/ MatCreateSubMatrix_MPISBAIJ,
1845                                        NULL,
1846                                        NULL,
1847                                        NULL,
1848                                        NULL,
1849                                        /* 64*/ NULL,
1850                                        NULL,
1851                                        NULL,
1852                                        NULL,
1853                                        MatGetRowMaxAbs_MPISBAIJ,
1854                                        /* 69*/ NULL,
1855                                        MatConvert_MPISBAIJ_Basic,
1856                                        NULL,
1857                                        NULL,
1858                                        NULL,
1859                                        NULL,
1860                                        NULL,
1861                                        NULL,
1862                                        NULL,
1863                                        MatLoad_MPISBAIJ,
1864                                        /* 79*/ NULL,
1865                                        NULL,
1866                                        NULL,
1867                                        NULL,
1868                                        NULL,
1869                                        /* 84*/ NULL,
1870                                        NULL,
1871                                        NULL,
1872                                        NULL,
1873                                        NULL,
1874                                        /* 89*/ NULL,
1875                                        NULL,
1876                                        NULL,
1877                                        NULL,
1878                                        MatConjugate_MPISBAIJ,
1879                                        /* 94*/ NULL,
1880                                        NULL,
1881                                        MatRealPart_MPISBAIJ,
1882                                        MatImaginaryPart_MPISBAIJ,
1883                                        MatGetRowUpperTriangular_MPISBAIJ,
1884                                        /* 99*/ MatRestoreRowUpperTriangular_MPISBAIJ,
1885                                        NULL,
1886                                        NULL,
1887                                        NULL,
1888                                        NULL,
1889                                        /*104*/ MatMissingDiagonal_MPISBAIJ,
1890                                        NULL,
1891                                        NULL,
1892                                        NULL,
1893                                        NULL,
1894                                        /*109*/ NULL,
1895                                        NULL,
1896                                        NULL,
1897                                        NULL,
1898                                        NULL,
1899                                        /*114*/ NULL,
1900                                        NULL,
1901                                        NULL,
1902                                        NULL,
1903                                        NULL,
1904                                        /*119*/ NULL,
1905                                        NULL,
1906                                        NULL,
1907                                        NULL,
1908                                        NULL,
1909                                        /*124*/ NULL,
1910                                        NULL,
1911                                        NULL,
1912                                        MatSetBlockSizes_Default,
1913                                        NULL,
1914                                        /*129*/ NULL,
1915                                        NULL,
1916                                        MatCreateMPIMatConcatenateSeqMat_MPISBAIJ,
1917                                        NULL,
1918                                        NULL,
1919                                        /*134*/ NULL,
1920                                        NULL,
1921                                        NULL,
1922                                        MatEliminateZeros_MPISBAIJ,
1923                                        NULL,
1924                                        /*139*/ NULL,
1925                                        NULL,
1926                                        NULL,
1927                                        MatCopyHashToXAIJ_MPI_Hash,
1928                                        NULL};
1929 
1930 static PetscErrorCode MatMPISBAIJSetPreallocation_MPISBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz)
1931 {
1932   Mat_MPISBAIJ *b = (Mat_MPISBAIJ *)B->data;
1933   PetscInt      i, mbs, Mbs;
1934   PetscMPIInt   size;
1935 
1936   PetscFunctionBegin;
1937   if (B->hash_active) {
1938     B->ops[0]      = b->cops;
1939     B->hash_active = PETSC_FALSE;
1940   }
1941   if (!B->preallocated) PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), bs, &B->bstash));
1942   PetscCall(MatSetBlockSize(B, bs));
1943   PetscCall(PetscLayoutSetUp(B->rmap));
1944   PetscCall(PetscLayoutSetUp(B->cmap));
1945   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
1946   PetscCheck(B->rmap->N <= B->cmap->N, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->N, B->cmap->N);
1947   PetscCheck(B->rmap->n <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "MPISBAIJ matrix cannot have more local rows %" PetscInt_FMT " than columns %" PetscInt_FMT, B->rmap->n, B->cmap->n);
1948 
1949   mbs = B->rmap->n / bs;
1950   Mbs = B->rmap->N / bs;
1951   PetscCheck(mbs * bs == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "No of local rows %" PetscInt_FMT " must be divisible by blocksize %" PetscInt_FMT, B->rmap->N, bs);
1952 
1953   B->rmap->bs = bs;
1954   b->bs2      = bs * bs;
1955   b->mbs      = mbs;
1956   b->Mbs      = Mbs;
1957   b->nbs      = B->cmap->n / bs;
1958   b->Nbs      = B->cmap->N / bs;
1959 
1960   for (i = 0; i <= b->size; i++) b->rangebs[i] = B->rmap->range[i] / bs;
1961   b->rstartbs = B->rmap->rstart / bs;
1962   b->rendbs   = B->rmap->rend / bs;
1963 
1964   b->cstartbs = B->cmap->rstart / bs;
1965   b->cendbs   = B->cmap->rend / bs;
1966 
1967 #if defined(PETSC_USE_CTABLE)
1968   PetscCall(PetscHMapIDestroy(&b->colmap));
1969 #else
1970   PetscCall(PetscFree(b->colmap));
1971 #endif
1972   PetscCall(PetscFree(b->garray));
1973   PetscCall(VecDestroy(&b->lvec));
1974   PetscCall(VecScatterDestroy(&b->Mvctx));
1975   PetscCall(VecDestroy(&b->slvec0));
1976   PetscCall(VecDestroy(&b->slvec0b));
1977   PetscCall(VecDestroy(&b->slvec1));
1978   PetscCall(VecDestroy(&b->slvec1a));
1979   PetscCall(VecDestroy(&b->slvec1b));
1980   PetscCall(VecScatterDestroy(&b->sMvctx));
1981 
1982   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size));
1983 
1984   MatSeqXAIJGetOptions_Private(b->B);
1985   PetscCall(MatDestroy(&b->B));
1986   PetscCall(MatCreate(PETSC_COMM_SELF, &b->B));
1987   PetscCall(MatSetSizes(b->B, B->rmap->n, size > 1 ? B->cmap->N : 0, B->rmap->n, size > 1 ? B->cmap->N : 0));
1988   PetscCall(MatSetType(b->B, MATSEQBAIJ));
1989   MatSeqXAIJRestoreOptions_Private(b->B);
1990 
1991   MatSeqXAIJGetOptions_Private(b->A);
1992   PetscCall(MatDestroy(&b->A));
1993   PetscCall(MatCreate(PETSC_COMM_SELF, &b->A));
1994   PetscCall(MatSetSizes(b->A, B->rmap->n, B->cmap->n, B->rmap->n, B->cmap->n));
1995   PetscCall(MatSetType(b->A, MATSEQSBAIJ));
1996   MatSeqXAIJRestoreOptions_Private(b->A);
1997 
1998   PetscCall(MatSeqSBAIJSetPreallocation(b->A, bs, d_nz, d_nnz));
1999   PetscCall(MatSeqBAIJSetPreallocation(b->B, bs, o_nz, o_nnz));
2000 
2001   B->preallocated  = PETSC_TRUE;
2002   B->was_assembled = PETSC_FALSE;
2003   B->assembled     = PETSC_FALSE;
2004   PetscFunctionReturn(PETSC_SUCCESS);
2005 }
2006 
2007 static PetscErrorCode MatMPISBAIJSetPreallocationCSR_MPISBAIJ(Mat B, PetscInt bs, const PetscInt ii[], const PetscInt jj[], const PetscScalar V[])
2008 {
2009   PetscInt        m, rstart, cend;
2010   PetscInt        i, j, d, nz, bd, nz_max = 0, *d_nnz = NULL, *o_nnz = NULL;
2011   const PetscInt *JJ          = NULL;
2012   PetscScalar    *values      = NULL;
2013   PetscBool       roworiented = ((Mat_MPISBAIJ *)B->data)->roworiented;
2014   PetscBool       nooffprocentries;
2015 
2016   PetscFunctionBegin;
2017   PetscCheck(bs >= 1, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_OUTOFRANGE, "Invalid block size specified, must be positive but it is %" PetscInt_FMT, bs);
2018   PetscCall(PetscLayoutSetBlockSize(B->rmap, bs));
2019   PetscCall(PetscLayoutSetBlockSize(B->cmap, bs));
2020   PetscCall(PetscLayoutSetUp(B->rmap));
2021   PetscCall(PetscLayoutSetUp(B->cmap));
2022   PetscCall(PetscLayoutGetBlockSize(B->rmap, &bs));
2023   m      = B->rmap->n / bs;
2024   rstart = B->rmap->rstart / bs;
2025   cend   = B->cmap->rend / bs;
2026 
2027   PetscCheck(!ii[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ii[0] must be 0 but it is %" PetscInt_FMT, ii[0]);
2028   PetscCall(PetscMalloc2(m, &d_nnz, m, &o_nnz));
2029   for (i = 0; i < m; i++) {
2030     nz = ii[i + 1] - ii[i];
2031     PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz);
2032     /* count the ones on the diagonal and above, split into diagonal and off-diagonal portions. */
2033     JJ = jj + ii[i];
2034     bd = 0;
2035     for (j = 0; j < nz; j++) {
2036       if (*JJ >= i + rstart) break;
2037       JJ++;
2038       bd++;
2039     }
2040     d = 0;
2041     for (; j < nz; j++) {
2042       if (*JJ++ >= cend) break;
2043       d++;
2044     }
2045     d_nnz[i] = d;
2046     o_nnz[i] = nz - d - bd;
2047     nz       = nz - bd;
2048     nz_max   = PetscMax(nz_max, nz);
2049   }
2050   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, d_nnz, 0, o_nnz));
2051   PetscCall(MatSetOption(B, MAT_IGNORE_LOWER_TRIANGULAR, PETSC_TRUE));
2052   PetscCall(PetscFree2(d_nnz, o_nnz));
2053 
2054   values = (PetscScalar *)V;
2055   if (!values) PetscCall(PetscCalloc1(bs * bs * nz_max, &values));
2056   for (i = 0; i < m; i++) {
2057     PetscInt        row   = i + rstart;
2058     PetscInt        ncols = ii[i + 1] - ii[i];
2059     const PetscInt *icols = jj + ii[i];
2060     if (bs == 1 || !roworiented) { /* block ordering matches the non-nested layout of MatSetValues so we can insert entire rows */
2061       const PetscScalar *svals = values + (V ? (bs * bs * ii[i]) : 0);
2062       PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, ncols, icols, svals, INSERT_VALUES));
2063     } else { /* block ordering does not match so we can only insert one block at a time. */
2064       PetscInt j;
2065       for (j = 0; j < ncols; j++) {
2066         const PetscScalar *svals = values + (V ? (bs * bs * (ii[i] + j)) : 0);
2067         PetscCall(MatSetValuesBlocked_MPISBAIJ(B, 1, &row, 1, &icols[j], svals, INSERT_VALUES));
2068       }
2069     }
2070   }
2071 
2072   if (!V) PetscCall(PetscFree(values));
2073   nooffprocentries    = B->nooffprocentries;
2074   B->nooffprocentries = PETSC_TRUE;
2075   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2076   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2077   B->nooffprocentries = nooffprocentries;
2078 
2079   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
2080   PetscFunctionReturn(PETSC_SUCCESS);
2081 }
2082 
2083 /*MC
2084    MATMPISBAIJ - MATMPISBAIJ = "mpisbaij" - A matrix type to be used for distributed symmetric sparse block matrices,
2085    based on block compressed sparse row format.  Only the upper triangular portion of the "diagonal" portion of
2086    the matrix is stored.
2087 
2088    For complex numbers by default this matrix is symmetric, NOT Hermitian symmetric. To make it Hermitian symmetric you
2089    can call `MatSetOption`(`Mat`, `MAT_HERMITIAN`);
2090 
2091    Options Database Key:
2092 . -mat_type mpisbaij - sets the matrix type to "mpisbaij" during a call to `MatSetFromOptions()`
2093 
2094    Level: beginner
2095 
2096    Note:
2097      The number of rows in the matrix must be less than or equal to the number of columns. Similarly the number of rows in the
2098      diagonal portion of the matrix of each process has to less than or equal the number of columns.
2099 
2100 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MATBAIJ`, `MatCreateBAIJ()`, `MATSEQSBAIJ`, `MatType`
2101 M*/
2102 
2103 PETSC_EXTERN PetscErrorCode MatCreate_MPISBAIJ(Mat B)
2104 {
2105   Mat_MPISBAIJ *b;
2106   PetscBool     flg = PETSC_FALSE;
2107 
2108   PetscFunctionBegin;
2109   PetscCall(PetscNew(&b));
2110   B->data   = (void *)b;
2111   B->ops[0] = MatOps_Values;
2112 
2113   B->ops->destroy = MatDestroy_MPISBAIJ;
2114   B->ops->view    = MatView_MPISBAIJ;
2115   B->assembled    = PETSC_FALSE;
2116   B->insertmode   = NOT_SET_VALUES;
2117 
2118   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)B), &b->rank));
2119   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &b->size));
2120 
2121   /* build local table of row and column ownerships */
2122   PetscCall(PetscMalloc1(b->size + 2, &b->rangebs));
2123 
2124   /* build cache for off array entries formed */
2125   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash));
2126 
2127   b->donotstash  = PETSC_FALSE;
2128   b->colmap      = NULL;
2129   b->garray      = NULL;
2130   b->roworiented = PETSC_TRUE;
2131 
2132   /* stuff used in block assembly */
2133   b->barray = NULL;
2134 
2135   /* stuff used for matrix vector multiply */
2136   b->lvec    = NULL;
2137   b->Mvctx   = NULL;
2138   b->slvec0  = NULL;
2139   b->slvec0b = NULL;
2140   b->slvec1  = NULL;
2141   b->slvec1a = NULL;
2142   b->slvec1b = NULL;
2143   b->sMvctx  = NULL;
2144 
2145   /* stuff for MatGetRow() */
2146   b->rowindices   = NULL;
2147   b->rowvalues    = NULL;
2148   b->getrowactive = PETSC_FALSE;
2149 
2150   /* hash table stuff */
2151   b->ht           = NULL;
2152   b->hd           = NULL;
2153   b->ht_size      = 0;
2154   b->ht_flag      = PETSC_FALSE;
2155   b->ht_fact      = 0;
2156   b->ht_total_ct  = 0;
2157   b->ht_insert_ct = 0;
2158 
2159   /* stuff for MatCreateSubMatrices_MPIBAIJ_local() */
2160   b->ijonly = PETSC_FALSE;
2161 
2162   b->in_loc = NULL;
2163   b->v_loc  = NULL;
2164   b->n_loc  = 0;
2165 
2166   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_MPISBAIJ));
2167   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_MPISBAIJ));
2168   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocation_C", MatMPISBAIJSetPreallocation_MPISBAIJ));
2169   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatMPISBAIJSetPreallocationCSR_C", MatMPISBAIJSetPreallocationCSR_MPISBAIJ));
2170 #if defined(PETSC_HAVE_ELEMENTAL)
2171   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_elemental_C", MatConvert_MPISBAIJ_Elemental));
2172 #endif
2173 #if defined(PETSC_HAVE_SCALAPACK)
2174   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_scalapack_C", MatConvert_SBAIJ_ScaLAPACK));
2175 #endif
2176   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpiaij_C", MatConvert_MPISBAIJ_Basic));
2177   PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_mpisbaij_mpibaij_C", MatConvert_MPISBAIJ_Basic));
2178 
2179   B->symmetric                   = PETSC_BOOL3_TRUE;
2180   B->structurally_symmetric      = PETSC_BOOL3_TRUE;
2181   B->symmetry_eternal            = PETSC_TRUE;
2182   B->structural_symmetry_eternal = PETSC_TRUE;
2183 #if defined(PETSC_USE_COMPLEX)
2184   B->hermitian = PETSC_BOOL3_FALSE;
2185 #else
2186   B->hermitian = PETSC_BOOL3_TRUE;
2187 #endif
2188 
2189   PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATMPISBAIJ));
2190   PetscOptionsBegin(PetscObjectComm((PetscObject)B), NULL, "Options for loading MPISBAIJ matrix 1", "Mat");
2191   PetscCall(PetscOptionsBool("-mat_use_hash_table", "Use hash table to save memory in constructing matrix", "MatSetOption", flg, &flg, NULL));
2192   if (flg) {
2193     PetscReal fact = 1.39;
2194     PetscCall(MatSetOption(B, MAT_USE_HASH_TABLE, PETSC_TRUE));
2195     PetscCall(PetscOptionsReal("-mat_use_hash_table", "Use hash table factor", "MatMPIBAIJSetHashTableFactor", fact, &fact, NULL));
2196     if (fact <= 1.0) fact = 1.39;
2197     PetscCall(MatMPIBAIJSetHashTableFactor(B, fact));
2198     PetscCall(PetscInfo(B, "Hash table Factor used %5.2g\n", (double)fact));
2199   }
2200   PetscOptionsEnd();
2201   PetscFunctionReturn(PETSC_SUCCESS);
2202 }
2203 
2204 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2205 /*MC
2206    MATSBAIJ - MATSBAIJ = "sbaij" - A matrix type to be used for symmetric block sparse matrices.
2207 
2208    This matrix type is identical to `MATSEQSBAIJ` when constructed with a single process communicator,
2209    and `MATMPISBAIJ` otherwise.
2210 
2211    Options Database Key:
2212 . -mat_type sbaij - sets the matrix type to `MATSBAIJ` during a call to `MatSetFromOptions()`
2213 
2214   Level: beginner
2215 
2216 .seealso: [](ch_matrices), `Mat`, `MATSEQSBAIJ`, `MATMPISBAIJ`, `MatCreateSBAIJ()`, `MATSEQSBAIJ`, `MATMPISBAIJ`
2217 M*/
2218 
2219 /*@
2220   MatMPISBAIJSetPreallocation - For good matrix assembly performance
2221   the user should preallocate the matrix storage by setting the parameters
2222   d_nz (or d_nnz) and o_nz (or o_nnz).  By setting these parameters accurately,
2223   performance can be increased by more than a factor of 50.
2224 
2225   Collective
2226 
2227   Input Parameters:
2228 + B     - the matrix
2229 . bs    - size of block, the blocks are ALWAYS square. One can use MatSetBlockSizes() to set a different row and column blocksize but the row
2230           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with MatCreateVecs()
2231 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2232           submatrix  (same for all local rows)
2233 . d_nnz - array containing the number of block nonzeros in the various block rows
2234           in the upper triangular and diagonal part of the in diagonal portion of the local
2235           (possibly different for each block row) or `NULL`.  If you plan to factor the matrix you must leave room
2236           for the diagonal entry and set a value even if it is zero.
2237 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2238           submatrix (same for all local rows).
2239 - o_nnz - array containing the number of nonzeros in the various block rows of the
2240           off-diagonal portion of the local submatrix that is right of the diagonal
2241           (possibly different for each block row) or `NULL`.
2242 
2243   Options Database Keys:
2244 + -mat_no_unroll  - uses code that does not unroll the loops in the
2245                     block calculations (much slower)
2246 - -mat_block_size - size of the blocks to use
2247 
2248   Level: intermediate
2249 
2250   Notes:
2251 
2252   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2253   than it must be used on all processors that share the object for that argument.
2254 
2255   If the *_nnz parameter is given then the *_nz parameter is ignored
2256 
2257   Storage Information:
2258   For a square global matrix we define each processor's diagonal portion
2259   to be its local rows and the corresponding columns (a square submatrix);
2260   each processor's off-diagonal portion encompasses the remainder of the
2261   local matrix (a rectangular submatrix).
2262 
2263   The user can specify preallocated storage for the diagonal part of
2264   the local submatrix with either `d_nz` or `d_nnz` (not both).  Set
2265   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2266   memory allocation.  Likewise, specify preallocated storage for the
2267   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2268 
2269   You can call `MatGetInfo()` to get information on how effective the preallocation was;
2270   for example the fields mallocs,nz_allocated,nz_used,nz_unneeded;
2271   You can also run with the option `-info` and look for messages with the string
2272   malloc in them to see if additional memory allocation was needed.
2273 
2274   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2275   the figure below we depict these three local rows and all columns (0-11).
2276 
2277 .vb
2278            0 1 2 3 4 5 6 7 8 9 10 11
2279           --------------------------
2280    row 3  |. . . d d d o o o o  o  o
2281    row 4  |. . . d d d o o o o  o  o
2282    row 5  |. . . d d d o o o o  o  o
2283           --------------------------
2284 .ve
2285 
2286   Thus, any entries in the d locations are stored in the d (diagonal)
2287   submatrix, and any entries in the o locations are stored in the
2288   o (off-diagonal) submatrix.  Note that the d matrix is stored in
2289   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2290 
2291   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2292   plus the diagonal part of the d matrix,
2293   and `o_nz` should indicate the number of block nonzeros per row in the o matrix
2294 
2295   In general, for PDE problems in which most nonzeros are near the diagonal,
2296   one expects `d_nz` >> `o_nz`.
2297 
2298 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`, `PetscSplitOwnership()`
2299 @*/
2300 PetscErrorCode MatMPISBAIJSetPreallocation(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
2301 {
2302   PetscFunctionBegin;
2303   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
2304   PetscValidType(B, 1);
2305   PetscValidLogicalCollectiveInt(B, bs, 2);
2306   PetscTryMethod(B, "MatMPISBAIJSetPreallocation_C", (Mat, PetscInt, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, bs, d_nz, d_nnz, o_nz, o_nnz));
2307   PetscFunctionReturn(PETSC_SUCCESS);
2308 }
2309 
2310 // PetscClangLinter pragma disable: -fdoc-section-header-unknown
2311 /*@
2312   MatCreateSBAIJ - Creates a sparse parallel matrix in symmetric block AIJ format, `MATSBAIJ`,
2313   (block compressed row).  For good matrix assembly performance
2314   the user should preallocate the matrix storage by setting the parameters
2315   `d_nz` (or `d_nnz`) and `o_nz` (or `o_nnz`).
2316 
2317   Collective
2318 
2319   Input Parameters:
2320 + comm  - MPI communicator
2321 . bs    - size of block, the blocks are ALWAYS square. One can use `MatSetBlockSizes()` to set a different row and column blocksize but the row
2322           blocksize always defines the size of the blocks. The column blocksize sets the blocksize of the vectors obtained with `MatCreateVecs()`
2323 . m     - number of local rows (or `PETSC_DECIDE` to have calculated if `M` is given)
2324           This value should be the same as the local size used in creating the
2325           y vector for the matrix-vector product y = Ax.
2326 . n     - number of local columns (or `PETSC_DECIDE` to have calculated if `N` is given)
2327           This value should be the same as the local size used in creating the
2328           x vector for the matrix-vector product y = Ax.
2329 . M     - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2330 . N     - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2331 . d_nz  - number of block nonzeros per block row in diagonal portion of local
2332           submatrix (same for all local rows)
2333 . d_nnz - array containing the number of block nonzeros in the various block rows
2334           in the upper triangular portion of the in diagonal portion of the local
2335           (possibly different for each block block row) or `NULL`.
2336           If you plan to factor the matrix you must leave room for the diagonal entry and
2337           set its value even if it is zero.
2338 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
2339           submatrix (same for all local rows).
2340 - o_nnz - array containing the number of nonzeros in the various block rows of the
2341           off-diagonal portion of the local submatrix (possibly different for
2342           each block row) or `NULL`.
2343 
2344   Output Parameter:
2345 . A - the matrix
2346 
2347   Options Database Keys:
2348 + -mat_no_unroll  - uses code that does not unroll the loops in the
2349                     block calculations (much slower)
2350 . -mat_block_size - size of the blocks to use
2351 - -mat_mpi        - use the parallel matrix data structures even on one processor
2352                     (defaults to using SeqBAIJ format on one processor)
2353 
2354   Level: intermediate
2355 
2356   Notes:
2357   It is recommended that one use `MatCreateFromOptions()` or the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`,
2358   MatXXXXSetPreallocation() paradigm instead of this routine directly.
2359   [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`]
2360 
2361   The number of rows and columns must be divisible by blocksize.
2362   This matrix type does not support complex Hermitian operation.
2363 
2364   The user MUST specify either the local or global matrix dimensions
2365   (possibly both).
2366 
2367   If `PETSC_DECIDE` or `PETSC_DETERMINE` is used for a particular argument on one processor
2368   than it must be used on all processors that share the object for that argument.
2369 
2370   If `m` and `n` are not `PETSC_DECIDE`, then the values determines the `PetscLayout` of the matrix and the ranges returned by
2371   `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, and `MatGetOwnershipRangesColumn()`.
2372 
2373   If the *_nnz parameter is given then the *_nz parameter is ignored
2374 
2375   Storage Information:
2376   For a square global matrix we define each processor's diagonal portion
2377   to be its local rows and the corresponding columns (a square submatrix);
2378   each processor's off-diagonal portion encompasses the remainder of the
2379   local matrix (a rectangular submatrix).
2380 
2381   The user can specify preallocated storage for the diagonal part of
2382   the local submatrix with either `d_nz` or `d_nnz` (not both). Set
2383   `d_nz` = `PETSC_DEFAULT` and `d_nnz` = `NULL` for PETSc to control dynamic
2384   memory allocation. Likewise, specify preallocated storage for the
2385   off-diagonal part of the local submatrix with `o_nz` or `o_nnz` (not both).
2386 
2387   Consider a processor that owns rows 3, 4 and 5 of a parallel matrix. In
2388   the figure below we depict these three local rows and all columns (0-11).
2389 
2390 .vb
2391            0 1 2 3 4 5 6 7 8 9 10 11
2392           --------------------------
2393    row 3  |. . . d d d o o o o  o  o
2394    row 4  |. . . d d d o o o o  o  o
2395    row 5  |. . . d d d o o o o  o  o
2396           --------------------------
2397 .ve
2398 
2399   Thus, any entries in the d locations are stored in the d (diagonal)
2400   submatrix, and any entries in the o locations are stored in the
2401   o (off-diagonal) submatrix. Note that the d matrix is stored in
2402   `MATSEQSBAIJ` format and the o submatrix in `MATSEQBAIJ` format.
2403 
2404   Now `d_nz` should indicate the number of block nonzeros per row in the upper triangular
2405   plus the diagonal part of the d matrix,
2406   and `o_nz` should indicate the number of block nonzeros per row in the o matrix.
2407   In general, for PDE problems in which most nonzeros are near the diagonal,
2408   one expects `d_nz` >> `o_nz`.
2409 
2410 .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatCreate()`, `MatCreateSeqSBAIJ()`, `MatSetValues()`, `MatCreateBAIJ()`,
2411           `MatGetOwnershipRange()`,  `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
2412 @*/
2413 PetscErrorCode MatCreateSBAIJ(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[], Mat *A)
2414 {
2415   PetscMPIInt size;
2416 
2417   PetscFunctionBegin;
2418   PetscCall(MatCreate(comm, A));
2419   PetscCall(MatSetSizes(*A, m, n, M, N));
2420   PetscCallMPI(MPI_Comm_size(comm, &size));
2421   if (size > 1) {
2422     PetscCall(MatSetType(*A, MATMPISBAIJ));
2423     PetscCall(MatMPISBAIJSetPreallocation(*A, bs, d_nz, d_nnz, o_nz, o_nnz));
2424   } else {
2425     PetscCall(MatSetType(*A, MATSEQSBAIJ));
2426     PetscCall(MatSeqSBAIJSetPreallocation(*A, bs, d_nz, d_nnz));
2427   }
2428   PetscFunctionReturn(PETSC_SUCCESS);
2429 }
2430 
2431 static PetscErrorCode MatDuplicate_MPISBAIJ(Mat matin, MatDuplicateOption cpvalues, Mat *newmat)
2432 {
2433   Mat           mat;
2434   Mat_MPISBAIJ *a, *oldmat = (Mat_MPISBAIJ *)matin->data;
2435   PetscInt      len = 0, nt, bs = matin->rmap->bs, mbs = oldmat->mbs;
2436   PetscScalar  *array;
2437 
2438   PetscFunctionBegin;
2439   *newmat = NULL;
2440 
2441   PetscCall(MatCreate(PetscObjectComm((PetscObject)matin), &mat));
2442   PetscCall(MatSetSizes(mat, matin->rmap->n, matin->cmap->n, matin->rmap->N, matin->cmap->N));
2443   PetscCall(MatSetType(mat, ((PetscObject)matin)->type_name));
2444   PetscCall(PetscLayoutReference(matin->rmap, &mat->rmap));
2445   PetscCall(PetscLayoutReference(matin->cmap, &mat->cmap));
2446 
2447   if (matin->hash_active) {
2448     PetscCall(MatSetUp(mat));
2449   } else {
2450     mat->factortype   = matin->factortype;
2451     mat->preallocated = PETSC_TRUE;
2452     mat->assembled    = PETSC_TRUE;
2453     mat->insertmode   = NOT_SET_VALUES;
2454 
2455     a      = (Mat_MPISBAIJ *)mat->data;
2456     a->bs2 = oldmat->bs2;
2457     a->mbs = oldmat->mbs;
2458     a->nbs = oldmat->nbs;
2459     a->Mbs = oldmat->Mbs;
2460     a->Nbs = oldmat->Nbs;
2461 
2462     a->size         = oldmat->size;
2463     a->rank         = oldmat->rank;
2464     a->donotstash   = oldmat->donotstash;
2465     a->roworiented  = oldmat->roworiented;
2466     a->rowindices   = NULL;
2467     a->rowvalues    = NULL;
2468     a->getrowactive = PETSC_FALSE;
2469     a->barray       = NULL;
2470     a->rstartbs     = oldmat->rstartbs;
2471     a->rendbs       = oldmat->rendbs;
2472     a->cstartbs     = oldmat->cstartbs;
2473     a->cendbs       = oldmat->cendbs;
2474 
2475     /* hash table stuff */
2476     a->ht           = NULL;
2477     a->hd           = NULL;
2478     a->ht_size      = 0;
2479     a->ht_flag      = oldmat->ht_flag;
2480     a->ht_fact      = oldmat->ht_fact;
2481     a->ht_total_ct  = 0;
2482     a->ht_insert_ct = 0;
2483 
2484     PetscCall(PetscArraycpy(a->rangebs, oldmat->rangebs, a->size + 2));
2485     if (oldmat->colmap) {
2486 #if defined(PETSC_USE_CTABLE)
2487       PetscCall(PetscHMapIDuplicate(oldmat->colmap, &a->colmap));
2488 #else
2489       PetscCall(PetscMalloc1(a->Nbs, &a->colmap));
2490       PetscCall(PetscArraycpy(a->colmap, oldmat->colmap, a->Nbs));
2491 #endif
2492     } else a->colmap = NULL;
2493 
2494     if (oldmat->garray && (len = ((Mat_SeqBAIJ *)oldmat->B->data)->nbs)) {
2495       PetscCall(PetscMalloc1(len, &a->garray));
2496       PetscCall(PetscArraycpy(a->garray, oldmat->garray, len));
2497     } else a->garray = NULL;
2498 
2499     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)matin), matin->rmap->bs, &mat->bstash));
2500     PetscCall(VecDuplicate(oldmat->lvec, &a->lvec));
2501     PetscCall(VecScatterCopy(oldmat->Mvctx, &a->Mvctx));
2502 
2503     PetscCall(VecDuplicate(oldmat->slvec0, &a->slvec0));
2504     PetscCall(VecDuplicate(oldmat->slvec1, &a->slvec1));
2505 
2506     PetscCall(VecGetLocalSize(a->slvec1, &nt));
2507     PetscCall(VecGetArray(a->slvec1, &array));
2508     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, bs * mbs, array, &a->slvec1a));
2509     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec1b));
2510     PetscCall(VecRestoreArray(a->slvec1, &array));
2511     PetscCall(VecGetArray(a->slvec0, &array));
2512     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, nt - bs * mbs, array + bs * mbs, &a->slvec0b));
2513     PetscCall(VecRestoreArray(a->slvec0, &array));
2514 
2515     /* ierr =  VecScatterCopy(oldmat->sMvctx,&a->sMvctx); - not written yet, replaced by the lazy trick: */
2516     PetscCall(PetscObjectReference((PetscObject)oldmat->sMvctx));
2517     a->sMvctx = oldmat->sMvctx;
2518 
2519     PetscCall(MatDuplicate(oldmat->A, cpvalues, &a->A));
2520     PetscCall(MatDuplicate(oldmat->B, cpvalues, &a->B));
2521   }
2522   PetscCall(PetscFunctionListDuplicate(((PetscObject)matin)->qlist, &((PetscObject)mat)->qlist));
2523   *newmat = mat;
2524   PetscFunctionReturn(PETSC_SUCCESS);
2525 }
2526 
2527 /* Used for both MPIBAIJ and MPISBAIJ matrices */
2528 #define MatLoad_MPISBAIJ_Binary MatLoad_MPIBAIJ_Binary
2529 
2530 static PetscErrorCode MatLoad_MPISBAIJ(Mat mat, PetscViewer viewer)
2531 {
2532   PetscBool isbinary;
2533 
2534   PetscFunctionBegin;
2535   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary));
2536   PetscCheck(isbinary, PetscObjectComm((PetscObject)viewer), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)mat)->type_name);
2537   PetscCall(MatLoad_MPISBAIJ_Binary(mat, viewer));
2538   PetscFunctionReturn(PETSC_SUCCESS);
2539 }
2540 
2541 static PetscErrorCode MatGetRowMaxAbs_MPISBAIJ(Mat A, Vec v, PetscInt idx[])
2542 {
2543   Mat_MPISBAIJ *a = (Mat_MPISBAIJ *)A->data;
2544   Mat_SeqBAIJ  *b = (Mat_SeqBAIJ *)a->B->data;
2545   PetscReal     atmp;
2546   PetscReal    *work, *svalues, *rvalues;
2547   PetscInt      i, bs, mbs, *bi, *bj, brow, j, ncols, krow, kcol, col, row, Mbs, bcol;
2548   PetscMPIInt   rank, size;
2549   PetscInt     *rowners_bs, count, source;
2550   PetscScalar  *va;
2551   MatScalar    *ba;
2552   MPI_Status    stat;
2553 
2554   PetscFunctionBegin;
2555   PetscCheck(!idx, PETSC_COMM_SELF, PETSC_ERR_SUP, "Send email to petsc-maint@mcs.anl.gov");
2556   PetscCall(MatGetRowMaxAbs(a->A, v, NULL));
2557   PetscCall(VecGetArray(v, &va));
2558 
2559   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
2560   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)A), &rank));
2561 
2562   bs  = A->rmap->bs;
2563   mbs = a->mbs;
2564   Mbs = a->Mbs;
2565   ba  = b->a;
2566   bi  = b->i;
2567   bj  = b->j;
2568 
2569   /* find ownerships */
2570   rowners_bs = A->rmap->range;
2571 
2572   /* each proc creates an array to be distributed */
2573   PetscCall(PetscCalloc1(bs * Mbs, &work));
2574 
2575   /* row_max for B */
2576   if (rank != size - 1) {
2577     for (i = 0; i < mbs; i++) {
2578       ncols = bi[1] - bi[0];
2579       bi++;
2580       brow = bs * i;
2581       for (j = 0; j < ncols; j++) {
2582         bcol = bs * (*bj);
2583         for (kcol = 0; kcol < bs; kcol++) {
2584           col = bcol + kcol;           /* local col index */
2585           col += rowners_bs[rank + 1]; /* global col index */
2586           for (krow = 0; krow < bs; krow++) {
2587             atmp = PetscAbsScalar(*ba);
2588             ba++;
2589             row = brow + krow; /* local row index */
2590             if (PetscRealPart(va[row]) < atmp) va[row] = atmp;
2591             if (work[col] < atmp) work[col] = atmp;
2592           }
2593         }
2594         bj++;
2595       }
2596     }
2597 
2598     /* send values to its owners */
2599     for (PetscMPIInt dest = rank + 1; dest < size; dest++) {
2600       svalues = work + rowners_bs[dest];
2601       count   = rowners_bs[dest + 1] - rowners_bs[dest];
2602       PetscCallMPI(MPIU_Send(svalues, count, MPIU_REAL, dest, rank, PetscObjectComm((PetscObject)A)));
2603     }
2604   }
2605 
2606   /* receive values */
2607   if (rank) {
2608     rvalues = work;
2609     count   = rowners_bs[rank + 1] - rowners_bs[rank];
2610     for (source = 0; source < rank; source++) {
2611       PetscCallMPI(MPIU_Recv(rvalues, count, MPIU_REAL, MPI_ANY_SOURCE, MPI_ANY_TAG, PetscObjectComm((PetscObject)A), &stat));
2612       /* process values */
2613       for (i = 0; i < count; i++) {
2614         if (PetscRealPart(va[i]) < rvalues[i]) va[i] = rvalues[i];
2615       }
2616     }
2617   }
2618 
2619   PetscCall(VecRestoreArray(v, &va));
2620   PetscCall(PetscFree(work));
2621   PetscFunctionReturn(PETSC_SUCCESS);
2622 }
2623 
2624 static PetscErrorCode MatSOR_MPISBAIJ(Mat matin, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx)
2625 {
2626   Mat_MPISBAIJ      *mat = (Mat_MPISBAIJ *)matin->data;
2627   PetscInt           mbs = mat->mbs, bs = matin->rmap->bs;
2628   PetscScalar       *x, *ptr, *from;
2629   Vec                bb1;
2630   const PetscScalar *b;
2631 
2632   PetscFunctionBegin;
2633   PetscCheck(its > 0 && lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " and local its %" PetscInt_FMT " both positive", its, lits);
2634   PetscCheck(bs <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP, "SSOR for block size > 1 is not yet implemented");
2635 
2636   if (flag == SOR_APPLY_UPPER) {
2637     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2638     PetscFunctionReturn(PETSC_SUCCESS);
2639   }
2640 
2641   if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP) {
2642     if (flag & SOR_ZERO_INITIAL_GUESS) {
2643       PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, lits, xx));
2644       its--;
2645     }
2646 
2647     PetscCall(VecDuplicate(bb, &bb1));
2648     while (its--) {
2649       /* lower triangular part: slvec0b = - B^T*xx */
2650       PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2651 
2652       /* copy xx into slvec0a */
2653       PetscCall(VecGetArray(mat->slvec0, &ptr));
2654       PetscCall(VecGetArray(xx, &x));
2655       PetscCall(PetscArraycpy(ptr, x, bs * mbs));
2656       PetscCall(VecRestoreArray(mat->slvec0, &ptr));
2657 
2658       PetscCall(VecScale(mat->slvec0, -1.0));
2659 
2660       /* copy bb into slvec1a */
2661       PetscCall(VecGetArray(mat->slvec1, &ptr));
2662       PetscCall(VecGetArrayRead(bb, &b));
2663       PetscCall(PetscArraycpy(ptr, b, bs * mbs));
2664       PetscCall(VecRestoreArray(mat->slvec1, &ptr));
2665 
2666       /* set slvec1b = 0 */
2667       PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2668       PetscCall(VecZeroEntries(mat->slvec1b));
2669 
2670       PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2671       PetscCall(VecRestoreArray(xx, &x));
2672       PetscCall(VecRestoreArrayRead(bb, &b));
2673       PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2674 
2675       /* upper triangular part: bb1 = bb1 - B*x */
2676       PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, bb1));
2677 
2678       /* local diagonal sweep */
2679       PetscCall((*mat->A->ops->sor)(mat->A, bb1, omega, SOR_SYMMETRIC_SWEEP, fshift, lits, lits, xx));
2680     }
2681     PetscCall(VecDestroy(&bb1));
2682   } else if ((flag & SOR_LOCAL_FORWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2683     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2684   } else if ((flag & SOR_LOCAL_BACKWARD_SWEEP) && (its == 1) && (flag & SOR_ZERO_INITIAL_GUESS)) {
2685     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, flag, fshift, lits, 1, xx));
2686   } else if (flag & SOR_EISENSTAT) {
2687     Vec                xx1;
2688     PetscBool          hasop;
2689     const PetscScalar *diag;
2690     PetscScalar       *sl, scale = (omega - 2.0) / omega;
2691     PetscInt           i, n;
2692 
2693     if (!mat->xx1) {
2694       PetscCall(VecDuplicate(bb, &mat->xx1));
2695       PetscCall(VecDuplicate(bb, &mat->bb1));
2696     }
2697     xx1 = mat->xx1;
2698     bb1 = mat->bb1;
2699 
2700     PetscCall((*mat->A->ops->sor)(mat->A, bb, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP), fshift, lits, 1, xx));
2701 
2702     if (!mat->diag) {
2703       /* this is wrong for same matrix with new nonzero values */
2704       PetscCall(MatCreateVecs(matin, &mat->diag, NULL));
2705       PetscCall(MatGetDiagonal(matin, mat->diag));
2706     }
2707     PetscCall(MatHasOperation(matin, MATOP_MULT_DIAGONAL_BLOCK, &hasop));
2708 
2709     if (hasop) {
2710       PetscCall(MatMultDiagonalBlock(matin, xx, bb1));
2711       PetscCall(VecAYPX(mat->slvec1a, scale, bb));
2712     } else {
2713       /*
2714           These two lines are replaced by code that may be a bit faster for a good compiler
2715       PetscCall(VecPointwiseMult(mat->slvec1a,mat->diag,xx));
2716       PetscCall(VecAYPX(mat->slvec1a,scale,bb));
2717       */
2718       PetscCall(VecGetArray(mat->slvec1a, &sl));
2719       PetscCall(VecGetArrayRead(mat->diag, &diag));
2720       PetscCall(VecGetArrayRead(bb, &b));
2721       PetscCall(VecGetArray(xx, &x));
2722       PetscCall(VecGetLocalSize(xx, &n));
2723       if (omega == 1.0) {
2724         for (i = 0; i < n; i++) sl[i] = b[i] - diag[i] * x[i];
2725         PetscCall(PetscLogFlops(2.0 * n));
2726       } else {
2727         for (i = 0; i < n; i++) sl[i] = b[i] + scale * diag[i] * x[i];
2728         PetscCall(PetscLogFlops(3.0 * n));
2729       }
2730       PetscCall(VecRestoreArray(mat->slvec1a, &sl));
2731       PetscCall(VecRestoreArrayRead(mat->diag, &diag));
2732       PetscCall(VecRestoreArrayRead(bb, &b));
2733       PetscCall(VecRestoreArray(xx, &x));
2734     }
2735 
2736     /* multiply off-diagonal portion of matrix */
2737     PetscCall(PetscObjectStateIncrease((PetscObject)mat->slvec1b));
2738     PetscCall(VecZeroEntries(mat->slvec1b));
2739     PetscCall((*mat->B->ops->multtranspose)(mat->B, xx, mat->slvec0b));
2740     PetscCall(VecGetArray(mat->slvec0, &from));
2741     PetscCall(VecGetArray(xx, &x));
2742     PetscCall(PetscArraycpy(from, x, bs * mbs));
2743     PetscCall(VecRestoreArray(mat->slvec0, &from));
2744     PetscCall(VecRestoreArray(xx, &x));
2745     PetscCall(VecScatterBegin(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2746     PetscCall(VecScatterEnd(mat->sMvctx, mat->slvec0, mat->slvec1, ADD_VALUES, SCATTER_FORWARD));
2747     PetscCall((*mat->B->ops->multadd)(mat->B, mat->slvec1b, mat->slvec1a, mat->slvec1a));
2748 
2749     /* local sweep */
2750     PetscCall((*mat->A->ops->sor)(mat->A, mat->slvec1a, omega, (MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP), fshift, lits, 1, xx1));
2751     PetscCall(VecAXPY(xx, 1.0, xx1));
2752   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "MatSORType is not supported for SBAIJ matrix format");
2753   PetscFunctionReturn(PETSC_SUCCESS);
2754 }
2755 
2756 /*@
2757   MatCreateMPISBAIJWithArrays - creates a `MATMPISBAIJ` matrix using arrays that contain in standard CSR format for the local rows.
2758 
2759   Collective
2760 
2761   Input Parameters:
2762 + comm - MPI communicator
2763 . bs   - the block size, only a block size of 1 is supported
2764 . m    - number of local rows (Cannot be `PETSC_DECIDE`)
2765 . n    - This value should be the same as the local size used in creating the
2766          x vector for the matrix-vector product $ y = Ax $. (or `PETSC_DECIDE` to have
2767          calculated if `N` is given) For square matrices `n` is almost always `m`.
2768 . M    - number of global rows (or `PETSC_DETERMINE` to have calculated if `m` is given)
2769 . N    - number of global columns (or `PETSC_DETERMINE` to have calculated if `n` is given)
2770 . i    - row indices; that is i[0] = 0, i[row] = i[row-1] + number of block elements in that row block row of the matrix
2771 . j    - column indices
2772 - a    - matrix values
2773 
2774   Output Parameter:
2775 . mat - the matrix
2776 
2777   Level: intermediate
2778 
2779   Notes:
2780   The `i`, `j`, and `a` arrays ARE copied by this routine into the internal format used by PETSc;
2781   thus you CANNOT change the matrix entries by changing the values of `a` after you have
2782   called this routine. Use `MatCreateMPIAIJWithSplitArrays()` to avoid needing to copy the arrays.
2783 
2784   The `i` and `j` indices are 0 based, and `i` indices are indices corresponding to the local `j` array.
2785 
2786 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIAIJSetPreallocation()`, `MatMPIAIJSetPreallocationCSR()`,
2787           `MATMPIAIJ`, `MatCreateAIJ()`, `MatCreateMPIAIJWithSplitArrays()`, `MatMPISBAIJSetPreallocationCSR()`
2788 @*/
2789 PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, const PetscInt i[], const PetscInt j[], const PetscScalar a[], Mat *mat)
2790 {
2791   PetscFunctionBegin;
2792   PetscCheck(!i[0], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0");
2793   PetscCheck(m >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "local number of rows (m) cannot be PETSC_DECIDE, or negative");
2794   PetscCall(MatCreate(comm, mat));
2795   PetscCall(MatSetSizes(*mat, m, n, M, N));
2796   PetscCall(MatSetType(*mat, MATMPISBAIJ));
2797   PetscCall(MatMPISBAIJSetPreallocationCSR(*mat, bs, i, j, a));
2798   PetscFunctionReturn(PETSC_SUCCESS);
2799 }
2800 
2801 /*@
2802   MatMPISBAIJSetPreallocationCSR - Creates a sparse parallel matrix in `MATMPISBAIJ` format using the given nonzero structure and (optional) numerical values
2803 
2804   Collective
2805 
2806   Input Parameters:
2807 + B  - the matrix
2808 . bs - the block size
2809 . i  - the indices into `j` for the start of each local row (indices start with zero)
2810 . j  - the column indices for each local row (indices start with zero) these must be sorted for each row
2811 - v  - optional values in the matrix, pass `NULL` if not provided
2812 
2813   Level: advanced
2814 
2815   Notes:
2816   The `i`, `j`, and `v` arrays ARE copied by this routine into the internal format used by PETSc;
2817   thus you CANNOT change the matrix entries by changing the values of `v` after you have
2818   called this routine.
2819 
2820   Though this routine has Preallocation() in the name it also sets the exact nonzero locations of the matrix entries
2821   and usually the numerical values as well
2822 
2823   Any entries passed in that are below the diagonal are ignored
2824 
2825 .seealso: [](ch_matrices), `Mat`, `MATMPISBAIJ`, `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatMPIBAIJSetPreallocation()`, `MatCreateAIJ()`, `MATMPIAIJ`,
2826           `MatCreateMPISBAIJWithArrays()`
2827 @*/
2828 PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat B, PetscInt bs, const PetscInt i[], const PetscInt j[], const PetscScalar v[])
2829 {
2830   PetscFunctionBegin;
2831   PetscTryMethod(B, "MatMPISBAIJSetPreallocationCSR_C", (Mat, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, bs, i, j, v));
2832   PetscFunctionReturn(PETSC_SUCCESS);
2833 }
2834 
2835 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPISBAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat)
2836 {
2837   PetscInt     m, N, i, rstart, nnz, Ii, bs, cbs;
2838   PetscInt    *indx;
2839   PetscScalar *values;
2840 
2841   PetscFunctionBegin;
2842   PetscCall(MatGetSize(inmat, &m, &N));
2843   if (scall == MAT_INITIAL_MATRIX) { /* symbolic phase */
2844     Mat_SeqSBAIJ *a = (Mat_SeqSBAIJ *)inmat->data;
2845     PetscInt     *dnz, *onz, mbs, Nbs, nbs;
2846     PetscInt     *bindx, rmax = a->rmax, j;
2847     PetscMPIInt   rank, size;
2848 
2849     PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2850     mbs = m / bs;
2851     Nbs = N / cbs;
2852     if (n == PETSC_DECIDE) PetscCall(PetscSplitOwnershipBlock(comm, cbs, &n, &N));
2853     nbs = n / cbs;
2854 
2855     PetscCall(PetscMalloc1(rmax, &bindx));
2856     MatPreallocateBegin(comm, mbs, nbs, dnz, onz); /* inline function, output __end and __rstart are used below */
2857 
2858     PetscCallMPI(MPI_Comm_rank(comm, &rank));
2859     PetscCallMPI(MPI_Comm_rank(comm, &size));
2860     if (rank == size - 1) {
2861       /* Check sum(nbs) = Nbs */
2862       PetscCheck(__end == Nbs, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Sum of local block columns %" PetscInt_FMT " != global block columns %" PetscInt_FMT, __end, Nbs);
2863     }
2864 
2865     rstart = __rstart; /* block rstart of *outmat; see inline function MatPreallocateBegin */
2866     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2867     for (i = 0; i < mbs; i++) {
2868       PetscCall(MatGetRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL)); /* non-blocked nnz and indx */
2869       nnz = nnz / bs;
2870       for (j = 0; j < nnz; j++) bindx[j] = indx[j * bs] / bs;
2871       PetscCall(MatPreallocateSet(i + rstart, nnz, bindx, dnz, onz));
2872       PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i * bs, &nnz, &indx, NULL));
2873     }
2874     PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2875     PetscCall(PetscFree(bindx));
2876 
2877     PetscCall(MatCreate(comm, outmat));
2878     PetscCall(MatSetSizes(*outmat, m, n, PETSC_DETERMINE, PETSC_DETERMINE));
2879     PetscCall(MatSetBlockSizes(*outmat, bs, cbs));
2880     PetscCall(MatSetType(*outmat, MATSBAIJ));
2881     PetscCall(MatSeqSBAIJSetPreallocation(*outmat, bs, 0, dnz));
2882     PetscCall(MatMPISBAIJSetPreallocation(*outmat, bs, 0, dnz, 0, onz));
2883     MatPreallocateEnd(dnz, onz);
2884   }
2885 
2886   /* numeric phase */
2887   PetscCall(MatGetBlockSizes(inmat, &bs, &cbs));
2888   PetscCall(MatGetOwnershipRange(*outmat, &rstart, NULL));
2889 
2890   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_TRUE));
2891   for (i = 0; i < m; i++) {
2892     PetscCall(MatGetRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2893     Ii = i + rstart;
2894     PetscCall(MatSetValues(*outmat, 1, &Ii, nnz, indx, values, INSERT_VALUES));
2895     PetscCall(MatRestoreRow_SeqSBAIJ(inmat, i, &nnz, &indx, &values));
2896   }
2897   PetscCall(MatSetOption(inmat, MAT_GETROW_UPPERTRIANGULAR, PETSC_FALSE));
2898   PetscCall(MatAssemblyBegin(*outmat, MAT_FINAL_ASSEMBLY));
2899   PetscCall(MatAssemblyEnd(*outmat, MAT_FINAL_ASSEMBLY));
2900   PetscFunctionReturn(PETSC_SUCCESS);
2901 }
2902