1 /* 2 Defines the basic matrix operations for the AIJ (compressed row) 3 matrix storage format. 4 */ 5 6 #include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/ 7 #include <petscblaslapack.h> 8 #include <petscbt.h> 9 #include <petsc/private/kernels/blocktranspose.h> 10 11 /* defines MatSetValues_Seq_Hash(), MatAssemblyEnd_Seq_Hash(), MatSetUp_Seq_Hash() */ 12 #define TYPE AIJ 13 #define TYPE_BS 14 #include "../src/mat/impls/aij/seq/seqhashmatsetvalues.h" 15 #include "../src/mat/impls/aij/seq/seqhashmat.h" 16 #undef TYPE 17 #undef TYPE_BS 18 19 PetscErrorCode MatSeqAIJSetTypeFromOptions(Mat A) 20 { 21 PetscBool flg; 22 char type[256]; 23 24 PetscFunctionBegin; 25 PetscObjectOptionsBegin((PetscObject)A); 26 PetscCall(PetscOptionsFList("-mat_seqaij_type", "Matrix SeqAIJ type", "MatSeqAIJSetType", MatSeqAIJList, "seqaij", type, 256, &flg)); 27 if (flg) PetscCall(MatSeqAIJSetType(A, type)); 28 PetscOptionsEnd(); 29 PetscFunctionReturn(PETSC_SUCCESS); 30 } 31 32 PetscErrorCode MatGetColumnReductions_SeqAIJ(Mat A, PetscInt type, PetscReal *reductions) 33 { 34 PetscInt i, m, n; 35 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 36 37 PetscFunctionBegin; 38 PetscCall(MatGetSize(A, &m, &n)); 39 PetscCall(PetscArrayzero(reductions, n)); 40 if (type == NORM_2) { 41 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i] * aij->a[i]); 42 } else if (type == NORM_1) { 43 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscAbsScalar(aij->a[i]); 44 } else if (type == NORM_INFINITY) { 45 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] = PetscMax(PetscAbsScalar(aij->a[i]), reductions[aij->j[i]]); 46 } else if (type == REDUCTION_SUM_REALPART || type == REDUCTION_MEAN_REALPART) { 47 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscRealPart(aij->a[i]); 48 } else if (type == REDUCTION_SUM_IMAGINARYPART || type == REDUCTION_MEAN_IMAGINARYPART) { 49 for (i = 0; i < aij->i[m]; i++) reductions[aij->j[i]] += PetscImaginaryPart(aij->a[i]); 50 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unknown reduction type"); 51 52 if (type == NORM_2) { 53 for (i = 0; i < n; i++) reductions[i] = PetscSqrtReal(reductions[i]); 54 } else if (type == REDUCTION_MEAN_REALPART || type == REDUCTION_MEAN_IMAGINARYPART) { 55 for (i = 0; i < n; i++) reductions[i] /= m; 56 } 57 PetscFunctionReturn(PETSC_SUCCESS); 58 } 59 60 PetscErrorCode MatFindOffBlockDiagonalEntries_SeqAIJ(Mat A, IS *is) 61 { 62 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 63 PetscInt i, m = A->rmap->n, cnt = 0, bs = A->rmap->bs; 64 const PetscInt *jj = a->j, *ii = a->i; 65 PetscInt *rows; 66 67 PetscFunctionBegin; 68 for (i = 0; i < m; i++) { 69 if ((ii[i] != ii[i + 1]) && ((jj[ii[i]] < bs * (i / bs)) || (jj[ii[i + 1] - 1] > bs * ((i + bs) / bs) - 1))) cnt++; 70 } 71 PetscCall(PetscMalloc1(cnt, &rows)); 72 cnt = 0; 73 for (i = 0; i < m; i++) { 74 if ((ii[i] != ii[i + 1]) && ((jj[ii[i]] < bs * (i / bs)) || (jj[ii[i + 1] - 1] > bs * ((i + bs) / bs) - 1))) { 75 rows[cnt] = i; 76 cnt++; 77 } 78 } 79 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, cnt, rows, PETSC_OWN_POINTER, is)); 80 PetscFunctionReturn(PETSC_SUCCESS); 81 } 82 83 PetscErrorCode MatFindZeroDiagonals_SeqAIJ_Private(Mat A, PetscInt *nrows, PetscInt **zrows) 84 { 85 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 86 const MatScalar *aa; 87 PetscInt i, m = A->rmap->n, cnt = 0; 88 const PetscInt *ii = a->i, *jj = a->j, *diag; 89 PetscInt *rows; 90 91 PetscFunctionBegin; 92 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 93 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 94 diag = a->diag; 95 for (i = 0; i < m; i++) { 96 if ((diag[i] >= ii[i + 1]) || (jj[diag[i]] != i) || (aa[diag[i]] == 0.0)) cnt++; 97 } 98 PetscCall(PetscMalloc1(cnt, &rows)); 99 cnt = 0; 100 for (i = 0; i < m; i++) { 101 if ((diag[i] >= ii[i + 1]) || (jj[diag[i]] != i) || (aa[diag[i]] == 0.0)) rows[cnt++] = i; 102 } 103 *nrows = cnt; 104 *zrows = rows; 105 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 106 PetscFunctionReturn(PETSC_SUCCESS); 107 } 108 109 PetscErrorCode MatFindZeroDiagonals_SeqAIJ(Mat A, IS *zrows) 110 { 111 PetscInt nrows, *rows; 112 113 PetscFunctionBegin; 114 *zrows = NULL; 115 PetscCall(MatFindZeroDiagonals_SeqAIJ_Private(A, &nrows, &rows)); 116 PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrows, rows, PETSC_OWN_POINTER, zrows)); 117 PetscFunctionReturn(PETSC_SUCCESS); 118 } 119 120 PetscErrorCode MatFindNonzeroRows_SeqAIJ(Mat A, IS *keptrows) 121 { 122 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 123 const MatScalar *aa; 124 PetscInt m = A->rmap->n, cnt = 0; 125 const PetscInt *ii; 126 PetscInt n, i, j, *rows; 127 128 PetscFunctionBegin; 129 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 130 *keptrows = NULL; 131 ii = a->i; 132 for (i = 0; i < m; i++) { 133 n = ii[i + 1] - ii[i]; 134 if (!n) { 135 cnt++; 136 goto ok1; 137 } 138 for (j = ii[i]; j < ii[i + 1]; j++) { 139 if (aa[j] != 0.0) goto ok1; 140 } 141 cnt++; 142 ok1:; 143 } 144 if (!cnt) { 145 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 146 PetscFunctionReturn(PETSC_SUCCESS); 147 } 148 PetscCall(PetscMalloc1(A->rmap->n - cnt, &rows)); 149 cnt = 0; 150 for (i = 0; i < m; i++) { 151 n = ii[i + 1] - ii[i]; 152 if (!n) continue; 153 for (j = ii[i]; j < ii[i + 1]; j++) { 154 if (aa[j] != 0.0) { 155 rows[cnt++] = i; 156 break; 157 } 158 } 159 } 160 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 161 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, cnt, rows, PETSC_OWN_POINTER, keptrows)); 162 PetscFunctionReturn(PETSC_SUCCESS); 163 } 164 165 PetscErrorCode MatDiagonalSet_SeqAIJ(Mat Y, Vec D, InsertMode is) 166 { 167 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)Y->data; 168 PetscInt i, m = Y->rmap->n; 169 const PetscInt *diag; 170 MatScalar *aa; 171 const PetscScalar *v; 172 PetscBool missing; 173 174 PetscFunctionBegin; 175 if (Y->assembled) { 176 PetscCall(MatMissingDiagonal_SeqAIJ(Y, &missing, NULL)); 177 if (!missing) { 178 diag = aij->diag; 179 PetscCall(VecGetArrayRead(D, &v)); 180 PetscCall(MatSeqAIJGetArray(Y, &aa)); 181 if (is == INSERT_VALUES) { 182 for (i = 0; i < m; i++) aa[diag[i]] = v[i]; 183 } else { 184 for (i = 0; i < m; i++) aa[diag[i]] += v[i]; 185 } 186 PetscCall(MatSeqAIJRestoreArray(Y, &aa)); 187 PetscCall(VecRestoreArrayRead(D, &v)); 188 PetscFunctionReturn(PETSC_SUCCESS); 189 } 190 PetscCall(MatSeqAIJInvalidateDiagonal(Y)); 191 } 192 PetscCall(MatDiagonalSet_Default(Y, D, is)); 193 PetscFunctionReturn(PETSC_SUCCESS); 194 } 195 196 PetscErrorCode MatGetRowIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *m, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 197 { 198 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 199 PetscInt i, ishift; 200 201 PetscFunctionBegin; 202 if (m) *m = A->rmap->n; 203 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 204 ishift = 0; 205 if (symmetric && A->structurally_symmetric != PETSC_BOOL3_TRUE) { 206 PetscCall(MatToSymmetricIJ_SeqAIJ(A->rmap->n, a->i, a->j, PETSC_TRUE, ishift, oshift, (PetscInt **)ia, (PetscInt **)ja)); 207 } else if (oshift == 1) { 208 PetscInt *tia; 209 PetscInt nz = a->i[A->rmap->n]; 210 /* malloc space and add 1 to i and j indices */ 211 PetscCall(PetscMalloc1(A->rmap->n + 1, &tia)); 212 for (i = 0; i < A->rmap->n + 1; i++) tia[i] = a->i[i] + 1; 213 *ia = tia; 214 if (ja) { 215 PetscInt *tja; 216 PetscCall(PetscMalloc1(nz + 1, &tja)); 217 for (i = 0; i < nz; i++) tja[i] = a->j[i] + 1; 218 *ja = tja; 219 } 220 } else { 221 *ia = a->i; 222 if (ja) *ja = a->j; 223 } 224 PetscFunctionReturn(PETSC_SUCCESS); 225 } 226 227 PetscErrorCode MatRestoreRowIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 228 { 229 PetscFunctionBegin; 230 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 231 if ((symmetric && A->structurally_symmetric != PETSC_BOOL3_TRUE) || oshift == 1) { 232 PetscCall(PetscFree(*ia)); 233 if (ja) PetscCall(PetscFree(*ja)); 234 } 235 PetscFunctionReturn(PETSC_SUCCESS); 236 } 237 238 PetscErrorCode MatGetColumnIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *nn, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 239 { 240 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 241 PetscInt i, *collengths, *cia, *cja, n = A->cmap->n, m = A->rmap->n; 242 PetscInt nz = a->i[m], row, *jj, mr, col; 243 244 PetscFunctionBegin; 245 *nn = n; 246 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 247 if (symmetric) { 248 PetscCall(MatToSymmetricIJ_SeqAIJ(A->rmap->n, a->i, a->j, PETSC_TRUE, 0, oshift, (PetscInt **)ia, (PetscInt **)ja)); 249 } else { 250 PetscCall(PetscCalloc1(n, &collengths)); 251 PetscCall(PetscMalloc1(n + 1, &cia)); 252 PetscCall(PetscMalloc1(nz, &cja)); 253 jj = a->j; 254 for (i = 0; i < nz; i++) collengths[jj[i]]++; 255 cia[0] = oshift; 256 for (i = 0; i < n; i++) cia[i + 1] = cia[i] + collengths[i]; 257 PetscCall(PetscArrayzero(collengths, n)); 258 jj = a->j; 259 for (row = 0; row < m; row++) { 260 mr = a->i[row + 1] - a->i[row]; 261 for (i = 0; i < mr; i++) { 262 col = *jj++; 263 264 cja[cia[col] + collengths[col]++ - oshift] = row + oshift; 265 } 266 } 267 PetscCall(PetscFree(collengths)); 268 *ia = cia; 269 *ja = cja; 270 } 271 PetscFunctionReturn(PETSC_SUCCESS); 272 } 273 274 PetscErrorCode MatRestoreColumnIJ_SeqAIJ(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done) 275 { 276 PetscFunctionBegin; 277 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 278 279 PetscCall(PetscFree(*ia)); 280 PetscCall(PetscFree(*ja)); 281 PetscFunctionReturn(PETSC_SUCCESS); 282 } 283 284 /* 285 MatGetColumnIJ_SeqAIJ_Color() and MatRestoreColumnIJ_SeqAIJ_Color() are customized from 286 MatGetColumnIJ_SeqAIJ() and MatRestoreColumnIJ_SeqAIJ() by adding an output 287 spidx[], index of a->a, to be used in MatTransposeColoringCreate_SeqAIJ() and MatFDColoringCreate_SeqXAIJ() 288 */ 289 PetscErrorCode MatGetColumnIJ_SeqAIJ_Color(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *nn, const PetscInt *ia[], const PetscInt *ja[], PetscInt *spidx[], PetscBool *done) 290 { 291 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 292 PetscInt i, *collengths, *cia, *cja, n = A->cmap->n, m = A->rmap->n; 293 PetscInt nz = a->i[m], row, mr, col, tmp; 294 PetscInt *cspidx; 295 const PetscInt *jj; 296 297 PetscFunctionBegin; 298 *nn = n; 299 if (!ia) PetscFunctionReturn(PETSC_SUCCESS); 300 301 PetscCall(PetscCalloc1(n, &collengths)); 302 PetscCall(PetscMalloc1(n + 1, &cia)); 303 PetscCall(PetscMalloc1(nz, &cja)); 304 PetscCall(PetscMalloc1(nz, &cspidx)); 305 jj = a->j; 306 for (i = 0; i < nz; i++) collengths[jj[i]]++; 307 cia[0] = oshift; 308 for (i = 0; i < n; i++) cia[i + 1] = cia[i] + collengths[i]; 309 PetscCall(PetscArrayzero(collengths, n)); 310 jj = a->j; 311 for (row = 0; row < m; row++) { 312 mr = a->i[row + 1] - a->i[row]; 313 for (i = 0; i < mr; i++) { 314 col = *jj++; 315 tmp = cia[col] + collengths[col]++ - oshift; 316 cspidx[tmp] = a->i[row] + i; /* index of a->j */ 317 cja[tmp] = row + oshift; 318 } 319 } 320 PetscCall(PetscFree(collengths)); 321 *ia = cia; 322 *ja = cja; 323 *spidx = cspidx; 324 PetscFunctionReturn(PETSC_SUCCESS); 325 } 326 327 PetscErrorCode MatRestoreColumnIJ_SeqAIJ_Color(Mat A, PetscInt oshift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscInt *spidx[], PetscBool *done) 328 { 329 PetscFunctionBegin; 330 PetscCall(MatRestoreColumnIJ_SeqAIJ(A, oshift, symmetric, inodecompressed, n, ia, ja, done)); 331 PetscCall(PetscFree(*spidx)); 332 PetscFunctionReturn(PETSC_SUCCESS); 333 } 334 335 PetscErrorCode MatSetValuesRow_SeqAIJ(Mat A, PetscInt row, const PetscScalar v[]) 336 { 337 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 338 PetscInt *ai = a->i; 339 PetscScalar *aa; 340 341 PetscFunctionBegin; 342 PetscCall(MatSeqAIJGetArray(A, &aa)); 343 PetscCall(PetscArraycpy(aa + ai[row], v, ai[row + 1] - ai[row])); 344 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 345 PetscFunctionReturn(PETSC_SUCCESS); 346 } 347 348 /* 349 MatSeqAIJSetValuesLocalFast - An optimized version of MatSetValuesLocal() for SeqAIJ matrices with several assumptions 350 351 - a single row of values is set with each call 352 - no row or column indices are negative or (in error) larger than the number of rows or columns 353 - the values are always added to the matrix, not set 354 - no new locations are introduced in the nonzero structure of the matrix 355 356 This does NOT assume the global column indices are sorted 357 358 */ 359 360 #include <petsc/private/isimpl.h> 361 PetscErrorCode MatSeqAIJSetValuesLocalFast(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 362 { 363 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 364 PetscInt low, high, t, row, nrow, i, col, l; 365 const PetscInt *rp, *ai = a->i, *ailen = a->ilen, *aj = a->j; 366 PetscInt lastcol = -1; 367 MatScalar *ap, value, *aa; 368 const PetscInt *ridx = A->rmap->mapping->indices, *cidx = A->cmap->mapping->indices; 369 370 PetscFunctionBegin; 371 PetscCall(MatSeqAIJGetArray(A, &aa)); 372 row = ridx[im[0]]; 373 rp = aj + ai[row]; 374 ap = aa + ai[row]; 375 nrow = ailen[row]; 376 low = 0; 377 high = nrow; 378 for (l = 0; l < n; l++) { /* loop over added columns */ 379 col = cidx[in[l]]; 380 value = v[l]; 381 382 if (col <= lastcol) low = 0; 383 else high = nrow; 384 lastcol = col; 385 while (high - low > 5) { 386 t = (low + high) / 2; 387 if (rp[t] > col) high = t; 388 else low = t; 389 } 390 for (i = low; i < high; i++) { 391 if (rp[i] == col) { 392 ap[i] += value; 393 low = i + 1; 394 break; 395 } 396 } 397 } 398 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 399 return PETSC_SUCCESS; 400 } 401 402 PetscErrorCode MatSetValues_SeqAIJ(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 403 { 404 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 405 PetscInt *rp, k, low, high, t, ii, row, nrow, i, col, l, rmax, N; 406 PetscInt *imax = a->imax, *ai = a->i, *ailen = a->ilen; 407 PetscInt *aj = a->j, nonew = a->nonew, lastcol = -1; 408 MatScalar *ap = NULL, value = 0.0, *aa; 409 PetscBool ignorezeroentries = a->ignorezeroentries; 410 PetscBool roworiented = a->roworiented; 411 412 PetscFunctionBegin; 413 PetscCall(MatSeqAIJGetArray(A, &aa)); 414 for (k = 0; k < m; k++) { /* loop over added rows */ 415 row = im[k]; 416 if (row < 0) continue; 417 PetscCheck(row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, row, A->rmap->n - 1); 418 rp = aj + ai[row]; 419 if (!A->structure_only) ap = aa + ai[row]; 420 rmax = imax[row]; 421 nrow = ailen[row]; 422 low = 0; 423 high = nrow; 424 for (l = 0; l < n; l++) { /* loop over added columns */ 425 if (in[l] < 0) continue; 426 PetscCheck(in[l] < A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[l], A->cmap->n - 1); 427 col = in[l]; 428 if (v && !A->structure_only) value = roworiented ? v[l + k * n] : v[k + l * m]; 429 if (!A->structure_only && value == 0.0 && ignorezeroentries && is == ADD_VALUES && row != col) continue; 430 431 if (col <= lastcol) low = 0; 432 else high = nrow; 433 lastcol = col; 434 while (high - low > 5) { 435 t = (low + high) / 2; 436 if (rp[t] > col) high = t; 437 else low = t; 438 } 439 for (i = low; i < high; i++) { 440 if (rp[i] > col) break; 441 if (rp[i] == col) { 442 if (!A->structure_only) { 443 if (is == ADD_VALUES) { 444 ap[i] += value; 445 (void)PetscLogFlops(1.0); 446 } else ap[i] = value; 447 } 448 low = i + 1; 449 goto noinsert; 450 } 451 } 452 if (value == 0.0 && ignorezeroentries && row != col) goto noinsert; 453 if (nonew == 1) goto noinsert; 454 PetscCheck(nonew != -1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero at (%" PetscInt_FMT ",%" PetscInt_FMT ") in the matrix", row, col); 455 if (A->structure_only) { 456 MatSeqXAIJReallocateAIJ_structure_only(A, A->rmap->n, 1, nrow, row, col, rmax, ai, aj, rp, imax, nonew, MatScalar); 457 } else { 458 MatSeqXAIJReallocateAIJ(A, A->rmap->n, 1, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar); 459 } 460 N = nrow++ - 1; 461 a->nz++; 462 high++; 463 /* shift up all the later entries in this row */ 464 PetscCall(PetscArraymove(rp + i + 1, rp + i, N - i + 1)); 465 rp[i] = col; 466 if (!A->structure_only) { 467 PetscCall(PetscArraymove(ap + i + 1, ap + i, N - i + 1)); 468 ap[i] = value; 469 } 470 low = i + 1; 471 A->nonzerostate++; 472 noinsert:; 473 } 474 ailen[row] = nrow; 475 } 476 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 477 PetscFunctionReturn(PETSC_SUCCESS); 478 } 479 480 PetscErrorCode MatSetValues_SeqAIJ_SortedFullNoPreallocation(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 481 { 482 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 483 PetscInt *rp, k, row; 484 PetscInt *ai = a->i; 485 PetscInt *aj = a->j; 486 MatScalar *aa, *ap; 487 488 PetscFunctionBegin; 489 PetscCheck(!A->was_assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot call on assembled matrix."); 490 PetscCheck(m * n + a->nz <= a->maxnz, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of entries in matrix will be larger than maximum nonzeros allocated for %" PetscInt_FMT " in MatSeqAIJSetTotalPreallocation()", a->maxnz); 491 492 PetscCall(MatSeqAIJGetArray(A, &aa)); 493 for (k = 0; k < m; k++) { /* loop over added rows */ 494 row = im[k]; 495 rp = aj + ai[row]; 496 ap = aa + ai[row]; 497 498 PetscCall(PetscMemcpy(rp, in, n * sizeof(PetscInt))); 499 if (!A->structure_only) { 500 if (v) { 501 PetscCall(PetscMemcpy(ap, v, n * sizeof(PetscScalar))); 502 v += n; 503 } else { 504 PetscCall(PetscMemzero(ap, n * sizeof(PetscScalar))); 505 } 506 } 507 a->ilen[row] = n; 508 a->imax[row] = n; 509 a->i[row + 1] = a->i[row] + n; 510 a->nz += n; 511 } 512 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 513 PetscFunctionReturn(PETSC_SUCCESS); 514 } 515 516 /*@ 517 MatSeqAIJSetTotalPreallocation - Sets an upper bound on the total number of expected nonzeros in the matrix. 518 519 Input Parameters: 520 + A - the `MATSEQAIJ` matrix 521 - nztotal - bound on the number of nonzeros 522 523 Level: advanced 524 525 Notes: 526 This can be called if you will be provided the matrix row by row (from row zero) with sorted column indices for each row. 527 Simply call `MatSetValues()` after this call to provide the matrix entries in the usual manner. This matrix may be used 528 as always with multiple matrix assemblies. 529 530 .seealso: `MatSetOption()`, `MAT_SORTED_FULL`, `MatSetValues()`, `MatSeqAIJSetPreallocation()` 531 @*/ 532 533 PetscErrorCode MatSeqAIJSetTotalPreallocation(Mat A, PetscInt nztotal) 534 { 535 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 536 537 PetscFunctionBegin; 538 PetscCall(PetscLayoutSetUp(A->rmap)); 539 PetscCall(PetscLayoutSetUp(A->cmap)); 540 a->maxnz = nztotal; 541 if (!a->imax) { PetscCall(PetscMalloc1(A->rmap->n, &a->imax)); } 542 if (!a->ilen) { 543 PetscCall(PetscMalloc1(A->rmap->n, &a->ilen)); 544 } else { 545 PetscCall(PetscMemzero(a->ilen, A->rmap->n * sizeof(PetscInt))); 546 } 547 548 /* allocate the matrix space */ 549 if (A->structure_only) { 550 PetscCall(PetscMalloc1(nztotal, &a->j)); 551 PetscCall(PetscMalloc1(A->rmap->n + 1, &a->i)); 552 } else { 553 PetscCall(PetscMalloc3(nztotal, &a->a, nztotal, &a->j, A->rmap->n + 1, &a->i)); 554 } 555 a->i[0] = 0; 556 if (A->structure_only) { 557 a->singlemalloc = PETSC_FALSE; 558 a->free_a = PETSC_FALSE; 559 } else { 560 a->singlemalloc = PETSC_TRUE; 561 a->free_a = PETSC_TRUE; 562 } 563 a->free_ij = PETSC_TRUE; 564 A->ops->setvalues = MatSetValues_SeqAIJ_SortedFullNoPreallocation; 565 A->preallocated = PETSC_TRUE; 566 PetscFunctionReturn(PETSC_SUCCESS); 567 } 568 569 PetscErrorCode MatSetValues_SeqAIJ_SortedFull(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], const PetscScalar v[], InsertMode is) 570 { 571 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 572 PetscInt *rp, k, row; 573 PetscInt *ai = a->i, *ailen = a->ilen; 574 PetscInt *aj = a->j; 575 MatScalar *aa, *ap; 576 577 PetscFunctionBegin; 578 PetscCall(MatSeqAIJGetArray(A, &aa)); 579 for (k = 0; k < m; k++) { /* loop over added rows */ 580 row = im[k]; 581 PetscCheck(n <= a->imax[row], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Preallocation for row %" PetscInt_FMT " does not match number of columns provided", n); 582 rp = aj + ai[row]; 583 ap = aa + ai[row]; 584 if (!A->was_assembled) PetscCall(PetscMemcpy(rp, in, n * sizeof(PetscInt))); 585 if (!A->structure_only) { 586 if (v) { 587 PetscCall(PetscMemcpy(ap, v, n * sizeof(PetscScalar))); 588 v += n; 589 } else { 590 PetscCall(PetscMemzero(ap, n * sizeof(PetscScalar))); 591 } 592 } 593 ailen[row] = n; 594 a->nz += n; 595 } 596 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 597 PetscFunctionReturn(PETSC_SUCCESS); 598 } 599 600 PetscErrorCode MatGetValues_SeqAIJ(Mat A, PetscInt m, const PetscInt im[], PetscInt n, const PetscInt in[], PetscScalar v[]) 601 { 602 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 603 PetscInt *rp, k, low, high, t, row, nrow, i, col, l, *aj = a->j; 604 PetscInt *ai = a->i, *ailen = a->ilen; 605 const MatScalar *ap, *aa; 606 607 PetscFunctionBegin; 608 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 609 for (k = 0; k < m; k++) { /* loop over rows */ 610 row = im[k]; 611 if (row < 0) { 612 v += n; 613 continue; 614 } /* negative row */ 615 PetscCheck(row < A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row too large: row %" PetscInt_FMT " max %" PetscInt_FMT, row, A->rmap->n - 1); 616 rp = aj + ai[row]; 617 ap = aa + ai[row]; 618 nrow = ailen[row]; 619 for (l = 0; l < n; l++) { /* loop over columns */ 620 if (in[l] < 0) { 621 v++; 622 continue; 623 } /* negative column */ 624 PetscCheck(in[l] < A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column too large: col %" PetscInt_FMT " max %" PetscInt_FMT, in[l], A->cmap->n - 1); 625 col = in[l]; 626 high = nrow; 627 low = 0; /* assume unsorted */ 628 while (high - low > 5) { 629 t = (low + high) / 2; 630 if (rp[t] > col) high = t; 631 else low = t; 632 } 633 for (i = low; i < high; i++) { 634 if (rp[i] > col) break; 635 if (rp[i] == col) { 636 *v++ = ap[i]; 637 goto finished; 638 } 639 } 640 *v++ = 0.0; 641 finished:; 642 } 643 } 644 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 645 PetscFunctionReturn(PETSC_SUCCESS); 646 } 647 648 PetscErrorCode MatView_SeqAIJ_Binary(Mat mat, PetscViewer viewer) 649 { 650 Mat_SeqAIJ *A = (Mat_SeqAIJ *)mat->data; 651 const PetscScalar *av; 652 PetscInt header[4], M, N, m, nz, i; 653 PetscInt *rowlens; 654 655 PetscFunctionBegin; 656 PetscCall(PetscViewerSetUp(viewer)); 657 658 M = mat->rmap->N; 659 N = mat->cmap->N; 660 m = mat->rmap->n; 661 nz = A->nz; 662 663 /* write matrix header */ 664 header[0] = MAT_FILE_CLASSID; 665 header[1] = M; 666 header[2] = N; 667 header[3] = nz; 668 PetscCall(PetscViewerBinaryWrite(viewer, header, 4, PETSC_INT)); 669 670 /* fill in and store row lengths */ 671 PetscCall(PetscMalloc1(m, &rowlens)); 672 for (i = 0; i < m; i++) rowlens[i] = A->i[i + 1] - A->i[i]; 673 PetscCall(PetscViewerBinaryWrite(viewer, rowlens, m, PETSC_INT)); 674 PetscCall(PetscFree(rowlens)); 675 /* store column indices */ 676 PetscCall(PetscViewerBinaryWrite(viewer, A->j, nz, PETSC_INT)); 677 /* store nonzero values */ 678 PetscCall(MatSeqAIJGetArrayRead(mat, &av)); 679 PetscCall(PetscViewerBinaryWrite(viewer, av, nz, PETSC_SCALAR)); 680 PetscCall(MatSeqAIJRestoreArrayRead(mat, &av)); 681 682 /* write block size option to the viewer's .info file */ 683 PetscCall(MatView_Binary_BlockSizes(mat, viewer)); 684 PetscFunctionReturn(PETSC_SUCCESS); 685 } 686 687 static PetscErrorCode MatView_SeqAIJ_ASCII_structonly(Mat A, PetscViewer viewer) 688 { 689 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 690 PetscInt i, k, m = A->rmap->N; 691 692 PetscFunctionBegin; 693 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 694 for (i = 0; i < m; i++) { 695 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 696 for (k = a->i[i]; k < a->i[i + 1]; k++) PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ") ", a->j[k])); 697 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 698 } 699 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 700 PetscFunctionReturn(PETSC_SUCCESS); 701 } 702 703 extern PetscErrorCode MatSeqAIJFactorInfo_Matlab(Mat, PetscViewer); 704 705 PetscErrorCode MatView_SeqAIJ_ASCII(Mat A, PetscViewer viewer) 706 { 707 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 708 const PetscScalar *av; 709 PetscInt i, j, m = A->rmap->n; 710 const char *name; 711 PetscViewerFormat format; 712 713 PetscFunctionBegin; 714 if (A->structure_only) { 715 PetscCall(MatView_SeqAIJ_ASCII_structonly(A, viewer)); 716 PetscFunctionReturn(PETSC_SUCCESS); 717 } 718 719 PetscCall(PetscViewerGetFormat(viewer, &format)); 720 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS); 721 722 /* trigger copy to CPU if needed */ 723 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 724 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 725 if (format == PETSC_VIEWER_ASCII_MATLAB) { 726 PetscInt nofinalvalue = 0; 727 if (m && ((a->i[m] == a->i[m - 1]) || (a->j[a->nz - 1] != A->cmap->n - 1))) { 728 /* Need a dummy value to ensure the dimension of the matrix. */ 729 nofinalvalue = 1; 730 } 731 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 732 PetscCall(PetscViewerASCIIPrintf(viewer, "%% Size = %" PetscInt_FMT " %" PetscInt_FMT " \n", m, A->cmap->n)); 733 PetscCall(PetscViewerASCIIPrintf(viewer, "%% Nonzeros = %" PetscInt_FMT " \n", a->nz)); 734 #if defined(PETSC_USE_COMPLEX) 735 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = zeros(%" PetscInt_FMT ",4);\n", a->nz + nofinalvalue)); 736 #else 737 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = zeros(%" PetscInt_FMT ",3);\n", a->nz + nofinalvalue)); 738 #endif 739 PetscCall(PetscViewerASCIIPrintf(viewer, "zzz = [\n")); 740 741 for (i = 0; i < m; i++) { 742 for (j = a->i[i]; j < a->i[i + 1]; j++) { 743 #if defined(PETSC_USE_COMPLEX) 744 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e %18.16e\n", i + 1, a->j[j] + 1, (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 745 #else 746 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e\n", i + 1, a->j[j] + 1, (double)a->a[j])); 747 #endif 748 } 749 } 750 if (nofinalvalue) { 751 #if defined(PETSC_USE_COMPLEX) 752 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e %18.16e\n", m, A->cmap->n, 0., 0.)); 753 #else 754 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %18.16e\n", m, A->cmap->n, 0.0)); 755 #endif 756 } 757 PetscCall(PetscObjectGetName((PetscObject)A, &name)); 758 PetscCall(PetscViewerASCIIPrintf(viewer, "];\n %s = spconvert(zzz);\n", name)); 759 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 760 } else if (format == PETSC_VIEWER_ASCII_COMMON) { 761 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 762 for (i = 0; i < m; i++) { 763 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 764 for (j = a->i[i]; j < a->i[i + 1]; j++) { 765 #if defined(PETSC_USE_COMPLEX) 766 if (PetscImaginaryPart(a->a[j]) > 0.0 && PetscRealPart(a->a[j]) != 0.0) { 767 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 768 } else if (PetscImaginaryPart(a->a[j]) < 0.0 && PetscRealPart(a->a[j]) != 0.0) { 769 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)-PetscImaginaryPart(a->a[j]))); 770 } else if (PetscRealPart(a->a[j]) != 0.0) { 771 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 772 } 773 #else 774 if (a->a[j] != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 775 #endif 776 } 777 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 778 } 779 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 780 } else if (format == PETSC_VIEWER_ASCII_SYMMODU) { 781 PetscInt nzd = 0, fshift = 1, *sptr; 782 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 783 PetscCall(PetscMalloc1(m + 1, &sptr)); 784 for (i = 0; i < m; i++) { 785 sptr[i] = nzd + 1; 786 for (j = a->i[i]; j < a->i[i + 1]; j++) { 787 if (a->j[j] >= i) { 788 #if defined(PETSC_USE_COMPLEX) 789 if (PetscImaginaryPart(a->a[j]) != 0.0 || PetscRealPart(a->a[j]) != 0.0) nzd++; 790 #else 791 if (a->a[j] != 0.0) nzd++; 792 #endif 793 } 794 } 795 } 796 sptr[m] = nzd + 1; 797 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT "\n\n", m, nzd)); 798 for (i = 0; i < m + 1; i += 6) { 799 if (i + 4 < m) { 800 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3], sptr[i + 4], sptr[i + 5])); 801 } else if (i + 3 < m) { 802 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3], sptr[i + 4])); 803 } else if (i + 2 < m) { 804 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2], sptr[i + 3])); 805 } else if (i + 1 < m) { 806 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1], sptr[i + 2])); 807 } else if (i < m) { 808 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " %" PetscInt_FMT "\n", sptr[i], sptr[i + 1])); 809 } else { 810 PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT "\n", sptr[i])); 811 } 812 } 813 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 814 PetscCall(PetscFree(sptr)); 815 for (i = 0; i < m; i++) { 816 for (j = a->i[i]; j < a->i[i + 1]; j++) { 817 if (a->j[j] >= i) PetscCall(PetscViewerASCIIPrintf(viewer, " %" PetscInt_FMT " ", a->j[j] + fshift)); 818 } 819 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 820 } 821 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 822 for (i = 0; i < m; i++) { 823 for (j = a->i[i]; j < a->i[i + 1]; j++) { 824 if (a->j[j] >= i) { 825 #if defined(PETSC_USE_COMPLEX) 826 if (PetscImaginaryPart(a->a[j]) != 0.0 || PetscRealPart(a->a[j]) != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " %18.16e %18.16e ", (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 827 #else 828 if (a->a[j] != 0.0) PetscCall(PetscViewerASCIIPrintf(viewer, " %18.16e ", (double)a->a[j])); 829 #endif 830 } 831 } 832 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 833 } 834 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 835 } else if (format == PETSC_VIEWER_ASCII_DENSE) { 836 PetscInt cnt = 0, jcnt; 837 PetscScalar value; 838 #if defined(PETSC_USE_COMPLEX) 839 PetscBool realonly = PETSC_TRUE; 840 841 for (i = 0; i < a->i[m]; i++) { 842 if (PetscImaginaryPart(a->a[i]) != 0.0) { 843 realonly = PETSC_FALSE; 844 break; 845 } 846 } 847 #endif 848 849 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 850 for (i = 0; i < m; i++) { 851 jcnt = 0; 852 for (j = 0; j < A->cmap->n; j++) { 853 if (jcnt < a->i[i + 1] - a->i[i] && j == a->j[cnt]) { 854 value = a->a[cnt++]; 855 jcnt++; 856 } else { 857 value = 0.0; 858 } 859 #if defined(PETSC_USE_COMPLEX) 860 if (realonly) { 861 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e ", (double)PetscRealPart(value))); 862 } else { 863 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e+%7.5e i ", (double)PetscRealPart(value), (double)PetscImaginaryPart(value))); 864 } 865 #else 866 PetscCall(PetscViewerASCIIPrintf(viewer, " %7.5e ", (double)value)); 867 #endif 868 } 869 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 870 } 871 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 872 } else if (format == PETSC_VIEWER_ASCII_MATRIXMARKET) { 873 PetscInt fshift = 1; 874 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 875 #if defined(PETSC_USE_COMPLEX) 876 PetscCall(PetscViewerASCIIPrintf(viewer, "%%%%MatrixMarket matrix coordinate complex general\n")); 877 #else 878 PetscCall(PetscViewerASCIIPrintf(viewer, "%%%%MatrixMarket matrix coordinate real general\n")); 879 #endif 880 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, A->cmap->n, a->nz)); 881 for (i = 0; i < m; i++) { 882 for (j = a->i[i]; j < a->i[i + 1]; j++) { 883 #if defined(PETSC_USE_COMPLEX) 884 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %g %g\n", i + fshift, a->j[j] + fshift, (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 885 #else 886 PetscCall(PetscViewerASCIIPrintf(viewer, "%" PetscInt_FMT " %" PetscInt_FMT " %g\n", i + fshift, a->j[j] + fshift, (double)a->a[j])); 887 #endif 888 } 889 } 890 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 891 } else { 892 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_FALSE)); 893 if (A->factortype) { 894 for (i = 0; i < m; i++) { 895 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 896 /* L part */ 897 for (j = a->i[i]; j < a->i[i + 1]; j++) { 898 #if defined(PETSC_USE_COMPLEX) 899 if (PetscImaginaryPart(a->a[j]) > 0.0) { 900 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 901 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 902 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)(-PetscImaginaryPart(a->a[j])))); 903 } else { 904 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 905 } 906 #else 907 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 908 #endif 909 } 910 /* diagonal */ 911 j = a->diag[i]; 912 #if defined(PETSC_USE_COMPLEX) 913 if (PetscImaginaryPart(a->a[j]) > 0.0) { 914 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(1.0 / a->a[j]), (double)PetscImaginaryPart(1.0 / a->a[j]))); 915 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 916 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(1.0 / a->a[j]), (double)(-PetscImaginaryPart(1.0 / a->a[j])))); 917 } else { 918 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(1.0 / a->a[j]))); 919 } 920 #else 921 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)(1.0 / a->a[j]))); 922 #endif 923 924 /* U part */ 925 for (j = a->diag[i + 1] + 1; j < a->diag[i]; j++) { 926 #if defined(PETSC_USE_COMPLEX) 927 if (PetscImaginaryPart(a->a[j]) > 0.0) { 928 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 929 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 930 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)(-PetscImaginaryPart(a->a[j])))); 931 } else { 932 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 933 } 934 #else 935 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 936 #endif 937 } 938 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 939 } 940 } else { 941 for (i = 0; i < m; i++) { 942 PetscCall(PetscViewerASCIIPrintf(viewer, "row %" PetscInt_FMT ":", i)); 943 for (j = a->i[i]; j < a->i[i + 1]; j++) { 944 #if defined(PETSC_USE_COMPLEX) 945 if (PetscImaginaryPart(a->a[j]) > 0.0) { 946 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g + %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)PetscImaginaryPart(a->a[j]))); 947 } else if (PetscImaginaryPart(a->a[j]) < 0.0) { 948 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g - %g i)", a->j[j], (double)PetscRealPart(a->a[j]), (double)-PetscImaginaryPart(a->a[j]))); 949 } else { 950 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)PetscRealPart(a->a[j]))); 951 } 952 #else 953 PetscCall(PetscViewerASCIIPrintf(viewer, " (%" PetscInt_FMT ", %g) ", a->j[j], (double)a->a[j])); 954 #endif 955 } 956 PetscCall(PetscViewerASCIIPrintf(viewer, "\n")); 957 } 958 } 959 PetscCall(PetscViewerASCIIUseTabs(viewer, PETSC_TRUE)); 960 } 961 PetscCall(PetscViewerFlush(viewer)); 962 PetscFunctionReturn(PETSC_SUCCESS); 963 } 964 965 #include <petscdraw.h> 966 PetscErrorCode MatView_SeqAIJ_Draw_Zoom(PetscDraw draw, void *Aa) 967 { 968 Mat A = (Mat)Aa; 969 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 970 PetscInt i, j, m = A->rmap->n; 971 int color; 972 PetscReal xl, yl, xr, yr, x_l, x_r, y_l, y_r; 973 PetscViewer viewer; 974 PetscViewerFormat format; 975 const PetscScalar *aa; 976 977 PetscFunctionBegin; 978 PetscCall(PetscObjectQuery((PetscObject)A, "Zoomviewer", (PetscObject *)&viewer)); 979 PetscCall(PetscViewerGetFormat(viewer, &format)); 980 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 981 982 /* loop over matrix elements drawing boxes */ 983 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 984 if (format != PETSC_VIEWER_DRAW_CONTOUR) { 985 PetscDrawCollectiveBegin(draw); 986 /* Blue for negative, Cyan for zero and Red for positive */ 987 color = PETSC_DRAW_BLUE; 988 for (i = 0; i < m; i++) { 989 y_l = m - i - 1.0; 990 y_r = y_l + 1.0; 991 for (j = a->i[i]; j < a->i[i + 1]; j++) { 992 x_l = a->j[j]; 993 x_r = x_l + 1.0; 994 if (PetscRealPart(aa[j]) >= 0.) continue; 995 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 996 } 997 } 998 color = PETSC_DRAW_CYAN; 999 for (i = 0; i < m; i++) { 1000 y_l = m - i - 1.0; 1001 y_r = y_l + 1.0; 1002 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1003 x_l = a->j[j]; 1004 x_r = x_l + 1.0; 1005 if (aa[j] != 0.) continue; 1006 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1007 } 1008 } 1009 color = PETSC_DRAW_RED; 1010 for (i = 0; i < m; i++) { 1011 y_l = m - i - 1.0; 1012 y_r = y_l + 1.0; 1013 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1014 x_l = a->j[j]; 1015 x_r = x_l + 1.0; 1016 if (PetscRealPart(aa[j]) <= 0.) continue; 1017 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1018 } 1019 } 1020 PetscDrawCollectiveEnd(draw); 1021 } else { 1022 /* use contour shading to indicate magnitude of values */ 1023 /* first determine max of all nonzero values */ 1024 PetscReal minv = 0.0, maxv = 0.0; 1025 PetscInt nz = a->nz, count = 0; 1026 PetscDraw popup; 1027 1028 for (i = 0; i < nz; i++) { 1029 if (PetscAbsScalar(aa[i]) > maxv) maxv = PetscAbsScalar(aa[i]); 1030 } 1031 if (minv >= maxv) maxv = minv + PETSC_SMALL; 1032 PetscCall(PetscDrawGetPopup(draw, &popup)); 1033 PetscCall(PetscDrawScalePopup(popup, minv, maxv)); 1034 1035 PetscDrawCollectiveBegin(draw); 1036 for (i = 0; i < m; i++) { 1037 y_l = m - i - 1.0; 1038 y_r = y_l + 1.0; 1039 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1040 x_l = a->j[j]; 1041 x_r = x_l + 1.0; 1042 color = PetscDrawRealToColor(PetscAbsScalar(aa[count]), minv, maxv); 1043 PetscCall(PetscDrawRectangle(draw, x_l, y_l, x_r, y_r, color, color, color, color)); 1044 count++; 1045 } 1046 } 1047 PetscDrawCollectiveEnd(draw); 1048 } 1049 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1050 PetscFunctionReturn(PETSC_SUCCESS); 1051 } 1052 1053 #include <petscdraw.h> 1054 PetscErrorCode MatView_SeqAIJ_Draw(Mat A, PetscViewer viewer) 1055 { 1056 PetscDraw draw; 1057 PetscReal xr, yr, xl, yl, h, w; 1058 PetscBool isnull; 1059 1060 PetscFunctionBegin; 1061 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 1062 PetscCall(PetscDrawIsNull(draw, &isnull)); 1063 if (isnull) PetscFunctionReturn(PETSC_SUCCESS); 1064 1065 xr = A->cmap->n; 1066 yr = A->rmap->n; 1067 h = yr / 10.0; 1068 w = xr / 10.0; 1069 xr += w; 1070 yr += h; 1071 xl = -w; 1072 yl = -h; 1073 PetscCall(PetscDrawSetCoordinates(draw, xl, yl, xr, yr)); 1074 PetscCall(PetscObjectCompose((PetscObject)A, "Zoomviewer", (PetscObject)viewer)); 1075 PetscCall(PetscDrawZoom(draw, MatView_SeqAIJ_Draw_Zoom, A)); 1076 PetscCall(PetscObjectCompose((PetscObject)A, "Zoomviewer", NULL)); 1077 PetscCall(PetscDrawSave(draw)); 1078 PetscFunctionReturn(PETSC_SUCCESS); 1079 } 1080 1081 PetscErrorCode MatView_SeqAIJ(Mat A, PetscViewer viewer) 1082 { 1083 PetscBool iascii, isbinary, isdraw; 1084 1085 PetscFunctionBegin; 1086 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 1087 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 1088 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 1089 if (iascii) PetscCall(MatView_SeqAIJ_ASCII(A, viewer)); 1090 else if (isbinary) PetscCall(MatView_SeqAIJ_Binary(A, viewer)); 1091 else if (isdraw) PetscCall(MatView_SeqAIJ_Draw(A, viewer)); 1092 PetscCall(MatView_SeqAIJ_Inode(A, viewer)); 1093 PetscFunctionReturn(PETSC_SUCCESS); 1094 } 1095 1096 PetscErrorCode MatAssemblyEnd_SeqAIJ(Mat A, MatAssemblyType mode) 1097 { 1098 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1099 PetscInt fshift = 0, i, *ai = a->i, *aj = a->j, *imax = a->imax; 1100 PetscInt m = A->rmap->n, *ip, N, *ailen = a->ilen, rmax = 0; 1101 MatScalar *aa = a->a, *ap; 1102 PetscReal ratio = 0.6; 1103 1104 PetscFunctionBegin; 1105 if (mode == MAT_FLUSH_ASSEMBLY) PetscFunctionReturn(PETSC_SUCCESS); 1106 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1107 if (A->was_assembled && A->ass_nonzerostate == A->nonzerostate) { 1108 /* we need to respect users asking to use or not the inodes routine in between matrix assemblies */ 1109 PetscCall(MatAssemblyEnd_SeqAIJ_Inode(A, mode)); 1110 PetscFunctionReturn(PETSC_SUCCESS); 1111 } 1112 1113 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 1114 for (i = 1; i < m; i++) { 1115 /* move each row back by the amount of empty slots (fshift) before it*/ 1116 fshift += imax[i - 1] - ailen[i - 1]; 1117 rmax = PetscMax(rmax, ailen[i]); 1118 if (fshift) { 1119 ip = aj + ai[i]; 1120 ap = aa + ai[i]; 1121 N = ailen[i]; 1122 PetscCall(PetscArraymove(ip - fshift, ip, N)); 1123 if (!A->structure_only) PetscCall(PetscArraymove(ap - fshift, ap, N)); 1124 } 1125 ai[i] = ai[i - 1] + ailen[i - 1]; 1126 } 1127 if (m) { 1128 fshift += imax[m - 1] - ailen[m - 1]; 1129 ai[m] = ai[m - 1] + ailen[m - 1]; 1130 } 1131 /* reset ilen and imax for each row */ 1132 a->nonzerorowcnt = 0; 1133 if (A->structure_only) { 1134 PetscCall(PetscFree(a->imax)); 1135 PetscCall(PetscFree(a->ilen)); 1136 } else { /* !A->structure_only */ 1137 for (i = 0; i < m; i++) { 1138 ailen[i] = imax[i] = ai[i + 1] - ai[i]; 1139 a->nonzerorowcnt += ((ai[i + 1] - ai[i]) > 0); 1140 } 1141 } 1142 a->nz = ai[m]; 1143 PetscCheck(!fshift || a->nounused != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unused space detected in matrix: %" PetscInt_FMT " X %" PetscInt_FMT ", %" PetscInt_FMT " unneeded", m, A->cmap->n, fshift); 1144 1145 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 1146 PetscCall(PetscInfo(A, "Matrix size: %" PetscInt_FMT " X %" PetscInt_FMT "; storage space: %" PetscInt_FMT " unneeded,%" PetscInt_FMT " used\n", m, A->cmap->n, fshift, a->nz)); 1147 PetscCall(PetscInfo(A, "Number of mallocs during MatSetValues() is %" PetscInt_FMT "\n", a->reallocs)); 1148 PetscCall(PetscInfo(A, "Maximum nonzeros in any row is %" PetscInt_FMT "\n", rmax)); 1149 1150 A->info.mallocs += a->reallocs; 1151 a->reallocs = 0; 1152 A->info.nz_unneeded = (PetscReal)fshift; 1153 a->rmax = rmax; 1154 1155 if (!A->structure_only) PetscCall(MatCheckCompressedRow(A, a->nonzerorowcnt, &a->compressedrow, a->i, m, ratio)); 1156 PetscCall(MatAssemblyEnd_SeqAIJ_Inode(A, mode)); 1157 PetscFunctionReturn(PETSC_SUCCESS); 1158 } 1159 1160 PetscErrorCode MatRealPart_SeqAIJ(Mat A) 1161 { 1162 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1163 PetscInt i, nz = a->nz; 1164 MatScalar *aa; 1165 1166 PetscFunctionBegin; 1167 PetscCall(MatSeqAIJGetArray(A, &aa)); 1168 for (i = 0; i < nz; i++) aa[i] = PetscRealPart(aa[i]); 1169 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 1170 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1171 PetscFunctionReturn(PETSC_SUCCESS); 1172 } 1173 1174 PetscErrorCode MatImaginaryPart_SeqAIJ(Mat A) 1175 { 1176 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1177 PetscInt i, nz = a->nz; 1178 MatScalar *aa; 1179 1180 PetscFunctionBegin; 1181 PetscCall(MatSeqAIJGetArray(A, &aa)); 1182 for (i = 0; i < nz; i++) aa[i] = PetscImaginaryPart(aa[i]); 1183 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 1184 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1185 PetscFunctionReturn(PETSC_SUCCESS); 1186 } 1187 1188 PetscErrorCode MatZeroEntries_SeqAIJ(Mat A) 1189 { 1190 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1191 MatScalar *aa; 1192 1193 PetscFunctionBegin; 1194 PetscCall(MatSeqAIJGetArrayWrite(A, &aa)); 1195 PetscCall(PetscArrayzero(aa, a->i[A->rmap->n])); 1196 PetscCall(MatSeqAIJRestoreArrayWrite(A, &aa)); 1197 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 1198 PetscFunctionReturn(PETSC_SUCCESS); 1199 } 1200 1201 PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_SeqAIJ(Mat A) 1202 { 1203 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1204 1205 PetscFunctionBegin; 1206 PetscCall(PetscFree(a->perm)); 1207 PetscCall(PetscFree(a->jmap)); 1208 PetscFunctionReturn(PETSC_SUCCESS); 1209 } 1210 1211 PetscErrorCode MatDestroy_SeqAIJ(Mat A) 1212 { 1213 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1214 1215 PetscFunctionBegin; 1216 #if defined(PETSC_USE_LOG) 1217 PetscCall(PetscLogObjectState((PetscObject)A, "Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT, A->rmap->n, A->cmap->n, a->nz)); 1218 #endif 1219 PetscCall(MatSeqXAIJFreeAIJ(A, &a->a, &a->j, &a->i)); 1220 PetscCall(MatResetPreallocationCOO_SeqAIJ(A)); 1221 PetscCall(ISDestroy(&a->row)); 1222 PetscCall(ISDestroy(&a->col)); 1223 PetscCall(PetscFree(a->diag)); 1224 PetscCall(PetscFree(a->ibdiag)); 1225 PetscCall(PetscFree(a->imax)); 1226 PetscCall(PetscFree(a->ilen)); 1227 PetscCall(PetscFree(a->ipre)); 1228 PetscCall(PetscFree3(a->idiag, a->mdiag, a->ssor_work)); 1229 PetscCall(PetscFree(a->solve_work)); 1230 PetscCall(ISDestroy(&a->icol)); 1231 PetscCall(PetscFree(a->saved_values)); 1232 PetscCall(PetscFree2(a->compressedrow.i, a->compressedrow.rindex)); 1233 PetscCall(MatDestroy_SeqAIJ_Inode(A)); 1234 PetscCall(PetscFree(A->data)); 1235 1236 /* MatMatMultNumeric_SeqAIJ_SeqAIJ_Sorted may allocate this. 1237 That function is so heavily used (sometimes in an hidden way through multnumeric function pointers) 1238 that is hard to properly add this data to the MatProduct data. We free it here to avoid 1239 users reusing the matrix object with different data to incur in obscure segmentation faults 1240 due to different matrix sizes */ 1241 PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc__ab_dense", NULL)); 1242 1243 PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL)); 1244 PetscCall(PetscObjectComposeFunction((PetscObject)A, "PetscMatlabEnginePut_C", NULL)); 1245 PetscCall(PetscObjectComposeFunction((PetscObject)A, "PetscMatlabEngineGet_C", NULL)); 1246 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetColumnIndices_C", NULL)); 1247 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatStoreValues_C", NULL)); 1248 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatRetrieveValues_C", NULL)); 1249 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqsbaij_C", NULL)); 1250 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqbaij_C", NULL)); 1251 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijperm_C", NULL)); 1252 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijsell_C", NULL)); 1253 #if defined(PETSC_HAVE_MKL_SPARSE) 1254 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijmkl_C", NULL)); 1255 #endif 1256 #if defined(PETSC_HAVE_CUDA) 1257 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijcusparse_C", NULL)); 1258 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijcusparse_seqaij_C", NULL)); 1259 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaijcusparse_C", NULL)); 1260 #endif 1261 #if defined(PETSC_HAVE_HIP) 1262 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijhipsparse_C", NULL)); 1263 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijhipsparse_seqaij_C", NULL)); 1264 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaijhipsparse_C", NULL)); 1265 #endif 1266 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 1267 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijkokkos_C", NULL)); 1268 #endif 1269 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijcrl_C", NULL)); 1270 #if defined(PETSC_HAVE_ELEMENTAL) 1271 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_elemental_C", NULL)); 1272 #endif 1273 #if defined(PETSC_HAVE_SCALAPACK) 1274 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_scalapack_C", NULL)); 1275 #endif 1276 #if defined(PETSC_HAVE_HYPRE) 1277 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_hypre_C", NULL)); 1278 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_transpose_seqaij_seqaij_C", NULL)); 1279 #endif 1280 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqdense_C", NULL)); 1281 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqsell_C", NULL)); 1282 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_is_C", NULL)); 1283 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatIsTranspose_C", NULL)); 1284 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatIsHermitianTranspose_C", NULL)); 1285 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetPreallocation_C", NULL)); 1286 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatResetPreallocation_C", NULL)); 1287 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJSetPreallocationCSR_C", NULL)); 1288 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatReorderForNonzeroDiagonal_C", NULL)); 1289 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_is_seqaij_C", NULL)); 1290 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqdense_seqaij_C", NULL)); 1291 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_seqaij_C", NULL)); 1292 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSeqAIJKron_C", NULL)); 1293 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL)); 1294 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL)); 1295 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL)); 1296 /* these calls do not belong here: the subclasses Duplicate/Destroy are wrong */ 1297 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaijsell_seqaij_C", NULL)); 1298 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaijperm_seqaij_C", NULL)); 1299 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_seqaij_seqaijviennacl_C", NULL)); 1300 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijviennacl_seqdense_C", NULL)); 1301 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaijviennacl_seqaij_C", NULL)); 1302 PetscFunctionReturn(PETSC_SUCCESS); 1303 } 1304 1305 PetscErrorCode MatSetOption_SeqAIJ(Mat A, MatOption op, PetscBool flg) 1306 { 1307 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1308 1309 PetscFunctionBegin; 1310 switch (op) { 1311 case MAT_ROW_ORIENTED: 1312 a->roworiented = flg; 1313 break; 1314 case MAT_KEEP_NONZERO_PATTERN: 1315 a->keepnonzeropattern = flg; 1316 break; 1317 case MAT_NEW_NONZERO_LOCATIONS: 1318 a->nonew = (flg ? 0 : 1); 1319 break; 1320 case MAT_NEW_NONZERO_LOCATION_ERR: 1321 a->nonew = (flg ? -1 : 0); 1322 break; 1323 case MAT_NEW_NONZERO_ALLOCATION_ERR: 1324 a->nonew = (flg ? -2 : 0); 1325 break; 1326 case MAT_UNUSED_NONZERO_LOCATION_ERR: 1327 a->nounused = (flg ? -1 : 0); 1328 break; 1329 case MAT_IGNORE_ZERO_ENTRIES: 1330 a->ignorezeroentries = flg; 1331 break; 1332 case MAT_SPD: 1333 case MAT_SYMMETRIC: 1334 case MAT_STRUCTURALLY_SYMMETRIC: 1335 case MAT_HERMITIAN: 1336 case MAT_SYMMETRY_ETERNAL: 1337 case MAT_STRUCTURE_ONLY: 1338 case MAT_STRUCTURAL_SYMMETRY_ETERNAL: 1339 case MAT_SPD_ETERNAL: 1340 /* if the diagonal matrix is square it inherits some of the properties above */ 1341 break; 1342 case MAT_FORCE_DIAGONAL_ENTRIES: 1343 case MAT_IGNORE_OFF_PROC_ENTRIES: 1344 case MAT_USE_HASH_TABLE: 1345 PetscCall(PetscInfo(A, "Option %s ignored\n", MatOptions[op])); 1346 break; 1347 case MAT_USE_INODES: 1348 PetscCall(MatSetOption_SeqAIJ_Inode(A, MAT_USE_INODES, flg)); 1349 break; 1350 case MAT_SUBMAT_SINGLEIS: 1351 A->submat_singleis = flg; 1352 break; 1353 case MAT_SORTED_FULL: 1354 if (flg) A->ops->setvalues = MatSetValues_SeqAIJ_SortedFull; 1355 else A->ops->setvalues = MatSetValues_SeqAIJ; 1356 break; 1357 case MAT_FORM_EXPLICIT_TRANSPOSE: 1358 A->form_explicit_transpose = flg; 1359 break; 1360 default: 1361 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "unknown option %d", op); 1362 } 1363 PetscFunctionReturn(PETSC_SUCCESS); 1364 } 1365 1366 PetscErrorCode MatGetDiagonal_SeqAIJ(Mat A, Vec v) 1367 { 1368 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1369 PetscInt i, j, n, *ai = a->i, *aj = a->j; 1370 PetscScalar *x; 1371 const PetscScalar *aa; 1372 1373 PetscFunctionBegin; 1374 PetscCall(VecGetLocalSize(v, &n)); 1375 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 1376 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1377 if (A->factortype == MAT_FACTOR_ILU || A->factortype == MAT_FACTOR_LU) { 1378 PetscInt *diag = a->diag; 1379 PetscCall(VecGetArrayWrite(v, &x)); 1380 for (i = 0; i < n; i++) x[i] = 1.0 / aa[diag[i]]; 1381 PetscCall(VecRestoreArrayWrite(v, &x)); 1382 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1383 PetscFunctionReturn(PETSC_SUCCESS); 1384 } 1385 1386 PetscCall(VecGetArrayWrite(v, &x)); 1387 for (i = 0; i < n; i++) { 1388 x[i] = 0.0; 1389 for (j = ai[i]; j < ai[i + 1]; j++) { 1390 if (aj[j] == i) { 1391 x[i] = aa[j]; 1392 break; 1393 } 1394 } 1395 } 1396 PetscCall(VecRestoreArrayWrite(v, &x)); 1397 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1398 PetscFunctionReturn(PETSC_SUCCESS); 1399 } 1400 1401 #include <../src/mat/impls/aij/seq/ftn-kernels/fmult.h> 1402 PetscErrorCode MatMultTransposeAdd_SeqAIJ(Mat A, Vec xx, Vec zz, Vec yy) 1403 { 1404 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1405 const MatScalar *aa; 1406 PetscScalar *y; 1407 const PetscScalar *x; 1408 PetscInt m = A->rmap->n; 1409 #if !defined(PETSC_USE_FORTRAN_KERNEL_MULTTRANSPOSEAIJ) 1410 const MatScalar *v; 1411 PetscScalar alpha; 1412 PetscInt n, i, j; 1413 const PetscInt *idx, *ii, *ridx = NULL; 1414 Mat_CompressedRow cprow = a->compressedrow; 1415 PetscBool usecprow = cprow.use; 1416 #endif 1417 1418 PetscFunctionBegin; 1419 if (zz != yy) PetscCall(VecCopy(zz, yy)); 1420 PetscCall(VecGetArrayRead(xx, &x)); 1421 PetscCall(VecGetArray(yy, &y)); 1422 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1423 1424 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTTRANSPOSEAIJ) 1425 fortranmulttransposeaddaij_(&m, x, a->i, a->j, aa, y); 1426 #else 1427 if (usecprow) { 1428 m = cprow.nrows; 1429 ii = cprow.i; 1430 ridx = cprow.rindex; 1431 } else { 1432 ii = a->i; 1433 } 1434 for (i = 0; i < m; i++) { 1435 idx = a->j + ii[i]; 1436 v = aa + ii[i]; 1437 n = ii[i + 1] - ii[i]; 1438 if (usecprow) { 1439 alpha = x[ridx[i]]; 1440 } else { 1441 alpha = x[i]; 1442 } 1443 for (j = 0; j < n; j++) y[idx[j]] += alpha * v[j]; 1444 } 1445 #endif 1446 PetscCall(PetscLogFlops(2.0 * a->nz)); 1447 PetscCall(VecRestoreArrayRead(xx, &x)); 1448 PetscCall(VecRestoreArray(yy, &y)); 1449 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1450 PetscFunctionReturn(PETSC_SUCCESS); 1451 } 1452 1453 PetscErrorCode MatMultTranspose_SeqAIJ(Mat A, Vec xx, Vec yy) 1454 { 1455 PetscFunctionBegin; 1456 PetscCall(VecSet(yy, 0.0)); 1457 PetscCall(MatMultTransposeAdd_SeqAIJ(A, xx, yy, yy)); 1458 PetscFunctionReturn(PETSC_SUCCESS); 1459 } 1460 1461 #include <../src/mat/impls/aij/seq/ftn-kernels/fmult.h> 1462 1463 PetscErrorCode MatMult_SeqAIJ(Mat A, Vec xx, Vec yy) 1464 { 1465 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1466 PetscScalar *y; 1467 const PetscScalar *x; 1468 const MatScalar *aa, *a_a; 1469 PetscInt m = A->rmap->n; 1470 const PetscInt *aj, *ii, *ridx = NULL; 1471 PetscInt n, i; 1472 PetscScalar sum; 1473 PetscBool usecprow = a->compressedrow.use; 1474 1475 #if defined(PETSC_HAVE_PRAGMA_DISJOINT) 1476 #pragma disjoint(*x, *y, *aa) 1477 #endif 1478 1479 PetscFunctionBegin; 1480 if (a->inode.use && a->inode.checked) { 1481 PetscCall(MatMult_SeqAIJ_Inode(A, xx, yy)); 1482 PetscFunctionReturn(PETSC_SUCCESS); 1483 } 1484 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1485 PetscCall(VecGetArrayRead(xx, &x)); 1486 PetscCall(VecGetArray(yy, &y)); 1487 ii = a->i; 1488 if (usecprow) { /* use compressed row format */ 1489 PetscCall(PetscArrayzero(y, m)); 1490 m = a->compressedrow.nrows; 1491 ii = a->compressedrow.i; 1492 ridx = a->compressedrow.rindex; 1493 for (i = 0; i < m; i++) { 1494 n = ii[i + 1] - ii[i]; 1495 aj = a->j + ii[i]; 1496 aa = a_a + ii[i]; 1497 sum = 0.0; 1498 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1499 /* for (j=0; j<n; j++) sum += (*aa++)*x[*aj++]; */ 1500 y[*ridx++] = sum; 1501 } 1502 } else { /* do not use compressed row format */ 1503 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTAIJ) 1504 aj = a->j; 1505 aa = a_a; 1506 fortranmultaij_(&m, x, ii, aj, aa, y); 1507 #else 1508 for (i = 0; i < m; i++) { 1509 n = ii[i + 1] - ii[i]; 1510 aj = a->j + ii[i]; 1511 aa = a_a + ii[i]; 1512 sum = 0.0; 1513 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1514 y[i] = sum; 1515 } 1516 #endif 1517 } 1518 PetscCall(PetscLogFlops(2.0 * a->nz - a->nonzerorowcnt)); 1519 PetscCall(VecRestoreArrayRead(xx, &x)); 1520 PetscCall(VecRestoreArray(yy, &y)); 1521 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1522 PetscFunctionReturn(PETSC_SUCCESS); 1523 } 1524 1525 PetscErrorCode MatMultMax_SeqAIJ(Mat A, Vec xx, Vec yy) 1526 { 1527 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1528 PetscScalar *y; 1529 const PetscScalar *x; 1530 const MatScalar *aa, *a_a; 1531 PetscInt m = A->rmap->n; 1532 const PetscInt *aj, *ii, *ridx = NULL; 1533 PetscInt n, i, nonzerorow = 0; 1534 PetscScalar sum; 1535 PetscBool usecprow = a->compressedrow.use; 1536 1537 #if defined(PETSC_HAVE_PRAGMA_DISJOINT) 1538 #pragma disjoint(*x, *y, *aa) 1539 #endif 1540 1541 PetscFunctionBegin; 1542 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1543 PetscCall(VecGetArrayRead(xx, &x)); 1544 PetscCall(VecGetArray(yy, &y)); 1545 if (usecprow) { /* use compressed row format */ 1546 m = a->compressedrow.nrows; 1547 ii = a->compressedrow.i; 1548 ridx = a->compressedrow.rindex; 1549 for (i = 0; i < m; i++) { 1550 n = ii[i + 1] - ii[i]; 1551 aj = a->j + ii[i]; 1552 aa = a_a + ii[i]; 1553 sum = 0.0; 1554 nonzerorow += (n > 0); 1555 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1556 /* for (j=0; j<n; j++) sum += (*aa++)*x[*aj++]; */ 1557 y[*ridx++] = sum; 1558 } 1559 } else { /* do not use compressed row format */ 1560 ii = a->i; 1561 for (i = 0; i < m; i++) { 1562 n = ii[i + 1] - ii[i]; 1563 aj = a->j + ii[i]; 1564 aa = a_a + ii[i]; 1565 sum = 0.0; 1566 nonzerorow += (n > 0); 1567 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1568 y[i] = sum; 1569 } 1570 } 1571 PetscCall(PetscLogFlops(2.0 * a->nz - nonzerorow)); 1572 PetscCall(VecRestoreArrayRead(xx, &x)); 1573 PetscCall(VecRestoreArray(yy, &y)); 1574 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1575 PetscFunctionReturn(PETSC_SUCCESS); 1576 } 1577 1578 PetscErrorCode MatMultAddMax_SeqAIJ(Mat A, Vec xx, Vec yy, Vec zz) 1579 { 1580 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1581 PetscScalar *y, *z; 1582 const PetscScalar *x; 1583 const MatScalar *aa, *a_a; 1584 PetscInt m = A->rmap->n, *aj, *ii; 1585 PetscInt n, i, *ridx = NULL; 1586 PetscScalar sum; 1587 PetscBool usecprow = a->compressedrow.use; 1588 1589 PetscFunctionBegin; 1590 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1591 PetscCall(VecGetArrayRead(xx, &x)); 1592 PetscCall(VecGetArrayPair(yy, zz, &y, &z)); 1593 if (usecprow) { /* use compressed row format */ 1594 if (zz != yy) PetscCall(PetscArraycpy(z, y, m)); 1595 m = a->compressedrow.nrows; 1596 ii = a->compressedrow.i; 1597 ridx = a->compressedrow.rindex; 1598 for (i = 0; i < m; i++) { 1599 n = ii[i + 1] - ii[i]; 1600 aj = a->j + ii[i]; 1601 aa = a_a + ii[i]; 1602 sum = y[*ridx]; 1603 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1604 z[*ridx++] = sum; 1605 } 1606 } else { /* do not use compressed row format */ 1607 ii = a->i; 1608 for (i = 0; i < m; i++) { 1609 n = ii[i + 1] - ii[i]; 1610 aj = a->j + ii[i]; 1611 aa = a_a + ii[i]; 1612 sum = y[i]; 1613 PetscSparseDenseMaxDot(sum, x, aa, aj, n); 1614 z[i] = sum; 1615 } 1616 } 1617 PetscCall(PetscLogFlops(2.0 * a->nz)); 1618 PetscCall(VecRestoreArrayRead(xx, &x)); 1619 PetscCall(VecRestoreArrayPair(yy, zz, &y, &z)); 1620 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1621 PetscFunctionReturn(PETSC_SUCCESS); 1622 } 1623 1624 #include <../src/mat/impls/aij/seq/ftn-kernels/fmultadd.h> 1625 PetscErrorCode MatMultAdd_SeqAIJ(Mat A, Vec xx, Vec yy, Vec zz) 1626 { 1627 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1628 PetscScalar *y, *z; 1629 const PetscScalar *x; 1630 const MatScalar *aa, *a_a; 1631 const PetscInt *aj, *ii, *ridx = NULL; 1632 PetscInt m = A->rmap->n, n, i; 1633 PetscScalar sum; 1634 PetscBool usecprow = a->compressedrow.use; 1635 1636 PetscFunctionBegin; 1637 if (a->inode.use && a->inode.checked) { 1638 PetscCall(MatMultAdd_SeqAIJ_Inode(A, xx, yy, zz)); 1639 PetscFunctionReturn(PETSC_SUCCESS); 1640 } 1641 PetscCall(MatSeqAIJGetArrayRead(A, &a_a)); 1642 PetscCall(VecGetArrayRead(xx, &x)); 1643 PetscCall(VecGetArrayPair(yy, zz, &y, &z)); 1644 if (usecprow) { /* use compressed row format */ 1645 if (zz != yy) PetscCall(PetscArraycpy(z, y, m)); 1646 m = a->compressedrow.nrows; 1647 ii = a->compressedrow.i; 1648 ridx = a->compressedrow.rindex; 1649 for (i = 0; i < m; i++) { 1650 n = ii[i + 1] - ii[i]; 1651 aj = a->j + ii[i]; 1652 aa = a_a + ii[i]; 1653 sum = y[*ridx]; 1654 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1655 z[*ridx++] = sum; 1656 } 1657 } else { /* do not use compressed row format */ 1658 ii = a->i; 1659 #if defined(PETSC_USE_FORTRAN_KERNEL_MULTADDAIJ) 1660 aj = a->j; 1661 aa = a_a; 1662 fortranmultaddaij_(&m, x, ii, aj, aa, y, z); 1663 #else 1664 for (i = 0; i < m; i++) { 1665 n = ii[i + 1] - ii[i]; 1666 aj = a->j + ii[i]; 1667 aa = a_a + ii[i]; 1668 sum = y[i]; 1669 PetscSparseDensePlusDot(sum, x, aa, aj, n); 1670 z[i] = sum; 1671 } 1672 #endif 1673 } 1674 PetscCall(PetscLogFlops(2.0 * a->nz)); 1675 PetscCall(VecRestoreArrayRead(xx, &x)); 1676 PetscCall(VecRestoreArrayPair(yy, zz, &y, &z)); 1677 PetscCall(MatSeqAIJRestoreArrayRead(A, &a_a)); 1678 PetscFunctionReturn(PETSC_SUCCESS); 1679 } 1680 1681 /* 1682 Adds diagonal pointers to sparse matrix structure. 1683 */ 1684 PetscErrorCode MatMarkDiagonal_SeqAIJ(Mat A) 1685 { 1686 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1687 PetscInt i, j, m = A->rmap->n; 1688 PetscBool alreadySet = PETSC_TRUE; 1689 1690 PetscFunctionBegin; 1691 if (!a->diag) { 1692 PetscCall(PetscMalloc1(m, &a->diag)); 1693 alreadySet = PETSC_FALSE; 1694 } 1695 for (i = 0; i < A->rmap->n; i++) { 1696 /* If A's diagonal is already correctly set, this fast track enables cheap and repeated MatMarkDiagonal_SeqAIJ() calls */ 1697 if (alreadySet) { 1698 PetscInt pos = a->diag[i]; 1699 if (pos >= a->i[i] && pos < a->i[i + 1] && a->j[pos] == i) continue; 1700 } 1701 1702 a->diag[i] = a->i[i + 1]; 1703 for (j = a->i[i]; j < a->i[i + 1]; j++) { 1704 if (a->j[j] == i) { 1705 a->diag[i] = j; 1706 break; 1707 } 1708 } 1709 } 1710 PetscFunctionReturn(PETSC_SUCCESS); 1711 } 1712 1713 PetscErrorCode MatShift_SeqAIJ(Mat A, PetscScalar v) 1714 { 1715 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1716 const PetscInt *diag = (const PetscInt *)a->diag; 1717 const PetscInt *ii = (const PetscInt *)a->i; 1718 PetscInt i, *mdiag = NULL; 1719 PetscInt cnt = 0; /* how many diagonals are missing */ 1720 1721 PetscFunctionBegin; 1722 if (!A->preallocated || !a->nz) { 1723 PetscCall(MatSeqAIJSetPreallocation(A, 1, NULL)); 1724 PetscCall(MatShift_Basic(A, v)); 1725 PetscFunctionReturn(PETSC_SUCCESS); 1726 } 1727 1728 if (a->diagonaldense) { 1729 cnt = 0; 1730 } else { 1731 PetscCall(PetscCalloc1(A->rmap->n, &mdiag)); 1732 for (i = 0; i < A->rmap->n; i++) { 1733 if (i < A->cmap->n && diag[i] >= ii[i + 1]) { /* 'out of range' rows never have diagonals */ 1734 cnt++; 1735 mdiag[i] = 1; 1736 } 1737 } 1738 } 1739 if (!cnt) { 1740 PetscCall(MatShift_Basic(A, v)); 1741 } else { 1742 PetscScalar *olda = a->a; /* preserve pointers to current matrix nonzeros structure and values */ 1743 PetscInt *oldj = a->j, *oldi = a->i; 1744 PetscBool singlemalloc = a->singlemalloc, free_a = a->free_a, free_ij = a->free_ij; 1745 1746 a->a = NULL; 1747 a->j = NULL; 1748 a->i = NULL; 1749 /* increase the values in imax for each row where a diagonal is being inserted then reallocate the matrix data structures */ 1750 for (i = 0; i < PetscMin(A->rmap->n, A->cmap->n); i++) a->imax[i] += mdiag[i]; 1751 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(A, 0, a->imax)); 1752 1753 /* copy old values into new matrix data structure */ 1754 for (i = 0; i < A->rmap->n; i++) { 1755 PetscCall(MatSetValues(A, 1, &i, a->imax[i] - mdiag[i], &oldj[oldi[i]], &olda[oldi[i]], ADD_VALUES)); 1756 if (i < A->cmap->n) PetscCall(MatSetValue(A, i, i, v, ADD_VALUES)); 1757 } 1758 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 1759 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 1760 if (singlemalloc) { 1761 PetscCall(PetscFree3(olda, oldj, oldi)); 1762 } else { 1763 if (free_a) PetscCall(PetscFree(olda)); 1764 if (free_ij) PetscCall(PetscFree(oldj)); 1765 if (free_ij) PetscCall(PetscFree(oldi)); 1766 } 1767 } 1768 PetscCall(PetscFree(mdiag)); 1769 a->diagonaldense = PETSC_TRUE; 1770 PetscFunctionReturn(PETSC_SUCCESS); 1771 } 1772 1773 /* 1774 Checks for missing diagonals 1775 */ 1776 PetscErrorCode MatMissingDiagonal_SeqAIJ(Mat A, PetscBool *missing, PetscInt *d) 1777 { 1778 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1779 PetscInt *diag, *ii = a->i, i; 1780 1781 PetscFunctionBegin; 1782 *missing = PETSC_FALSE; 1783 if (A->rmap->n > 0 && !ii) { 1784 *missing = PETSC_TRUE; 1785 if (d) *d = 0; 1786 PetscCall(PetscInfo(A, "Matrix has no entries therefore is missing diagonal\n")); 1787 } else { 1788 PetscInt n; 1789 n = PetscMin(A->rmap->n, A->cmap->n); 1790 diag = a->diag; 1791 for (i = 0; i < n; i++) { 1792 if (diag[i] >= ii[i + 1]) { 1793 *missing = PETSC_TRUE; 1794 if (d) *d = i; 1795 PetscCall(PetscInfo(A, "Matrix is missing diagonal number %" PetscInt_FMT "\n", i)); 1796 break; 1797 } 1798 } 1799 } 1800 PetscFunctionReturn(PETSC_SUCCESS); 1801 } 1802 1803 #include <petscblaslapack.h> 1804 #include <petsc/private/kernels/blockinvert.h> 1805 1806 /* 1807 Note that values is allocated externally by the PC and then passed into this routine 1808 */ 1809 PetscErrorCode MatInvertVariableBlockDiagonal_SeqAIJ(Mat A, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *diag) 1810 { 1811 PetscInt n = A->rmap->n, i, ncnt = 0, *indx, j, bsizemax = 0, *v_pivots; 1812 PetscBool allowzeropivot, zeropivotdetected = PETSC_FALSE; 1813 const PetscReal shift = 0.0; 1814 PetscInt ipvt[5]; 1815 PetscCount flops = 0; 1816 PetscScalar work[25], *v_work; 1817 1818 PetscFunctionBegin; 1819 allowzeropivot = PetscNot(A->erroriffailure); 1820 for (i = 0; i < nblocks; i++) ncnt += bsizes[i]; 1821 PetscCheck(ncnt == n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Total blocksizes %" PetscInt_FMT " doesn't match number matrix rows %" PetscInt_FMT, ncnt, n); 1822 for (i = 0; i < nblocks; i++) bsizemax = PetscMax(bsizemax, bsizes[i]); 1823 PetscCall(PetscMalloc1(bsizemax, &indx)); 1824 if (bsizemax > 7) PetscCall(PetscMalloc2(bsizemax, &v_work, bsizemax, &v_pivots)); 1825 ncnt = 0; 1826 for (i = 0; i < nblocks; i++) { 1827 for (j = 0; j < bsizes[i]; j++) indx[j] = ncnt + j; 1828 PetscCall(MatGetValues(A, bsizes[i], indx, bsizes[i], indx, diag)); 1829 switch (bsizes[i]) { 1830 case 1: 1831 *diag = 1.0 / (*diag); 1832 break; 1833 case 2: 1834 PetscCall(PetscKernel_A_gets_inverse_A_2(diag, shift, allowzeropivot, &zeropivotdetected)); 1835 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1836 PetscCall(PetscKernel_A_gets_transpose_A_2(diag)); 1837 break; 1838 case 3: 1839 PetscCall(PetscKernel_A_gets_inverse_A_3(diag, shift, allowzeropivot, &zeropivotdetected)); 1840 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1841 PetscCall(PetscKernel_A_gets_transpose_A_3(diag)); 1842 break; 1843 case 4: 1844 PetscCall(PetscKernel_A_gets_inverse_A_4(diag, shift, allowzeropivot, &zeropivotdetected)); 1845 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1846 PetscCall(PetscKernel_A_gets_transpose_A_4(diag)); 1847 break; 1848 case 5: 1849 PetscCall(PetscKernel_A_gets_inverse_A_5(diag, ipvt, work, shift, allowzeropivot, &zeropivotdetected)); 1850 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1851 PetscCall(PetscKernel_A_gets_transpose_A_5(diag)); 1852 break; 1853 case 6: 1854 PetscCall(PetscKernel_A_gets_inverse_A_6(diag, shift, allowzeropivot, &zeropivotdetected)); 1855 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1856 PetscCall(PetscKernel_A_gets_transpose_A_6(diag)); 1857 break; 1858 case 7: 1859 PetscCall(PetscKernel_A_gets_inverse_A_7(diag, shift, allowzeropivot, &zeropivotdetected)); 1860 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1861 PetscCall(PetscKernel_A_gets_transpose_A_7(diag)); 1862 break; 1863 default: 1864 PetscCall(PetscKernel_A_gets_inverse_A(bsizes[i], diag, v_pivots, v_work, allowzeropivot, &zeropivotdetected)); 1865 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1866 PetscCall(PetscKernel_A_gets_transpose_A_N(diag, bsizes[i])); 1867 } 1868 ncnt += bsizes[i]; 1869 diag += bsizes[i] * bsizes[i]; 1870 flops += 2 * PetscPowInt(bsizes[i], 3) / 3; 1871 } 1872 PetscCall(PetscLogFlops(flops)); 1873 if (bsizemax > 7) PetscCall(PetscFree2(v_work, v_pivots)); 1874 PetscCall(PetscFree(indx)); 1875 PetscFunctionReturn(PETSC_SUCCESS); 1876 } 1877 1878 /* 1879 Negative shift indicates do not generate an error if there is a zero diagonal, just invert it anyways 1880 */ 1881 PetscErrorCode MatInvertDiagonal_SeqAIJ(Mat A, PetscScalar omega, PetscScalar fshift) 1882 { 1883 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1884 PetscInt i, *diag, m = A->rmap->n; 1885 const MatScalar *v; 1886 PetscScalar *idiag, *mdiag; 1887 1888 PetscFunctionBegin; 1889 if (a->idiagvalid) PetscFunctionReturn(PETSC_SUCCESS); 1890 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 1891 diag = a->diag; 1892 if (!a->idiag) { PetscCall(PetscMalloc3(m, &a->idiag, m, &a->mdiag, m, &a->ssor_work)); } 1893 1894 mdiag = a->mdiag; 1895 idiag = a->idiag; 1896 PetscCall(MatSeqAIJGetArrayRead(A, &v)); 1897 if (omega == 1.0 && PetscRealPart(fshift) <= 0.0) { 1898 for (i = 0; i < m; i++) { 1899 mdiag[i] = v[diag[i]]; 1900 if (!PetscAbsScalar(mdiag[i])) { /* zero diagonal */ 1901 if (PetscRealPart(fshift)) { 1902 PetscCall(PetscInfo(A, "Zero diagonal on row %" PetscInt_FMT "\n", i)); 1903 A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 1904 A->factorerror_zeropivot_value = 0.0; 1905 A->factorerror_zeropivot_row = i; 1906 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Zero diagonal on row %" PetscInt_FMT, i); 1907 } 1908 idiag[i] = 1.0 / v[diag[i]]; 1909 } 1910 PetscCall(PetscLogFlops(m)); 1911 } else { 1912 for (i = 0; i < m; i++) { 1913 mdiag[i] = v[diag[i]]; 1914 idiag[i] = omega / (fshift + v[diag[i]]); 1915 } 1916 PetscCall(PetscLogFlops(2.0 * m)); 1917 } 1918 a->idiagvalid = PETSC_TRUE; 1919 PetscCall(MatSeqAIJRestoreArrayRead(A, &v)); 1920 PetscFunctionReturn(PETSC_SUCCESS); 1921 } 1922 1923 #include <../src/mat/impls/aij/seq/ftn-kernels/frelax.h> 1924 PetscErrorCode MatSOR_SeqAIJ(Mat A, Vec bb, PetscReal omega, MatSORType flag, PetscReal fshift, PetscInt its, PetscInt lits, Vec xx) 1925 { 1926 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 1927 PetscScalar *x, d, sum, *t, scale; 1928 const MatScalar *v, *idiag = NULL, *mdiag, *aa; 1929 const PetscScalar *b, *bs, *xb, *ts; 1930 PetscInt n, m = A->rmap->n, i; 1931 const PetscInt *idx, *diag; 1932 1933 PetscFunctionBegin; 1934 if (a->inode.use && a->inode.checked && omega == 1.0 && fshift == 0.0) { 1935 PetscCall(MatSOR_SeqAIJ_Inode(A, bb, omega, flag, fshift, its, lits, xx)); 1936 PetscFunctionReturn(PETSC_SUCCESS); 1937 } 1938 its = its * lits; 1939 1940 if (fshift != a->fshift || omega != a->omega) a->idiagvalid = PETSC_FALSE; /* must recompute idiag[] */ 1941 if (!a->idiagvalid) PetscCall(MatInvertDiagonal_SeqAIJ(A, omega, fshift)); 1942 a->fshift = fshift; 1943 a->omega = omega; 1944 1945 diag = a->diag; 1946 t = a->ssor_work; 1947 idiag = a->idiag; 1948 mdiag = a->mdiag; 1949 1950 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 1951 PetscCall(VecGetArray(xx, &x)); 1952 PetscCall(VecGetArrayRead(bb, &b)); 1953 /* We count flops by assuming the upper triangular and lower triangular parts have the same number of nonzeros */ 1954 if (flag == SOR_APPLY_UPPER) { 1955 /* apply (U + D/omega) to the vector */ 1956 bs = b; 1957 for (i = 0; i < m; i++) { 1958 d = fshift + mdiag[i]; 1959 n = a->i[i + 1] - diag[i] - 1; 1960 idx = a->j + diag[i] + 1; 1961 v = aa + diag[i] + 1; 1962 sum = b[i] * d / omega; 1963 PetscSparseDensePlusDot(sum, bs, v, idx, n); 1964 x[i] = sum; 1965 } 1966 PetscCall(VecRestoreArray(xx, &x)); 1967 PetscCall(VecRestoreArrayRead(bb, &b)); 1968 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 1969 PetscCall(PetscLogFlops(a->nz)); 1970 PetscFunctionReturn(PETSC_SUCCESS); 1971 } 1972 1973 PetscCheck(flag != SOR_APPLY_LOWER, PETSC_COMM_SELF, PETSC_ERR_SUP, "SOR_APPLY_LOWER is not implemented"); 1974 if (flag & SOR_EISENSTAT) { 1975 /* Let A = L + U + D; where L is lower triangular, 1976 U is upper triangular, E = D/omega; This routine applies 1977 1978 (L + E)^{-1} A (U + E)^{-1} 1979 1980 to a vector efficiently using Eisenstat's trick. 1981 */ 1982 scale = (2.0 / omega) - 1.0; 1983 1984 /* x = (E + U)^{-1} b */ 1985 for (i = m - 1; i >= 0; i--) { 1986 n = a->i[i + 1] - diag[i] - 1; 1987 idx = a->j + diag[i] + 1; 1988 v = aa + diag[i] + 1; 1989 sum = b[i]; 1990 PetscSparseDenseMinusDot(sum, x, v, idx, n); 1991 x[i] = sum * idiag[i]; 1992 } 1993 1994 /* t = b - (2*E - D)x */ 1995 v = aa; 1996 for (i = 0; i < m; i++) t[i] = b[i] - scale * (v[*diag++]) * x[i]; 1997 1998 /* t = (E + L)^{-1}t */ 1999 ts = t; 2000 diag = a->diag; 2001 for (i = 0; i < m; i++) { 2002 n = diag[i] - a->i[i]; 2003 idx = a->j + a->i[i]; 2004 v = aa + a->i[i]; 2005 sum = t[i]; 2006 PetscSparseDenseMinusDot(sum, ts, v, idx, n); 2007 t[i] = sum * idiag[i]; 2008 /* x = x + t */ 2009 x[i] += t[i]; 2010 } 2011 2012 PetscCall(PetscLogFlops(6.0 * m - 1 + 2.0 * a->nz)); 2013 PetscCall(VecRestoreArray(xx, &x)); 2014 PetscCall(VecRestoreArrayRead(bb, &b)); 2015 PetscFunctionReturn(PETSC_SUCCESS); 2016 } 2017 if (flag & SOR_ZERO_INITIAL_GUESS) { 2018 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) { 2019 for (i = 0; i < m; i++) { 2020 n = diag[i] - a->i[i]; 2021 idx = a->j + a->i[i]; 2022 v = aa + a->i[i]; 2023 sum = b[i]; 2024 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2025 t[i] = sum; 2026 x[i] = sum * idiag[i]; 2027 } 2028 xb = t; 2029 PetscCall(PetscLogFlops(a->nz)); 2030 } else xb = b; 2031 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) { 2032 for (i = m - 1; i >= 0; i--) { 2033 n = a->i[i + 1] - diag[i] - 1; 2034 idx = a->j + diag[i] + 1; 2035 v = aa + diag[i] + 1; 2036 sum = xb[i]; 2037 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2038 if (xb == b) { 2039 x[i] = sum * idiag[i]; 2040 } else { 2041 x[i] = (1 - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2042 } 2043 } 2044 PetscCall(PetscLogFlops(a->nz)); /* assumes 1/2 in upper */ 2045 } 2046 its--; 2047 } 2048 while (its--) { 2049 if (flag & SOR_FORWARD_SWEEP || flag & SOR_LOCAL_FORWARD_SWEEP) { 2050 for (i = 0; i < m; i++) { 2051 /* lower */ 2052 n = diag[i] - a->i[i]; 2053 idx = a->j + a->i[i]; 2054 v = aa + a->i[i]; 2055 sum = b[i]; 2056 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2057 t[i] = sum; /* save application of the lower-triangular part */ 2058 /* upper */ 2059 n = a->i[i + 1] - diag[i] - 1; 2060 idx = a->j + diag[i] + 1; 2061 v = aa + diag[i] + 1; 2062 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2063 x[i] = (1. - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2064 } 2065 xb = t; 2066 PetscCall(PetscLogFlops(2.0 * a->nz)); 2067 } else xb = b; 2068 if (flag & SOR_BACKWARD_SWEEP || flag & SOR_LOCAL_BACKWARD_SWEEP) { 2069 for (i = m - 1; i >= 0; i--) { 2070 sum = xb[i]; 2071 if (xb == b) { 2072 /* whole matrix (no checkpointing available) */ 2073 n = a->i[i + 1] - a->i[i]; 2074 idx = a->j + a->i[i]; 2075 v = aa + a->i[i]; 2076 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2077 x[i] = (1. - omega) * x[i] + (sum + mdiag[i] * x[i]) * idiag[i]; 2078 } else { /* lower-triangular part has been saved, so only apply upper-triangular */ 2079 n = a->i[i + 1] - diag[i] - 1; 2080 idx = a->j + diag[i] + 1; 2081 v = aa + diag[i] + 1; 2082 PetscSparseDenseMinusDot(sum, x, v, idx, n); 2083 x[i] = (1. - omega) * x[i] + sum * idiag[i]; /* omega in idiag */ 2084 } 2085 } 2086 if (xb == b) { 2087 PetscCall(PetscLogFlops(2.0 * a->nz)); 2088 } else { 2089 PetscCall(PetscLogFlops(a->nz)); /* assumes 1/2 in upper */ 2090 } 2091 } 2092 } 2093 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2094 PetscCall(VecRestoreArray(xx, &x)); 2095 PetscCall(VecRestoreArrayRead(bb, &b)); 2096 PetscFunctionReturn(PETSC_SUCCESS); 2097 } 2098 2099 PetscErrorCode MatGetInfo_SeqAIJ(Mat A, MatInfoType flag, MatInfo *info) 2100 { 2101 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2102 2103 PetscFunctionBegin; 2104 info->block_size = 1.0; 2105 info->nz_allocated = a->maxnz; 2106 info->nz_used = a->nz; 2107 info->nz_unneeded = (a->maxnz - a->nz); 2108 info->assemblies = A->num_ass; 2109 info->mallocs = A->info.mallocs; 2110 info->memory = 0; /* REVIEW ME */ 2111 if (A->factortype) { 2112 info->fill_ratio_given = A->info.fill_ratio_given; 2113 info->fill_ratio_needed = A->info.fill_ratio_needed; 2114 info->factor_mallocs = A->info.factor_mallocs; 2115 } else { 2116 info->fill_ratio_given = 0; 2117 info->fill_ratio_needed = 0; 2118 info->factor_mallocs = 0; 2119 } 2120 PetscFunctionReturn(PETSC_SUCCESS); 2121 } 2122 2123 PetscErrorCode MatZeroRows_SeqAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2124 { 2125 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2126 PetscInt i, m = A->rmap->n - 1; 2127 const PetscScalar *xx; 2128 PetscScalar *bb, *aa; 2129 PetscInt d = 0; 2130 2131 PetscFunctionBegin; 2132 if (x && b) { 2133 PetscCall(VecGetArrayRead(x, &xx)); 2134 PetscCall(VecGetArray(b, &bb)); 2135 for (i = 0; i < N; i++) { 2136 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2137 if (rows[i] >= A->cmap->n) continue; 2138 bb[rows[i]] = diag * xx[rows[i]]; 2139 } 2140 PetscCall(VecRestoreArrayRead(x, &xx)); 2141 PetscCall(VecRestoreArray(b, &bb)); 2142 } 2143 2144 PetscCall(MatSeqAIJGetArray(A, &aa)); 2145 if (a->keepnonzeropattern) { 2146 for (i = 0; i < N; i++) { 2147 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2148 PetscCall(PetscArrayzero(&aa[a->i[rows[i]]], a->ilen[rows[i]])); 2149 } 2150 if (diag != 0.0) { 2151 for (i = 0; i < N; i++) { 2152 d = rows[i]; 2153 if (rows[i] >= A->cmap->n) continue; 2154 PetscCheck(a->diag[d] < a->i[d + 1], PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is missing diagonal entry in the zeroed row %" PetscInt_FMT, d); 2155 } 2156 for (i = 0; i < N; i++) { 2157 if (rows[i] >= A->cmap->n) continue; 2158 aa[a->diag[rows[i]]] = diag; 2159 } 2160 } 2161 } else { 2162 if (diag != 0.0) { 2163 for (i = 0; i < N; i++) { 2164 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2165 if (a->ilen[rows[i]] > 0) { 2166 if (rows[i] >= A->cmap->n) { 2167 a->ilen[rows[i]] = 0; 2168 } else { 2169 a->ilen[rows[i]] = 1; 2170 aa[a->i[rows[i]]] = diag; 2171 a->j[a->i[rows[i]]] = rows[i]; 2172 } 2173 } else if (rows[i] < A->cmap->n) { /* in case row was completely empty */ 2174 PetscCall(MatSetValues_SeqAIJ(A, 1, &rows[i], 1, &rows[i], &diag, INSERT_VALUES)); 2175 } 2176 } 2177 } else { 2178 for (i = 0; i < N; i++) { 2179 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2180 a->ilen[rows[i]] = 0; 2181 } 2182 } 2183 A->nonzerostate++; 2184 } 2185 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 2186 PetscUseTypeMethod(A, assemblyend, MAT_FINAL_ASSEMBLY); 2187 PetscFunctionReturn(PETSC_SUCCESS); 2188 } 2189 2190 PetscErrorCode MatZeroRowsColumns_SeqAIJ(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 2191 { 2192 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2193 PetscInt i, j, m = A->rmap->n - 1, d = 0; 2194 PetscBool missing, *zeroed, vecs = PETSC_FALSE; 2195 const PetscScalar *xx; 2196 PetscScalar *bb, *aa; 2197 2198 PetscFunctionBegin; 2199 if (!N) PetscFunctionReturn(PETSC_SUCCESS); 2200 PetscCall(MatSeqAIJGetArray(A, &aa)); 2201 if (x && b) { 2202 PetscCall(VecGetArrayRead(x, &xx)); 2203 PetscCall(VecGetArray(b, &bb)); 2204 vecs = PETSC_TRUE; 2205 } 2206 PetscCall(PetscCalloc1(A->rmap->n, &zeroed)); 2207 for (i = 0; i < N; i++) { 2208 PetscCheck(rows[i] >= 0 && rows[i] <= m, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "row %" PetscInt_FMT " out of range", rows[i]); 2209 PetscCall(PetscArrayzero(&aa[a->i[rows[i]]], a->ilen[rows[i]])); 2210 2211 zeroed[rows[i]] = PETSC_TRUE; 2212 } 2213 for (i = 0; i < A->rmap->n; i++) { 2214 if (!zeroed[i]) { 2215 for (j = a->i[i]; j < a->i[i + 1]; j++) { 2216 if (a->j[j] < A->rmap->n && zeroed[a->j[j]]) { 2217 if (vecs) bb[i] -= aa[j] * xx[a->j[j]]; 2218 aa[j] = 0.0; 2219 } 2220 } 2221 } else if (vecs && i < A->cmap->N) bb[i] = diag * xx[i]; 2222 } 2223 if (x && b) { 2224 PetscCall(VecRestoreArrayRead(x, &xx)); 2225 PetscCall(VecRestoreArray(b, &bb)); 2226 } 2227 PetscCall(PetscFree(zeroed)); 2228 if (diag != 0.0) { 2229 PetscCall(MatMissingDiagonal_SeqAIJ(A, &missing, &d)); 2230 if (missing) { 2231 for (i = 0; i < N; i++) { 2232 if (rows[i] >= A->cmap->N) continue; 2233 PetscCheck(!a->nonew || rows[i] < d, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Matrix is missing diagonal entry in row %" PetscInt_FMT " (%" PetscInt_FMT ")", d, rows[i]); 2234 PetscCall(MatSetValues_SeqAIJ(A, 1, &rows[i], 1, &rows[i], &diag, INSERT_VALUES)); 2235 } 2236 } else { 2237 for (i = 0; i < N; i++) aa[a->diag[rows[i]]] = diag; 2238 } 2239 } 2240 PetscCall(MatSeqAIJRestoreArray(A, &aa)); 2241 PetscUseTypeMethod(A, assemblyend, MAT_FINAL_ASSEMBLY); 2242 PetscFunctionReturn(PETSC_SUCCESS); 2243 } 2244 2245 PetscErrorCode MatGetRow_SeqAIJ(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 2246 { 2247 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2248 const PetscScalar *aa; 2249 PetscInt *itmp; 2250 2251 PetscFunctionBegin; 2252 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2253 *nz = a->i[row + 1] - a->i[row]; 2254 if (v) *v = (PetscScalar *)(aa + a->i[row]); 2255 if (idx) { 2256 itmp = a->j + a->i[row]; 2257 if (*nz) *idx = itmp; 2258 else *idx = NULL; 2259 } 2260 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2261 PetscFunctionReturn(PETSC_SUCCESS); 2262 } 2263 2264 PetscErrorCode MatRestoreRow_SeqAIJ(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 2265 { 2266 PetscFunctionBegin; 2267 if (nz) *nz = 0; 2268 if (idx) *idx = NULL; 2269 if (v) *v = NULL; 2270 PetscFunctionReturn(PETSC_SUCCESS); 2271 } 2272 2273 PetscErrorCode MatNorm_SeqAIJ(Mat A, NormType type, PetscReal *nrm) 2274 { 2275 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2276 const MatScalar *v; 2277 PetscReal sum = 0.0; 2278 PetscInt i, j; 2279 2280 PetscFunctionBegin; 2281 PetscCall(MatSeqAIJGetArrayRead(A, &v)); 2282 if (type == NORM_FROBENIUS) { 2283 #if defined(PETSC_USE_REAL___FP16) 2284 PetscBLASInt one = 1, nz = a->nz; 2285 PetscCallBLAS("BLASnrm2", *nrm = BLASnrm2_(&nz, v, &one)); 2286 #else 2287 for (i = 0; i < a->nz; i++) { 2288 sum += PetscRealPart(PetscConj(*v) * (*v)); 2289 v++; 2290 } 2291 *nrm = PetscSqrtReal(sum); 2292 #endif 2293 PetscCall(PetscLogFlops(2.0 * a->nz)); 2294 } else if (type == NORM_1) { 2295 PetscReal *tmp; 2296 PetscInt *jj = a->j; 2297 PetscCall(PetscCalloc1(A->cmap->n + 1, &tmp)); 2298 *nrm = 0.0; 2299 for (j = 0; j < a->nz; j++) { 2300 tmp[*jj++] += PetscAbsScalar(*v); 2301 v++; 2302 } 2303 for (j = 0; j < A->cmap->n; j++) { 2304 if (tmp[j] > *nrm) *nrm = tmp[j]; 2305 } 2306 PetscCall(PetscFree(tmp)); 2307 PetscCall(PetscLogFlops(PetscMax(a->nz - 1, 0))); 2308 } else if (type == NORM_INFINITY) { 2309 *nrm = 0.0; 2310 for (j = 0; j < A->rmap->n; j++) { 2311 const PetscScalar *v2 = v + a->i[j]; 2312 sum = 0.0; 2313 for (i = 0; i < a->i[j + 1] - a->i[j]; i++) { 2314 sum += PetscAbsScalar(*v2); 2315 v2++; 2316 } 2317 if (sum > *nrm) *nrm = sum; 2318 } 2319 PetscCall(PetscLogFlops(PetscMax(a->nz - 1, 0))); 2320 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "No support for two norm"); 2321 PetscCall(MatSeqAIJRestoreArrayRead(A, &v)); 2322 PetscFunctionReturn(PETSC_SUCCESS); 2323 } 2324 2325 PetscErrorCode MatIsTranspose_SeqAIJ(Mat A, Mat B, PetscReal tol, PetscBool *f) 2326 { 2327 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data, *bij = (Mat_SeqAIJ *)B->data; 2328 PetscInt *adx, *bdx, *aii, *bii, *aptr, *bptr; 2329 const MatScalar *va, *vb; 2330 PetscInt ma, na, mb, nb, i; 2331 2332 PetscFunctionBegin; 2333 PetscCall(MatGetSize(A, &ma, &na)); 2334 PetscCall(MatGetSize(B, &mb, &nb)); 2335 if (ma != nb || na != mb) { 2336 *f = PETSC_FALSE; 2337 PetscFunctionReturn(PETSC_SUCCESS); 2338 } 2339 PetscCall(MatSeqAIJGetArrayRead(A, &va)); 2340 PetscCall(MatSeqAIJGetArrayRead(B, &vb)); 2341 aii = aij->i; 2342 bii = bij->i; 2343 adx = aij->j; 2344 bdx = bij->j; 2345 PetscCall(PetscMalloc1(ma, &aptr)); 2346 PetscCall(PetscMalloc1(mb, &bptr)); 2347 for (i = 0; i < ma; i++) aptr[i] = aii[i]; 2348 for (i = 0; i < mb; i++) bptr[i] = bii[i]; 2349 2350 *f = PETSC_TRUE; 2351 for (i = 0; i < ma; i++) { 2352 while (aptr[i] < aii[i + 1]) { 2353 PetscInt idc, idr; 2354 PetscScalar vc, vr; 2355 /* column/row index/value */ 2356 idc = adx[aptr[i]]; 2357 idr = bdx[bptr[idc]]; 2358 vc = va[aptr[i]]; 2359 vr = vb[bptr[idc]]; 2360 if (i != idr || PetscAbsScalar(vc - vr) > tol) { 2361 *f = PETSC_FALSE; 2362 goto done; 2363 } else { 2364 aptr[i]++; 2365 if (B || i != idc) bptr[idc]++; 2366 } 2367 } 2368 } 2369 done: 2370 PetscCall(PetscFree(aptr)); 2371 PetscCall(PetscFree(bptr)); 2372 PetscCall(MatSeqAIJRestoreArrayRead(A, &va)); 2373 PetscCall(MatSeqAIJRestoreArrayRead(B, &vb)); 2374 PetscFunctionReturn(PETSC_SUCCESS); 2375 } 2376 2377 PetscErrorCode MatIsHermitianTranspose_SeqAIJ(Mat A, Mat B, PetscReal tol, PetscBool *f) 2378 { 2379 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data, *bij = (Mat_SeqAIJ *)B->data; 2380 PetscInt *adx, *bdx, *aii, *bii, *aptr, *bptr; 2381 MatScalar *va, *vb; 2382 PetscInt ma, na, mb, nb, i; 2383 2384 PetscFunctionBegin; 2385 PetscCall(MatGetSize(A, &ma, &na)); 2386 PetscCall(MatGetSize(B, &mb, &nb)); 2387 if (ma != nb || na != mb) { 2388 *f = PETSC_FALSE; 2389 PetscFunctionReturn(PETSC_SUCCESS); 2390 } 2391 aii = aij->i; 2392 bii = bij->i; 2393 adx = aij->j; 2394 bdx = bij->j; 2395 va = aij->a; 2396 vb = bij->a; 2397 PetscCall(PetscMalloc1(ma, &aptr)); 2398 PetscCall(PetscMalloc1(mb, &bptr)); 2399 for (i = 0; i < ma; i++) aptr[i] = aii[i]; 2400 for (i = 0; i < mb; i++) bptr[i] = bii[i]; 2401 2402 *f = PETSC_TRUE; 2403 for (i = 0; i < ma; i++) { 2404 while (aptr[i] < aii[i + 1]) { 2405 PetscInt idc, idr; 2406 PetscScalar vc, vr; 2407 /* column/row index/value */ 2408 idc = adx[aptr[i]]; 2409 idr = bdx[bptr[idc]]; 2410 vc = va[aptr[i]]; 2411 vr = vb[bptr[idc]]; 2412 if (i != idr || PetscAbsScalar(vc - PetscConj(vr)) > tol) { 2413 *f = PETSC_FALSE; 2414 goto done; 2415 } else { 2416 aptr[i]++; 2417 if (B || i != idc) bptr[idc]++; 2418 } 2419 } 2420 } 2421 done: 2422 PetscCall(PetscFree(aptr)); 2423 PetscCall(PetscFree(bptr)); 2424 PetscFunctionReturn(PETSC_SUCCESS); 2425 } 2426 2427 PetscErrorCode MatIsSymmetric_SeqAIJ(Mat A, PetscReal tol, PetscBool *f) 2428 { 2429 PetscFunctionBegin; 2430 PetscCall(MatIsTranspose_SeqAIJ(A, A, tol, f)); 2431 PetscFunctionReturn(PETSC_SUCCESS); 2432 } 2433 2434 PetscErrorCode MatIsHermitian_SeqAIJ(Mat A, PetscReal tol, PetscBool *f) 2435 { 2436 PetscFunctionBegin; 2437 PetscCall(MatIsHermitianTranspose_SeqAIJ(A, A, tol, f)); 2438 PetscFunctionReturn(PETSC_SUCCESS); 2439 } 2440 2441 PetscErrorCode MatDiagonalScale_SeqAIJ(Mat A, Vec ll, Vec rr) 2442 { 2443 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2444 const PetscScalar *l, *r; 2445 PetscScalar x; 2446 MatScalar *v; 2447 PetscInt i, j, m = A->rmap->n, n = A->cmap->n, M, nz = a->nz; 2448 const PetscInt *jj; 2449 2450 PetscFunctionBegin; 2451 if (ll) { 2452 /* The local size is used so that VecMPI can be passed to this routine 2453 by MatDiagonalScale_MPIAIJ */ 2454 PetscCall(VecGetLocalSize(ll, &m)); 2455 PetscCheck(m == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Left scaling vector wrong length"); 2456 PetscCall(VecGetArrayRead(ll, &l)); 2457 PetscCall(MatSeqAIJGetArray(A, &v)); 2458 for (i = 0; i < m; i++) { 2459 x = l[i]; 2460 M = a->i[i + 1] - a->i[i]; 2461 for (j = 0; j < M; j++) (*v++) *= x; 2462 } 2463 PetscCall(VecRestoreArrayRead(ll, &l)); 2464 PetscCall(PetscLogFlops(nz)); 2465 PetscCall(MatSeqAIJRestoreArray(A, &v)); 2466 } 2467 if (rr) { 2468 PetscCall(VecGetLocalSize(rr, &n)); 2469 PetscCheck(n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Right scaling vector wrong length"); 2470 PetscCall(VecGetArrayRead(rr, &r)); 2471 PetscCall(MatSeqAIJGetArray(A, &v)); 2472 jj = a->j; 2473 for (i = 0; i < nz; i++) (*v++) *= r[*jj++]; 2474 PetscCall(MatSeqAIJRestoreArray(A, &v)); 2475 PetscCall(VecRestoreArrayRead(rr, &r)); 2476 PetscCall(PetscLogFlops(nz)); 2477 } 2478 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 2479 PetscFunctionReturn(PETSC_SUCCESS); 2480 } 2481 2482 PetscErrorCode MatCreateSubMatrix_SeqAIJ(Mat A, IS isrow, IS iscol, PetscInt csize, MatReuse scall, Mat *B) 2483 { 2484 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data, *c; 2485 PetscInt *smap, i, k, kstart, kend, oldcols = A->cmap->n, *lens; 2486 PetscInt row, mat_i, *mat_j, tcol, first, step, *mat_ilen, sum, lensi; 2487 const PetscInt *irow, *icol; 2488 const PetscScalar *aa; 2489 PetscInt nrows, ncols; 2490 PetscInt *starts, *j_new, *i_new, *aj = a->j, *ai = a->i, ii, *ailen = a->ilen; 2491 MatScalar *a_new, *mat_a, *c_a; 2492 Mat C; 2493 PetscBool stride; 2494 2495 PetscFunctionBegin; 2496 PetscCall(ISGetIndices(isrow, &irow)); 2497 PetscCall(ISGetLocalSize(isrow, &nrows)); 2498 PetscCall(ISGetLocalSize(iscol, &ncols)); 2499 2500 PetscCall(PetscObjectTypeCompare((PetscObject)iscol, ISSTRIDE, &stride)); 2501 if (stride) { 2502 PetscCall(ISStrideGetInfo(iscol, &first, &step)); 2503 } else { 2504 first = 0; 2505 step = 0; 2506 } 2507 if (stride && step == 1) { 2508 /* special case of contiguous rows */ 2509 PetscCall(PetscMalloc2(nrows, &lens, nrows, &starts)); 2510 /* loop over new rows determining lens and starting points */ 2511 for (i = 0; i < nrows; i++) { 2512 kstart = ai[irow[i]]; 2513 kend = kstart + ailen[irow[i]]; 2514 starts[i] = kstart; 2515 for (k = kstart; k < kend; k++) { 2516 if (aj[k] >= first) { 2517 starts[i] = k; 2518 break; 2519 } 2520 } 2521 sum = 0; 2522 while (k < kend) { 2523 if (aj[k++] >= first + ncols) break; 2524 sum++; 2525 } 2526 lens[i] = sum; 2527 } 2528 /* create submatrix */ 2529 if (scall == MAT_REUSE_MATRIX) { 2530 PetscInt n_cols, n_rows; 2531 PetscCall(MatGetSize(*B, &n_rows, &n_cols)); 2532 PetscCheck(n_rows == nrows && n_cols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Reused submatrix wrong size"); 2533 PetscCall(MatZeroEntries(*B)); 2534 C = *B; 2535 } else { 2536 PetscInt rbs, cbs; 2537 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2538 PetscCall(MatSetSizes(C, nrows, ncols, PETSC_DETERMINE, PETSC_DETERMINE)); 2539 PetscCall(ISGetBlockSize(isrow, &rbs)); 2540 PetscCall(ISGetBlockSize(iscol, &cbs)); 2541 PetscCall(MatSetBlockSizes(C, rbs, cbs)); 2542 PetscCall(MatSetType(C, ((PetscObject)A)->type_name)); 2543 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(C, 0, lens)); 2544 } 2545 c = (Mat_SeqAIJ *)C->data; 2546 2547 /* loop over rows inserting into submatrix */ 2548 PetscCall(MatSeqAIJGetArrayWrite(C, &a_new)); // Not 'a_new = c->a-new', since that raw usage ignores offload state of C 2549 j_new = c->j; 2550 i_new = c->i; 2551 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2552 for (i = 0; i < nrows; i++) { 2553 ii = starts[i]; 2554 lensi = lens[i]; 2555 for (k = 0; k < lensi; k++) *j_new++ = aj[ii + k] - first; 2556 PetscCall(PetscArraycpy(a_new, aa + starts[i], lensi)); 2557 a_new += lensi; 2558 i_new[i + 1] = i_new[i] + lensi; 2559 c->ilen[i] = lensi; 2560 } 2561 PetscCall(MatSeqAIJRestoreArrayWrite(C, &a_new)); // Set C's offload state properly 2562 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2563 PetscCall(PetscFree2(lens, starts)); 2564 } else { 2565 PetscCall(ISGetIndices(iscol, &icol)); 2566 PetscCall(PetscCalloc1(oldcols, &smap)); 2567 PetscCall(PetscMalloc1(1 + nrows, &lens)); 2568 for (i = 0; i < ncols; i++) { 2569 PetscCheck(icol[i] < oldcols, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Requesting column beyond largest column icol[%" PetscInt_FMT "] %" PetscInt_FMT " >= A->cmap->n %" PetscInt_FMT, i, icol[i], oldcols); 2570 smap[icol[i]] = i + 1; 2571 } 2572 2573 /* determine lens of each row */ 2574 for (i = 0; i < nrows; i++) { 2575 kstart = ai[irow[i]]; 2576 kend = kstart + a->ilen[irow[i]]; 2577 lens[i] = 0; 2578 for (k = kstart; k < kend; k++) { 2579 if (smap[aj[k]]) lens[i]++; 2580 } 2581 } 2582 /* Create and fill new matrix */ 2583 if (scall == MAT_REUSE_MATRIX) { 2584 PetscBool equal; 2585 2586 c = (Mat_SeqAIJ *)((*B)->data); 2587 PetscCheck((*B)->rmap->n == nrows && (*B)->cmap->n == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong size"); 2588 PetscCall(PetscArraycmp(c->ilen, lens, (*B)->rmap->n, &equal)); 2589 PetscCheck(equal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Cannot reuse matrix. wrong no of nonzeros"); 2590 PetscCall(PetscArrayzero(c->ilen, (*B)->rmap->n)); 2591 C = *B; 2592 } else { 2593 PetscInt rbs, cbs; 2594 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C)); 2595 PetscCall(MatSetSizes(C, nrows, ncols, PETSC_DETERMINE, PETSC_DETERMINE)); 2596 PetscCall(ISGetBlockSize(isrow, &rbs)); 2597 PetscCall(ISGetBlockSize(iscol, &cbs)); 2598 PetscCall(MatSetBlockSizes(C, rbs, cbs)); 2599 PetscCall(MatSetType(C, ((PetscObject)A)->type_name)); 2600 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(C, 0, lens)); 2601 } 2602 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2603 2604 c = (Mat_SeqAIJ *)(C->data); 2605 PetscCall(MatSeqAIJGetArrayWrite(C, &c_a)); // Not 'c->a', since that raw usage ignores offload state of C 2606 for (i = 0; i < nrows; i++) { 2607 row = irow[i]; 2608 kstart = ai[row]; 2609 kend = kstart + a->ilen[row]; 2610 mat_i = c->i[i]; 2611 mat_j = c->j + mat_i; 2612 mat_a = c_a + mat_i; 2613 mat_ilen = c->ilen + i; 2614 for (k = kstart; k < kend; k++) { 2615 if ((tcol = smap[a->j[k]])) { 2616 *mat_j++ = tcol - 1; 2617 *mat_a++ = aa[k]; 2618 (*mat_ilen)++; 2619 } 2620 } 2621 } 2622 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2623 /* Free work space */ 2624 PetscCall(ISRestoreIndices(iscol, &icol)); 2625 PetscCall(PetscFree(smap)); 2626 PetscCall(PetscFree(lens)); 2627 /* sort */ 2628 for (i = 0; i < nrows; i++) { 2629 PetscInt ilen; 2630 2631 mat_i = c->i[i]; 2632 mat_j = c->j + mat_i; 2633 mat_a = c_a + mat_i; 2634 ilen = c->ilen[i]; 2635 PetscCall(PetscSortIntWithScalarArray(ilen, mat_j, mat_a)); 2636 } 2637 PetscCall(MatSeqAIJRestoreArrayWrite(C, &c_a)); 2638 } 2639 #if defined(PETSC_HAVE_DEVICE) 2640 PetscCall(MatBindToCPU(C, A->boundtocpu)); 2641 #endif 2642 PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY)); 2643 PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY)); 2644 2645 PetscCall(ISRestoreIndices(isrow, &irow)); 2646 *B = C; 2647 PetscFunctionReturn(PETSC_SUCCESS); 2648 } 2649 2650 PetscErrorCode MatGetMultiProcBlock_SeqAIJ(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat) 2651 { 2652 Mat B; 2653 2654 PetscFunctionBegin; 2655 if (scall == MAT_INITIAL_MATRIX) { 2656 PetscCall(MatCreate(subComm, &B)); 2657 PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->n, mat->cmap->n)); 2658 PetscCall(MatSetBlockSizesFromMats(B, mat, mat)); 2659 PetscCall(MatSetType(B, MATSEQAIJ)); 2660 PetscCall(MatDuplicateNoCreate_SeqAIJ(B, mat, MAT_COPY_VALUES, PETSC_TRUE)); 2661 *subMat = B; 2662 } else { 2663 PetscCall(MatCopy_SeqAIJ(mat, *subMat, SAME_NONZERO_PATTERN)); 2664 } 2665 PetscFunctionReturn(PETSC_SUCCESS); 2666 } 2667 2668 PetscErrorCode MatILUFactor_SeqAIJ(Mat inA, IS row, IS col, const MatFactorInfo *info) 2669 { 2670 Mat_SeqAIJ *a = (Mat_SeqAIJ *)inA->data; 2671 Mat outA; 2672 PetscBool row_identity, col_identity; 2673 2674 PetscFunctionBegin; 2675 PetscCheck(info->levels == 0, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only levels=0 supported for in-place ilu"); 2676 2677 PetscCall(ISIdentity(row, &row_identity)); 2678 PetscCall(ISIdentity(col, &col_identity)); 2679 2680 outA = inA; 2681 outA->factortype = MAT_FACTOR_LU; 2682 PetscCall(PetscFree(inA->solvertype)); 2683 PetscCall(PetscStrallocpy(MATSOLVERPETSC, &inA->solvertype)); 2684 2685 PetscCall(PetscObjectReference((PetscObject)row)); 2686 PetscCall(ISDestroy(&a->row)); 2687 2688 a->row = row; 2689 2690 PetscCall(PetscObjectReference((PetscObject)col)); 2691 PetscCall(ISDestroy(&a->col)); 2692 2693 a->col = col; 2694 2695 /* Create the inverse permutation so that it can be used in MatLUFactorNumeric() */ 2696 PetscCall(ISDestroy(&a->icol)); 2697 PetscCall(ISInvertPermutation(col, PETSC_DECIDE, &a->icol)); 2698 2699 if (!a->solve_work) { /* this matrix may have been factored before */ 2700 PetscCall(PetscMalloc1(inA->rmap->n + 1, &a->solve_work)); 2701 } 2702 2703 PetscCall(MatMarkDiagonal_SeqAIJ(inA)); 2704 if (row_identity && col_identity) { 2705 PetscCall(MatLUFactorNumeric_SeqAIJ_inplace(outA, inA, info)); 2706 } else { 2707 PetscCall(MatLUFactorNumeric_SeqAIJ_InplaceWithPerm(outA, inA, info)); 2708 } 2709 PetscFunctionReturn(PETSC_SUCCESS); 2710 } 2711 2712 PetscErrorCode MatScale_SeqAIJ(Mat inA, PetscScalar alpha) 2713 { 2714 Mat_SeqAIJ *a = (Mat_SeqAIJ *)inA->data; 2715 PetscScalar *v; 2716 PetscBLASInt one = 1, bnz; 2717 2718 PetscFunctionBegin; 2719 PetscCall(MatSeqAIJGetArray(inA, &v)); 2720 PetscCall(PetscBLASIntCast(a->nz, &bnz)); 2721 PetscCallBLAS("BLASscal", BLASscal_(&bnz, &alpha, v, &one)); 2722 PetscCall(PetscLogFlops(a->nz)); 2723 PetscCall(MatSeqAIJRestoreArray(inA, &v)); 2724 PetscCall(MatSeqAIJInvalidateDiagonal(inA)); 2725 PetscFunctionReturn(PETSC_SUCCESS); 2726 } 2727 2728 PetscErrorCode MatDestroySubMatrix_Private(Mat_SubSppt *submatj) 2729 { 2730 PetscInt i; 2731 2732 PetscFunctionBegin; 2733 if (!submatj->id) { /* delete data that are linked only to submats[id=0] */ 2734 PetscCall(PetscFree4(submatj->sbuf1, submatj->ptr, submatj->tmp, submatj->ctr)); 2735 2736 for (i = 0; i < submatj->nrqr; ++i) PetscCall(PetscFree(submatj->sbuf2[i])); 2737 PetscCall(PetscFree3(submatj->sbuf2, submatj->req_size, submatj->req_source1)); 2738 2739 if (submatj->rbuf1) { 2740 PetscCall(PetscFree(submatj->rbuf1[0])); 2741 PetscCall(PetscFree(submatj->rbuf1)); 2742 } 2743 2744 for (i = 0; i < submatj->nrqs; ++i) PetscCall(PetscFree(submatj->rbuf3[i])); 2745 PetscCall(PetscFree3(submatj->req_source2, submatj->rbuf2, submatj->rbuf3)); 2746 PetscCall(PetscFree(submatj->pa)); 2747 } 2748 2749 #if defined(PETSC_USE_CTABLE) 2750 PetscCall(PetscHMapIDestroy(&submatj->rmap)); 2751 if (submatj->cmap_loc) PetscCall(PetscFree(submatj->cmap_loc)); 2752 PetscCall(PetscFree(submatj->rmap_loc)); 2753 #else 2754 PetscCall(PetscFree(submatj->rmap)); 2755 #endif 2756 2757 if (!submatj->allcolumns) { 2758 #if defined(PETSC_USE_CTABLE) 2759 PetscCall(PetscHMapIDestroy((PetscHMapI *)&submatj->cmap)); 2760 #else 2761 PetscCall(PetscFree(submatj->cmap)); 2762 #endif 2763 } 2764 PetscCall(PetscFree(submatj->row2proc)); 2765 2766 PetscCall(PetscFree(submatj)); 2767 PetscFunctionReturn(PETSC_SUCCESS); 2768 } 2769 2770 PetscErrorCode MatDestroySubMatrix_SeqAIJ(Mat C) 2771 { 2772 Mat_SeqAIJ *c = (Mat_SeqAIJ *)C->data; 2773 Mat_SubSppt *submatj = c->submatis1; 2774 2775 PetscFunctionBegin; 2776 PetscCall((*submatj->destroy)(C)); 2777 PetscCall(MatDestroySubMatrix_Private(submatj)); 2778 PetscFunctionReturn(PETSC_SUCCESS); 2779 } 2780 2781 /* Note this has code duplication with MatDestroySubMatrices_SeqBAIJ() */ 2782 PetscErrorCode MatDestroySubMatrices_SeqAIJ(PetscInt n, Mat *mat[]) 2783 { 2784 PetscInt i; 2785 Mat C; 2786 Mat_SeqAIJ *c; 2787 Mat_SubSppt *submatj; 2788 2789 PetscFunctionBegin; 2790 for (i = 0; i < n; i++) { 2791 C = (*mat)[i]; 2792 c = (Mat_SeqAIJ *)C->data; 2793 submatj = c->submatis1; 2794 if (submatj) { 2795 if (--((PetscObject)C)->refct <= 0) { 2796 PetscCall(PetscFree(C->factorprefix)); 2797 PetscCall((*submatj->destroy)(C)); 2798 PetscCall(MatDestroySubMatrix_Private(submatj)); 2799 PetscCall(PetscFree(C->defaultvectype)); 2800 PetscCall(PetscFree(C->defaultrandtype)); 2801 PetscCall(PetscLayoutDestroy(&C->rmap)); 2802 PetscCall(PetscLayoutDestroy(&C->cmap)); 2803 PetscCall(PetscHeaderDestroy(&C)); 2804 } 2805 } else { 2806 PetscCall(MatDestroy(&C)); 2807 } 2808 } 2809 2810 /* Destroy Dummy submatrices created for reuse */ 2811 PetscCall(MatDestroySubMatrices_Dummy(n, mat)); 2812 2813 PetscCall(PetscFree(*mat)); 2814 PetscFunctionReturn(PETSC_SUCCESS); 2815 } 2816 2817 PetscErrorCode MatCreateSubMatrices_SeqAIJ(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *B[]) 2818 { 2819 PetscInt i; 2820 2821 PetscFunctionBegin; 2822 if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscCalloc1(n + 1, B)); 2823 2824 for (i = 0; i < n; i++) PetscCall(MatCreateSubMatrix_SeqAIJ(A, irow[i], icol[i], PETSC_DECIDE, scall, &(*B)[i])); 2825 PetscFunctionReturn(PETSC_SUCCESS); 2826 } 2827 2828 PetscErrorCode MatIncreaseOverlap_SeqAIJ(Mat A, PetscInt is_max, IS is[], PetscInt ov) 2829 { 2830 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2831 PetscInt row, i, j, k, l, ll, m, n, *nidx, isz, val; 2832 const PetscInt *idx; 2833 PetscInt start, end, *ai, *aj, bs = (A->rmap->bs > 0 && A->rmap->bs == A->cmap->bs) ? A->rmap->bs : 1; 2834 PetscBT table; 2835 2836 PetscFunctionBegin; 2837 m = A->rmap->n / bs; 2838 ai = a->i; 2839 aj = a->j; 2840 2841 PetscCheck(ov >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "illegal negative overlap value used"); 2842 2843 PetscCall(PetscMalloc1(m + 1, &nidx)); 2844 PetscCall(PetscBTCreate(m, &table)); 2845 2846 for (i = 0; i < is_max; i++) { 2847 /* Initialize the two local arrays */ 2848 isz = 0; 2849 PetscCall(PetscBTMemzero(m, table)); 2850 2851 /* Extract the indices, assume there can be duplicate entries */ 2852 PetscCall(ISGetIndices(is[i], &idx)); 2853 PetscCall(ISGetLocalSize(is[i], &n)); 2854 2855 if (bs > 1) { 2856 /* Enter these into the temp arrays. I.e., mark table[row], enter row into new index */ 2857 for (j = 0; j < n; ++j) { 2858 if (!PetscBTLookupSet(table, idx[j] / bs)) nidx[isz++] = idx[j] / bs; 2859 } 2860 PetscCall(ISRestoreIndices(is[i], &idx)); 2861 PetscCall(ISDestroy(&is[i])); 2862 2863 k = 0; 2864 for (j = 0; j < ov; j++) { /* for each overlap */ 2865 n = isz; 2866 for (; k < n; k++) { /* do only those rows in nidx[k], which are not done yet */ 2867 for (ll = 0; ll < bs; ll++) { 2868 row = bs * nidx[k] + ll; 2869 start = ai[row]; 2870 end = ai[row + 1]; 2871 for (l = start; l < end; l++) { 2872 val = aj[l] / bs; 2873 if (!PetscBTLookupSet(table, val)) nidx[isz++] = val; 2874 } 2875 } 2876 } 2877 } 2878 PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, isz, nidx, PETSC_COPY_VALUES, (is + i))); 2879 } else { 2880 /* Enter these into the temp arrays. I.e., mark table[row], enter row into new index */ 2881 for (j = 0; j < n; ++j) { 2882 if (!PetscBTLookupSet(table, idx[j])) nidx[isz++] = idx[j]; 2883 } 2884 PetscCall(ISRestoreIndices(is[i], &idx)); 2885 PetscCall(ISDestroy(&is[i])); 2886 2887 k = 0; 2888 for (j = 0; j < ov; j++) { /* for each overlap */ 2889 n = isz; 2890 for (; k < n; k++) { /* do only those rows in nidx[k], which are not done yet */ 2891 row = nidx[k]; 2892 start = ai[row]; 2893 end = ai[row + 1]; 2894 for (l = start; l < end; l++) { 2895 val = aj[l]; 2896 if (!PetscBTLookupSet(table, val)) nidx[isz++] = val; 2897 } 2898 } 2899 } 2900 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, isz, nidx, PETSC_COPY_VALUES, (is + i))); 2901 } 2902 } 2903 PetscCall(PetscBTDestroy(&table)); 2904 PetscCall(PetscFree(nidx)); 2905 PetscFunctionReturn(PETSC_SUCCESS); 2906 } 2907 2908 /* -------------------------------------------------------------- */ 2909 PetscErrorCode MatPermute_SeqAIJ(Mat A, IS rowp, IS colp, Mat *B) 2910 { 2911 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2912 PetscInt i, nz = 0, m = A->rmap->n, n = A->cmap->n; 2913 const PetscInt *row, *col; 2914 PetscInt *cnew, j, *lens; 2915 IS icolp, irowp; 2916 PetscInt *cwork = NULL; 2917 PetscScalar *vwork = NULL; 2918 2919 PetscFunctionBegin; 2920 PetscCall(ISInvertPermutation(rowp, PETSC_DECIDE, &irowp)); 2921 PetscCall(ISGetIndices(irowp, &row)); 2922 PetscCall(ISInvertPermutation(colp, PETSC_DECIDE, &icolp)); 2923 PetscCall(ISGetIndices(icolp, &col)); 2924 2925 /* determine lengths of permuted rows */ 2926 PetscCall(PetscMalloc1(m + 1, &lens)); 2927 for (i = 0; i < m; i++) lens[row[i]] = a->i[i + 1] - a->i[i]; 2928 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 2929 PetscCall(MatSetSizes(*B, m, n, m, n)); 2930 PetscCall(MatSetBlockSizesFromMats(*B, A, A)); 2931 PetscCall(MatSetType(*B, ((PetscObject)A)->type_name)); 2932 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*B, 0, lens)); 2933 PetscCall(PetscFree(lens)); 2934 2935 PetscCall(PetscMalloc1(n, &cnew)); 2936 for (i = 0; i < m; i++) { 2937 PetscCall(MatGetRow_SeqAIJ(A, i, &nz, &cwork, &vwork)); 2938 for (j = 0; j < nz; j++) cnew[j] = col[cwork[j]]; 2939 PetscCall(MatSetValues_SeqAIJ(*B, 1, &row[i], nz, cnew, vwork, INSERT_VALUES)); 2940 PetscCall(MatRestoreRow_SeqAIJ(A, i, &nz, &cwork, &vwork)); 2941 } 2942 PetscCall(PetscFree(cnew)); 2943 2944 (*B)->assembled = PETSC_FALSE; 2945 2946 #if defined(PETSC_HAVE_DEVICE) 2947 PetscCall(MatBindToCPU(*B, A->boundtocpu)); 2948 #endif 2949 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 2950 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 2951 PetscCall(ISRestoreIndices(irowp, &row)); 2952 PetscCall(ISRestoreIndices(icolp, &col)); 2953 PetscCall(ISDestroy(&irowp)); 2954 PetscCall(ISDestroy(&icolp)); 2955 if (rowp == colp) PetscCall(MatPropagateSymmetryOptions(A, *B)); 2956 PetscFunctionReturn(PETSC_SUCCESS); 2957 } 2958 2959 PetscErrorCode MatCopy_SeqAIJ(Mat A, Mat B, MatStructure str) 2960 { 2961 PetscFunctionBegin; 2962 /* If the two matrices have the same copy implementation, use fast copy. */ 2963 if (str == SAME_NONZERO_PATTERN && (A->ops->copy == B->ops->copy)) { 2964 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2965 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 2966 const PetscScalar *aa; 2967 2968 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 2969 PetscCheck(a->i[A->rmap->n] == b->i[B->rmap->n], PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Number of nonzeros in two matrices are different %" PetscInt_FMT " != %" PetscInt_FMT, a->i[A->rmap->n], b->i[B->rmap->n]); 2970 PetscCall(PetscArraycpy(b->a, aa, a->i[A->rmap->n])); 2971 PetscCall(PetscObjectStateIncrease((PetscObject)B)); 2972 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 2973 } else { 2974 PetscCall(MatCopy_Basic(A, B, str)); 2975 } 2976 PetscFunctionReturn(PETSC_SUCCESS); 2977 } 2978 2979 PETSC_INTERN PetscErrorCode MatSeqAIJGetArray_SeqAIJ(Mat A, PetscScalar *array[]) 2980 { 2981 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 2982 2983 PetscFunctionBegin; 2984 *array = a->a; 2985 PetscFunctionReturn(PETSC_SUCCESS); 2986 } 2987 2988 PETSC_INTERN PetscErrorCode MatSeqAIJRestoreArray_SeqAIJ(Mat A, PetscScalar *array[]) 2989 { 2990 PetscFunctionBegin; 2991 *array = NULL; 2992 PetscFunctionReturn(PETSC_SUCCESS); 2993 } 2994 2995 /* 2996 Computes the number of nonzeros per row needed for preallocation when X and Y 2997 have different nonzero structure. 2998 */ 2999 PetscErrorCode MatAXPYGetPreallocation_SeqX_private(PetscInt m, const PetscInt *xi, const PetscInt *xj, const PetscInt *yi, const PetscInt *yj, PetscInt *nnz) 3000 { 3001 PetscInt i, j, k, nzx, nzy; 3002 3003 PetscFunctionBegin; 3004 /* Set the number of nonzeros in the new matrix */ 3005 for (i = 0; i < m; i++) { 3006 const PetscInt *xjj = xj + xi[i], *yjj = yj + yi[i]; 3007 nzx = xi[i + 1] - xi[i]; 3008 nzy = yi[i + 1] - yi[i]; 3009 nnz[i] = 0; 3010 for (j = 0, k = 0; j < nzx; j++) { /* Point in X */ 3011 for (; k < nzy && yjj[k] < xjj[j]; k++) nnz[i]++; /* Catch up to X */ 3012 if (k < nzy && yjj[k] == xjj[j]) k++; /* Skip duplicate */ 3013 nnz[i]++; 3014 } 3015 for (; k < nzy; k++) nnz[i]++; 3016 } 3017 PetscFunctionReturn(PETSC_SUCCESS); 3018 } 3019 3020 PetscErrorCode MatAXPYGetPreallocation_SeqAIJ(Mat Y, Mat X, PetscInt *nnz) 3021 { 3022 PetscInt m = Y->rmap->N; 3023 Mat_SeqAIJ *x = (Mat_SeqAIJ *)X->data; 3024 Mat_SeqAIJ *y = (Mat_SeqAIJ *)Y->data; 3025 3026 PetscFunctionBegin; 3027 /* Set the number of nonzeros in the new matrix */ 3028 PetscCall(MatAXPYGetPreallocation_SeqX_private(m, x->i, x->j, y->i, y->j, nnz)); 3029 PetscFunctionReturn(PETSC_SUCCESS); 3030 } 3031 3032 PetscErrorCode MatAXPY_SeqAIJ(Mat Y, PetscScalar a, Mat X, MatStructure str) 3033 { 3034 Mat_SeqAIJ *x = (Mat_SeqAIJ *)X->data, *y = (Mat_SeqAIJ *)Y->data; 3035 3036 PetscFunctionBegin; 3037 if (str == UNKNOWN_NONZERO_PATTERN || (PetscDefined(USE_DEBUG) && str == SAME_NONZERO_PATTERN)) { 3038 PetscBool e = x->nz == y->nz ? PETSC_TRUE : PETSC_FALSE; 3039 if (e) { 3040 PetscCall(PetscArraycmp(x->i, y->i, Y->rmap->n + 1, &e)); 3041 if (e) { 3042 PetscCall(PetscArraycmp(x->j, y->j, y->nz, &e)); 3043 if (e) str = SAME_NONZERO_PATTERN; 3044 } 3045 } 3046 if (!e) PetscCheck(str != SAME_NONZERO_PATTERN, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MatStructure is not SAME_NONZERO_PATTERN"); 3047 } 3048 if (str == SAME_NONZERO_PATTERN) { 3049 const PetscScalar *xa; 3050 PetscScalar *ya, alpha = a; 3051 PetscBLASInt one = 1, bnz; 3052 3053 PetscCall(PetscBLASIntCast(x->nz, &bnz)); 3054 PetscCall(MatSeqAIJGetArray(Y, &ya)); 3055 PetscCall(MatSeqAIJGetArrayRead(X, &xa)); 3056 PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &alpha, xa, &one, ya, &one)); 3057 PetscCall(MatSeqAIJRestoreArrayRead(X, &xa)); 3058 PetscCall(MatSeqAIJRestoreArray(Y, &ya)); 3059 PetscCall(PetscLogFlops(2.0 * bnz)); 3060 PetscCall(MatSeqAIJInvalidateDiagonal(Y)); 3061 PetscCall(PetscObjectStateIncrease((PetscObject)Y)); 3062 } else if (str == SUBSET_NONZERO_PATTERN) { /* nonzeros of X is a subset of Y's */ 3063 PetscCall(MatAXPY_Basic(Y, a, X, str)); 3064 } else { 3065 Mat B; 3066 PetscInt *nnz; 3067 PetscCall(PetscMalloc1(Y->rmap->N, &nnz)); 3068 PetscCall(MatCreate(PetscObjectComm((PetscObject)Y), &B)); 3069 PetscCall(PetscObjectSetName((PetscObject)B, ((PetscObject)Y)->name)); 3070 PetscCall(MatSetLayouts(B, Y->rmap, Y->cmap)); 3071 PetscCall(MatSetType(B, ((PetscObject)Y)->type_name)); 3072 PetscCall(MatAXPYGetPreallocation_SeqAIJ(Y, X, nnz)); 3073 PetscCall(MatSeqAIJSetPreallocation(B, 0, nnz)); 3074 PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str)); 3075 PetscCall(MatHeaderMerge(Y, &B)); 3076 PetscCall(MatSeqAIJCheckInode(Y)); 3077 PetscCall(PetscFree(nnz)); 3078 } 3079 PetscFunctionReturn(PETSC_SUCCESS); 3080 } 3081 3082 PETSC_INTERN PetscErrorCode MatConjugate_SeqAIJ(Mat mat) 3083 { 3084 #if defined(PETSC_USE_COMPLEX) 3085 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3086 PetscInt i, nz; 3087 PetscScalar *a; 3088 3089 PetscFunctionBegin; 3090 nz = aij->nz; 3091 PetscCall(MatSeqAIJGetArray(mat, &a)); 3092 for (i = 0; i < nz; i++) a[i] = PetscConj(a[i]); 3093 PetscCall(MatSeqAIJRestoreArray(mat, &a)); 3094 #else 3095 PetscFunctionBegin; 3096 #endif 3097 PetscFunctionReturn(PETSC_SUCCESS); 3098 } 3099 3100 PetscErrorCode MatGetRowMaxAbs_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3101 { 3102 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3103 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3104 PetscReal atmp; 3105 PetscScalar *x; 3106 const MatScalar *aa, *av; 3107 3108 PetscFunctionBegin; 3109 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3110 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3111 aa = av; 3112 ai = a->i; 3113 aj = a->j; 3114 3115 PetscCall(VecSet(v, 0.0)); 3116 PetscCall(VecGetArrayWrite(v, &x)); 3117 PetscCall(VecGetLocalSize(v, &n)); 3118 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3119 for (i = 0; i < m; i++) { 3120 ncols = ai[1] - ai[0]; 3121 ai++; 3122 for (j = 0; j < ncols; j++) { 3123 atmp = PetscAbsScalar(*aa); 3124 if (PetscAbsScalar(x[i]) < atmp) { 3125 x[i] = atmp; 3126 if (idx) idx[i] = *aj; 3127 } 3128 aa++; 3129 aj++; 3130 } 3131 } 3132 PetscCall(VecRestoreArrayWrite(v, &x)); 3133 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3134 PetscFunctionReturn(PETSC_SUCCESS); 3135 } 3136 3137 PetscErrorCode MatGetRowMax_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3138 { 3139 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3140 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3141 PetscScalar *x; 3142 const MatScalar *aa, *av; 3143 3144 PetscFunctionBegin; 3145 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3146 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3147 aa = av; 3148 ai = a->i; 3149 aj = a->j; 3150 3151 PetscCall(VecSet(v, 0.0)); 3152 PetscCall(VecGetArrayWrite(v, &x)); 3153 PetscCall(VecGetLocalSize(v, &n)); 3154 PetscCheck(n == A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3155 for (i = 0; i < m; i++) { 3156 ncols = ai[1] - ai[0]; 3157 ai++; 3158 if (ncols == A->cmap->n) { /* row is dense */ 3159 x[i] = *aa; 3160 if (idx) idx[i] = 0; 3161 } else { /* row is sparse so already KNOW maximum is 0.0 or higher */ 3162 x[i] = 0.0; 3163 if (idx) { 3164 for (j = 0; j < ncols; j++) { /* find first implicit 0.0 in the row */ 3165 if (aj[j] > j) { 3166 idx[i] = j; 3167 break; 3168 } 3169 } 3170 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3171 if (j == ncols && j < A->cmap->n) idx[i] = j; 3172 } 3173 } 3174 for (j = 0; j < ncols; j++) { 3175 if (PetscRealPart(x[i]) < PetscRealPart(*aa)) { 3176 x[i] = *aa; 3177 if (idx) idx[i] = *aj; 3178 } 3179 aa++; 3180 aj++; 3181 } 3182 } 3183 PetscCall(VecRestoreArrayWrite(v, &x)); 3184 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3185 PetscFunctionReturn(PETSC_SUCCESS); 3186 } 3187 3188 PetscErrorCode MatGetRowMinAbs_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3189 { 3190 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3191 PetscInt i, j, m = A->rmap->n, *ai, *aj, ncols, n; 3192 PetscScalar *x; 3193 const MatScalar *aa, *av; 3194 3195 PetscFunctionBegin; 3196 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3197 aa = av; 3198 ai = a->i; 3199 aj = a->j; 3200 3201 PetscCall(VecSet(v, 0.0)); 3202 PetscCall(VecGetArrayWrite(v, &x)); 3203 PetscCall(VecGetLocalSize(v, &n)); 3204 PetscCheck(n == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector, %" PetscInt_FMT " vs. %" PetscInt_FMT " rows", m, n); 3205 for (i = 0; i < m; i++) { 3206 ncols = ai[1] - ai[0]; 3207 ai++; 3208 if (ncols == A->cmap->n) { /* row is dense */ 3209 x[i] = *aa; 3210 if (idx) idx[i] = 0; 3211 } else { /* row is sparse so already KNOW minimum is 0.0 or higher */ 3212 x[i] = 0.0; 3213 if (idx) { /* find first implicit 0.0 in the row */ 3214 for (j = 0; j < ncols; j++) { 3215 if (aj[j] > j) { 3216 idx[i] = j; 3217 break; 3218 } 3219 } 3220 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3221 if (j == ncols && j < A->cmap->n) idx[i] = j; 3222 } 3223 } 3224 for (j = 0; j < ncols; j++) { 3225 if (PetscAbsScalar(x[i]) > PetscAbsScalar(*aa)) { 3226 x[i] = *aa; 3227 if (idx) idx[i] = *aj; 3228 } 3229 aa++; 3230 aj++; 3231 } 3232 } 3233 PetscCall(VecRestoreArrayWrite(v, &x)); 3234 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3235 PetscFunctionReturn(PETSC_SUCCESS); 3236 } 3237 3238 PetscErrorCode MatGetRowMin_SeqAIJ(Mat A, Vec v, PetscInt idx[]) 3239 { 3240 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3241 PetscInt i, j, m = A->rmap->n, ncols, n; 3242 const PetscInt *ai, *aj; 3243 PetscScalar *x; 3244 const MatScalar *aa, *av; 3245 3246 PetscFunctionBegin; 3247 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3248 PetscCall(MatSeqAIJGetArrayRead(A, &av)); 3249 aa = av; 3250 ai = a->i; 3251 aj = a->j; 3252 3253 PetscCall(VecSet(v, 0.0)); 3254 PetscCall(VecGetArrayWrite(v, &x)); 3255 PetscCall(VecGetLocalSize(v, &n)); 3256 PetscCheck(n == m, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming matrix and vector"); 3257 for (i = 0; i < m; i++) { 3258 ncols = ai[1] - ai[0]; 3259 ai++; 3260 if (ncols == A->cmap->n) { /* row is dense */ 3261 x[i] = *aa; 3262 if (idx) idx[i] = 0; 3263 } else { /* row is sparse so already KNOW minimum is 0.0 or lower */ 3264 x[i] = 0.0; 3265 if (idx) { /* find first implicit 0.0 in the row */ 3266 for (j = 0; j < ncols; j++) { 3267 if (aj[j] > j) { 3268 idx[i] = j; 3269 break; 3270 } 3271 } 3272 /* in case first implicit 0.0 in the row occurs at ncols-th column */ 3273 if (j == ncols && j < A->cmap->n) idx[i] = j; 3274 } 3275 } 3276 for (j = 0; j < ncols; j++) { 3277 if (PetscRealPart(x[i]) > PetscRealPart(*aa)) { 3278 x[i] = *aa; 3279 if (idx) idx[i] = *aj; 3280 } 3281 aa++; 3282 aj++; 3283 } 3284 } 3285 PetscCall(VecRestoreArrayWrite(v, &x)); 3286 PetscCall(MatSeqAIJRestoreArrayRead(A, &av)); 3287 PetscFunctionReturn(PETSC_SUCCESS); 3288 } 3289 3290 PetscErrorCode MatInvertBlockDiagonal_SeqAIJ(Mat A, const PetscScalar **values) 3291 { 3292 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 3293 PetscInt i, bs = PetscAbs(A->rmap->bs), mbs = A->rmap->n / bs, ipvt[5], bs2 = bs * bs, *v_pivots, ij[7], *IJ, j; 3294 MatScalar *diag, work[25], *v_work; 3295 const PetscReal shift = 0.0; 3296 PetscBool allowzeropivot, zeropivotdetected = PETSC_FALSE; 3297 3298 PetscFunctionBegin; 3299 allowzeropivot = PetscNot(A->erroriffailure); 3300 if (a->ibdiagvalid) { 3301 if (values) *values = a->ibdiag; 3302 PetscFunctionReturn(PETSC_SUCCESS); 3303 } 3304 PetscCall(MatMarkDiagonal_SeqAIJ(A)); 3305 if (!a->ibdiag) { PetscCall(PetscMalloc1(bs2 * mbs, &a->ibdiag)); } 3306 diag = a->ibdiag; 3307 if (values) *values = a->ibdiag; 3308 /* factor and invert each block */ 3309 switch (bs) { 3310 case 1: 3311 for (i = 0; i < mbs; i++) { 3312 PetscCall(MatGetValues(A, 1, &i, 1, &i, diag + i)); 3313 if (PetscAbsScalar(diag[i] + shift) < PETSC_MACHINE_EPSILON) { 3314 if (allowzeropivot) { 3315 A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3316 A->factorerror_zeropivot_value = PetscAbsScalar(diag[i]); 3317 A->factorerror_zeropivot_row = i; 3318 PetscCall(PetscInfo(A, "Zero pivot, row %" PetscInt_FMT " pivot %g tolerance %g\n", i, (double)PetscAbsScalar(diag[i]), (double)PETSC_MACHINE_EPSILON)); 3319 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_MAT_LU_ZRPVT, "Zero pivot, row %" PetscInt_FMT " pivot %g tolerance %g", i, (double)PetscAbsScalar(diag[i]), (double)PETSC_MACHINE_EPSILON); 3320 } 3321 diag[i] = (PetscScalar)1.0 / (diag[i] + shift); 3322 } 3323 break; 3324 case 2: 3325 for (i = 0; i < mbs; i++) { 3326 ij[0] = 2 * i; 3327 ij[1] = 2 * i + 1; 3328 PetscCall(MatGetValues(A, 2, ij, 2, ij, diag)); 3329 PetscCall(PetscKernel_A_gets_inverse_A_2(diag, shift, allowzeropivot, &zeropivotdetected)); 3330 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3331 PetscCall(PetscKernel_A_gets_transpose_A_2(diag)); 3332 diag += 4; 3333 } 3334 break; 3335 case 3: 3336 for (i = 0; i < mbs; i++) { 3337 ij[0] = 3 * i; 3338 ij[1] = 3 * i + 1; 3339 ij[2] = 3 * i + 2; 3340 PetscCall(MatGetValues(A, 3, ij, 3, ij, diag)); 3341 PetscCall(PetscKernel_A_gets_inverse_A_3(diag, shift, allowzeropivot, &zeropivotdetected)); 3342 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3343 PetscCall(PetscKernel_A_gets_transpose_A_3(diag)); 3344 diag += 9; 3345 } 3346 break; 3347 case 4: 3348 for (i = 0; i < mbs; i++) { 3349 ij[0] = 4 * i; 3350 ij[1] = 4 * i + 1; 3351 ij[2] = 4 * i + 2; 3352 ij[3] = 4 * i + 3; 3353 PetscCall(MatGetValues(A, 4, ij, 4, ij, diag)); 3354 PetscCall(PetscKernel_A_gets_inverse_A_4(diag, shift, allowzeropivot, &zeropivotdetected)); 3355 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3356 PetscCall(PetscKernel_A_gets_transpose_A_4(diag)); 3357 diag += 16; 3358 } 3359 break; 3360 case 5: 3361 for (i = 0; i < mbs; i++) { 3362 ij[0] = 5 * i; 3363 ij[1] = 5 * i + 1; 3364 ij[2] = 5 * i + 2; 3365 ij[3] = 5 * i + 3; 3366 ij[4] = 5 * i + 4; 3367 PetscCall(MatGetValues(A, 5, ij, 5, ij, diag)); 3368 PetscCall(PetscKernel_A_gets_inverse_A_5(diag, ipvt, work, shift, allowzeropivot, &zeropivotdetected)); 3369 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3370 PetscCall(PetscKernel_A_gets_transpose_A_5(diag)); 3371 diag += 25; 3372 } 3373 break; 3374 case 6: 3375 for (i = 0; i < mbs; i++) { 3376 ij[0] = 6 * i; 3377 ij[1] = 6 * i + 1; 3378 ij[2] = 6 * i + 2; 3379 ij[3] = 6 * i + 3; 3380 ij[4] = 6 * i + 4; 3381 ij[5] = 6 * i + 5; 3382 PetscCall(MatGetValues(A, 6, ij, 6, ij, diag)); 3383 PetscCall(PetscKernel_A_gets_inverse_A_6(diag, shift, allowzeropivot, &zeropivotdetected)); 3384 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3385 PetscCall(PetscKernel_A_gets_transpose_A_6(diag)); 3386 diag += 36; 3387 } 3388 break; 3389 case 7: 3390 for (i = 0; i < mbs; i++) { 3391 ij[0] = 7 * i; 3392 ij[1] = 7 * i + 1; 3393 ij[2] = 7 * i + 2; 3394 ij[3] = 7 * i + 3; 3395 ij[4] = 7 * i + 4; 3396 ij[5] = 7 * i + 5; 3397 ij[6] = 7 * i + 6; 3398 PetscCall(MatGetValues(A, 7, ij, 7, ij, diag)); 3399 PetscCall(PetscKernel_A_gets_inverse_A_7(diag, shift, allowzeropivot, &zeropivotdetected)); 3400 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3401 PetscCall(PetscKernel_A_gets_transpose_A_7(diag)); 3402 diag += 49; 3403 } 3404 break; 3405 default: 3406 PetscCall(PetscMalloc3(bs, &v_work, bs, &v_pivots, bs, &IJ)); 3407 for (i = 0; i < mbs; i++) { 3408 for (j = 0; j < bs; j++) IJ[j] = bs * i + j; 3409 PetscCall(MatGetValues(A, bs, IJ, bs, IJ, diag)); 3410 PetscCall(PetscKernel_A_gets_inverse_A(bs, diag, v_pivots, v_work, allowzeropivot, &zeropivotdetected)); 3411 if (zeropivotdetected) A->factorerrortype = MAT_FACTOR_NUMERIC_ZEROPIVOT; 3412 PetscCall(PetscKernel_A_gets_transpose_A_N(diag, bs)); 3413 diag += bs2; 3414 } 3415 PetscCall(PetscFree3(v_work, v_pivots, IJ)); 3416 } 3417 a->ibdiagvalid = PETSC_TRUE; 3418 PetscFunctionReturn(PETSC_SUCCESS); 3419 } 3420 3421 static PetscErrorCode MatSetRandom_SeqAIJ(Mat x, PetscRandom rctx) 3422 { 3423 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)x->data; 3424 PetscScalar a, *aa; 3425 PetscInt m, n, i, j, col; 3426 3427 PetscFunctionBegin; 3428 if (!x->assembled) { 3429 PetscCall(MatGetSize(x, &m, &n)); 3430 for (i = 0; i < m; i++) { 3431 for (j = 0; j < aij->imax[i]; j++) { 3432 PetscCall(PetscRandomGetValue(rctx, &a)); 3433 col = (PetscInt)(n * PetscRealPart(a)); 3434 PetscCall(MatSetValues(x, 1, &i, 1, &col, &a, ADD_VALUES)); 3435 } 3436 } 3437 } else { 3438 PetscCall(MatSeqAIJGetArrayWrite(x, &aa)); 3439 for (i = 0; i < aij->nz; i++) PetscCall(PetscRandomGetValue(rctx, aa + i)); 3440 PetscCall(MatSeqAIJRestoreArrayWrite(x, &aa)); 3441 } 3442 PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY)); 3443 PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY)); 3444 PetscFunctionReturn(PETSC_SUCCESS); 3445 } 3446 3447 /* Like MatSetRandom_SeqAIJ, but do not set values on columns in range of [low, high) */ 3448 PetscErrorCode MatSetRandomSkipColumnRange_SeqAIJ_Private(Mat x, PetscInt low, PetscInt high, PetscRandom rctx) 3449 { 3450 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)x->data; 3451 PetscScalar a; 3452 PetscInt m, n, i, j, col, nskip; 3453 3454 PetscFunctionBegin; 3455 nskip = high - low; 3456 PetscCall(MatGetSize(x, &m, &n)); 3457 n -= nskip; /* shrink number of columns where nonzeros can be set */ 3458 for (i = 0; i < m; i++) { 3459 for (j = 0; j < aij->imax[i]; j++) { 3460 PetscCall(PetscRandomGetValue(rctx, &a)); 3461 col = (PetscInt)(n * PetscRealPart(a)); 3462 if (col >= low) col += nskip; /* shift col rightward to skip the hole */ 3463 PetscCall(MatSetValues(x, 1, &i, 1, &col, &a, ADD_VALUES)); 3464 } 3465 } 3466 PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY)); 3467 PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY)); 3468 PetscFunctionReturn(PETSC_SUCCESS); 3469 } 3470 3471 /* -------------------------------------------------------------------*/ 3472 static struct _MatOps MatOps_Values = {MatSetValues_SeqAIJ, 3473 MatGetRow_SeqAIJ, 3474 MatRestoreRow_SeqAIJ, 3475 MatMult_SeqAIJ, 3476 /* 4*/ MatMultAdd_SeqAIJ, 3477 MatMultTranspose_SeqAIJ, 3478 MatMultTransposeAdd_SeqAIJ, 3479 NULL, 3480 NULL, 3481 NULL, 3482 /* 10*/ NULL, 3483 MatLUFactor_SeqAIJ, 3484 NULL, 3485 MatSOR_SeqAIJ, 3486 MatTranspose_SeqAIJ, 3487 /*1 5*/ MatGetInfo_SeqAIJ, 3488 MatEqual_SeqAIJ, 3489 MatGetDiagonal_SeqAIJ, 3490 MatDiagonalScale_SeqAIJ, 3491 MatNorm_SeqAIJ, 3492 /* 20*/ NULL, 3493 MatAssemblyEnd_SeqAIJ, 3494 MatSetOption_SeqAIJ, 3495 MatZeroEntries_SeqAIJ, 3496 /* 24*/ MatZeroRows_SeqAIJ, 3497 NULL, 3498 NULL, 3499 NULL, 3500 NULL, 3501 /* 29*/ MatSetUp_Seq_Hash, 3502 NULL, 3503 NULL, 3504 NULL, 3505 NULL, 3506 /* 34*/ MatDuplicate_SeqAIJ, 3507 NULL, 3508 NULL, 3509 MatILUFactor_SeqAIJ, 3510 NULL, 3511 /* 39*/ MatAXPY_SeqAIJ, 3512 MatCreateSubMatrices_SeqAIJ, 3513 MatIncreaseOverlap_SeqAIJ, 3514 MatGetValues_SeqAIJ, 3515 MatCopy_SeqAIJ, 3516 /* 44*/ MatGetRowMax_SeqAIJ, 3517 MatScale_SeqAIJ, 3518 MatShift_SeqAIJ, 3519 MatDiagonalSet_SeqAIJ, 3520 MatZeroRowsColumns_SeqAIJ, 3521 /* 49*/ MatSetRandom_SeqAIJ, 3522 MatGetRowIJ_SeqAIJ, 3523 MatRestoreRowIJ_SeqAIJ, 3524 MatGetColumnIJ_SeqAIJ, 3525 MatRestoreColumnIJ_SeqAIJ, 3526 /* 54*/ MatFDColoringCreate_SeqXAIJ, 3527 NULL, 3528 NULL, 3529 MatPermute_SeqAIJ, 3530 NULL, 3531 /* 59*/ NULL, 3532 MatDestroy_SeqAIJ, 3533 MatView_SeqAIJ, 3534 NULL, 3535 NULL, 3536 /* 64*/ NULL, 3537 MatMatMatMultNumeric_SeqAIJ_SeqAIJ_SeqAIJ, 3538 NULL, 3539 NULL, 3540 NULL, 3541 /* 69*/ MatGetRowMaxAbs_SeqAIJ, 3542 MatGetRowMinAbs_SeqAIJ, 3543 NULL, 3544 NULL, 3545 NULL, 3546 /* 74*/ NULL, 3547 MatFDColoringApply_AIJ, 3548 NULL, 3549 NULL, 3550 NULL, 3551 /* 79*/ MatFindZeroDiagonals_SeqAIJ, 3552 NULL, 3553 NULL, 3554 NULL, 3555 MatLoad_SeqAIJ, 3556 /* 84*/ MatIsSymmetric_SeqAIJ, 3557 MatIsHermitian_SeqAIJ, 3558 NULL, 3559 NULL, 3560 NULL, 3561 /* 89*/ NULL, 3562 NULL, 3563 MatMatMultNumeric_SeqAIJ_SeqAIJ, 3564 NULL, 3565 NULL, 3566 /* 94*/ MatPtAPNumeric_SeqAIJ_SeqAIJ_SparseAxpy, 3567 NULL, 3568 NULL, 3569 MatMatTransposeMultNumeric_SeqAIJ_SeqAIJ, 3570 NULL, 3571 /* 99*/ MatProductSetFromOptions_SeqAIJ, 3572 NULL, 3573 NULL, 3574 MatConjugate_SeqAIJ, 3575 NULL, 3576 /*104*/ MatSetValuesRow_SeqAIJ, 3577 MatRealPart_SeqAIJ, 3578 MatImaginaryPart_SeqAIJ, 3579 NULL, 3580 NULL, 3581 /*109*/ MatMatSolve_SeqAIJ, 3582 NULL, 3583 MatGetRowMin_SeqAIJ, 3584 NULL, 3585 MatMissingDiagonal_SeqAIJ, 3586 /*114*/ NULL, 3587 NULL, 3588 NULL, 3589 NULL, 3590 NULL, 3591 /*119*/ NULL, 3592 NULL, 3593 NULL, 3594 NULL, 3595 MatGetMultiProcBlock_SeqAIJ, 3596 /*124*/ MatFindNonzeroRows_SeqAIJ, 3597 MatGetColumnReductions_SeqAIJ, 3598 MatInvertBlockDiagonal_SeqAIJ, 3599 MatInvertVariableBlockDiagonal_SeqAIJ, 3600 NULL, 3601 /*129*/ NULL, 3602 NULL, 3603 NULL, 3604 MatTransposeMatMultNumeric_SeqAIJ_SeqAIJ, 3605 MatTransposeColoringCreate_SeqAIJ, 3606 /*134*/ MatTransColoringApplySpToDen_SeqAIJ, 3607 MatTransColoringApplyDenToSp_SeqAIJ, 3608 NULL, 3609 NULL, 3610 MatRARtNumeric_SeqAIJ_SeqAIJ, 3611 /*139*/ NULL, 3612 NULL, 3613 NULL, 3614 MatFDColoringSetUp_SeqXAIJ, 3615 MatFindOffBlockDiagonalEntries_SeqAIJ, 3616 MatCreateMPIMatConcatenateSeqMat_SeqAIJ, 3617 /*145*/ MatDestroySubMatrices_SeqAIJ, 3618 NULL, 3619 NULL, 3620 MatCreateGraph_Simple_AIJ, 3621 NULL, 3622 /*150*/ MatTransposeSymbolic_SeqAIJ, 3623 MatEliminateZeros_SeqAIJ}; 3624 3625 PetscErrorCode MatSeqAIJSetColumnIndices_SeqAIJ(Mat mat, PetscInt *indices) 3626 { 3627 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3628 PetscInt i, nz, n; 3629 3630 PetscFunctionBegin; 3631 nz = aij->maxnz; 3632 n = mat->rmap->n; 3633 for (i = 0; i < nz; i++) aij->j[i] = indices[i]; 3634 aij->nz = nz; 3635 for (i = 0; i < n; i++) aij->ilen[i] = aij->imax[i]; 3636 PetscFunctionReturn(PETSC_SUCCESS); 3637 } 3638 3639 /* 3640 * Given a sparse matrix with global column indices, compact it by using a local column space. 3641 * The result matrix helps saving memory in other algorithms, such as MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable() 3642 */ 3643 PetscErrorCode MatSeqAIJCompactOutExtraColumns_SeqAIJ(Mat mat, ISLocalToGlobalMapping *mapping) 3644 { 3645 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3646 PetscHMapI gid1_lid1; 3647 PetscHashIter tpos; 3648 PetscInt gid, lid, i, ec, nz = aij->nz; 3649 PetscInt *garray, *jj = aij->j; 3650 3651 PetscFunctionBegin; 3652 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3653 PetscValidPointer(mapping, 2); 3654 /* use a table */ 3655 PetscCall(PetscHMapICreateWithSize(mat->rmap->n, &gid1_lid1)); 3656 ec = 0; 3657 for (i = 0; i < nz; i++) { 3658 PetscInt data, gid1 = jj[i] + 1; 3659 PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &data)); 3660 if (!data) { 3661 /* one based table */ 3662 PetscCall(PetscHMapISet(gid1_lid1, gid1, ++ec)); 3663 } 3664 } 3665 /* form array of columns we need */ 3666 PetscCall(PetscMalloc1(ec, &garray)); 3667 PetscHashIterBegin(gid1_lid1, tpos); 3668 while (!PetscHashIterAtEnd(gid1_lid1, tpos)) { 3669 PetscHashIterGetKey(gid1_lid1, tpos, gid); 3670 PetscHashIterGetVal(gid1_lid1, tpos, lid); 3671 PetscHashIterNext(gid1_lid1, tpos); 3672 gid--; 3673 lid--; 3674 garray[lid] = gid; 3675 } 3676 PetscCall(PetscSortInt(ec, garray)); /* sort, and rebuild */ 3677 PetscCall(PetscHMapIClear(gid1_lid1)); 3678 for (i = 0; i < ec; i++) PetscCall(PetscHMapISet(gid1_lid1, garray[i] + 1, i + 1)); 3679 /* compact out the extra columns in B */ 3680 for (i = 0; i < nz; i++) { 3681 PetscInt gid1 = jj[i] + 1; 3682 PetscCall(PetscHMapIGetWithDefault(gid1_lid1, gid1, 0, &lid)); 3683 lid--; 3684 jj[i] = lid; 3685 } 3686 PetscCall(PetscLayoutDestroy(&mat->cmap)); 3687 PetscCall(PetscHMapIDestroy(&gid1_lid1)); 3688 PetscCall(PetscLayoutCreateFromSizes(PetscObjectComm((PetscObject)mat), ec, ec, 1, &mat->cmap)); 3689 PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, mat->cmap->bs, mat->cmap->n, garray, PETSC_OWN_POINTER, mapping)); 3690 PetscCall(ISLocalToGlobalMappingSetType(*mapping, ISLOCALTOGLOBALMAPPINGHASH)); 3691 PetscFunctionReturn(PETSC_SUCCESS); 3692 } 3693 3694 /*@ 3695 MatSeqAIJSetColumnIndices - Set the column indices for all the rows 3696 in the matrix. 3697 3698 Input Parameters: 3699 + mat - the `MATSEQAIJ` matrix 3700 - indices - the column indices 3701 3702 Level: advanced 3703 3704 Notes: 3705 This can be called if you have precomputed the nonzero structure of the 3706 matrix and want to provide it to the matrix object to improve the performance 3707 of the `MatSetValues()` operation. 3708 3709 You MUST have set the correct numbers of nonzeros per row in the call to 3710 `MatCreateSeqAIJ()`, and the columns indices MUST be sorted. 3711 3712 MUST be called before any calls to `MatSetValues()` 3713 3714 The indices should start with zero, not one. 3715 3716 @*/ 3717 PetscErrorCode MatSeqAIJSetColumnIndices(Mat mat, PetscInt *indices) 3718 { 3719 PetscFunctionBegin; 3720 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3721 PetscValidIntPointer(indices, 2); 3722 PetscUseMethod(mat, "MatSeqAIJSetColumnIndices_C", (Mat, PetscInt *), (mat, indices)); 3723 PetscFunctionReturn(PETSC_SUCCESS); 3724 } 3725 3726 /* ----------------------------------------------------------------------------------------*/ 3727 3728 PetscErrorCode MatStoreValues_SeqAIJ(Mat mat) 3729 { 3730 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3731 size_t nz = aij->i[mat->rmap->n]; 3732 3733 PetscFunctionBegin; 3734 PetscCheck(aij->nonew, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatSetOption(A,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE);first"); 3735 3736 /* allocate space for values if not already there */ 3737 if (!aij->saved_values) { PetscCall(PetscMalloc1(nz + 1, &aij->saved_values)); } 3738 3739 /* copy values over */ 3740 PetscCall(PetscArraycpy(aij->saved_values, aij->a, nz)); 3741 PetscFunctionReturn(PETSC_SUCCESS); 3742 } 3743 3744 /*@ 3745 MatStoreValues - Stashes a copy of the matrix values; this allows, for 3746 example, reuse of the linear part of a Jacobian, while recomputing the 3747 nonlinear portion. 3748 3749 Logically Collect 3750 3751 Input Parameter: 3752 . mat - the matrix (currently only `MATAIJ` matrices support this option) 3753 3754 Level: advanced 3755 3756 Usage: 3757 .vb 3758 Using `SNES` 3759 Create Jacobian matrix 3760 Set linear terms into matrix 3761 Apply boundary conditions to matrix, at this time matrix must have 3762 final nonzero structure (i.e. setting the nonlinear terms and applying 3763 boundary conditions again will not change the nonzero structure 3764 MatSetOption(mat,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE); 3765 MatStoreValues(mat); 3766 Call SNESSetJacobian() with matrix 3767 In your Jacobian routine 3768 MatRetrieveValues(mat); 3769 Set nonlinear terms in matrix 3770 3771 Without `SNESSolve()`, i.e. when you handle nonlinear solve yourself: 3772 // build linear portion of Jacobian 3773 MatSetOption(mat,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE); 3774 MatStoreValues(mat); 3775 loop over nonlinear iterations 3776 MatRetrieveValues(mat); 3777 // call MatSetValues(mat,...) to set nonliner portion of Jacobian 3778 // call MatAssemblyBegin/End() on matrix 3779 Solve linear system with Jacobian 3780 endloop 3781 .ve 3782 3783 Notes: 3784 Matrix must already be assembled before calling this routine 3785 Must set the matrix option `MatSetOption`(mat,`MAT_NEW_NONZERO_LOCATIONS`,`PETSC_FALSE`); before 3786 calling this routine. 3787 3788 When this is called multiple times it overwrites the previous set of stored values 3789 and does not allocated additional space. 3790 3791 .seealso: `Mat`, `MatRetrieveValues()` 3792 @*/ 3793 PetscErrorCode MatStoreValues(Mat mat) 3794 { 3795 PetscFunctionBegin; 3796 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3797 PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 3798 PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3799 PetscUseMethod(mat, "MatStoreValues_C", (Mat), (mat)); 3800 PetscFunctionReturn(PETSC_SUCCESS); 3801 } 3802 3803 PetscErrorCode MatRetrieveValues_SeqAIJ(Mat mat) 3804 { 3805 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 3806 PetscInt nz = aij->i[mat->rmap->n]; 3807 3808 PetscFunctionBegin; 3809 PetscCheck(aij->nonew, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatSetOption(A,MAT_NEW_NONZERO_LOCATIONS,PETSC_FALSE);first"); 3810 PetscCheck(aij->saved_values, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call MatStoreValues(A);first"); 3811 /* copy values over */ 3812 PetscCall(PetscArraycpy(aij->a, aij->saved_values, nz)); 3813 PetscFunctionReturn(PETSC_SUCCESS); 3814 } 3815 3816 /*@ 3817 MatRetrieveValues - Retrieves the copy of the matrix values; this allows, for 3818 example, reuse of the linear part of a Jacobian, while recomputing the 3819 nonlinear portion. 3820 3821 Logically Collect 3822 3823 Input Parameters: 3824 . mat - the matrix (currently only `MATAIJ` matrices support this option) 3825 3826 Level: advanced 3827 3828 .seealso: `MatStoreValues()` 3829 @*/ 3830 PetscErrorCode MatRetrieveValues(Mat mat) 3831 { 3832 PetscFunctionBegin; 3833 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 3834 PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 3835 PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 3836 PetscUseMethod(mat, "MatRetrieveValues_C", (Mat), (mat)); 3837 PetscFunctionReturn(PETSC_SUCCESS); 3838 } 3839 3840 /* --------------------------------------------------------------------------------*/ 3841 /*@C 3842 MatCreateSeqAIJ - Creates a sparse matrix in `MATSEQAIJ` (compressed row) format 3843 (the default parallel PETSc format). For good matrix assembly performance 3844 the user should preallocate the matrix storage by setting the parameter nz 3845 (or the array nnz). By setting these parameters accurately, performance 3846 during matrix assembly can be increased by more than a factor of 50. 3847 3848 Collective 3849 3850 Input Parameters: 3851 + comm - MPI communicator, set to `PETSC_COMM_SELF` 3852 . m - number of rows 3853 . n - number of columns 3854 . nz - number of nonzeros per row (same for all rows) 3855 - nnz - array containing the number of nonzeros in the various rows 3856 (possibly different for each row) or NULL 3857 3858 Output Parameter: 3859 . A - the matrix 3860 3861 It is recommended that one use the `MatCreate()`, `MatSetType()` and/or `MatSetFromOptions()`, 3862 MatXXXXSetPreallocation() paradigm instead of this routine directly. 3863 [MatXXXXSetPreallocation() is, for example, `MatSeqAIJSetPreallocation()`] 3864 3865 Notes: 3866 If nnz is given then nz is ignored 3867 3868 The AIJ format, also called 3869 compressed row storage, is fully compatible with standard Fortran 77 3870 storage. That is, the stored row and column indices can begin at 3871 either one (as in Fortran) or zero. See the users' manual for details. 3872 3873 Specify the preallocated storage with either nz or nnz (not both). 3874 Set nz = `PETSC_DEFAULT` and nnz = NULL for PETSc to control dynamic memory 3875 allocation. For large problems you MUST preallocate memory or you 3876 will get TERRIBLE performance, see the users' manual chapter on matrices. 3877 3878 By default, this format uses inodes (identical nodes) when possible, to 3879 improve numerical efficiency of matrix-vector products and solves. We 3880 search for consecutive rows with the same nonzero structure, thereby 3881 reusing matrix information to achieve increased efficiency. 3882 3883 Options Database Keys: 3884 + -mat_no_inode - Do not use inodes 3885 - -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3886 3887 Level: intermediate 3888 3889 .seealso: [Sparse Matrix Creation](sec_matsparse), `MatCreate()`, `MatCreateAIJ()`, `MatSetValues()`, `MatSeqAIJSetColumnIndices()`, `MatCreateSeqAIJWithArrays()` 3890 @*/ 3891 PetscErrorCode MatCreateSeqAIJ(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt nz, const PetscInt nnz[], Mat *A) 3892 { 3893 PetscFunctionBegin; 3894 PetscCall(MatCreate(comm, A)); 3895 PetscCall(MatSetSizes(*A, m, n, m, n)); 3896 PetscCall(MatSetType(*A, MATSEQAIJ)); 3897 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*A, nz, nnz)); 3898 PetscFunctionReturn(PETSC_SUCCESS); 3899 } 3900 3901 /*@C 3902 MatSeqAIJSetPreallocation - For good matrix assembly performance 3903 the user should preallocate the matrix storage by setting the parameter nz 3904 (or the array nnz). By setting these parameters accurately, performance 3905 during matrix assembly can be increased by more than a factor of 50. 3906 3907 Collective 3908 3909 Input Parameters: 3910 + B - The matrix 3911 . nz - number of nonzeros per row (same for all rows) 3912 - nnz - array containing the number of nonzeros in the various rows 3913 (possibly different for each row) or NULL 3914 3915 Notes: 3916 If nnz is given then nz is ignored 3917 3918 The `MATSEQAIJ` format also called 3919 compressed row storage, is fully compatible with standard Fortran 77 3920 storage. That is, the stored row and column indices can begin at 3921 either one (as in Fortran) or zero. See the users' manual for details. 3922 3923 Specify the preallocated storage with either nz or nnz (not both). 3924 Set nz = `PETSC_DEFAULT` and nnz = NULL for PETSc to control dynamic memory 3925 allocation. For large problems you MUST preallocate memory or you 3926 will get TERRIBLE performance, see the users' manual chapter on matrices. 3927 3928 You can call `MatGetInfo()` to get information on how effective the preallocation was; 3929 for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3930 You can also run with the option -info and look for messages with the string 3931 malloc in them to see if additional memory allocation was needed. 3932 3933 Developer Notes: 3934 Use nz of `MAT_SKIP_ALLOCATION` to not allocate any space for the matrix 3935 entries or columns indices 3936 3937 By default, this format uses inodes (identical nodes) when possible, to 3938 improve numerical efficiency of matrix-vector products and solves. We 3939 search for consecutive rows with the same nonzero structure, thereby 3940 reusing matrix information to achieve increased efficiency. 3941 3942 Options Database Keys: 3943 + -mat_no_inode - Do not use inodes 3944 - -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3945 3946 Level: intermediate 3947 3948 .seealso: `MatCreate()`, `MatCreateAIJ()`, `MatSetValues()`, `MatSeqAIJSetColumnIndices()`, `MatCreateSeqAIJWithArrays()`, `MatGetInfo()`, 3949 `MatSeqAIJSetTotalPreallocation()` 3950 @*/ 3951 PetscErrorCode MatSeqAIJSetPreallocation(Mat B, PetscInt nz, const PetscInt nnz[]) 3952 { 3953 PetscFunctionBegin; 3954 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 3955 PetscValidType(B, 1); 3956 PetscTryMethod(B, "MatSeqAIJSetPreallocation_C", (Mat, PetscInt, const PetscInt[]), (B, nz, nnz)); 3957 PetscFunctionReturn(PETSC_SUCCESS); 3958 } 3959 3960 PetscErrorCode MatSeqAIJSetPreallocation_SeqAIJ(Mat B, PetscInt nz, const PetscInt *nnz) 3961 { 3962 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 3963 PetscBool skipallocation = PETSC_FALSE, realalloc = PETSC_FALSE; 3964 PetscInt i; 3965 3966 PetscFunctionBegin; 3967 if (B->hash_active) { 3968 PetscCall(PetscMemcpy(&B->ops, &b->cops, sizeof(*(B->ops)))); 3969 PetscCall(PetscHMapIJVDestroy(&b->ht)); 3970 PetscCall(PetscFree(b->dnz)); 3971 B->hash_active = PETSC_FALSE; 3972 } 3973 if (nz >= 0 || nnz) realalloc = PETSC_TRUE; 3974 if (nz == MAT_SKIP_ALLOCATION) { 3975 skipallocation = PETSC_TRUE; 3976 nz = 0; 3977 } 3978 PetscCall(PetscLayoutSetUp(B->rmap)); 3979 PetscCall(PetscLayoutSetUp(B->cmap)); 3980 3981 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 5; 3982 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nz cannot be less than 0: value %" PetscInt_FMT, nz); 3983 if (PetscUnlikelyDebug(nnz)) { 3984 for (i = 0; i < B->rmap->n; i++) { 3985 PetscCheck(nnz[i] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nnz cannot be less than 0: local row %" PetscInt_FMT " value %" PetscInt_FMT, i, nnz[i]); 3986 PetscCheck(nnz[i] <= B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "nnz cannot be greater than row length: local row %" PetscInt_FMT " value %" PetscInt_FMT " rowlength %" PetscInt_FMT, i, nnz[i], B->cmap->n); 3987 } 3988 } 3989 3990 B->preallocated = PETSC_TRUE; 3991 if (!skipallocation) { 3992 if (!b->imax) { PetscCall(PetscMalloc1(B->rmap->n, &b->imax)); } 3993 if (!b->ilen) { 3994 /* b->ilen will count nonzeros in each row so far. */ 3995 PetscCall(PetscCalloc1(B->rmap->n, &b->ilen)); 3996 } else { 3997 PetscCall(PetscMemzero(b->ilen, B->rmap->n * sizeof(PetscInt))); 3998 } 3999 if (!b->ipre) PetscCall(PetscMalloc1(B->rmap->n, &b->ipre)); 4000 if (!nnz) { 4001 if (nz == PETSC_DEFAULT || nz == PETSC_DECIDE) nz = 10; 4002 else if (nz < 0) nz = 1; 4003 nz = PetscMin(nz, B->cmap->n); 4004 for (i = 0; i < B->rmap->n; i++) b->imax[i] = nz; 4005 nz = nz * B->rmap->n; 4006 } else { 4007 PetscInt64 nz64 = 0; 4008 for (i = 0; i < B->rmap->n; i++) { 4009 b->imax[i] = nnz[i]; 4010 nz64 += nnz[i]; 4011 } 4012 PetscCall(PetscIntCast(nz64, &nz)); 4013 } 4014 4015 /* allocate the matrix space */ 4016 /* FIXME: should B's old memory be unlogged? */ 4017 PetscCall(MatSeqXAIJFreeAIJ(B, &b->a, &b->j, &b->i)); 4018 if (B->structure_only) { 4019 PetscCall(PetscMalloc1(nz, &b->j)); 4020 PetscCall(PetscMalloc1(B->rmap->n + 1, &b->i)); 4021 } else { 4022 PetscCall(PetscMalloc3(nz, &b->a, nz, &b->j, B->rmap->n + 1, &b->i)); 4023 } 4024 b->i[0] = 0; 4025 for (i = 1; i < B->rmap->n + 1; i++) b->i[i] = b->i[i - 1] + b->imax[i - 1]; 4026 if (B->structure_only) { 4027 b->singlemalloc = PETSC_FALSE; 4028 b->free_a = PETSC_FALSE; 4029 } else { 4030 b->singlemalloc = PETSC_TRUE; 4031 b->free_a = PETSC_TRUE; 4032 } 4033 b->free_ij = PETSC_TRUE; 4034 } else { 4035 b->free_a = PETSC_FALSE; 4036 b->free_ij = PETSC_FALSE; 4037 } 4038 4039 if (b->ipre && nnz != b->ipre && b->imax) { 4040 /* reserve user-requested sparsity */ 4041 PetscCall(PetscArraycpy(b->ipre, b->imax, B->rmap->n)); 4042 } 4043 4044 b->nz = 0; 4045 b->maxnz = nz; 4046 B->info.nz_unneeded = (double)b->maxnz; 4047 if (realalloc) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE)); 4048 B->was_assembled = PETSC_FALSE; 4049 B->assembled = PETSC_FALSE; 4050 /* We simply deem preallocation has changed nonzero state. Updating the state 4051 will give clients (like AIJKokkos) a chance to know something has happened. 4052 */ 4053 B->nonzerostate++; 4054 PetscFunctionReturn(PETSC_SUCCESS); 4055 } 4056 4057 PetscErrorCode MatResetPreallocation_SeqAIJ(Mat A) 4058 { 4059 Mat_SeqAIJ *a; 4060 PetscInt i; 4061 4062 PetscFunctionBegin; 4063 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 4064 4065 /* Check local size. If zero, then return */ 4066 if (!A->rmap->n) PetscFunctionReturn(PETSC_SUCCESS); 4067 4068 a = (Mat_SeqAIJ *)A->data; 4069 /* if no saved info, we error out */ 4070 PetscCheck(a->ipre, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "No saved preallocation info "); 4071 4072 PetscCheck(a->i && a->j && a->a && a->imax && a->ilen, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Memory info is incomplete, and can not reset preallocation "); 4073 4074 PetscCall(PetscArraycpy(a->imax, a->ipre, A->rmap->n)); 4075 PetscCall(PetscArrayzero(a->ilen, A->rmap->n)); 4076 a->i[0] = 0; 4077 for (i = 1; i < A->rmap->n + 1; i++) a->i[i] = a->i[i - 1] + a->imax[i - 1]; 4078 A->preallocated = PETSC_TRUE; 4079 a->nz = 0; 4080 a->maxnz = a->i[A->rmap->n]; 4081 A->info.nz_unneeded = (double)a->maxnz; 4082 A->was_assembled = PETSC_FALSE; 4083 A->assembled = PETSC_FALSE; 4084 PetscFunctionReturn(PETSC_SUCCESS); 4085 } 4086 4087 /*@ 4088 MatSeqAIJSetPreallocationCSR - Allocates memory for a sparse sequential matrix in `MATSEQAIJ` format. 4089 4090 Input Parameters: 4091 + B - the matrix 4092 . i - the indices into j for the start of each row (starts with zero) 4093 . j - the column indices for each row (starts with zero) these must be sorted for each row 4094 - v - optional values in the matrix 4095 4096 Level: developer 4097 4098 Notes: 4099 The i,j,v values are COPIED with this routine; to avoid the copy use `MatCreateSeqAIJWithArrays()` 4100 4101 This routine may be called multiple times with different nonzero patterns (or the same nonzero pattern). The nonzero 4102 structure will be the union of all the previous nonzero structures. 4103 4104 Developer Notes: 4105 An optimization could be added to the implementation where it checks if the i, and j are identical to the current i and j and 4106 then just copies the v values directly with `PetscMemcpy()`. 4107 4108 This routine could also take a `PetscCopyMode` argument to allow sharing the values instead of always copying them. 4109 4110 .seealso: `MatCreate()`, `MatCreateSeqAIJ()`, `MatSetValues()`, `MatSeqAIJSetPreallocation()`, `MatCreateSeqAIJ()`, `MATSEQAIJ`, `MatResetPreallocation()` 4111 @*/ 4112 PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat B, const PetscInt i[], const PetscInt j[], const PetscScalar v[]) 4113 { 4114 PetscFunctionBegin; 4115 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 4116 PetscValidType(B, 1); 4117 PetscTryMethod(B, "MatSeqAIJSetPreallocationCSR_C", (Mat, const PetscInt[], const PetscInt[], const PetscScalar[]), (B, i, j, v)); 4118 PetscFunctionReturn(PETSC_SUCCESS); 4119 } 4120 4121 PetscErrorCode MatSeqAIJSetPreallocationCSR_SeqAIJ(Mat B, const PetscInt Ii[], const PetscInt J[], const PetscScalar v[]) 4122 { 4123 PetscInt i; 4124 PetscInt m, n; 4125 PetscInt nz; 4126 PetscInt *nnz; 4127 4128 PetscFunctionBegin; 4129 PetscCheck(Ii[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ii[0] must be 0 it is %" PetscInt_FMT, Ii[0]); 4130 4131 PetscCall(PetscLayoutSetUp(B->rmap)); 4132 PetscCall(PetscLayoutSetUp(B->cmap)); 4133 4134 PetscCall(MatGetSize(B, &m, &n)); 4135 PetscCall(PetscMalloc1(m + 1, &nnz)); 4136 for (i = 0; i < m; i++) { 4137 nz = Ii[i + 1] - Ii[i]; 4138 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local row %" PetscInt_FMT " has a negative number of columns %" PetscInt_FMT, i, nz); 4139 nnz[i] = nz; 4140 } 4141 PetscCall(MatSeqAIJSetPreallocation(B, 0, nnz)); 4142 PetscCall(PetscFree(nnz)); 4143 4144 for (i = 0; i < m; i++) PetscCall(MatSetValues_SeqAIJ(B, 1, &i, Ii[i + 1] - Ii[i], J + Ii[i], v ? v + Ii[i] : NULL, INSERT_VALUES)); 4145 4146 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 4147 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 4148 4149 PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE)); 4150 PetscFunctionReturn(PETSC_SUCCESS); 4151 } 4152 4153 /*@ 4154 MatSeqAIJKron - Computes C, the Kronecker product of A and B. 4155 4156 Input Parameters: 4157 + A - left-hand side matrix 4158 . B - right-hand side matrix 4159 - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` 4160 4161 Output Parameter: 4162 . C - Kronecker product of A and B 4163 4164 Level: intermediate 4165 4166 Note: 4167 `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the product matrix has not changed from that last call to `MatSeqAIJKron()`. 4168 4169 .seealso: `MatCreateSeqAIJ()`, `MATSEQAIJ`, `MATKAIJ`, `MatReuse` 4170 @*/ 4171 PetscErrorCode MatSeqAIJKron(Mat A, Mat B, MatReuse reuse, Mat *C) 4172 { 4173 PetscFunctionBegin; 4174 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 4175 PetscValidType(A, 1); 4176 PetscValidHeaderSpecific(B, MAT_CLASSID, 2); 4177 PetscValidType(B, 2); 4178 PetscValidPointer(C, 4); 4179 if (reuse == MAT_REUSE_MATRIX) { 4180 PetscValidHeaderSpecific(*C, MAT_CLASSID, 4); 4181 PetscValidType(*C, 4); 4182 } 4183 PetscTryMethod(A, "MatSeqAIJKron_C", (Mat, Mat, MatReuse, Mat *), (A, B, reuse, C)); 4184 PetscFunctionReturn(PETSC_SUCCESS); 4185 } 4186 4187 PetscErrorCode MatSeqAIJKron_SeqAIJ(Mat A, Mat B, MatReuse reuse, Mat *C) 4188 { 4189 Mat newmat; 4190 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 4191 Mat_SeqAIJ *b = (Mat_SeqAIJ *)B->data; 4192 PetscScalar *v; 4193 const PetscScalar *aa, *ba; 4194 PetscInt *i, *j, m, n, p, q, nnz = 0, am = A->rmap->n, bm = B->rmap->n, an = A->cmap->n, bn = B->cmap->n; 4195 PetscBool flg; 4196 4197 PetscFunctionBegin; 4198 PetscCheck(!A->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 4199 PetscCheck(A->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 4200 PetscCheck(!B->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix"); 4201 PetscCheck(B->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix"); 4202 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATSEQAIJ, &flg)); 4203 PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatType %s", ((PetscObject)B)->type_name); 4204 PetscCheck(reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatReuse %d", (int)reuse); 4205 if (reuse == MAT_INITIAL_MATRIX) { 4206 PetscCall(PetscMalloc2(am * bm + 1, &i, a->i[am] * b->i[bm], &j)); 4207 PetscCall(MatCreate(PETSC_COMM_SELF, &newmat)); 4208 PetscCall(MatSetSizes(newmat, am * bm, an * bn, am * bm, an * bn)); 4209 PetscCall(MatSetType(newmat, MATAIJ)); 4210 i[0] = 0; 4211 for (m = 0; m < am; ++m) { 4212 for (p = 0; p < bm; ++p) { 4213 i[m * bm + p + 1] = i[m * bm + p] + (a->i[m + 1] - a->i[m]) * (b->i[p + 1] - b->i[p]); 4214 for (n = a->i[m]; n < a->i[m + 1]; ++n) { 4215 for (q = b->i[p]; q < b->i[p + 1]; ++q) j[nnz++] = a->j[n] * bn + b->j[q]; 4216 } 4217 } 4218 } 4219 PetscCall(MatSeqAIJSetPreallocationCSR(newmat, i, j, NULL)); 4220 *C = newmat; 4221 PetscCall(PetscFree2(i, j)); 4222 nnz = 0; 4223 } 4224 PetscCall(MatSeqAIJGetArray(*C, &v)); 4225 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4226 PetscCall(MatSeqAIJGetArrayRead(B, &ba)); 4227 for (m = 0; m < am; ++m) { 4228 for (p = 0; p < bm; ++p) { 4229 for (n = a->i[m]; n < a->i[m + 1]; ++n) { 4230 for (q = b->i[p]; q < b->i[p + 1]; ++q) v[nnz++] = aa[n] * ba[q]; 4231 } 4232 } 4233 } 4234 PetscCall(MatSeqAIJRestoreArray(*C, &v)); 4235 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 4236 PetscCall(MatSeqAIJRestoreArrayRead(B, &ba)); 4237 PetscFunctionReturn(PETSC_SUCCESS); 4238 } 4239 4240 #include <../src/mat/impls/dense/seq/dense.h> 4241 #include <petsc/private/kernels/petscaxpy.h> 4242 4243 /* 4244 Computes (B'*A')' since computing B*A directly is untenable 4245 4246 n p p 4247 [ ] [ ] [ ] 4248 m [ A ] * n [ B ] = m [ C ] 4249 [ ] [ ] [ ] 4250 4251 */ 4252 PetscErrorCode MatMatMultNumeric_SeqDense_SeqAIJ(Mat A, Mat B, Mat C) 4253 { 4254 Mat_SeqDense *sub_a = (Mat_SeqDense *)A->data; 4255 Mat_SeqAIJ *sub_b = (Mat_SeqAIJ *)B->data; 4256 Mat_SeqDense *sub_c = (Mat_SeqDense *)C->data; 4257 PetscInt i, j, n, m, q, p; 4258 const PetscInt *ii, *idx; 4259 const PetscScalar *b, *a, *a_q; 4260 PetscScalar *c, *c_q; 4261 PetscInt clda = sub_c->lda; 4262 PetscInt alda = sub_a->lda; 4263 4264 PetscFunctionBegin; 4265 m = A->rmap->n; 4266 n = A->cmap->n; 4267 p = B->cmap->n; 4268 a = sub_a->v; 4269 b = sub_b->a; 4270 c = sub_c->v; 4271 if (clda == m) { 4272 PetscCall(PetscArrayzero(c, m * p)); 4273 } else { 4274 for (j = 0; j < p; j++) 4275 for (i = 0; i < m; i++) c[j * clda + i] = 0.0; 4276 } 4277 ii = sub_b->i; 4278 idx = sub_b->j; 4279 for (i = 0; i < n; i++) { 4280 q = ii[i + 1] - ii[i]; 4281 while (q-- > 0) { 4282 c_q = c + clda * (*idx); 4283 a_q = a + alda * i; 4284 PetscKernelAXPY(c_q, *b, a_q, m); 4285 idx++; 4286 b++; 4287 } 4288 } 4289 PetscFunctionReturn(PETSC_SUCCESS); 4290 } 4291 4292 PetscErrorCode MatMatMultSymbolic_SeqDense_SeqAIJ(Mat A, Mat B, PetscReal fill, Mat C) 4293 { 4294 PetscInt m = A->rmap->n, n = B->cmap->n; 4295 PetscBool cisdense; 4296 4297 PetscFunctionBegin; 4298 PetscCheck(A->cmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "A->cmap->n %" PetscInt_FMT " != B->rmap->n %" PetscInt_FMT, A->cmap->n, B->rmap->n); 4299 PetscCall(MatSetSizes(C, m, n, m, n)); 4300 PetscCall(MatSetBlockSizesFromMats(C, A, B)); 4301 PetscCall(PetscObjectTypeCompareAny((PetscObject)C, &cisdense, MATSEQDENSE, MATSEQDENSECUDA, MATSEQDENSEHIP, "")); 4302 if (!cisdense) PetscCall(MatSetType(C, MATDENSE)); 4303 PetscCall(MatSetUp(C)); 4304 4305 C->ops->matmultnumeric = MatMatMultNumeric_SeqDense_SeqAIJ; 4306 PetscFunctionReturn(PETSC_SUCCESS); 4307 } 4308 4309 /* ----------------------------------------------------------------*/ 4310 /*MC 4311 MATSEQAIJ - MATSEQAIJ = "seqaij" - A matrix type to be used for sequential sparse matrices, 4312 based on compressed sparse row format. 4313 4314 Options Database Keys: 4315 . -mat_type seqaij - sets the matrix type to "seqaij" during a call to MatSetFromOptions() 4316 4317 Level: beginner 4318 4319 Notes: 4320 `MatSetValues()` may be called for this matrix type with a NULL argument for the numerical values, 4321 in this case the values associated with the rows and columns one passes in are set to zero 4322 in the matrix 4323 4324 `MatSetOptions`(,`MAT_STRUCTURE_ONLY`,`PETSC_TRUE`) may be called for this matrix type. In this no 4325 space is allocated for the nonzero entries and any entries passed with `MatSetValues()` are ignored 4326 4327 Developer Note: 4328 It would be nice if all matrix formats supported passing NULL in for the numerical values 4329 4330 .seealso: `MatCreateSeqAIJ()`, `MatSetFromOptions()`, `MatSetType()`, `MatCreate()`, `MatType`, `MATSELL`, `MATSEQSELL`, `MATMPISELL` 4331 M*/ 4332 4333 /*MC 4334 MATAIJ - MATAIJ = "aij" - A matrix type to be used for sparse matrices. 4335 4336 This matrix type is identical to `MATSEQAIJ` when constructed with a single process communicator, 4337 and `MATMPIAIJ` otherwise. As a result, for single process communicators, 4338 `MatSeqAIJSetPreallocation()` is supported, and similarly `MatMPIAIJSetPreallocation()` is supported 4339 for communicators controlling multiple processes. It is recommended that you call both of 4340 the above preallocation routines for simplicity. 4341 4342 Options Database Keys: 4343 . -mat_type aij - sets the matrix type to "aij" during a call to `MatSetFromOptions()` 4344 4345 Note: 4346 Subclasses include `MATAIJCUSPARSE`, `MATAIJPERM`, `MATAIJSELL`, `MATAIJMKL`, `MATAIJCRL`, and also automatically switches over to use inodes when 4347 enough exist. 4348 4349 Level: beginner 4350 4351 .seealso: `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MATSEQAIJ`, `MATMPIAIJ`, `MATSELL`, `MATSEQSELL`, `MATMPISELL` 4352 M*/ 4353 4354 /*MC 4355 MATAIJCRL - MATAIJCRL = "aijcrl" - A matrix type to be used for sparse matrices. 4356 4357 This matrix type is identical to `MATSEQAIJCRL` when constructed with a single process communicator, 4358 and `MATMPIAIJCRL` otherwise. As a result, for single process communicators, 4359 `MatSeqAIJSetPreallocation()` is supported, and similarly `MatMPIAIJSetPreallocation()` is supported 4360 for communicators controlling multiple processes. It is recommended that you call both of 4361 the above preallocation routines for simplicity. 4362 4363 Options Database Keys: 4364 . -mat_type aijcrl - sets the matrix type to "aijcrl" during a call to `MatSetFromOptions()` 4365 4366 Level: beginner 4367 4368 .seealso: `MatCreateMPIAIJCRL`, `MATSEQAIJCRL`, `MATMPIAIJCRL`, `MATSEQAIJCRL`, `MATMPIAIJCRL` 4369 M*/ 4370 4371 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJCRL(Mat, MatType, MatReuse, Mat *); 4372 #if defined(PETSC_HAVE_ELEMENTAL) 4373 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat, MatType, MatReuse, Mat *); 4374 #endif 4375 #if defined(PETSC_HAVE_SCALAPACK) 4376 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *); 4377 #endif 4378 #if defined(PETSC_HAVE_HYPRE) 4379 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType, MatReuse, Mat *); 4380 #endif 4381 4382 PETSC_EXTERN PetscErrorCode MatConvert_SeqAIJ_SeqSELL(Mat, MatType, MatReuse, Mat *); 4383 PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat, MatType, MatReuse, Mat *); 4384 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat); 4385 4386 /*@C 4387 MatSeqAIJGetArray - gives read/write access to the array where the data for a `MATSEQAIJ` matrix is stored 4388 4389 Not Collective 4390 4391 Input Parameter: 4392 . mat - a `MATSEQAIJ` matrix 4393 4394 Output Parameter: 4395 . array - pointer to the data 4396 4397 Level: intermediate 4398 4399 Fortran Note: 4400 `MatSeqAIJGetArray()` Fortran binding is deprecated (since PETSc 3.19), use `MatSeqAIJGetArrayF90()` 4401 4402 .seealso: `MatSeqAIJRestoreArray()`, `MatSeqAIJGetArrayF90()` 4403 @*/ 4404 PetscErrorCode MatSeqAIJGetArray(Mat A, PetscScalar **array) 4405 { 4406 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4407 4408 PetscFunctionBegin; 4409 if (aij->ops->getarray) { 4410 PetscCall((*aij->ops->getarray)(A, array)); 4411 } else { 4412 *array = aij->a; 4413 } 4414 PetscFunctionReturn(PETSC_SUCCESS); 4415 } 4416 4417 /*@C 4418 MatSeqAIJRestoreArray - returns access to the array where the data for a `MATSEQAIJ` matrix is stored obtained by `MatSeqAIJGetArray()` 4419 4420 Not Collective 4421 4422 Input Parameters: 4423 + mat - a `MATSEQAIJ` matrix 4424 - array - pointer to the data 4425 4426 Level: intermediate 4427 4428 Fortran Note: 4429 `MatSeqAIJRestoreArray()` Fortran binding is deprecated (since PETSc 3.19), use `MatSeqAIJRestoreArrayF90()` 4430 4431 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayF90()` 4432 @*/ 4433 PetscErrorCode MatSeqAIJRestoreArray(Mat A, PetscScalar **array) 4434 { 4435 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4436 4437 PetscFunctionBegin; 4438 if (aij->ops->restorearray) { 4439 PetscCall((*aij->ops->restorearray)(A, array)); 4440 } else { 4441 *array = NULL; 4442 } 4443 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 4444 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 4445 PetscFunctionReturn(PETSC_SUCCESS); 4446 } 4447 4448 /*@C 4449 MatSeqAIJGetArrayRead - gives read-only access to the array where the data for a `MATSEQAIJ` matrix is stored 4450 4451 Not Collective; No Fortran Support 4452 4453 Input Parameter: 4454 . mat - a `MATSEQAIJ` matrix 4455 4456 Output Parameter: 4457 . array - pointer to the data 4458 4459 Level: intermediate 4460 4461 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` 4462 @*/ 4463 PetscErrorCode MatSeqAIJGetArrayRead(Mat A, const PetscScalar **array) 4464 { 4465 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4466 4467 PetscFunctionBegin; 4468 if (aij->ops->getarrayread) { 4469 PetscCall((*aij->ops->getarrayread)(A, array)); 4470 } else { 4471 *array = aij->a; 4472 } 4473 PetscFunctionReturn(PETSC_SUCCESS); 4474 } 4475 4476 /*@C 4477 MatSeqAIJRestoreArrayRead - restore the read-only access array obtained from `MatSeqAIJGetArrayRead()` 4478 4479 Not Collective; No Fortran Support 4480 4481 Input Parameter: 4482 . mat - a `MATSEQAIJ` matrix 4483 4484 Output Parameter: 4485 . array - pointer to the data 4486 4487 Level: intermediate 4488 4489 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4490 @*/ 4491 PetscErrorCode MatSeqAIJRestoreArrayRead(Mat A, const PetscScalar **array) 4492 { 4493 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4494 4495 PetscFunctionBegin; 4496 if (aij->ops->restorearrayread) { 4497 PetscCall((*aij->ops->restorearrayread)(A, array)); 4498 } else { 4499 *array = NULL; 4500 } 4501 PetscFunctionReturn(PETSC_SUCCESS); 4502 } 4503 4504 /*@C 4505 MatSeqAIJGetArrayWrite - gives write-only access to the array where the data for a `MATSEQAIJ` matrix is stored 4506 4507 Not Collective; No Fortran Support 4508 4509 Input Parameter: 4510 . mat - a `MATSEQAIJ` matrix 4511 4512 Output Parameter: 4513 . array - pointer to the data 4514 4515 Level: intermediate 4516 4517 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArrayRead()` 4518 @*/ 4519 PetscErrorCode MatSeqAIJGetArrayWrite(Mat A, PetscScalar **array) 4520 { 4521 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4522 4523 PetscFunctionBegin; 4524 if (aij->ops->getarraywrite) { 4525 PetscCall((*aij->ops->getarraywrite)(A, array)); 4526 } else { 4527 *array = aij->a; 4528 } 4529 PetscCall(MatSeqAIJInvalidateDiagonal(A)); 4530 PetscCall(PetscObjectStateIncrease((PetscObject)A)); 4531 PetscFunctionReturn(PETSC_SUCCESS); 4532 } 4533 4534 /*@C 4535 MatSeqAIJRestoreArrayWrite - restore the read-only access array obtained from MatSeqAIJGetArrayRead 4536 4537 Not Collective; No Fortran Support 4538 4539 Input Parameter: 4540 . mat - a MATSEQAIJ matrix 4541 4542 Output Parameter: 4543 . array - pointer to the data 4544 4545 Level: intermediate 4546 4547 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4548 @*/ 4549 PetscErrorCode MatSeqAIJRestoreArrayWrite(Mat A, PetscScalar **array) 4550 { 4551 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4552 4553 PetscFunctionBegin; 4554 if (aij->ops->restorearraywrite) { 4555 PetscCall((*aij->ops->restorearraywrite)(A, array)); 4556 } else { 4557 *array = NULL; 4558 } 4559 PetscFunctionReturn(PETSC_SUCCESS); 4560 } 4561 4562 /*@C 4563 MatSeqAIJGetCSRAndMemType - Get the CSR arrays and the memory type of the `MATSEQAIJ` matrix 4564 4565 Not Collective; No Fortran Support 4566 4567 Input Parameter: 4568 . mat - a matrix of type `MATSEQAIJ` or its subclasses 4569 4570 Output Parameters: 4571 + i - row map array of the matrix 4572 . j - column index array of the matrix 4573 . a - data array of the matrix 4574 - memtype - memory type of the arrays 4575 4576 Notes: 4577 Any of the output parameters can be NULL, in which case the corresponding value is not returned. 4578 If mat is a device matrix, the arrays are on the device. Otherwise, they are on the host. 4579 4580 One can call this routine on a preallocated but not assembled matrix to just get the memory of the CSR underneath the matrix. 4581 If the matrix is assembled, the data array 'a' is guaranteed to have the latest values of the matrix. 4582 4583 Level: Developer 4584 4585 .seealso: `MatSeqAIJGetArray()`, `MatSeqAIJGetArrayRead()` 4586 @*/ 4587 PetscErrorCode MatSeqAIJGetCSRAndMemType(Mat mat, const PetscInt **i, const PetscInt **j, PetscScalar **a, PetscMemType *mtype) 4588 { 4589 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)mat->data; 4590 4591 PetscFunctionBegin; 4592 PetscCheck(mat->preallocated, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "matrix is not preallocated"); 4593 if (aij->ops->getcsrandmemtype) { 4594 PetscCall((*aij->ops->getcsrandmemtype)(mat, i, j, a, mtype)); 4595 } else { 4596 if (i) *i = aij->i; 4597 if (j) *j = aij->j; 4598 if (a) *a = aij->a; 4599 if (mtype) *mtype = PETSC_MEMTYPE_HOST; 4600 } 4601 PetscFunctionReturn(PETSC_SUCCESS); 4602 } 4603 4604 /*@C 4605 MatSeqAIJGetMaxRowNonzeros - returns the maximum number of nonzeros in any row 4606 4607 Not Collective 4608 4609 Input Parameter: 4610 . mat - a `MATSEQAIJ` matrix 4611 4612 Output Parameter: 4613 . nz - the maximum number of nonzeros in any row 4614 4615 Level: intermediate 4616 4617 .seealso: `MatSeqAIJRestoreArray()`, `MatSeqAIJGetArrayF90()` 4618 @*/ 4619 PetscErrorCode MatSeqAIJGetMaxRowNonzeros(Mat A, PetscInt *nz) 4620 { 4621 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 4622 4623 PetscFunctionBegin; 4624 *nz = aij->rmax; 4625 PetscFunctionReturn(PETSC_SUCCESS); 4626 } 4627 4628 PetscErrorCode MatSetPreallocationCOO_SeqAIJ(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[]) 4629 { 4630 MPI_Comm comm; 4631 PetscInt *i, *j; 4632 PetscInt M, N, row; 4633 PetscCount k, p, q, nneg, nnz, start, end; /* Index the coo array, so use PetscCount as their type */ 4634 PetscInt *Ai; /* Change to PetscCount once we use it for row pointers */ 4635 PetscInt *Aj; 4636 PetscScalar *Aa; 4637 Mat_SeqAIJ *seqaij = (Mat_SeqAIJ *)(mat->data); 4638 MatType rtype; 4639 PetscCount *perm, *jmap; 4640 4641 PetscFunctionBegin; 4642 PetscCall(MatResetPreallocationCOO_SeqAIJ(mat)); 4643 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); 4644 PetscCall(MatGetSize(mat, &M, &N)); 4645 i = coo_i; 4646 j = coo_j; 4647 PetscCall(PetscMalloc1(coo_n, &perm)); 4648 for (k = 0; k < coo_n; k++) { /* Ignore entries with negative row or col indices */ 4649 if (j[k] < 0) i[k] = -1; 4650 perm[k] = k; 4651 } 4652 4653 /* Sort by row */ 4654 PetscCall(PetscSortIntWithIntCountArrayPair(coo_n, i, j, perm)); 4655 for (k = 0; k < coo_n; k++) { 4656 if (i[k] >= 0) break; 4657 } /* Advance k to the first row with a non-negative index */ 4658 nneg = k; 4659 PetscCall(PetscMalloc1(coo_n - nneg + 1, &jmap)); /* +1 to make a CSR-like data structure. jmap[i] originally is the number of repeats for i-th nonzero */ 4660 nnz = 0; /* Total number of unique nonzeros to be counted */ 4661 jmap++; /* Inc jmap by 1 for convenience */ 4662 4663 PetscCall(PetscCalloc1(M + 1, &Ai)); /* CSR of A */ 4664 PetscCall(PetscMalloc1(coo_n - nneg, &Aj)); /* We have at most coo_n-nneg unique nonzeros */ 4665 4666 /* In each row, sort by column, then unique column indices to get row length */ 4667 Ai++; /* Inc by 1 for convenience */ 4668 q = 0; /* q-th unique nonzero, with q starting from 0 */ 4669 while (k < coo_n) { 4670 row = i[k]; 4671 start = k; /* [start,end) indices for this row */ 4672 while (k < coo_n && i[k] == row) k++; 4673 end = k; 4674 PetscCall(PetscSortIntWithCountArray(end - start, j + start, perm + start)); 4675 /* Find number of unique col entries in this row */ 4676 Aj[q] = j[start]; /* Log the first nonzero in this row */ 4677 jmap[q] = 1; /* Number of repeats of this nozero entry */ 4678 Ai[row] = 1; 4679 nnz++; 4680 4681 for (p = start + 1; p < end; p++) { /* Scan remaining nonzero in this row */ 4682 if (j[p] != j[p - 1]) { /* Meet a new nonzero */ 4683 q++; 4684 jmap[q] = 1; 4685 Aj[q] = j[p]; 4686 Ai[row]++; 4687 nnz++; 4688 } else { 4689 jmap[q]++; 4690 } 4691 } 4692 q++; /* Move to next row and thus next unique nonzero */ 4693 } 4694 4695 Ai--; /* Back to the beginning of Ai[] */ 4696 for (k = 0; k < M; k++) Ai[k + 1] += Ai[k]; 4697 jmap--; /* Back to the beginning of jmap[] */ 4698 jmap[0] = 0; 4699 for (k = 0; k < nnz; k++) jmap[k + 1] += jmap[k]; 4700 if (nnz < coo_n - nneg) { /* Realloc with actual number of unique nonzeros */ 4701 PetscCount *jmap_new; 4702 PetscInt *Aj_new; 4703 4704 PetscCall(PetscMalloc1(nnz + 1, &jmap_new)); 4705 PetscCall(PetscArraycpy(jmap_new, jmap, nnz + 1)); 4706 PetscCall(PetscFree(jmap)); 4707 jmap = jmap_new; 4708 4709 PetscCall(PetscMalloc1(nnz, &Aj_new)); 4710 PetscCall(PetscArraycpy(Aj_new, Aj, nnz)); 4711 PetscCall(PetscFree(Aj)); 4712 Aj = Aj_new; 4713 } 4714 4715 if (nneg) { /* Discard heading entries with negative indices in perm[], as we'll access it from index 0 in MatSetValuesCOO */ 4716 PetscCount *perm_new; 4717 4718 PetscCall(PetscMalloc1(coo_n - nneg, &perm_new)); 4719 PetscCall(PetscArraycpy(perm_new, perm + nneg, coo_n - nneg)); 4720 PetscCall(PetscFree(perm)); 4721 perm = perm_new; 4722 } 4723 4724 PetscCall(MatGetRootType_Private(mat, &rtype)); 4725 PetscCall(PetscCalloc1(nnz, &Aa)); /* Zero the matrix */ 4726 PetscCall(MatSetSeqAIJWithArrays_private(PETSC_COMM_SELF, M, N, Ai, Aj, Aa, rtype, mat)); 4727 4728 seqaij->singlemalloc = PETSC_FALSE; /* Ai, Aj and Aa are not allocated in one big malloc */ 4729 seqaij->free_a = seqaij->free_ij = PETSC_TRUE; /* Let newmat own Ai, Aj and Aa */ 4730 /* Record COO fields */ 4731 seqaij->coo_n = coo_n; 4732 seqaij->Atot = coo_n - nneg; /* Annz is seqaij->nz, so no need to record that again */ 4733 seqaij->jmap = jmap; /* of length nnz+1 */ 4734 seqaij->perm = perm; 4735 PetscFunctionReturn(PETSC_SUCCESS); 4736 } 4737 4738 static PetscErrorCode MatSetValuesCOO_SeqAIJ(Mat A, const PetscScalar v[], InsertMode imode) 4739 { 4740 Mat_SeqAIJ *aseq = (Mat_SeqAIJ *)A->data; 4741 PetscCount i, j, Annz = aseq->nz; 4742 PetscCount *perm = aseq->perm, *jmap = aseq->jmap; 4743 PetscScalar *Aa; 4744 4745 PetscFunctionBegin; 4746 PetscCall(MatSeqAIJGetArray(A, &Aa)); 4747 for (i = 0; i < Annz; i++) { 4748 PetscScalar sum = 0.0; 4749 for (j = jmap[i]; j < jmap[i + 1]; j++) sum += v[perm[j]]; 4750 Aa[i] = (imode == INSERT_VALUES ? 0.0 : Aa[i]) + sum; 4751 } 4752 PetscCall(MatSeqAIJRestoreArray(A, &Aa)); 4753 PetscFunctionReturn(PETSC_SUCCESS); 4754 } 4755 4756 #if defined(PETSC_HAVE_CUDA) 4757 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJCUSPARSE(Mat, MatType, MatReuse, Mat *); 4758 #endif 4759 #if defined(PETSC_HAVE_HIP) 4760 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJHIPSPARSE(Mat, MatType, MatReuse, Mat *); 4761 #endif 4762 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 4763 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJKokkos(Mat, MatType, MatReuse, Mat *); 4764 #endif 4765 4766 PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJ(Mat B) 4767 { 4768 Mat_SeqAIJ *b; 4769 PetscMPIInt size; 4770 4771 PetscFunctionBegin; 4772 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)B), &size)); 4773 PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Comm must be of size 1"); 4774 4775 PetscCall(PetscNew(&b)); 4776 4777 B->data = (void *)b; 4778 4779 PetscCall(PetscMemcpy(B->ops, &MatOps_Values, sizeof(struct _MatOps))); 4780 if (B->sortedfull) B->ops->setvalues = MatSetValues_SeqAIJ_SortedFull; 4781 4782 b->row = NULL; 4783 b->col = NULL; 4784 b->icol = NULL; 4785 b->reallocs = 0; 4786 b->ignorezeroentries = PETSC_FALSE; 4787 b->roworiented = PETSC_TRUE; 4788 b->nonew = 0; 4789 b->diag = NULL; 4790 b->solve_work = NULL; 4791 B->spptr = NULL; 4792 b->saved_values = NULL; 4793 b->idiag = NULL; 4794 b->mdiag = NULL; 4795 b->ssor_work = NULL; 4796 b->omega = 1.0; 4797 b->fshift = 0.0; 4798 b->idiagvalid = PETSC_FALSE; 4799 b->ibdiagvalid = PETSC_FALSE; 4800 b->keepnonzeropattern = PETSC_FALSE; 4801 4802 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQAIJ)); 4803 #if defined(PETSC_HAVE_MATLAB) 4804 PetscCall(PetscObjectComposeFunction((PetscObject)B, "PetscMatlabEnginePut_C", MatlabEnginePut_SeqAIJ)); 4805 PetscCall(PetscObjectComposeFunction((PetscObject)B, "PetscMatlabEngineGet_C", MatlabEngineGet_SeqAIJ)); 4806 #endif 4807 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetColumnIndices_C", MatSeqAIJSetColumnIndices_SeqAIJ)); 4808 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatStoreValues_C", MatStoreValues_SeqAIJ)); 4809 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatRetrieveValues_C", MatRetrieveValues_SeqAIJ)); 4810 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqsbaij_C", MatConvert_SeqAIJ_SeqSBAIJ)); 4811 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqbaij_C", MatConvert_SeqAIJ_SeqBAIJ)); 4812 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijperm_C", MatConvert_SeqAIJ_SeqAIJPERM)); 4813 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijsell_C", MatConvert_SeqAIJ_SeqAIJSELL)); 4814 #if defined(PETSC_HAVE_MKL_SPARSE) 4815 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijmkl_C", MatConvert_SeqAIJ_SeqAIJMKL)); 4816 #endif 4817 #if defined(PETSC_HAVE_CUDA) 4818 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijcusparse_C", MatConvert_SeqAIJ_SeqAIJCUSPARSE)); 4819 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijcusparse_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4820 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaijcusparse_C", MatProductSetFromOptions_SeqAIJ)); 4821 #endif 4822 #if defined(PETSC_HAVE_HIP) 4823 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijhipsparse_C", MatConvert_SeqAIJ_SeqAIJHIPSPARSE)); 4824 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaijhipsparse_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4825 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaijhipsparse_C", MatProductSetFromOptions_SeqAIJ)); 4826 #endif 4827 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 4828 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijkokkos_C", MatConvert_SeqAIJ_SeqAIJKokkos)); 4829 #endif 4830 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqaijcrl_C", MatConvert_SeqAIJ_SeqAIJCRL)); 4831 #if defined(PETSC_HAVE_ELEMENTAL) 4832 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_elemental_C", MatConvert_SeqAIJ_Elemental)); 4833 #endif 4834 #if defined(PETSC_HAVE_SCALAPACK) 4835 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_scalapack_C", MatConvert_AIJ_ScaLAPACK)); 4836 #endif 4837 #if defined(PETSC_HAVE_HYPRE) 4838 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_hypre_C", MatConvert_AIJ_HYPRE)); 4839 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_transpose_seqaij_seqaij_C", MatProductSetFromOptions_Transpose_AIJ_AIJ)); 4840 #endif 4841 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqdense_C", MatConvert_SeqAIJ_SeqDense)); 4842 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_seqsell_C", MatConvert_SeqAIJ_SeqSELL)); 4843 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_seqaij_is_C", MatConvert_XAIJ_IS)); 4844 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatIsTranspose_C", MatIsTranspose_SeqAIJ)); 4845 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatIsHermitianTranspose_C", MatIsTranspose_SeqAIJ)); 4846 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetPreallocation_C", MatSeqAIJSetPreallocation_SeqAIJ)); 4847 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatResetPreallocation_C", MatResetPreallocation_SeqAIJ)); 4848 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJSetPreallocationCSR_C", MatSeqAIJSetPreallocationCSR_SeqAIJ)); 4849 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatReorderForNonzeroDiagonal_C", MatReorderForNonzeroDiagonal_SeqAIJ)); 4850 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_is_seqaij_C", MatProductSetFromOptions_IS_XAIJ)); 4851 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqdense_seqaij_C", MatProductSetFromOptions_SeqDense_SeqAIJ)); 4852 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_seqaij_C", MatProductSetFromOptions_SeqAIJ)); 4853 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSeqAIJKron_C", MatSeqAIJKron_SeqAIJ)); 4854 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_SeqAIJ)); 4855 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_SeqAIJ)); 4856 PetscCall(MatCreate_SeqAIJ_Inode(B)); 4857 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATSEQAIJ)); 4858 PetscCall(MatSeqAIJSetTypeFromOptions(B)); /* this allows changing the matrix subtype to say MATSEQAIJPERM */ 4859 PetscFunctionReturn(PETSC_SUCCESS); 4860 } 4861 4862 /* 4863 Given a matrix generated with MatGetFactor() duplicates all the information in A into C 4864 */ 4865 PetscErrorCode MatDuplicateNoCreate_SeqAIJ(Mat C, Mat A, MatDuplicateOption cpvalues, PetscBool mallocmatspace) 4866 { 4867 Mat_SeqAIJ *c = (Mat_SeqAIJ *)C->data, *a = (Mat_SeqAIJ *)A->data; 4868 PetscInt m = A->rmap->n, i; 4869 4870 PetscFunctionBegin; 4871 PetscCheck(A->assembled || cpvalues == MAT_DO_NOT_COPY_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot duplicate unassembled matrix"); 4872 4873 C->factortype = A->factortype; 4874 c->row = NULL; 4875 c->col = NULL; 4876 c->icol = NULL; 4877 c->reallocs = 0; 4878 4879 C->assembled = A->assembled; 4880 4881 if (A->preallocated) { 4882 PetscCall(PetscLayoutReference(A->rmap, &C->rmap)); 4883 PetscCall(PetscLayoutReference(A->cmap, &C->cmap)); 4884 4885 if (!A->hash_active) { 4886 PetscCall(PetscMalloc1(m, &c->imax)); 4887 PetscCall(PetscMemcpy(c->imax, a->imax, m * sizeof(PetscInt))); 4888 PetscCall(PetscMalloc1(m, &c->ilen)); 4889 PetscCall(PetscMemcpy(c->ilen, a->ilen, m * sizeof(PetscInt))); 4890 4891 /* allocate the matrix space */ 4892 if (mallocmatspace) { 4893 PetscCall(PetscMalloc3(a->i[m], &c->a, a->i[m], &c->j, m + 1, &c->i)); 4894 4895 c->singlemalloc = PETSC_TRUE; 4896 4897 PetscCall(PetscArraycpy(c->i, a->i, m + 1)); 4898 if (m > 0) { 4899 PetscCall(PetscArraycpy(c->j, a->j, a->i[m])); 4900 if (cpvalues == MAT_COPY_VALUES) { 4901 const PetscScalar *aa; 4902 4903 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4904 PetscCall(PetscArraycpy(c->a, aa, a->i[m])); 4905 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 4906 } else { 4907 PetscCall(PetscArrayzero(c->a, a->i[m])); 4908 } 4909 } 4910 } 4911 C->preallocated = PETSC_TRUE; 4912 } else { 4913 PetscCheck(mallocmatspace, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Cannot malloc matrix memory from a non-preallocated matrix"); 4914 PetscCall(MatSetUp(C)); 4915 } 4916 4917 c->ignorezeroentries = a->ignorezeroentries; 4918 c->roworiented = a->roworiented; 4919 c->nonew = a->nonew; 4920 if (a->diag) { 4921 PetscCall(PetscMalloc1(m + 1, &c->diag)); 4922 PetscCall(PetscMemcpy(c->diag, a->diag, m * sizeof(PetscInt))); 4923 } else c->diag = NULL; 4924 4925 c->solve_work = NULL; 4926 c->saved_values = NULL; 4927 c->idiag = NULL; 4928 c->ssor_work = NULL; 4929 c->keepnonzeropattern = a->keepnonzeropattern; 4930 c->free_a = PETSC_TRUE; 4931 c->free_ij = PETSC_TRUE; 4932 4933 c->rmax = a->rmax; 4934 c->nz = a->nz; 4935 c->maxnz = a->nz; /* Since we allocate exactly the right amount */ 4936 4937 c->compressedrow.use = a->compressedrow.use; 4938 c->compressedrow.nrows = a->compressedrow.nrows; 4939 if (a->compressedrow.use) { 4940 i = a->compressedrow.nrows; 4941 PetscCall(PetscMalloc2(i + 1, &c->compressedrow.i, i, &c->compressedrow.rindex)); 4942 PetscCall(PetscArraycpy(c->compressedrow.i, a->compressedrow.i, i + 1)); 4943 PetscCall(PetscArraycpy(c->compressedrow.rindex, a->compressedrow.rindex, i)); 4944 } else { 4945 c->compressedrow.use = PETSC_FALSE; 4946 c->compressedrow.i = NULL; 4947 c->compressedrow.rindex = NULL; 4948 } 4949 c->nonzerorowcnt = a->nonzerorowcnt; 4950 C->nonzerostate = A->nonzerostate; 4951 4952 PetscCall(MatDuplicate_SeqAIJ_Inode(A, cpvalues, &C)); 4953 } 4954 PetscCall(PetscFunctionListDuplicate(((PetscObject)A)->qlist, &((PetscObject)C)->qlist)); 4955 PetscFunctionReturn(PETSC_SUCCESS); 4956 } 4957 4958 PetscErrorCode MatDuplicate_SeqAIJ(Mat A, MatDuplicateOption cpvalues, Mat *B) 4959 { 4960 PetscFunctionBegin; 4961 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), B)); 4962 PetscCall(MatSetSizes(*B, A->rmap->n, A->cmap->n, A->rmap->n, A->cmap->n)); 4963 if (!(A->rmap->n % A->rmap->bs) && !(A->cmap->n % A->cmap->bs)) PetscCall(MatSetBlockSizesFromMats(*B, A, A)); 4964 PetscCall(MatSetType(*B, ((PetscObject)A)->type_name)); 4965 PetscCall(MatDuplicateNoCreate_SeqAIJ(*B, A, cpvalues, PETSC_TRUE)); 4966 PetscFunctionReturn(PETSC_SUCCESS); 4967 } 4968 4969 PetscErrorCode MatLoad_SeqAIJ(Mat newMat, PetscViewer viewer) 4970 { 4971 PetscBool isbinary, ishdf5; 4972 4973 PetscFunctionBegin; 4974 PetscValidHeaderSpecific(newMat, MAT_CLASSID, 1); 4975 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 4976 /* force binary viewer to load .info file if it has not yet done so */ 4977 PetscCall(PetscViewerSetUp(viewer)); 4978 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 4979 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); 4980 if (isbinary) { 4981 PetscCall(MatLoad_SeqAIJ_Binary(newMat, viewer)); 4982 } else if (ishdf5) { 4983 #if defined(PETSC_HAVE_HDF5) 4984 PetscCall(MatLoad_AIJ_HDF5(newMat, viewer)); 4985 #else 4986 SETERRQ(PetscObjectComm((PetscObject)newMat), PETSC_ERR_SUP, "HDF5 not supported in this build.\nPlease reconfigure using --download-hdf5"); 4987 #endif 4988 } else { 4989 SETERRQ(PetscObjectComm((PetscObject)newMat), PETSC_ERR_SUP, "Viewer type %s not yet supported for reading %s matrices", ((PetscObject)viewer)->type_name, ((PetscObject)newMat)->type_name); 4990 } 4991 PetscFunctionReturn(PETSC_SUCCESS); 4992 } 4993 4994 PetscErrorCode MatLoad_SeqAIJ_Binary(Mat mat, PetscViewer viewer) 4995 { 4996 Mat_SeqAIJ *a = (Mat_SeqAIJ *)mat->data; 4997 PetscInt header[4], *rowlens, M, N, nz, sum, rows, cols, i; 4998 4999 PetscFunctionBegin; 5000 PetscCall(PetscViewerSetUp(viewer)); 5001 5002 /* read in matrix header */ 5003 PetscCall(PetscViewerBinaryRead(viewer, header, 4, NULL, PETSC_INT)); 5004 PetscCheck(header[0] == MAT_FILE_CLASSID, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Not a matrix object in file"); 5005 M = header[1]; 5006 N = header[2]; 5007 nz = header[3]; 5008 PetscCheck(M >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix row size (%" PetscInt_FMT ") in file is negative", M); 5009 PetscCheck(N >= 0, PetscObjectComm((PetscObject)viewer), PETSC_ERR_FILE_UNEXPECTED, "Matrix column size (%" PetscInt_FMT ") in file is negative", N); 5010 PetscCheck(nz >= 0, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix stored in special format on disk, cannot load as SeqAIJ"); 5011 5012 /* set block sizes from the viewer's .info file */ 5013 PetscCall(MatLoad_Binary_BlockSizes(mat, viewer)); 5014 /* set local and global sizes if not set already */ 5015 if (mat->rmap->n < 0) mat->rmap->n = M; 5016 if (mat->cmap->n < 0) mat->cmap->n = N; 5017 if (mat->rmap->N < 0) mat->rmap->N = M; 5018 if (mat->cmap->N < 0) mat->cmap->N = N; 5019 PetscCall(PetscLayoutSetUp(mat->rmap)); 5020 PetscCall(PetscLayoutSetUp(mat->cmap)); 5021 5022 /* check if the matrix sizes are correct */ 5023 PetscCall(MatGetSize(mat, &rows, &cols)); 5024 PetscCheck(M == rows && N == cols, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix in file of different sizes (%" PetscInt_FMT ", %" PetscInt_FMT ") than the input matrix (%" PetscInt_FMT ", %" PetscInt_FMT ")", M, N, rows, cols); 5025 5026 /* read in row lengths */ 5027 PetscCall(PetscMalloc1(M, &rowlens)); 5028 PetscCall(PetscViewerBinaryRead(viewer, rowlens, M, NULL, PETSC_INT)); 5029 /* check if sum(rowlens) is same as nz */ 5030 sum = 0; 5031 for (i = 0; i < M; i++) sum += rowlens[i]; 5032 PetscCheck(sum == nz, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent matrix data in file: nonzeros = %" PetscInt_FMT ", sum-row-lengths = %" PetscInt_FMT, nz, sum); 5033 /* preallocate and check sizes */ 5034 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(mat, 0, rowlens)); 5035 PetscCall(MatGetSize(mat, &rows, &cols)); 5036 PetscCheck(M == rows && N == cols, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Matrix in file of different length (%" PetscInt_FMT ", %" PetscInt_FMT ") than the input matrix (%" PetscInt_FMT ", %" PetscInt_FMT ")", M, N, rows, cols); 5037 /* store row lengths */ 5038 PetscCall(PetscArraycpy(a->ilen, rowlens, M)); 5039 PetscCall(PetscFree(rowlens)); 5040 5041 /* fill in "i" row pointers */ 5042 a->i[0] = 0; 5043 for (i = 0; i < M; i++) a->i[i + 1] = a->i[i] + a->ilen[i]; 5044 /* read in "j" column indices */ 5045 PetscCall(PetscViewerBinaryRead(viewer, a->j, nz, NULL, PETSC_INT)); 5046 /* read in "a" nonzero values */ 5047 PetscCall(PetscViewerBinaryRead(viewer, a->a, nz, NULL, PETSC_SCALAR)); 5048 5049 PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY)); 5050 PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); 5051 PetscFunctionReturn(PETSC_SUCCESS); 5052 } 5053 5054 PetscErrorCode MatEqual_SeqAIJ(Mat A, Mat B, PetscBool *flg) 5055 { 5056 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data, *b = (Mat_SeqAIJ *)B->data; 5057 const PetscScalar *aa, *ba; 5058 #if defined(PETSC_USE_COMPLEX) 5059 PetscInt k; 5060 #endif 5061 5062 PetscFunctionBegin; 5063 /* If the matrix dimensions are not equal,or no of nonzeros */ 5064 if ((A->rmap->n != B->rmap->n) || (A->cmap->n != B->cmap->n) || (a->nz != b->nz)) { 5065 *flg = PETSC_FALSE; 5066 PetscFunctionReturn(PETSC_SUCCESS); 5067 } 5068 5069 /* if the a->i are the same */ 5070 PetscCall(PetscArraycmp(a->i, b->i, A->rmap->n + 1, flg)); 5071 if (!*flg) PetscFunctionReturn(PETSC_SUCCESS); 5072 5073 /* if a->j are the same */ 5074 PetscCall(PetscArraycmp(a->j, b->j, a->nz, flg)); 5075 if (!*flg) PetscFunctionReturn(PETSC_SUCCESS); 5076 5077 PetscCall(MatSeqAIJGetArrayRead(A, &aa)); 5078 PetscCall(MatSeqAIJGetArrayRead(B, &ba)); 5079 /* if a->a are the same */ 5080 #if defined(PETSC_USE_COMPLEX) 5081 for (k = 0; k < a->nz; k++) { 5082 if (PetscRealPart(aa[k]) != PetscRealPart(ba[k]) || PetscImaginaryPart(aa[k]) != PetscImaginaryPart(ba[k])) { 5083 *flg = PETSC_FALSE; 5084 PetscFunctionReturn(PETSC_SUCCESS); 5085 } 5086 } 5087 #else 5088 PetscCall(PetscArraycmp(aa, ba, a->nz, flg)); 5089 #endif 5090 PetscCall(MatSeqAIJRestoreArrayRead(A, &aa)); 5091 PetscCall(MatSeqAIJRestoreArrayRead(B, &ba)); 5092 PetscFunctionReturn(PETSC_SUCCESS); 5093 } 5094 5095 /*@ 5096 MatCreateSeqAIJWithArrays - Creates an sequential `MATSEQAIJ` matrix using matrix elements (in CSR format) 5097 provided by the user. 5098 5099 Collective 5100 5101 Input Parameters: 5102 + comm - must be an MPI communicator of size 1 5103 . m - number of rows 5104 . n - number of columns 5105 . i - row indices; that is i[0] = 0, i[row] = i[row-1] + number of elements in that row of the matrix 5106 . j - column indices 5107 - a - matrix values 5108 5109 Output Parameter: 5110 . mat - the matrix 5111 5112 Level: intermediate 5113 5114 Notes: 5115 The i, j, and a arrays are not copied by this routine, the user must free these arrays 5116 once the matrix is destroyed and not before 5117 5118 You cannot set new nonzero locations into this matrix, that will generate an error. 5119 5120 The i and j indices are 0 based 5121 5122 The format which is used for the sparse matrix input, is equivalent to a 5123 row-major ordering.. i.e for the following matrix, the input data expected is 5124 as shown 5125 5126 $ 1 0 0 5127 $ 2 0 3 5128 $ 4 5 6 5129 $ 5130 $ i = {0,1,3,6} [size = nrow+1 = 3+1] 5131 $ j = {0,0,2,0,1,2} [size = 6]; values must be sorted for each row 5132 $ v = {1,2,3,4,5,6} [size = 6] 5133 5134 .seealso: `MatCreate()`, `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MatCreateMPIAIJWithArrays()`, `MatMPIAIJSetPreallocationCSR()` 5135 @*/ 5136 PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt i[], PetscInt j[], PetscScalar a[], Mat *mat) 5137 { 5138 PetscInt ii; 5139 Mat_SeqAIJ *aij; 5140 PetscInt jj; 5141 5142 PetscFunctionBegin; 5143 PetscCheck(m <= 0 || i[0] == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "i (row indices) must start with 0"); 5144 PetscCall(MatCreate(comm, mat)); 5145 PetscCall(MatSetSizes(*mat, m, n, m, n)); 5146 /* PetscCall(MatSetBlockSizes(*mat,,)); */ 5147 PetscCall(MatSetType(*mat, MATSEQAIJ)); 5148 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*mat, MAT_SKIP_ALLOCATION, NULL)); 5149 aij = (Mat_SeqAIJ *)(*mat)->data; 5150 PetscCall(PetscMalloc1(m, &aij->imax)); 5151 PetscCall(PetscMalloc1(m, &aij->ilen)); 5152 5153 aij->i = i; 5154 aij->j = j; 5155 aij->a = a; 5156 aij->singlemalloc = PETSC_FALSE; 5157 aij->nonew = -1; /*this indicates that inserting a new value in the matrix that generates a new nonzero is an error*/ 5158 aij->free_a = PETSC_FALSE; 5159 aij->free_ij = PETSC_FALSE; 5160 5161 for (ii = 0, aij->nonzerorowcnt = 0, aij->rmax = 0; ii < m; ii++) { 5162 aij->ilen[ii] = aij->imax[ii] = i[ii + 1] - i[ii]; 5163 if (PetscDefined(USE_DEBUG)) { 5164 PetscCheck(i[ii + 1] - i[ii] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative row length in i (row indices) row = %" PetscInt_FMT " length = %" PetscInt_FMT, ii, i[ii + 1] - i[ii]); 5165 for (jj = i[ii] + 1; jj < i[ii + 1]; jj++) { 5166 PetscCheck(j[jj] >= j[jj - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column entry number %" PetscInt_FMT " (actual column %" PetscInt_FMT ") in row %" PetscInt_FMT " is not sorted", jj - i[ii], j[jj], ii); 5167 PetscCheck(j[jj] != j[jj - 1], PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column entry number %" PetscInt_FMT " (actual column %" PetscInt_FMT ") in row %" PetscInt_FMT " is identical to previous entry", jj - i[ii], j[jj], ii); 5168 } 5169 } 5170 } 5171 if (PetscDefined(USE_DEBUG)) { 5172 for (ii = 0; ii < aij->i[m]; ii++) { 5173 PetscCheck(j[ii] >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Negative column index at location = %" PetscInt_FMT " index = %" PetscInt_FMT, ii, j[ii]); 5174 PetscCheck(j[ii] <= n - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column index to large at location = %" PetscInt_FMT " index = %" PetscInt_FMT, ii, j[ii]); 5175 } 5176 } 5177 5178 PetscCall(MatAssemblyBegin(*mat, MAT_FINAL_ASSEMBLY)); 5179 PetscCall(MatAssemblyEnd(*mat, MAT_FINAL_ASSEMBLY)); 5180 PetscFunctionReturn(PETSC_SUCCESS); 5181 } 5182 5183 /*@ 5184 MatCreateSeqAIJFromTriple - Creates an sequential `MATSEQAIJ` matrix using matrix elements (in COO format) 5185 provided by the user. 5186 5187 Collective 5188 5189 Input Parameters: 5190 + comm - must be an MPI communicator of size 1 5191 . m - number of rows 5192 . n - number of columns 5193 . i - row indices 5194 . j - column indices 5195 . a - matrix values 5196 . nz - number of nonzeros 5197 - idx - if the i and j indices start with 1 use `PETSC_TRUE` otherwise use `PETSC_FALSE` 5198 5199 Output Parameter: 5200 . mat - the matrix 5201 5202 Level: intermediate 5203 5204 Example: 5205 For the following matrix, the input data expected is as shown (using 0 based indexing) 5206 .vb 5207 1 0 0 5208 2 0 3 5209 4 5 6 5210 5211 i = {0,1,1,2,2,2} 5212 j = {0,0,2,0,1,2} 5213 v = {1,2,3,4,5,6} 5214 .ve 5215 Notes: 5216 Instead of using this function, users should also consider `MatSetPreallocationCOO()` and `MatSetValuesCOO()`, which allow repeated or remote entries, 5217 and are particularly useful in iterative applications. 5218 5219 .seealso: `MatCreate()`, `MatCreateAIJ()`, `MatCreateSeqAIJ()`, `MatCreateSeqAIJWithArrays()`, `MatMPIAIJSetPreallocationCSR()`, `MatSetValuesCOO()`, `MatSetPreallocationCOO()` 5220 @*/ 5221 PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm comm, PetscInt m, PetscInt n, PetscInt i[], PetscInt j[], PetscScalar a[], Mat *mat, PetscInt nz, PetscBool idx) 5222 { 5223 PetscInt ii, *nnz, one = 1, row, col; 5224 5225 PetscFunctionBegin; 5226 PetscCall(PetscCalloc1(m, &nnz)); 5227 for (ii = 0; ii < nz; ii++) nnz[i[ii] - !!idx] += 1; 5228 PetscCall(MatCreate(comm, mat)); 5229 PetscCall(MatSetSizes(*mat, m, n, m, n)); 5230 PetscCall(MatSetType(*mat, MATSEQAIJ)); 5231 PetscCall(MatSeqAIJSetPreallocation_SeqAIJ(*mat, 0, nnz)); 5232 for (ii = 0; ii < nz; ii++) { 5233 if (idx) { 5234 row = i[ii] - 1; 5235 col = j[ii] - 1; 5236 } else { 5237 row = i[ii]; 5238 col = j[ii]; 5239 } 5240 PetscCall(MatSetValues(*mat, one, &row, one, &col, &a[ii], ADD_VALUES)); 5241 } 5242 PetscCall(MatAssemblyBegin(*mat, MAT_FINAL_ASSEMBLY)); 5243 PetscCall(MatAssemblyEnd(*mat, MAT_FINAL_ASSEMBLY)); 5244 PetscCall(PetscFree(nnz)); 5245 PetscFunctionReturn(PETSC_SUCCESS); 5246 } 5247 5248 PetscErrorCode MatSeqAIJInvalidateDiagonal(Mat A) 5249 { 5250 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5251 5252 PetscFunctionBegin; 5253 a->idiagvalid = PETSC_FALSE; 5254 a->ibdiagvalid = PETSC_FALSE; 5255 5256 PetscCall(MatSeqAIJInvalidateDiagonal_Inode(A)); 5257 PetscFunctionReturn(PETSC_SUCCESS); 5258 } 5259 5260 PetscErrorCode MatCreateMPIMatConcatenateSeqMat_SeqAIJ(MPI_Comm comm, Mat inmat, PetscInt n, MatReuse scall, Mat *outmat) 5261 { 5262 PetscFunctionBegin; 5263 PetscCall(MatCreateMPIMatConcatenateSeqMat_MPIAIJ(comm, inmat, n, scall, outmat)); 5264 PetscFunctionReturn(PETSC_SUCCESS); 5265 } 5266 5267 /* 5268 Permute A into C's *local* index space using rowemb,colemb. 5269 The embedding are supposed to be injections and the above implies that the range of rowemb is a subset 5270 of [0,m), colemb is in [0,n). 5271 If pattern == DIFFERENT_NONZERO_PATTERN, C is preallocated according to A. 5272 */ 5273 PetscErrorCode MatSetSeqMat_SeqAIJ(Mat C, IS rowemb, IS colemb, MatStructure pattern, Mat B) 5274 { 5275 /* If making this function public, change the error returned in this function away from _PLIB. */ 5276 Mat_SeqAIJ *Baij; 5277 PetscBool seqaij; 5278 PetscInt m, n, *nz, i, j, count; 5279 PetscScalar v; 5280 const PetscInt *rowindices, *colindices; 5281 5282 PetscFunctionBegin; 5283 if (!B) PetscFunctionReturn(PETSC_SUCCESS); 5284 /* Check to make sure the target matrix (and embeddings) are compatible with C and each other. */ 5285 PetscCall(PetscObjectBaseTypeCompare((PetscObject)B, MATSEQAIJ, &seqaij)); 5286 PetscCheck(seqaij, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is of wrong type"); 5287 if (rowemb) { 5288 PetscCall(ISGetLocalSize(rowemb, &m)); 5289 PetscCheck(m == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Row IS of size %" PetscInt_FMT " is incompatible with matrix row size %" PetscInt_FMT, m, B->rmap->n); 5290 } else { 5291 PetscCheck(C->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is row-incompatible with the target matrix"); 5292 } 5293 if (colemb) { 5294 PetscCall(ISGetLocalSize(colemb, &n)); 5295 PetscCheck(n == B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Diag col IS of size %" PetscInt_FMT " is incompatible with input matrix col size %" PetscInt_FMT, n, B->cmap->n); 5296 } else { 5297 PetscCheck(C->cmap->n == B->cmap->n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Input matrix is col-incompatible with the target matrix"); 5298 } 5299 5300 Baij = (Mat_SeqAIJ *)(B->data); 5301 if (pattern == DIFFERENT_NONZERO_PATTERN) { 5302 PetscCall(PetscMalloc1(B->rmap->n, &nz)); 5303 for (i = 0; i < B->rmap->n; i++) nz[i] = Baij->i[i + 1] - Baij->i[i]; 5304 PetscCall(MatSeqAIJSetPreallocation(C, 0, nz)); 5305 PetscCall(PetscFree(nz)); 5306 } 5307 if (pattern == SUBSET_NONZERO_PATTERN) PetscCall(MatZeroEntries(C)); 5308 count = 0; 5309 rowindices = NULL; 5310 colindices = NULL; 5311 if (rowemb) PetscCall(ISGetIndices(rowemb, &rowindices)); 5312 if (colemb) PetscCall(ISGetIndices(colemb, &colindices)); 5313 for (i = 0; i < B->rmap->n; i++) { 5314 PetscInt row; 5315 row = i; 5316 if (rowindices) row = rowindices[i]; 5317 for (j = Baij->i[i]; j < Baij->i[i + 1]; j++) { 5318 PetscInt col; 5319 col = Baij->j[count]; 5320 if (colindices) col = colindices[col]; 5321 v = Baij->a[count]; 5322 PetscCall(MatSetValues(C, 1, &row, 1, &col, &v, INSERT_VALUES)); 5323 ++count; 5324 } 5325 } 5326 /* FIXME: set C's nonzerostate correctly. */ 5327 /* Assembly for C is necessary. */ 5328 C->preallocated = PETSC_TRUE; 5329 C->assembled = PETSC_TRUE; 5330 C->was_assembled = PETSC_FALSE; 5331 PetscFunctionReturn(PETSC_SUCCESS); 5332 } 5333 5334 PetscErrorCode MatEliminateZeros_SeqAIJ(Mat A) 5335 { 5336 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5337 MatScalar *aa = a->a; 5338 PetscInt m = A->rmap->n, fshift = 0, fshift_prev = 0, i, k; 5339 PetscInt *ailen = a->ilen, *imax = a->imax, *ai = a->i, *aj = a->j, rmax = 0; 5340 5341 PetscFunctionBegin; 5342 PetscCheck(A->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot eliminate zeros for unassembled matrix"); 5343 if (m) rmax = ailen[0]; /* determine row with most nonzeros */ 5344 for (i = 1; i <= m; i++) { 5345 /* move each nonzero entry back by the amount of zero slots (fshift) before it*/ 5346 for (k = ai[i - 1]; k < ai[i]; k++) { 5347 if (aa[k] == 0 && aj[k] != i - 1) fshift++; 5348 else { 5349 if (aa[k] == 0 && aj[k] == i - 1) PetscCall(PetscInfo(A, "Keep the diagonal zero at row %" PetscInt_FMT "\n", i - 1)); 5350 aa[k - fshift] = aa[k]; 5351 aj[k - fshift] = aj[k]; 5352 } 5353 } 5354 ai[i - 1] -= fshift_prev; // safe to update ai[i-1] now since it will not be used in the next iteration 5355 fshift_prev = fshift; 5356 /* reset ilen and imax for each row */ 5357 ailen[i - 1] = imax[i - 1] = ai[i] - fshift - ai[i - 1]; 5358 a->nonzerorowcnt += ((ai[i] - fshift - ai[i - 1]) > 0); 5359 rmax = PetscMax(rmax, ailen[i - 1]); 5360 } 5361 if (m) { 5362 ai[m] -= fshift; 5363 a->nz = ai[m]; 5364 } 5365 PetscCall(PetscInfo(A, "Matrix size: %" PetscInt_FMT " X %" PetscInt_FMT "; zeros eliminated: %" PetscInt_FMT "; nonzeros left: %" PetscInt_FMT "\n", m, A->cmap->n, fshift, a->nz)); 5366 A->nonzerostate -= fshift; 5367 A->info.nz_unneeded += (PetscReal)fshift; 5368 a->rmax = rmax; 5369 if (a->inode.use && a->inode.checked) PetscCall(MatSeqAIJCheckInode(A)); 5370 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 5371 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 5372 PetscFunctionReturn(PETSC_SUCCESS); 5373 } 5374 5375 PetscFunctionList MatSeqAIJList = NULL; 5376 5377 /*@C 5378 MatSeqAIJSetType - Converts a `MATSEQAIJ` matrix to a subtype 5379 5380 Collective 5381 5382 Input Parameters: 5383 + mat - the matrix object 5384 - matype - matrix type 5385 5386 Options Database Key: 5387 . -mat_seqaij_type <method> - for example seqaijcrl 5388 5389 Level: intermediate 5390 5391 .seealso: `PCSetType()`, `VecSetType()`, `MatCreate()`, `MatType`, `Mat` 5392 @*/ 5393 PetscErrorCode MatSeqAIJSetType(Mat mat, MatType matype) 5394 { 5395 PetscBool sametype; 5396 PetscErrorCode (*r)(Mat, MatType, MatReuse, Mat *); 5397 5398 PetscFunctionBegin; 5399 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 5400 PetscCall(PetscObjectTypeCompare((PetscObject)mat, matype, &sametype)); 5401 if (sametype) PetscFunctionReturn(PETSC_SUCCESS); 5402 5403 PetscCall(PetscFunctionListFind(MatSeqAIJList, matype, &r)); 5404 PetscCheck(r, PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Mat type given: %s", matype); 5405 PetscCall((*r)(mat, matype, MAT_INPLACE_MATRIX, &mat)); 5406 PetscFunctionReturn(PETSC_SUCCESS); 5407 } 5408 5409 /*@C 5410 MatSeqAIJRegister - - Adds a new sub-matrix type for sequential `MATSEQAIJ` matrices 5411 5412 Not Collective 5413 5414 Input Parameters: 5415 + name - name of a new user-defined matrix type, for example `MATSEQAIJCRL` 5416 - function - routine to convert to subtype 5417 5418 Notes: 5419 `MatSeqAIJRegister()` may be called multiple times to add several user-defined solvers. 5420 5421 Then, your matrix can be chosen with the procedural interface at runtime via the option 5422 $ -mat_seqaij_type my_mat 5423 5424 Level: advanced 5425 5426 .seealso: `MatSeqAIJRegisterAll()` 5427 @*/ 5428 PetscErrorCode MatSeqAIJRegister(const char sname[], PetscErrorCode (*function)(Mat, MatType, MatReuse, Mat *)) 5429 { 5430 PetscFunctionBegin; 5431 PetscCall(MatInitializePackage()); 5432 PetscCall(PetscFunctionListAdd(&MatSeqAIJList, sname, function)); 5433 PetscFunctionReturn(PETSC_SUCCESS); 5434 } 5435 5436 PetscBool MatSeqAIJRegisterAllCalled = PETSC_FALSE; 5437 5438 /*@C 5439 MatSeqAIJRegisterAll - Registers all of the matrix subtypes of `MATSSEQAIJ` 5440 5441 Not Collective 5442 5443 Level: advanced 5444 5445 .seealso: `MatRegisterAll()`, `MatSeqAIJRegister()` 5446 @*/ 5447 PetscErrorCode MatSeqAIJRegisterAll(void) 5448 { 5449 PetscFunctionBegin; 5450 if (MatSeqAIJRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS); 5451 MatSeqAIJRegisterAllCalled = PETSC_TRUE; 5452 5453 PetscCall(MatSeqAIJRegister(MATSEQAIJCRL, MatConvert_SeqAIJ_SeqAIJCRL)); 5454 PetscCall(MatSeqAIJRegister(MATSEQAIJPERM, MatConvert_SeqAIJ_SeqAIJPERM)); 5455 PetscCall(MatSeqAIJRegister(MATSEQAIJSELL, MatConvert_SeqAIJ_SeqAIJSELL)); 5456 #if defined(PETSC_HAVE_MKL_SPARSE) 5457 PetscCall(MatSeqAIJRegister(MATSEQAIJMKL, MatConvert_SeqAIJ_SeqAIJMKL)); 5458 #endif 5459 #if defined(PETSC_HAVE_CUDA) 5460 PetscCall(MatSeqAIJRegister(MATSEQAIJCUSPARSE, MatConvert_SeqAIJ_SeqAIJCUSPARSE)); 5461 #endif 5462 #if defined(PETSC_HAVE_HIP) 5463 PetscCall(MatSeqAIJRegister(MATSEQAIJHIPSPARSE, MatConvert_SeqAIJ_SeqAIJHIPSPARSE)); 5464 #endif 5465 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 5466 PetscCall(MatSeqAIJRegister(MATSEQAIJKOKKOS, MatConvert_SeqAIJ_SeqAIJKokkos)); 5467 #endif 5468 #if defined(PETSC_HAVE_VIENNACL) && defined(PETSC_HAVE_VIENNACL_NO_CUDA) 5469 PetscCall(MatSeqAIJRegister(MATMPIAIJVIENNACL, MatConvert_SeqAIJ_SeqAIJViennaCL)); 5470 #endif 5471 PetscFunctionReturn(PETSC_SUCCESS); 5472 } 5473 5474 /* 5475 Special version for direct calls from Fortran 5476 */ 5477 #include <petsc/private/fortranimpl.h> 5478 #if defined(PETSC_HAVE_FORTRAN_CAPS) 5479 #define matsetvaluesseqaij_ MATSETVALUESSEQAIJ 5480 #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 5481 #define matsetvaluesseqaij_ matsetvaluesseqaij 5482 #endif 5483 5484 /* Change these macros so can be used in void function */ 5485 5486 /* Change these macros so can be used in void function */ 5487 /* Identical to PetscCallVoid, except it assigns to *_ierr */ 5488 #undef PetscCall 5489 #define PetscCall(...) \ 5490 do { \ 5491 PetscErrorCode ierr_msv_mpiaij = __VA_ARGS__; \ 5492 if (PetscUnlikely(ierr_msv_mpiaij)) { \ 5493 *_ierr = PetscError(PETSC_COMM_SELF, __LINE__, PETSC_FUNCTION_NAME, __FILE__, ierr_msv_mpiaij, PETSC_ERROR_REPEAT, " "); \ 5494 return; \ 5495 } \ 5496 } while (0) 5497 5498 #undef SETERRQ 5499 #define SETERRQ(comm, ierr, ...) \ 5500 do { \ 5501 *_ierr = PetscError(comm, __LINE__, PETSC_FUNCTION_NAME, __FILE__, ierr, PETSC_ERROR_INITIAL, __VA_ARGS__); \ 5502 return; \ 5503 } while (0) 5504 5505 PETSC_EXTERN void matsetvaluesseqaij_(Mat *AA, PetscInt *mm, const PetscInt im[], PetscInt *nn, const PetscInt in[], const PetscScalar v[], InsertMode *isis, PetscErrorCode *_ierr) 5506 { 5507 Mat A = *AA; 5508 PetscInt m = *mm, n = *nn; 5509 InsertMode is = *isis; 5510 Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data; 5511 PetscInt *rp, k, low, high, t, ii, row, nrow, i, col, l, rmax, N; 5512 PetscInt *imax, *ai, *ailen; 5513 PetscInt *aj, nonew = a->nonew, lastcol = -1; 5514 MatScalar *ap, value, *aa; 5515 PetscBool ignorezeroentries = a->ignorezeroentries; 5516 PetscBool roworiented = a->roworiented; 5517 5518 PetscFunctionBegin; 5519 MatCheckPreallocated(A, 1); 5520 imax = a->imax; 5521 ai = a->i; 5522 ailen = a->ilen; 5523 aj = a->j; 5524 aa = a->a; 5525 5526 for (k = 0; k < m; k++) { /* loop over added rows */ 5527 row = im[k]; 5528 if (row < 0) continue; 5529 PetscCheck(row < A->rmap->n, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Row too large"); 5530 rp = aj + ai[row]; 5531 ap = aa + ai[row]; 5532 rmax = imax[row]; 5533 nrow = ailen[row]; 5534 low = 0; 5535 high = nrow; 5536 for (l = 0; l < n; l++) { /* loop over added columns */ 5537 if (in[l] < 0) continue; 5538 PetscCheck(in[l] < A->cmap->n, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Column too large"); 5539 col = in[l]; 5540 if (roworiented) value = v[l + k * n]; 5541 else value = v[k + l * m]; 5542 5543 if (value == 0.0 && ignorezeroentries && (is == ADD_VALUES)) continue; 5544 5545 if (col <= lastcol) low = 0; 5546 else high = nrow; 5547 lastcol = col; 5548 while (high - low > 5) { 5549 t = (low + high) / 2; 5550 if (rp[t] > col) high = t; 5551 else low = t; 5552 } 5553 for (i = low; i < high; i++) { 5554 if (rp[i] > col) break; 5555 if (rp[i] == col) { 5556 if (is == ADD_VALUES) ap[i] += value; 5557 else ap[i] = value; 5558 goto noinsert; 5559 } 5560 } 5561 if (value == 0.0 && ignorezeroentries) goto noinsert; 5562 if (nonew == 1) goto noinsert; 5563 PetscCheck(nonew != -1, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_OUTOFRANGE, "Inserting a new nonzero in the matrix"); 5564 MatSeqXAIJReallocateAIJ(A, A->rmap->n, 1, nrow, row, col, rmax, aa, ai, aj, rp, ap, imax, nonew, MatScalar); 5565 N = nrow++ - 1; 5566 a->nz++; 5567 high++; 5568 /* shift up all the later entries in this row */ 5569 for (ii = N; ii >= i; ii--) { 5570 rp[ii + 1] = rp[ii]; 5571 ap[ii + 1] = ap[ii]; 5572 } 5573 rp[i] = col; 5574 ap[i] = value; 5575 A->nonzerostate++; 5576 noinsert:; 5577 low = i + 1; 5578 } 5579 ailen[row] = nrow; 5580 } 5581 PetscFunctionReturnVoid(); 5582 } 5583 /* Undefining these here since they were redefined from their original definition above! No 5584 * other PETSc functions should be defined past this point, as it is impossible to recover the 5585 * original definitions */ 5586 #undef PetscCall 5587 #undef SETERRQ 5588