1 2 /* 3 Creates hypre ijmatrix from PETSc matrix 4 */ 5 6 #include <petscpkg_version.h> 7 #include <petsc/private/petschypre.h> 8 #include <petscmathypre.h> 9 #include <petsc/private/matimpl.h> 10 #include <petsc/private/deviceimpl.h> 11 #include <../src/mat/impls/hypre/mhypre.h> 12 #include <../src/mat/impls/aij/mpi/mpiaij.h> 13 #include <../src/vec/vec/impls/hypre/vhyp.h> 14 #include <HYPRE.h> 15 #include <HYPRE_utilities.h> 16 #include <_hypre_parcsr_ls.h> 17 #include <_hypre_sstruct_ls.h> 18 19 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0) 20 #define hypre_ParCSRMatrixClone(A, B) hypre_ParCSRMatrixCompleteClone(A) 21 #endif 22 23 static PetscErrorCode MatHYPRE_CreateFromMat(Mat, Mat_HYPRE *); 24 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat, Mat, HYPRE_IJMatrix); 25 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat, HYPRE_IJMatrix); 26 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat, HYPRE_IJMatrix); 27 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat, HYPRE_Complex, Vec, HYPRE_Complex, Vec, PetscBool); 28 static PetscErrorCode hypre_array_destroy(void *); 29 static PetscErrorCode MatSetValues_HYPRE(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode ins); 30 31 static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMatrix ij) 32 { 33 PetscInt i, n_d, n_o; 34 const PetscInt *ia_d, *ia_o; 35 PetscBool done_d = PETSC_FALSE, done_o = PETSC_FALSE; 36 HYPRE_Int *nnz_d = NULL, *nnz_o = NULL; 37 38 PetscFunctionBegin; 39 if (A_d) { /* determine number of nonzero entries in local diagonal part */ 40 PetscCall(MatGetRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, &n_d, &ia_d, NULL, &done_d)); 41 if (done_d) { 42 PetscCall(PetscMalloc1(n_d, &nnz_d)); 43 for (i = 0; i < n_d; i++) nnz_d[i] = ia_d[i + 1] - ia_d[i]; 44 } 45 PetscCall(MatRestoreRowIJ(A_d, 0, PETSC_FALSE, PETSC_FALSE, NULL, &ia_d, NULL, &done_d)); 46 } 47 if (A_o) { /* determine number of nonzero entries in local off-diagonal part */ 48 PetscCall(MatGetRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o)); 49 if (done_o) { 50 PetscCall(PetscMalloc1(n_o, &nnz_o)); 51 for (i = 0; i < n_o; i++) nnz_o[i] = ia_o[i + 1] - ia_o[i]; 52 } 53 PetscCall(MatRestoreRowIJ(A_o, 0, PETSC_FALSE, PETSC_FALSE, &n_o, &ia_o, NULL, &done_o)); 54 } 55 if (done_d) { /* set number of nonzeros in HYPRE IJ matrix */ 56 if (!done_o) { /* only diagonal part */ 57 PetscCall(PetscCalloc1(n_d, &nnz_o)); 58 } 59 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0) 60 { /* If we don't do this, the columns of the matrix will be all zeros! */ 61 hypre_AuxParCSRMatrix *aux_matrix; 62 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij); 63 hypre_AuxParCSRMatrixDestroy(aux_matrix); 64 hypre_IJMatrixTranslator(ij) = NULL; 65 PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o); 66 /* it seems they partially fixed it in 2.19.0 */ 67 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0) 68 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij); 69 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; 70 #endif 71 } 72 #else 73 PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, ij, nnz_d, nnz_o); 74 #endif 75 PetscCall(PetscFree(nnz_d)); 76 PetscCall(PetscFree(nnz_o)); 77 } 78 PetscFunctionReturn(PETSC_SUCCESS); 79 } 80 81 static PetscErrorCode MatHYPRE_CreateFromMat(Mat A, Mat_HYPRE *hA) 82 { 83 PetscInt rstart, rend, cstart, cend; 84 85 PetscFunctionBegin; 86 PetscCall(PetscLayoutSetUp(A->rmap)); 87 PetscCall(PetscLayoutSetUp(A->cmap)); 88 rstart = A->rmap->rstart; 89 rend = A->rmap->rend; 90 cstart = A->cmap->rstart; 91 cend = A->cmap->rend; 92 PetscHYPREInitialize(); 93 PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij); 94 PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR); 95 { 96 PetscBool same; 97 Mat A_d, A_o; 98 const PetscInt *colmap; 99 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &same)); 100 if (same) { 101 PetscCall(MatMPIAIJGetSeqAIJ(A, &A_d, &A_o, &colmap)); 102 PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij)); 103 PetscFunctionReturn(PETSC_SUCCESS); 104 } 105 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &same)); 106 if (same) { 107 PetscCall(MatMPIBAIJGetSeqBAIJ(A, &A_d, &A_o, &colmap)); 108 PetscCall(MatHYPRE_IJMatrixPreallocate(A_d, A_o, hA->ij)); 109 PetscFunctionReturn(PETSC_SUCCESS); 110 } 111 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &same)); 112 if (same) { 113 PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij)); 114 PetscFunctionReturn(PETSC_SUCCESS); 115 } 116 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQBAIJ, &same)); 117 if (same) { 118 PetscCall(MatHYPRE_IJMatrixPreallocate(A, NULL, hA->ij)); 119 PetscFunctionReturn(PETSC_SUCCESS); 120 } 121 } 122 PetscFunctionReturn(PETSC_SUCCESS); 123 } 124 125 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ(Mat A, HYPRE_IJMatrix ij) 126 { 127 PetscBool flg; 128 129 PetscFunctionBegin; 130 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0) 131 PetscCallExternal(HYPRE_IJMatrixInitialize, ij); 132 #else 133 PetscCallExternal(HYPRE_IJMatrixInitialize_v2, ij, HYPRE_MEMORY_HOST); 134 #endif 135 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &flg)); 136 if (flg) { 137 PetscCall(MatHYPRE_IJMatrixCopyIJ_MPIAIJ(A, ij)); 138 PetscFunctionReturn(PETSC_SUCCESS); 139 } 140 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &flg)); 141 if (flg) { 142 PetscCall(MatHYPRE_IJMatrixCopyIJ_SeqAIJ(A, ij)); 143 PetscFunctionReturn(PETSC_SUCCESS); 144 } 145 PetscCheck(PETSC_FALSE, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "No support for matrix type %s", ((PetscObject)A)->type_name); 146 } 147 148 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_SeqAIJ(Mat A, HYPRE_IJMatrix ij) 149 { 150 Mat_SeqAIJ *pdiag = (Mat_SeqAIJ *)A->data; 151 HYPRE_Int type; 152 hypre_ParCSRMatrix *par_matrix; 153 hypre_AuxParCSRMatrix *aux_matrix; 154 hypre_CSRMatrix *hdiag; 155 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 156 157 PetscFunctionBegin; 158 PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type); 159 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 160 PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix); 161 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 162 /* 163 this is the Hack part where we monkey directly with the hypre datastructures 164 */ 165 if (sameint) { 166 PetscCall(PetscArraycpy(hdiag->i, pdiag->i, A->rmap->n + 1)); 167 PetscCall(PetscArraycpy(hdiag->j, pdiag->j, pdiag->nz)); 168 } else { 169 PetscInt i; 170 171 for (i = 0; i < A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)pdiag->i[i]; 172 for (i = 0; i < pdiag->nz; i++) hdiag->j[i] = (HYPRE_Int)pdiag->j[i]; 173 } 174 175 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij); 176 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 177 PetscFunctionReturn(PETSC_SUCCESS); 178 } 179 180 static PetscErrorCode MatHYPRE_IJMatrixCopyIJ_MPIAIJ(Mat A, HYPRE_IJMatrix ij) 181 { 182 Mat_MPIAIJ *pA = (Mat_MPIAIJ *)A->data; 183 Mat_SeqAIJ *pdiag, *poffd; 184 PetscInt i, *garray = pA->garray, *jj, cstart, *pjj; 185 HYPRE_Int *hjj, type; 186 hypre_ParCSRMatrix *par_matrix; 187 hypre_AuxParCSRMatrix *aux_matrix; 188 hypre_CSRMatrix *hdiag, *hoffd; 189 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 190 191 PetscFunctionBegin; 192 pdiag = (Mat_SeqAIJ *)pA->A->data; 193 poffd = (Mat_SeqAIJ *)pA->B->data; 194 /* cstart is only valid for square MPIAIJ laid out in the usual way */ 195 PetscCall(MatGetOwnershipRange(A, &cstart, NULL)); 196 197 PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type); 198 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 199 PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix); 200 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 201 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 202 203 if (sameint) { 204 PetscCall(PetscArraycpy(hdiag->i, pdiag->i, pA->A->rmap->n + 1)); 205 } else { 206 for (i = 0; i < pA->A->rmap->n + 1; i++) hdiag->i[i] = (HYPRE_Int)(pdiag->i[i]); 207 } 208 209 hjj = hdiag->j; 210 pjj = pdiag->j; 211 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0) 212 for (i = 0; i < pdiag->nz; i++) hjj[i] = pjj[i]; 213 #else 214 for (i = 0; i < pdiag->nz; i++) hjj[i] = cstart + pjj[i]; 215 #endif 216 if (sameint) { 217 PetscCall(PetscArraycpy(hoffd->i, poffd->i, pA->A->rmap->n + 1)); 218 } else { 219 for (i = 0; i < pA->A->rmap->n + 1; i++) hoffd->i[i] = (HYPRE_Int)(poffd->i[i]); 220 } 221 222 #if PETSC_PKG_HYPRE_VERSION_GE(2, 16, 0) 223 PetscCallExternal(hypre_CSRMatrixBigInitialize, hoffd); 224 jj = (PetscInt *)hoffd->big_j; 225 #else 226 jj = (PetscInt *)hoffd->j; 227 #endif 228 pjj = poffd->j; 229 for (i = 0; i < poffd->nz; i++) jj[i] = garray[pjj[i]]; 230 231 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(ij); 232 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0; 233 PetscFunctionReturn(PETSC_SUCCESS); 234 } 235 236 static PetscErrorCode MatConvert_HYPRE_IS(Mat A, MatType mtype, MatReuse reuse, Mat *B) 237 { 238 Mat_HYPRE *mhA = (Mat_HYPRE *)(A->data); 239 Mat lA; 240 ISLocalToGlobalMapping rl2g, cl2g; 241 IS is; 242 hypre_ParCSRMatrix *hA; 243 hypre_CSRMatrix *hdiag, *hoffd; 244 MPI_Comm comm; 245 HYPRE_Complex *hdd, *hod, *aa; 246 PetscScalar *data; 247 HYPRE_BigInt *col_map_offd; 248 HYPRE_Int *hdi, *hdj, *hoi, *hoj; 249 PetscInt *ii, *jj, *iptr, *jptr; 250 PetscInt cum, dr, dc, oc, str, stc, nnz, i, jd, jo, M, N; 251 HYPRE_Int type; 252 253 PetscFunctionBegin; 254 comm = PetscObjectComm((PetscObject)A); 255 PetscCallExternal(HYPRE_IJMatrixGetObjectType, mhA->ij, &type); 256 PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 257 PetscCallExternal(HYPRE_IJMatrixGetObject, mhA->ij, (void **)&hA); 258 M = hypre_ParCSRMatrixGlobalNumRows(hA); 259 N = hypre_ParCSRMatrixGlobalNumCols(hA); 260 str = hypre_ParCSRMatrixFirstRowIndex(hA); 261 stc = hypre_ParCSRMatrixFirstColDiag(hA); 262 hdiag = hypre_ParCSRMatrixDiag(hA); 263 hoffd = hypre_ParCSRMatrixOffd(hA); 264 dr = hypre_CSRMatrixNumRows(hdiag); 265 dc = hypre_CSRMatrixNumCols(hdiag); 266 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 267 hdi = hypre_CSRMatrixI(hdiag); 268 hdj = hypre_CSRMatrixJ(hdiag); 269 hdd = hypre_CSRMatrixData(hdiag); 270 oc = hypre_CSRMatrixNumCols(hoffd); 271 nnz += hypre_CSRMatrixNumNonzeros(hoffd); 272 hoi = hypre_CSRMatrixI(hoffd); 273 hoj = hypre_CSRMatrixJ(hoffd); 274 hod = hypre_CSRMatrixData(hoffd); 275 if (reuse != MAT_REUSE_MATRIX) { 276 PetscInt *aux; 277 278 /* generate l2g maps for rows and cols */ 279 PetscCall(ISCreateStride(comm, dr, str, 1, &is)); 280 PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g)); 281 PetscCall(ISDestroy(&is)); 282 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA); 283 PetscCall(PetscMalloc1(dc + oc, &aux)); 284 for (i = 0; i < dc; i++) aux[i] = i + stc; 285 for (i = 0; i < oc; i++) aux[i + dc] = col_map_offd[i]; 286 PetscCall(ISCreateGeneral(comm, dc + oc, aux, PETSC_OWN_POINTER, &is)); 287 PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g)); 288 PetscCall(ISDestroy(&is)); 289 /* create MATIS object */ 290 PetscCall(MatCreate(comm, B)); 291 PetscCall(MatSetSizes(*B, dr, dc, M, N)); 292 PetscCall(MatSetType(*B, MATIS)); 293 PetscCall(MatSetLocalToGlobalMapping(*B, rl2g, cl2g)); 294 PetscCall(ISLocalToGlobalMappingDestroy(&rl2g)); 295 PetscCall(ISLocalToGlobalMappingDestroy(&cl2g)); 296 297 /* allocate CSR for local matrix */ 298 PetscCall(PetscMalloc1(dr + 1, &iptr)); 299 PetscCall(PetscMalloc1(nnz, &jptr)); 300 PetscCall(PetscMalloc1(nnz, &data)); 301 } else { 302 PetscInt nr; 303 PetscBool done; 304 PetscCall(MatISGetLocalMat(*B, &lA)); 305 PetscCall(MatGetRowIJ(lA, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&iptr, (const PetscInt **)&jptr, &done)); 306 PetscCheck(nr == dr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of rows in local mat! %" PetscInt_FMT " != %" PetscInt_FMT, nr, dr); 307 PetscCheck(iptr[nr] >= nnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in local mat! reuse %" PetscInt_FMT " requested %" PetscInt_FMT, iptr[nr], nnz); 308 PetscCall(MatSeqAIJGetArray(lA, &data)); 309 } 310 /* merge local matrices */ 311 ii = iptr; 312 jj = jptr; 313 aa = (HYPRE_Complex *)data; /* this cast fixes the clang error when doing the assignments below: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 314 *ii = *(hdi++) + *(hoi++); 315 for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) { 316 PetscScalar *aold = (PetscScalar *)aa; 317 PetscInt *jold = jj, nc = jd + jo; 318 for (; jd < *hdi; jd++) { 319 *jj++ = *hdj++; 320 *aa++ = *hdd++; 321 } 322 for (; jo < *hoi; jo++) { 323 *jj++ = *hoj++ + dc; 324 *aa++ = *hod++; 325 } 326 *(++ii) = *(hdi++) + *(hoi++); 327 PetscCall(PetscSortIntWithScalarArray(jd + jo - nc, jold, aold)); 328 } 329 for (; cum < dr; cum++) *(++ii) = nnz; 330 if (reuse != MAT_REUSE_MATRIX) { 331 Mat_SeqAIJ *a; 332 333 PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, dc + oc, iptr, jptr, data, &lA)); 334 PetscCall(MatISSetLocalMat(*B, lA)); 335 /* hack SeqAIJ */ 336 a = (Mat_SeqAIJ *)(lA->data); 337 a->free_a = PETSC_TRUE; 338 a->free_ij = PETSC_TRUE; 339 PetscCall(MatDestroy(&lA)); 340 } 341 PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY)); 342 PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY)); 343 if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, B)); 344 PetscFunctionReturn(PETSC_SUCCESS); 345 } 346 347 /* Move diagonal elements of the diagonal block to the front of their row, as needed by ParCSRMatrix. So damn hacky */ 348 static PetscErrorCode EnsureHypreCSR_SeqAIJ(Mat A, hypre_CSRMatrix *hA) 349 { 350 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 351 PetscBool isseqaij; 352 HYPRE_Int tmpj, *hj; 353 HYPRE_Complex tmpv, *ha; 354 355 PetscFunctionBegin; 356 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij)); 357 PetscCheck(isseqaij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Not for type %s\n", ((PetscObject)A)->type_name); 358 hj = hypre_CSRMatrixJ(hA); 359 ha = hypre_CSRMatrixData(hA); 360 #define swap_with_tmp(a, b, t) \ 361 { \ 362 t = a; \ 363 a = b; \ 364 b = t; \ 365 } 366 for (PetscInt i = 0; i < A->rmap->n; i++) { 367 if (aij->diag[i] >= aij->i[i] && aij->diag[i] < aij->i[i + 1]) { /* Diagonal element of this row exists in a[] and j[] */ 368 swap_with_tmp(ha[aij->i[i]], ha[aij->diag[i]], tmpv); 369 if (hj[aij->i[i]] != i) swap_with_tmp(hj[aij->i[i]], hj[aij->diag[i]], tmpj); 370 } 371 } 372 #undef swap_with_tmp 373 PetscFunctionReturn(PETSC_SUCCESS); 374 } 375 376 static PetscErrorCode MatHYPRE_IJMatrixCopyValues(Mat A, HYPRE_IJMatrix ij) 377 { 378 HYPRE_Int type; 379 hypre_ParCSRMatrix *par_matrix; 380 hypre_CSRMatrix *hdiag, *hoffd; 381 PetscBool ismpiaij; 382 Mat dA = A, oA = NULL; 383 PetscInt nnz; 384 const PetscScalar *pa; 385 386 PetscFunctionBegin; 387 PetscCallExternal(HYPRE_IJMatrixGetObjectType, ij, &type); 388 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 389 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 390 if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL)); 391 PetscCallExternal(HYPRE_IJMatrixGetObject, ij, (void **)&par_matrix); 392 393 hdiag = hypre_ParCSRMatrixDiag(par_matrix); 394 nnz = hypre_CSRMatrixNumNonzeros(hdiag); 395 PetscCall(MatSeqAIJGetArrayRead(dA, &pa)); 396 PetscCall(PetscArraycpy(hdiag->data, pa, nnz)); 397 PetscCall(MatSeqAIJRestoreArrayRead(dA, &pa)); 398 if (oA) { 399 hoffd = hypre_ParCSRMatrixOffd(par_matrix); 400 nnz = hypre_CSRMatrixNumNonzeros(hoffd); 401 PetscCall(MatSeqAIJGetArrayRead(oA, &pa)); 402 PetscCall(PetscArraycpy(hoffd->data, pa, nnz)); 403 PetscCall(MatSeqAIJRestoreArrayRead(dA, &pa)); 404 } 405 PetscCall(EnsureHypreCSR_SeqAIJ(dA, hdiag)); 406 PetscFunctionReturn(PETSC_SUCCESS); 407 } 408 409 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat A, MatType type, MatReuse reuse, Mat *B) 410 { 411 MPI_Comm comm = PetscObjectComm((PetscObject)A); 412 Mat M = NULL; 413 Mat dA = A, oA = NULL; 414 PetscBool ismpiaij, issbaij, isbaij; 415 Mat_HYPRE *hA; 416 417 PetscFunctionBegin; 418 PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &issbaij, MATSEQSBAIJ, MATMPIBAIJ, "")); 419 PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &isbaij, MATSEQBAIJ, MATMPIBAIJ, "")); 420 if (isbaij || issbaij) { /* handle BAIJ and SBAIJ */ 421 PetscBool ismpi; 422 MatType newtype; 423 424 PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &ismpi, MATMPISBAIJ, MATMPIBAIJ, "")); 425 newtype = ismpi ? MATMPIAIJ : MATSEQAIJ; 426 if (reuse == MAT_REUSE_MATRIX) { 427 PetscCall(MatConvert(*B, newtype, MAT_INPLACE_MATRIX, B)); 428 PetscCall(MatConvert(A, newtype, MAT_REUSE_MATRIX, B)); 429 PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B)); 430 } else if (reuse == MAT_INITIAL_MATRIX) { 431 PetscCall(MatConvert(A, newtype, MAT_INITIAL_MATRIX, B)); 432 PetscCall(MatConvert(*B, MATHYPRE, MAT_INPLACE_MATRIX, B)); 433 } else { 434 PetscCall(MatConvert(A, newtype, MAT_INPLACE_MATRIX, &A)); 435 PetscCall(MatConvert(A, MATHYPRE, MAT_INPLACE_MATRIX, &A)); 436 } 437 PetscFunctionReturn(PETSC_SUCCESS); 438 } 439 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 440 if (ismpiaij) PetscCall(MatMPIAIJGetSeqAIJ(A, &dA, &oA, NULL)); 441 if (reuse != MAT_REUSE_MATRIX) { 442 PetscCall(MatCreate(comm, &M)); 443 PetscCall(MatSetType(M, MATHYPRE)); 444 PetscCall(MatSetSizes(M, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); 445 PetscCall(MatSetOption(M, MAT_SORTED_FULL, PETSC_TRUE)); 446 PetscCall(MatSetOption(M, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 447 PetscCall(PetscLayoutSetUp(M->rmap)); 448 PetscCall(PetscLayoutSetUp(M->cmap)); 449 450 hA = (Mat_HYPRE *)M->data; 451 PetscCall(MatHYPRE_CreateFromMat(A, hA)); /* Create hmat->ij and preallocate it */ 452 PetscCall(MatHYPRE_IJMatrixCopyIJ(A, hA->ij)); /* Copy A's (a,i,j) to hmat->ij */ 453 M->preallocated = PETSC_TRUE; 454 if (reuse == MAT_INITIAL_MATRIX) *B = M; 455 } else M = *B; 456 457 hA = (Mat_HYPRE *)M->data; 458 PetscCall(MatHYPRE_IJMatrixCopyValues(A, hA->ij)); 459 PetscCall(MatAssemblyBegin(M, MAT_FINAL_ASSEMBLY)); 460 PetscCall(MatAssemblyEnd(M, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */ 461 462 if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &M)); 463 PetscFunctionReturn(PETSC_SUCCESS); 464 } 465 466 static PetscErrorCode MatConvert_HYPRE_AIJ(Mat A, MatType mtype, MatReuse reuse, Mat *B) 467 { 468 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 469 hypre_ParCSRMatrix *parcsr; 470 hypre_CSRMatrix *hdiag, *hoffd; 471 MPI_Comm comm; 472 PetscScalar *da, *oa, *aptr; 473 PetscInt *dii, *djj, *oii, *ojj, *iptr; 474 PetscInt i, dnnz, onnz, m, n; 475 HYPRE_Int type; 476 PetscMPIInt size; 477 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 478 PetscBool downs = PETSC_TRUE, oowns = PETSC_TRUE; 479 480 PetscFunctionBegin; 481 comm = PetscObjectComm((PetscObject)A); 482 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type); 483 PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 484 if (reuse == MAT_REUSE_MATRIX) { 485 PetscBool ismpiaij, isseqaij; 486 PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATMPIAIJ, &ismpiaij)); 487 PetscCall(PetscObjectBaseTypeCompare((PetscObject)*B, MATSEQAIJ, &isseqaij)); 488 PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Only MATMPIAIJ or MATSEQAIJ are supported"); 489 } 490 #if defined(PETSC_HAVE_HYPRE_DEVICE) 491 PetscCheck(HYPRE_MEMORY_DEVICE != hypre_IJMatrixMemoryLocation(hA->ij), comm, PETSC_ERR_SUP, "Not yet implemented"); 492 #endif 493 PetscCallMPI(MPI_Comm_size(comm, &size)); 494 495 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr); 496 hdiag = hypre_ParCSRMatrixDiag(parcsr); 497 hoffd = hypre_ParCSRMatrixOffd(parcsr); 498 m = hypre_CSRMatrixNumRows(hdiag); 499 n = hypre_CSRMatrixNumCols(hdiag); 500 dnnz = hypre_CSRMatrixNumNonzeros(hdiag); 501 onnz = hypre_CSRMatrixNumNonzeros(hoffd); 502 if (reuse == MAT_INITIAL_MATRIX) { 503 PetscCall(PetscMalloc1(m + 1, &dii)); 504 PetscCall(PetscMalloc1(dnnz, &djj)); 505 PetscCall(PetscMalloc1(dnnz, &da)); 506 } else if (reuse == MAT_REUSE_MATRIX) { 507 PetscInt nr; 508 PetscBool done; 509 if (size > 1) { 510 Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data); 511 512 PetscCall(MatGetRowIJ(b->A, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&dii, (const PetscInt **)&djj, &done)); 513 PetscCheck(nr == m, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows in diag part! %" PetscInt_FMT " != %" PetscInt_FMT, nr, m); 514 PetscCheck(dii[nr] >= dnnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros in diag part! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, dii[nr], dnnz); 515 PetscCall(MatSeqAIJGetArray(b->A, &da)); 516 } else { 517 PetscCall(MatGetRowIJ(*B, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&dii, (const PetscInt **)&djj, &done)); 518 PetscCheck(nr == m, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows! %" PetscInt_FMT " != %" PetscInt_FMT, nr, m); 519 PetscCheck(dii[nr] >= dnnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of nonzeros! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, dii[nr], dnnz); 520 PetscCall(MatSeqAIJGetArray(*B, &da)); 521 } 522 } else { /* MAT_INPLACE_MATRIX */ 523 downs = (PetscBool)(hypre_CSRMatrixOwnsData(hdiag)); 524 if (!sameint || !downs) { 525 PetscCall(PetscMalloc1(m + 1, &dii)); 526 PetscCall(PetscMalloc1(dnnz, &djj)); 527 } else { 528 dii = (PetscInt *)hypre_CSRMatrixI(hdiag); 529 djj = (PetscInt *)hypre_CSRMatrixJ(hdiag); 530 } 531 if (!downs) { 532 PetscCall(PetscMalloc1(dnnz, &da)); 533 } else da = (PetscScalar *)hypre_CSRMatrixData(hdiag); 534 } 535 536 if (!sameint) { 537 if (reuse != MAT_REUSE_MATRIX) { 538 for (i = 0; i < m + 1; i++) dii[i] = (PetscInt)(hypre_CSRMatrixI(hdiag)[i]); 539 } 540 for (i = 0; i < dnnz; i++) djj[i] = (PetscInt)(hypre_CSRMatrixJ(hdiag)[i]); 541 } else { 542 if (reuse != MAT_REUSE_MATRIX) PetscCall(PetscArraycpy(dii, hypre_CSRMatrixI(hdiag), m + 1)); 543 PetscCall(PetscArraycpy(djj, hypre_CSRMatrixJ(hdiag), dnnz)); 544 } 545 PetscCall(PetscArraycpy(da, hypre_CSRMatrixData(hdiag), dnnz)); 546 iptr = djj; 547 aptr = da; 548 for (i = 0; i < m; i++) { 549 PetscInt nc = dii[i + 1] - dii[i]; 550 PetscCall(PetscSortIntWithScalarArray(nc, iptr, aptr)); 551 iptr += nc; 552 aptr += nc; 553 } 554 if (size > 1) { 555 HYPRE_BigInt *coffd; 556 HYPRE_Int *offdj; 557 558 if (reuse == MAT_INITIAL_MATRIX) { 559 PetscCall(PetscMalloc1(m + 1, &oii)); 560 PetscCall(PetscMalloc1(onnz, &ojj)); 561 PetscCall(PetscMalloc1(onnz, &oa)); 562 } else if (reuse == MAT_REUSE_MATRIX) { 563 Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data); 564 PetscInt nr, hr = hypre_CSRMatrixNumRows(hoffd); 565 PetscBool done; 566 567 PetscCall(MatGetRowIJ(b->B, 0, PETSC_FALSE, PETSC_FALSE, &nr, (const PetscInt **)&oii, (const PetscInt **)&ojj, &done)); 568 PetscCheck(nr == hr, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse mat: invalid number of local rows in offdiag part! %" PetscInt_FMT " != %" PetscInt_FMT, nr, hr); 569 PetscCheck(oii[nr] >= onnz, PETSC_COMM_SELF, PETSC_ERR_USER, "Cannot reuse matrix: different number of nonzeros in off-diagonal part of matrix! reuse %" PetscInt_FMT " hypre %" PetscInt_FMT, oii[nr], onnz); 570 PetscCall(MatSeqAIJGetArray(b->B, &oa)); 571 } else { /* MAT_INPLACE_MATRIX */ 572 oowns = (PetscBool)(hypre_CSRMatrixOwnsData(hoffd)); 573 if (!sameint || !oowns) { 574 PetscCall(PetscMalloc1(m + 1, &oii)); 575 PetscCall(PetscMalloc1(onnz, &ojj)); 576 } else { 577 oii = (PetscInt *)hypre_CSRMatrixI(hoffd); 578 ojj = (PetscInt *)hypre_CSRMatrixJ(hoffd); 579 } 580 if (!oowns) { 581 PetscCall(PetscMalloc1(onnz, &oa)); 582 } else oa = (PetscScalar *)hypre_CSRMatrixData(hoffd); 583 } 584 if (reuse != MAT_REUSE_MATRIX) { 585 if (!sameint) { 586 for (i = 0; i < m + 1; i++) oii[i] = (PetscInt)(hypre_CSRMatrixI(hoffd)[i]); 587 } else { 588 PetscCall(PetscArraycpy(oii, hypre_CSRMatrixI(hoffd), m + 1)); 589 } 590 } 591 PetscCall(PetscArraycpy(oa, hypre_CSRMatrixData(hoffd), onnz)); 592 593 offdj = hypre_CSRMatrixJ(hoffd); 594 coffd = hypre_ParCSRMatrixColMapOffd(parcsr); 595 /* we only need the permutation to be computed properly, I don't know if HYPRE 596 messes up with the ordering. Just in case, allocate some memory and free it 597 later */ 598 if (reuse == MAT_REUSE_MATRIX) { 599 Mat_MPIAIJ *b = (Mat_MPIAIJ *)((*B)->data); 600 PetscInt mnz; 601 602 PetscCall(MatSeqAIJGetMaxRowNonzeros(b->B, &mnz)); 603 PetscCall(PetscMalloc1(mnz, &ojj)); 604 } else 605 for (i = 0; i < onnz; i++) ojj[i] = coffd[offdj[i]]; 606 iptr = ojj; 607 aptr = oa; 608 for (i = 0; i < m; i++) { 609 PetscInt nc = oii[i + 1] - oii[i]; 610 if (reuse == MAT_REUSE_MATRIX) { 611 PetscInt j; 612 613 iptr = ojj; 614 for (j = 0; j < nc; j++) iptr[j] = coffd[offdj[oii[i] + j]]; 615 } 616 PetscCall(PetscSortIntWithScalarArray(nc, iptr, aptr)); 617 iptr += nc; 618 aptr += nc; 619 } 620 if (reuse == MAT_REUSE_MATRIX) PetscCall(PetscFree(ojj)); 621 if (reuse == MAT_INITIAL_MATRIX) { 622 Mat_MPIAIJ *b; 623 Mat_SeqAIJ *d, *o; 624 625 PetscCall(MatCreateMPIAIJWithSplitArrays(comm, m, n, PETSC_DECIDE, PETSC_DECIDE, dii, djj, da, oii, ojj, oa, B)); 626 /* hack MPIAIJ */ 627 b = (Mat_MPIAIJ *)((*B)->data); 628 d = (Mat_SeqAIJ *)b->A->data; 629 o = (Mat_SeqAIJ *)b->B->data; 630 d->free_a = PETSC_TRUE; 631 d->free_ij = PETSC_TRUE; 632 o->free_a = PETSC_TRUE; 633 o->free_ij = PETSC_TRUE; 634 } else if (reuse == MAT_INPLACE_MATRIX) { 635 Mat T; 636 637 PetscCall(MatCreateMPIAIJWithSplitArrays(comm, m, n, PETSC_DECIDE, PETSC_DECIDE, dii, djj, da, oii, ojj, oa, &T)); 638 if (sameint && downs) { /* ownership of CSR pointers is transferred to PETSc */ 639 hypre_CSRMatrixI(hdiag) = NULL; 640 hypre_CSRMatrixJ(hdiag) = NULL; 641 } else { /* Hack MPIAIJ -> free ij but maybe not a */ 642 Mat_MPIAIJ *b = (Mat_MPIAIJ *)(T->data); 643 Mat_SeqAIJ *d = (Mat_SeqAIJ *)(b->A->data); 644 645 d->free_ij = PETSC_TRUE; 646 d->free_a = downs ? PETSC_FALSE : PETSC_TRUE; 647 } 648 if (sameint && oowns) { /* ownership of CSR pointers is transferred to PETSc */ 649 hypre_CSRMatrixI(hoffd) = NULL; 650 hypre_CSRMatrixJ(hoffd) = NULL; 651 } else { /* Hack MPIAIJ -> free ij but maybe not a */ 652 Mat_MPIAIJ *b = (Mat_MPIAIJ *)(T->data); 653 Mat_SeqAIJ *o = (Mat_SeqAIJ *)(b->B->data); 654 655 o->free_ij = PETSC_TRUE; 656 o->free_a = oowns ? PETSC_FALSE : PETSC_TRUE; 657 } 658 hypre_CSRMatrixData(hdiag) = NULL; 659 hypre_CSRMatrixData(hoffd) = NULL; 660 PetscCall(MatHeaderReplace(A, &T)); 661 } 662 } else { 663 oii = NULL; 664 ojj = NULL; 665 oa = NULL; 666 if (reuse == MAT_INITIAL_MATRIX) { 667 Mat_SeqAIJ *b; 668 669 PetscCall(MatCreateSeqAIJWithArrays(comm, m, n, dii, djj, da, B)); 670 /* hack SeqAIJ */ 671 b = (Mat_SeqAIJ *)((*B)->data); 672 b->free_a = PETSC_TRUE; 673 b->free_ij = PETSC_TRUE; 674 } else if (reuse == MAT_INPLACE_MATRIX) { 675 Mat T; 676 677 PetscCall(MatCreateSeqAIJWithArrays(comm, m, n, dii, djj, da, &T)); 678 if (sameint && downs) { /* ownership of CSR pointers is transferred to PETSc */ 679 hypre_CSRMatrixI(hdiag) = NULL; 680 hypre_CSRMatrixJ(hdiag) = NULL; 681 } else { /* free ij but maybe not a */ 682 Mat_SeqAIJ *b = (Mat_SeqAIJ *)(T->data); 683 684 b->free_ij = PETSC_TRUE; 685 b->free_a = downs ? PETSC_FALSE : PETSC_TRUE; 686 } 687 hypre_CSRMatrixData(hdiag) = NULL; 688 PetscCall(MatHeaderReplace(A, &T)); 689 } 690 } 691 692 /* we have to use hypre_Tfree to free the HYPRE arrays 693 that PETSc now owns */ 694 if (reuse == MAT_INPLACE_MATRIX) { 695 const char *names[6] = {"_hypre_csr_da", "_hypre_csr_oa", "_hypre_csr_dii", "_hypre_csr_djj", "_hypre_csr_oii", "_hypre_csr_ojj"}; 696 // clang-format off 697 void *ptrs[6] = {downs ? da : NULL, 698 oowns ? oa : NULL, 699 downs && sameint ? dii : NULL, 700 downs && sameint ? djj : NULL, 701 oowns && sameint ? oii : NULL, 702 oowns && sameint ? ojj : NULL}; 703 // clang-format on 704 for (i = 0; i < 6; i++) { 705 PetscContainer c; 706 707 PetscCall(PetscContainerCreate(comm, &c)); 708 PetscCall(PetscContainerSetPointer(c, ptrs[i])); 709 PetscCall(PetscContainerSetUserDestroy(c, hypre_array_destroy)); 710 PetscCall(PetscObjectCompose((PetscObject)(*B), names[i], (PetscObject)c)); 711 PetscCall(PetscContainerDestroy(&c)); 712 } 713 } 714 PetscFunctionReturn(PETSC_SUCCESS); 715 } 716 717 static PetscErrorCode MatAIJGetParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 718 { 719 hypre_ParCSRMatrix *tA; 720 hypre_CSRMatrix *hdiag, *hoffd; 721 Mat_SeqAIJ *diag, *offd; 722 PetscInt *garray, i, noffd, dnnz, onnz, *row_starts, *col_starts; 723 MPI_Comm comm = PetscObjectComm((PetscObject)A); 724 PetscBool ismpiaij, isseqaij; 725 PetscBool sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 726 HYPRE_Int *hdi = NULL, *hdj = NULL, *hoi = NULL, *hoj = NULL; 727 PetscInt *pdi = NULL, *pdj = NULL, *poi = NULL, *poj = NULL; 728 #if defined(PETSC_HAVE_HYPRE_DEVICE) 729 PetscBool iscuda = PETSC_FALSE; 730 #endif 731 732 PetscFunctionBegin; 733 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 734 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATSEQAIJ, &isseqaij)); 735 PetscCheck(ismpiaij || isseqaij, comm, PETSC_ERR_SUP, "Unsupported type %s", ((PetscObject)A)->type_name); 736 PetscHYPREInitialize(); 737 if (ismpiaij) { 738 Mat_MPIAIJ *a = (Mat_MPIAIJ *)(A->data); 739 740 diag = (Mat_SeqAIJ *)a->A->data; 741 offd = (Mat_SeqAIJ *)a->B->data; 742 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) && defined(HYPRE_USING_CUDA) 743 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJCUSPARSE, &iscuda)); 744 if (iscuda && !A->boundtocpu) { 745 sameint = PETSC_TRUE; 746 PetscCall(MatSeqAIJCUSPARSEGetIJ(a->A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj)); 747 PetscCall(MatSeqAIJCUSPARSEGetIJ(a->B, PETSC_FALSE, (const HYPRE_Int **)&hoi, (const HYPRE_Int **)&hoj)); 748 } else { 749 #else 750 { 751 #endif 752 pdi = diag->i; 753 pdj = diag->j; 754 poi = offd->i; 755 poj = offd->j; 756 if (sameint) { 757 hdi = (HYPRE_Int *)pdi; 758 hdj = (HYPRE_Int *)pdj; 759 hoi = (HYPRE_Int *)poi; 760 hoj = (HYPRE_Int *)poj; 761 } 762 } 763 garray = a->garray; 764 noffd = a->B->cmap->N; 765 dnnz = diag->nz; 766 onnz = offd->nz; 767 } else { 768 diag = (Mat_SeqAIJ *)A->data; 769 offd = NULL; 770 #if defined(PETSC_HAVE_CUDA) && defined(PETSC_HAVE_HYPRE_DEVICE) 771 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJCUSPARSE, &iscuda)); 772 if (iscuda && !A->boundtocpu) { 773 sameint = PETSC_TRUE; 774 PetscCall(MatSeqAIJCUSPARSEGetIJ(A, PETSC_FALSE, (const HYPRE_Int **)&hdi, (const HYPRE_Int **)&hdj)); 775 } else { 776 #else 777 { 778 #endif 779 pdi = diag->i; 780 pdj = diag->j; 781 if (sameint) { 782 hdi = (HYPRE_Int *)pdi; 783 hdj = (HYPRE_Int *)pdj; 784 } 785 } 786 garray = NULL; 787 noffd = 0; 788 dnnz = diag->nz; 789 onnz = 0; 790 } 791 792 /* create a temporary ParCSR */ 793 if (HYPRE_AssumedPartitionCheck()) { 794 PetscMPIInt myid; 795 796 PetscCallMPI(MPI_Comm_rank(comm, &myid)); 797 row_starts = A->rmap->range + myid; 798 col_starts = A->cmap->range + myid; 799 } else { 800 row_starts = A->rmap->range; 801 col_starts = A->cmap->range; 802 } 803 tA = hypre_ParCSRMatrixCreate(comm, A->rmap->N, A->cmap->N, (HYPRE_BigInt *)row_starts, (HYPRE_BigInt *)col_starts, noffd, dnnz, onnz); 804 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 805 hypre_ParCSRMatrixSetRowStartsOwner(tA, 0); 806 hypre_ParCSRMatrixSetColStartsOwner(tA, 0); 807 #endif 808 809 /* set diagonal part */ 810 hdiag = hypre_ParCSRMatrixDiag(tA); 811 if (!sameint) { /* malloc CSR pointers */ 812 PetscCall(PetscMalloc2(A->rmap->n + 1, &hdi, dnnz, &hdj)); 813 for (i = 0; i < A->rmap->n + 1; i++) hdi[i] = (HYPRE_Int)(pdi[i]); 814 for (i = 0; i < dnnz; i++) hdj[i] = (HYPRE_Int)(pdj[i]); 815 } 816 hypre_CSRMatrixI(hdiag) = hdi; 817 hypre_CSRMatrixJ(hdiag) = hdj; 818 hypre_CSRMatrixData(hdiag) = (HYPRE_Complex *)diag->a; 819 hypre_CSRMatrixNumNonzeros(hdiag) = diag->nz; 820 hypre_CSRMatrixSetRownnz(hdiag); 821 hypre_CSRMatrixSetDataOwner(hdiag, 0); 822 823 /* set offdiagonal part */ 824 hoffd = hypre_ParCSRMatrixOffd(tA); 825 if (offd) { 826 if (!sameint) { /* malloc CSR pointers */ 827 PetscCall(PetscMalloc2(A->rmap->n + 1, &hoi, onnz, &hoj)); 828 for (i = 0; i < A->rmap->n + 1; i++) hoi[i] = (HYPRE_Int)(poi[i]); 829 for (i = 0; i < onnz; i++) hoj[i] = (HYPRE_Int)(poj[i]); 830 } 831 hypre_CSRMatrixI(hoffd) = hoi; 832 hypre_CSRMatrixJ(hoffd) = hoj; 833 hypre_CSRMatrixData(hoffd) = (HYPRE_Complex *)offd->a; 834 hypre_CSRMatrixNumNonzeros(hoffd) = offd->nz; 835 hypre_CSRMatrixSetRownnz(hoffd); 836 hypre_CSRMatrixSetDataOwner(hoffd, 0); 837 } 838 #if defined(PETSC_HAVE_HYPRE_DEVICE) 839 PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, iscuda ? HYPRE_MEMORY_DEVICE : HYPRE_MEMORY_HOST); 840 #else 841 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0) 842 PetscCallExternal(hypre_ParCSRMatrixInitialize, tA); 843 #else 844 PetscCallExternal(hypre_ParCSRMatrixInitialize_v2, tA, HYPRE_MEMORY_HOST); 845 #endif 846 #endif 847 hypre_TFree(hypre_ParCSRMatrixColMapOffd(tA), HYPRE_MEMORY_HOST); 848 hypre_ParCSRMatrixSetNumNonzeros(tA); 849 hypre_ParCSRMatrixColMapOffd(tA) = (HYPRE_BigInt *)garray; 850 if (!hypre_ParCSRMatrixCommPkg(tA)) PetscCallExternal(hypre_MatvecCommPkgCreate, tA); 851 *hA = tA; 852 PetscFunctionReturn(PETSC_SUCCESS); 853 } 854 855 static PetscErrorCode MatAIJRestoreParCSR_Private(Mat A, hypre_ParCSRMatrix **hA) 856 { 857 hypre_CSRMatrix *hdiag, *hoffd; 858 PetscBool ismpiaij, sameint = (PetscBool)(sizeof(PetscInt) == sizeof(HYPRE_Int)); 859 #if defined(PETSC_HAVE_HYPRE_DEVICE) 860 PetscBool iscuda = PETSC_FALSE; 861 #endif 862 863 PetscFunctionBegin; 864 PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij)); 865 #if defined(PETSC_HAVE_HYPRE_DEVICE) 866 PetscCall(PetscObjectTypeCompareAny((PetscObject)A, &iscuda, MATSEQAIJCUSPARSE, MATMPIAIJCUSPARSE, "")); 867 if (iscuda) sameint = PETSC_TRUE; 868 #endif 869 hdiag = hypre_ParCSRMatrixDiag(*hA); 870 hoffd = hypre_ParCSRMatrixOffd(*hA); 871 /* free temporary memory allocated by PETSc 872 set pointers to NULL before destroying tA */ 873 if (!sameint) { 874 HYPRE_Int *hi, *hj; 875 876 hi = hypre_CSRMatrixI(hdiag); 877 hj = hypre_CSRMatrixJ(hdiag); 878 PetscCall(PetscFree2(hi, hj)); 879 if (ismpiaij) { 880 hi = hypre_CSRMatrixI(hoffd); 881 hj = hypre_CSRMatrixJ(hoffd); 882 PetscCall(PetscFree2(hi, hj)); 883 } 884 } 885 hypre_CSRMatrixI(hdiag) = NULL; 886 hypre_CSRMatrixJ(hdiag) = NULL; 887 hypre_CSRMatrixData(hdiag) = NULL; 888 if (ismpiaij) { 889 hypre_CSRMatrixI(hoffd) = NULL; 890 hypre_CSRMatrixJ(hoffd) = NULL; 891 hypre_CSRMatrixData(hoffd) = NULL; 892 } 893 hypre_ParCSRMatrixColMapOffd(*hA) = NULL; 894 hypre_ParCSRMatrixDestroy(*hA); 895 *hA = NULL; 896 PetscFunctionReturn(PETSC_SUCCESS); 897 } 898 899 /* calls RAP from BoomerAMG: 900 the resulting ParCSR will not own the column and row starts 901 It looks like we don't need to have the diagonal entries ordered first */ 902 static PetscErrorCode MatHYPRE_ParCSR_RAP(hypre_ParCSRMatrix *hR, hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hP, hypre_ParCSRMatrix **hRAP) 903 { 904 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 905 HYPRE_Int P_owns_col_starts, R_owns_row_starts; 906 #endif 907 908 PetscFunctionBegin; 909 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 910 P_owns_col_starts = hypre_ParCSRMatrixOwnsColStarts(hP); 911 R_owns_row_starts = hypre_ParCSRMatrixOwnsRowStarts(hR); 912 #endif 913 /* can be replaced by version test later */ 914 #if defined(PETSC_HAVE_HYPRE_DEVICE) 915 PetscStackPushExternal("hypre_ParCSRMatrixRAP"); 916 *hRAP = hypre_ParCSRMatrixRAP(hR, hA, hP); 917 PetscStackPop; 918 #else 919 PetscCallExternal(hypre_BoomerAMGBuildCoarseOperator, hR, hA, hP, hRAP); 920 PetscCallExternal(hypre_ParCSRMatrixSetNumNonzeros, *hRAP); 921 #endif 922 /* hypre_BoomerAMGBuildCoarseOperator steals the col_starts from P and the row_starts from R */ 923 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 924 hypre_ParCSRMatrixSetRowStartsOwner(*hRAP, 0); 925 hypre_ParCSRMatrixSetColStartsOwner(*hRAP, 0); 926 if (P_owns_col_starts) hypre_ParCSRMatrixSetColStartsOwner(hP, 1); 927 if (R_owns_row_starts) hypre_ParCSRMatrixSetRowStartsOwner(hR, 1); 928 #endif 929 PetscFunctionReturn(PETSC_SUCCESS); 930 } 931 932 static PetscErrorCode MatPtAPNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat P, Mat C) 933 { 934 Mat B; 935 hypre_ParCSRMatrix *hA, *hP, *hPtAP = NULL; 936 Mat_Product *product = C->product; 937 938 PetscFunctionBegin; 939 PetscCall(MatAIJGetParCSR_Private(A, &hA)); 940 PetscCall(MatAIJGetParCSR_Private(P, &hP)); 941 PetscCall(MatHYPRE_ParCSR_RAP(hP, hA, hP, &hPtAP)); 942 PetscCall(MatCreateFromParCSR(hPtAP, MATAIJ, PETSC_OWN_POINTER, &B)); 943 944 PetscCall(MatHeaderMerge(C, &B)); 945 C->product = product; 946 947 PetscCall(MatAIJRestoreParCSR_Private(A, &hA)); 948 PetscCall(MatAIJRestoreParCSR_Private(P, &hP)); 949 PetscFunctionReturn(PETSC_SUCCESS); 950 } 951 952 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat P, PetscReal fill, Mat C) 953 { 954 PetscFunctionBegin; 955 PetscCall(MatSetType(C, MATAIJ)); 956 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_AIJ_wHYPRE; 957 C->ops->productnumeric = MatProductNumeric_PtAP; 958 PetscFunctionReturn(PETSC_SUCCESS); 959 } 960 961 static PetscErrorCode MatPtAPNumeric_AIJ_HYPRE(Mat A, Mat P, Mat C) 962 { 963 Mat B; 964 Mat_HYPRE *hP; 965 hypre_ParCSRMatrix *hA = NULL, *Pparcsr, *ptapparcsr = NULL; 966 HYPRE_Int type; 967 MPI_Comm comm = PetscObjectComm((PetscObject)A); 968 PetscBool ishypre; 969 970 PetscFunctionBegin; 971 PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre)); 972 PetscCheck(ishypre, comm, PETSC_ERR_USER, "P should be of type %s", MATHYPRE); 973 hP = (Mat_HYPRE *)P->data; 974 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type); 975 PetscCheck(type == HYPRE_PARCSR, comm, PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 976 PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr); 977 978 PetscCall(MatAIJGetParCSR_Private(A, &hA)); 979 PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, hA, Pparcsr, &ptapparcsr)); 980 PetscCall(MatAIJRestoreParCSR_Private(A, &hA)); 981 982 /* create temporary matrix and merge to C */ 983 PetscCall(MatCreateFromParCSR(ptapparcsr, ((PetscObject)C)->type_name, PETSC_OWN_POINTER, &B)); 984 PetscCall(MatHeaderMerge(C, &B)); 985 PetscFunctionReturn(PETSC_SUCCESS); 986 } 987 988 static PetscErrorCode MatPtAPNumeric_HYPRE_HYPRE(Mat A, Mat P, Mat C) 989 { 990 Mat B; 991 hypre_ParCSRMatrix *Aparcsr, *Pparcsr, *ptapparcsr = NULL; 992 Mat_HYPRE *hA, *hP; 993 PetscBool ishypre; 994 HYPRE_Int type; 995 996 PetscFunctionBegin; 997 PetscCall(PetscObjectTypeCompare((PetscObject)P, MATHYPRE, &ishypre)); 998 PetscCheck(ishypre, PetscObjectComm((PetscObject)P), PETSC_ERR_USER, "P should be of type %s", MATHYPRE); 999 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1000 PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE); 1001 hA = (Mat_HYPRE *)A->data; 1002 hP = (Mat_HYPRE *)P->data; 1003 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type); 1004 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 1005 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hP->ij, &type); 1006 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)P), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 1007 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr); 1008 PetscCallExternal(HYPRE_IJMatrixGetObject, hP->ij, (void **)&Pparcsr); 1009 PetscCall(MatHYPRE_ParCSR_RAP(Pparcsr, Aparcsr, Pparcsr, &ptapparcsr)); 1010 PetscCall(MatCreateFromParCSR(ptapparcsr, MATHYPRE, PETSC_OWN_POINTER, &B)); 1011 PetscCall(MatHeaderMerge(C, &B)); 1012 PetscFunctionReturn(PETSC_SUCCESS); 1013 } 1014 1015 /* calls hypre_ParMatmul 1016 hypre_ParMatMul uses hypre_ParMatrixCreate with the communicator of hA 1017 hypre_ParMatrixCreate does not duplicate the communicator 1018 It looks like we don't need to have the diagonal entries ordered first */ 1019 static PetscErrorCode MatHYPRE_ParCSR_MatMatMult(hypre_ParCSRMatrix *hA, hypre_ParCSRMatrix *hB, hypre_ParCSRMatrix **hAB) 1020 { 1021 PetscFunctionBegin; 1022 /* can be replaced by version test later */ 1023 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1024 PetscStackPushExternal("hypre_ParCSRMatMat"); 1025 *hAB = hypre_ParCSRMatMat(hA, hB); 1026 #else 1027 PetscStackPushExternal("hypre_ParMatmul"); 1028 *hAB = hypre_ParMatmul(hA, hB); 1029 #endif 1030 PetscStackPop; 1031 PetscFunctionReturn(PETSC_SUCCESS); 1032 } 1033 1034 static PetscErrorCode MatMatMultNumeric_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C) 1035 { 1036 Mat D; 1037 hypre_ParCSRMatrix *hA, *hB, *hAB = NULL; 1038 Mat_Product *product = C->product; 1039 1040 PetscFunctionBegin; 1041 PetscCall(MatAIJGetParCSR_Private(A, &hA)); 1042 PetscCall(MatAIJGetParCSR_Private(B, &hB)); 1043 PetscCall(MatHYPRE_ParCSR_MatMatMult(hA, hB, &hAB)); 1044 PetscCall(MatCreateFromParCSR(hAB, MATAIJ, PETSC_OWN_POINTER, &D)); 1045 1046 PetscCall(MatHeaderMerge(C, &D)); 1047 C->product = product; 1048 1049 PetscCall(MatAIJRestoreParCSR_Private(A, &hA)); 1050 PetscCall(MatAIJRestoreParCSR_Private(B, &hB)); 1051 PetscFunctionReturn(PETSC_SUCCESS); 1052 } 1053 1054 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat A, Mat B, PetscReal fill, Mat C) 1055 { 1056 PetscFunctionBegin; 1057 PetscCall(MatSetType(C, MATAIJ)); 1058 C->ops->matmultnumeric = MatMatMultNumeric_AIJ_AIJ_wHYPRE; 1059 C->ops->productnumeric = MatProductNumeric_AB; 1060 PetscFunctionReturn(PETSC_SUCCESS); 1061 } 1062 1063 static PetscErrorCode MatMatMultNumeric_HYPRE_HYPRE(Mat A, Mat B, Mat C) 1064 { 1065 Mat D; 1066 hypre_ParCSRMatrix *Aparcsr, *Bparcsr, *ABparcsr = NULL; 1067 Mat_HYPRE *hA, *hB; 1068 PetscBool ishypre; 1069 HYPRE_Int type; 1070 Mat_Product *product; 1071 1072 PetscFunctionBegin; 1073 PetscCall(PetscObjectTypeCompare((PetscObject)B, MATHYPRE, &ishypre)); 1074 PetscCheck(ishypre, PetscObjectComm((PetscObject)B), PETSC_ERR_USER, "B should be of type %s", MATHYPRE); 1075 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &ishypre)); 1076 PetscCheck(ishypre, PetscObjectComm((PetscObject)A), PETSC_ERR_USER, "A should be of type %s", MATHYPRE); 1077 hA = (Mat_HYPRE *)A->data; 1078 hB = (Mat_HYPRE *)B->data; 1079 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type); 1080 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 1081 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hB->ij, &type); 1082 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Only HYPRE_PARCSR is supported"); 1083 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&Aparcsr); 1084 PetscCallExternal(HYPRE_IJMatrixGetObject, hB->ij, (void **)&Bparcsr); 1085 PetscCall(MatHYPRE_ParCSR_MatMatMult(Aparcsr, Bparcsr, &ABparcsr)); 1086 PetscCall(MatCreateFromParCSR(ABparcsr, MATHYPRE, PETSC_OWN_POINTER, &D)); 1087 1088 /* need to use HeaderReplace because HeaderMerge messes up with the communicator */ 1089 product = C->product; /* save it from MatHeaderReplace() */ 1090 C->product = NULL; 1091 PetscCall(MatHeaderReplace(C, &D)); 1092 C->product = product; 1093 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 1094 C->ops->productnumeric = MatProductNumeric_AB; 1095 PetscFunctionReturn(PETSC_SUCCESS); 1096 } 1097 1098 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultNumeric_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, Mat D) 1099 { 1100 Mat E; 1101 hypre_ParCSRMatrix *hA, *hB, *hC, *hABC = NULL; 1102 1103 PetscFunctionBegin; 1104 PetscCall(MatAIJGetParCSR_Private(A, &hA)); 1105 PetscCall(MatAIJGetParCSR_Private(B, &hB)); 1106 PetscCall(MatAIJGetParCSR_Private(C, &hC)); 1107 PetscCall(MatHYPRE_ParCSR_RAP(hA, hB, hC, &hABC)); 1108 PetscCall(MatCreateFromParCSR(hABC, MATAIJ, PETSC_OWN_POINTER, &E)); 1109 PetscCall(MatHeaderMerge(D, &E)); 1110 PetscCall(MatAIJRestoreParCSR_Private(A, &hA)); 1111 PetscCall(MatAIJRestoreParCSR_Private(B, &hB)); 1112 PetscCall(MatAIJRestoreParCSR_Private(C, &hC)); 1113 PetscFunctionReturn(PETSC_SUCCESS); 1114 } 1115 1116 PETSC_INTERN PetscErrorCode MatTransposeMatMatMultSymbolic_AIJ_AIJ_AIJ_wHYPRE(Mat A, Mat B, Mat C, PetscReal fill, Mat D) 1117 { 1118 PetscFunctionBegin; 1119 PetscCall(MatSetType(D, MATAIJ)); 1120 PetscFunctionReturn(PETSC_SUCCESS); 1121 } 1122 1123 static PetscErrorCode MatProductSymbolic_AB_HYPRE(Mat C) 1124 { 1125 PetscFunctionBegin; 1126 C->ops->productnumeric = MatProductNumeric_AB; 1127 PetscFunctionReturn(PETSC_SUCCESS); 1128 } 1129 1130 static PetscErrorCode MatProductSetFromOptions_HYPRE_AB(Mat C) 1131 { 1132 Mat_Product *product = C->product; 1133 PetscBool Ahypre; 1134 1135 PetscFunctionBegin; 1136 PetscCall(PetscObjectTypeCompare((PetscObject)product->A, MATHYPRE, &Ahypre)); 1137 if (Ahypre) { /* A is a Hypre matrix */ 1138 PetscCall(MatSetType(C, MATHYPRE)); 1139 C->ops->productsymbolic = MatProductSymbolic_AB_HYPRE; 1140 C->ops->matmultnumeric = MatMatMultNumeric_HYPRE_HYPRE; 1141 PetscFunctionReturn(PETSC_SUCCESS); 1142 } 1143 PetscFunctionReturn(PETSC_SUCCESS); 1144 } 1145 1146 static PetscErrorCode MatProductSymbolic_PtAP_HYPRE(Mat C) 1147 { 1148 PetscFunctionBegin; 1149 C->ops->productnumeric = MatProductNumeric_PtAP; 1150 PetscFunctionReturn(PETSC_SUCCESS); 1151 } 1152 1153 static PetscErrorCode MatProductSetFromOptions_HYPRE_PtAP(Mat C) 1154 { 1155 Mat_Product *product = C->product; 1156 PetscBool flg; 1157 PetscInt type = 0; 1158 const char *outTypes[4] = {"aij", "seqaij", "mpiaij", "hypre"}; 1159 PetscInt ntype = 4; 1160 Mat A = product->A; 1161 PetscBool Ahypre; 1162 1163 PetscFunctionBegin; 1164 PetscCall(PetscObjectTypeCompare((PetscObject)A, MATHYPRE, &Ahypre)); 1165 if (Ahypre) { /* A is a Hypre matrix */ 1166 PetscCall(MatSetType(C, MATHYPRE)); 1167 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1168 C->ops->ptapnumeric = MatPtAPNumeric_HYPRE_HYPRE; 1169 PetscFunctionReturn(PETSC_SUCCESS); 1170 } 1171 1172 /* A is AIJ, P is Hypre, C = PtAP can be either AIJ or Hypre format */ 1173 /* Get runtime option */ 1174 if (product->api_user) { 1175 PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatPtAP_HYPRE", "Mat"); 1176 PetscCall(PetscOptionsEList("-matptap_hypre_outtype", "MatPtAP outtype", "MatPtAP outtype", outTypes, ntype, outTypes[type], &type, &flg)); 1177 PetscOptionsEnd(); 1178 } else { 1179 PetscOptionsBegin(PetscObjectComm((PetscObject)C), ((PetscObject)C)->prefix, "MatProduct_PtAP_HYPRE", "Mat"); 1180 PetscCall(PetscOptionsEList("-mat_product_algorithm_hypre_outtype", "MatProduct_PtAP outtype", "MatProduct_PtAP", outTypes, ntype, outTypes[type], &type, &flg)); 1181 PetscOptionsEnd(); 1182 } 1183 1184 if (type == 0 || type == 1 || type == 2) { 1185 PetscCall(MatSetType(C, MATAIJ)); 1186 } else if (type == 3) { 1187 PetscCall(MatSetType(C, MATHYPRE)); 1188 } else SETERRQ(PetscObjectComm((PetscObject)C), PETSC_ERR_SUP, "MatPtAP outtype is not supported"); 1189 C->ops->productsymbolic = MatProductSymbolic_PtAP_HYPRE; 1190 C->ops->ptapnumeric = MatPtAPNumeric_AIJ_HYPRE; 1191 PetscFunctionReturn(PETSC_SUCCESS); 1192 } 1193 1194 static PetscErrorCode MatProductSetFromOptions_HYPRE(Mat C) 1195 { 1196 Mat_Product *product = C->product; 1197 1198 PetscFunctionBegin; 1199 switch (product->type) { 1200 case MATPRODUCT_AB: 1201 PetscCall(MatProductSetFromOptions_HYPRE_AB(C)); 1202 break; 1203 case MATPRODUCT_PtAP: 1204 PetscCall(MatProductSetFromOptions_HYPRE_PtAP(C)); 1205 break; 1206 default: 1207 break; 1208 } 1209 PetscFunctionReturn(PETSC_SUCCESS); 1210 } 1211 1212 static PetscErrorCode MatMultTranspose_HYPRE(Mat A, Vec x, Vec y) 1213 { 1214 PetscFunctionBegin; 1215 PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_TRUE)); 1216 PetscFunctionReturn(PETSC_SUCCESS); 1217 } 1218 1219 static PetscErrorCode MatMult_HYPRE(Mat A, Vec x, Vec y) 1220 { 1221 PetscFunctionBegin; 1222 PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 0.0, y, PETSC_FALSE)); 1223 PetscFunctionReturn(PETSC_SUCCESS); 1224 } 1225 1226 static PetscErrorCode MatMultAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1227 { 1228 PetscFunctionBegin; 1229 if (y != z) PetscCall(VecCopy(y, z)); 1230 PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_FALSE)); 1231 PetscFunctionReturn(PETSC_SUCCESS); 1232 } 1233 1234 static PetscErrorCode MatMultTransposeAdd_HYPRE(Mat A, Vec x, Vec y, Vec z) 1235 { 1236 PetscFunctionBegin; 1237 if (y != z) PetscCall(VecCopy(y, z)); 1238 PetscCall(MatHYPRE_MultKernel_Private(A, 1.0, x, 1.0, z, PETSC_TRUE)); 1239 PetscFunctionReturn(PETSC_SUCCESS); 1240 } 1241 1242 /* y = a * A * x + b * y or y = a * A^t * x + b * y depending on trans */ 1243 static PetscErrorCode MatHYPRE_MultKernel_Private(Mat A, HYPRE_Complex a, Vec x, HYPRE_Complex b, Vec y, PetscBool trans) 1244 { 1245 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1246 hypre_ParCSRMatrix *parcsr; 1247 hypre_ParVector *hx, *hy; 1248 1249 PetscFunctionBegin; 1250 if (trans) { 1251 PetscCall(VecHYPRE_IJVectorPushVecRead(hA->b, x)); 1252 if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->x, y)); 1253 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->x, y)); 1254 PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hx); 1255 PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hy); 1256 } else { 1257 PetscCall(VecHYPRE_IJVectorPushVecRead(hA->x, x)); 1258 if (b != 0.0) PetscCall(VecHYPRE_IJVectorPushVec(hA->b, y)); 1259 else PetscCall(VecHYPRE_IJVectorPushVecWrite(hA->b, y)); 1260 PetscCallExternal(HYPRE_IJVectorGetObject, hA->x->ij, (void **)&hx); 1261 PetscCallExternal(HYPRE_IJVectorGetObject, hA->b->ij, (void **)&hy); 1262 } 1263 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr); 1264 if (trans) { 1265 PetscCallExternal(hypre_ParCSRMatrixMatvecT, a, parcsr, hx, b, hy); 1266 } else { 1267 PetscCallExternal(hypre_ParCSRMatrixMatvec, a, parcsr, hx, b, hy); 1268 } 1269 PetscCall(VecHYPRE_IJVectorPopVec(hA->x)); 1270 PetscCall(VecHYPRE_IJVectorPopVec(hA->b)); 1271 PetscFunctionReturn(PETSC_SUCCESS); 1272 } 1273 1274 static PetscErrorCode MatDestroy_HYPRE(Mat A) 1275 { 1276 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1277 1278 PetscFunctionBegin; 1279 PetscCall(VecHYPRE_IJVectorDestroy(&hA->x)); 1280 PetscCall(VecHYPRE_IJVectorDestroy(&hA->b)); 1281 if (hA->ij) { 1282 if (!hA->inner_free) hypre_IJMatrixObject(hA->ij) = NULL; 1283 PetscCallExternal(HYPRE_IJMatrixDestroy, hA->ij); 1284 } 1285 if (hA->comm) PetscCall(PetscCommRestoreComm(PetscObjectComm((PetscObject)A), &hA->comm)); 1286 1287 PetscCall(MatStashDestroy_Private(&A->stash)); 1288 PetscCall(PetscFree(hA->array)); 1289 1290 if (hA->cooMat) { 1291 PetscCall(MatDestroy(&hA->cooMat)); 1292 PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->diagJ, hA->memType)); 1293 PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->offdJ, hA->memType)); 1294 PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hA->diag, hA->memType)); 1295 } 1296 1297 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_aij_C", NULL)); 1298 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_hypre_is_C", NULL)); 1299 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_seqaij_hypre_C", NULL)); 1300 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatProductSetFromOptions_mpiaij_hypre_C", NULL)); 1301 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPRESetPreallocation_C", NULL)); 1302 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatHYPREGetParCSR_C", NULL)); 1303 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL)); 1304 PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL)); 1305 PetscCall(PetscFree(A->data)); 1306 PetscFunctionReturn(PETSC_SUCCESS); 1307 } 1308 1309 static PetscErrorCode MatSetUp_HYPRE(Mat A) 1310 { 1311 PetscFunctionBegin; 1312 PetscCall(MatHYPRESetPreallocation(A, PETSC_DEFAULT, NULL, PETSC_DEFAULT, NULL)); 1313 PetscFunctionReturn(PETSC_SUCCESS); 1314 } 1315 1316 //TODO FIX hypre_CSRMatrixMatvecOutOfPlace 1317 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1318 static PetscErrorCode MatBindToCPU_HYPRE(Mat A, PetscBool bind) 1319 { 1320 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1321 HYPRE_MemoryLocation hmem = bind ? HYPRE_MEMORY_HOST : HYPRE_MEMORY_DEVICE; 1322 1323 PetscFunctionBegin; 1324 A->boundtocpu = bind; 1325 if (hA->ij && hypre_IJMatrixAssembleFlag(hA->ij) && hmem != hypre_IJMatrixMemoryLocation(hA->ij)) { 1326 hypre_ParCSRMatrix *parcsr; 1327 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr); 1328 PetscCallExternal(hypre_ParCSRMatrixMigrate, parcsr, hmem); 1329 } 1330 if (hA->x) PetscCall(VecHYPRE_IJBindToCPU(hA->x, bind)); 1331 if (hA->b) PetscCall(VecHYPRE_IJBindToCPU(hA->b, bind)); 1332 PetscFunctionReturn(PETSC_SUCCESS); 1333 } 1334 #endif 1335 1336 static PetscErrorCode MatAssemblyEnd_HYPRE(Mat A, MatAssemblyType mode) 1337 { 1338 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1339 PetscMPIInt n; 1340 PetscInt i, j, rstart, ncols, flg; 1341 PetscInt *row, *col; 1342 PetscScalar *val; 1343 1344 PetscFunctionBegin; 1345 PetscCheck(mode != MAT_FLUSH_ASSEMBLY, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_FLUSH_ASSEMBLY currently not supported with MATHYPRE"); 1346 1347 if (!A->nooffprocentries) { 1348 while (1) { 1349 PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg)); 1350 if (!flg) break; 1351 1352 for (i = 0; i < n;) { 1353 /* Now identify the consecutive vals belonging to the same row */ 1354 for (j = i, rstart = row[j]; j < n; j++) { 1355 if (row[j] != rstart) break; 1356 } 1357 if (j < n) ncols = j - i; 1358 else ncols = n - i; 1359 /* Now assemble all these values with a single function call */ 1360 PetscCall(MatSetValues_HYPRE(A, 1, row + i, ncols, col + i, val + i, A->insertmode)); 1361 1362 i = j; 1363 } 1364 } 1365 PetscCall(MatStashScatterEnd_Private(&A->stash)); 1366 } 1367 1368 PetscCallExternal(HYPRE_IJMatrixAssemble, hA->ij); 1369 /* The assembly routine destroys the aux_matrix, we recreate it here by calling HYPRE_IJMatrixInitialize */ 1370 /* If the option MAT_SORTED_FULL is set to true, the indices and values can be passed to hypre directly, so we don't need the aux_matrix */ 1371 if (!hA->sorted_full) { 1372 hypre_AuxParCSRMatrix *aux_matrix; 1373 1374 /* call destroy just to make sure we do not leak anything */ 1375 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij); 1376 PetscCallExternal(hypre_AuxParCSRMatrixDestroy, aux_matrix); 1377 hypre_IJMatrixTranslator(hA->ij) = NULL; 1378 1379 /* Initialize with assembled flag -> it only recreates the aux_par_matrix */ 1380 PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij); 1381 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij); 1382 if (aux_matrix) { 1383 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1; /* see comment in MatHYPRESetPreallocation_HYPRE */ 1384 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0) 1385 PetscCallExternal(hypre_AuxParCSRMatrixInitialize, aux_matrix); 1386 #else 1387 PetscCallExternal(hypre_AuxParCSRMatrixInitialize_v2, aux_matrix, HYPRE_MEMORY_HOST); 1388 #endif 1389 } 1390 } 1391 { 1392 hypre_ParCSRMatrix *parcsr; 1393 1394 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)&parcsr); 1395 if (!hypre_ParCSRMatrixCommPkg(parcsr)) PetscCallExternal(hypre_MatvecCommPkgCreate, parcsr); 1396 } 1397 if (!hA->x) PetscCall(VecHYPRE_IJVectorCreate(A->cmap, &hA->x)); 1398 if (!hA->b) PetscCall(VecHYPRE_IJVectorCreate(A->rmap, &hA->b)); 1399 #if defined(PETSC_HAVE_HYPRE_DEVICE) 1400 PetscCall(MatBindToCPU_HYPRE(A, A->boundtocpu)); 1401 #endif 1402 PetscFunctionReturn(PETSC_SUCCESS); 1403 } 1404 1405 static PetscErrorCode MatGetArray_HYPRE(Mat A, PetscInt size, void **array) 1406 { 1407 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1408 1409 PetscFunctionBegin; 1410 PetscCheck(hA->available, PETSC_COMM_SELF, PETSC_ERR_ARG_NULL, "Temporary space is in use"); 1411 1412 if (hA->size >= size) { 1413 *array = hA->array; 1414 } else { 1415 PetscCall(PetscFree(hA->array)); 1416 hA->size = size; 1417 PetscCall(PetscMalloc(hA->size, &hA->array)); 1418 *array = hA->array; 1419 } 1420 1421 hA->available = PETSC_FALSE; 1422 PetscFunctionReturn(PETSC_SUCCESS); 1423 } 1424 1425 static PetscErrorCode MatRestoreArray_HYPRE(Mat A, void **array) 1426 { 1427 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1428 1429 PetscFunctionBegin; 1430 *array = NULL; 1431 hA->available = PETSC_TRUE; 1432 PetscFunctionReturn(PETSC_SUCCESS); 1433 } 1434 1435 static PetscErrorCode MatSetValues_HYPRE(Mat A, PetscInt nr, const PetscInt rows[], PetscInt nc, const PetscInt cols[], const PetscScalar v[], InsertMode ins) 1436 { 1437 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1438 PetscScalar *vals = (PetscScalar *)v; 1439 HYPRE_Complex *sscr; 1440 PetscInt *cscr[2]; 1441 PetscInt i, nzc; 1442 void *array = NULL; 1443 1444 PetscFunctionBegin; 1445 PetscCall(MatGetArray_HYPRE(A, sizeof(PetscInt) * (2 * nc) + sizeof(HYPRE_Complex) * nc * nr, &array)); 1446 cscr[0] = (PetscInt *)array; 1447 cscr[1] = ((PetscInt *)array) + nc; 1448 sscr = (HYPRE_Complex *)(((PetscInt *)array) + nc * 2); 1449 for (i = 0, nzc = 0; i < nc; i++) { 1450 if (cols[i] >= 0) { 1451 cscr[0][nzc] = cols[i]; 1452 cscr[1][nzc++] = i; 1453 } 1454 } 1455 if (!nzc) { 1456 PetscCall(MatRestoreArray_HYPRE(A, &array)); 1457 PetscFunctionReturn(PETSC_SUCCESS); 1458 } 1459 1460 #if 0 //defined(PETSC_HAVE_HYPRE_DEVICE) 1461 if (HYPRE_MEMORY_HOST != hypre_IJMatrixMemoryLocation(hA->ij)) { 1462 hypre_ParCSRMatrix *parcsr; 1463 1464 PetscCallExternal(HYPRE_IJMatrixGetObject,hA->ij,(void**)&parcsr); 1465 PetscCallExternal(hypre_ParCSRMatrixMigrate,parcsr, HYPRE_MEMORY_HOST); 1466 } 1467 #endif 1468 1469 if (ins == ADD_VALUES) { 1470 for (i = 0; i < nr; i++) { 1471 if (rows[i] >= 0) { 1472 PetscInt j; 1473 HYPRE_Int hnc = (HYPRE_Int)nzc; 1474 1475 PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]); 1476 for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j])); 1477 PetscCallExternal(HYPRE_IJMatrixAddToValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr); 1478 } 1479 vals += nc; 1480 } 1481 } else { /* INSERT_VALUES */ 1482 PetscInt rst, ren; 1483 1484 PetscCall(MatGetOwnershipRange(A, &rst, &ren)); 1485 for (i = 0; i < nr; i++) { 1486 if (rows[i] >= 0) { 1487 PetscInt j; 1488 HYPRE_Int hnc = (HYPRE_Int)nzc; 1489 1490 PetscCheck((PetscInt)hnc == nzc, PETSC_COMM_SELF, PETSC_ERR_SUP, "Hypre overflow! number of columns %" PetscInt_FMT " for row %" PetscInt_FMT, nzc, rows[i]); 1491 for (j = 0; j < nzc; j++) PetscCall(PetscHYPREScalarCast(vals[cscr[1][j]], &sscr[j])); 1492 /* nonlocal values */ 1493 if (rows[i] < rst || rows[i] >= ren) PetscCall(MatStashValuesRow_Private(&A->stash, rows[i], nzc, cscr[0], (PetscScalar *)sscr, PETSC_FALSE)); 1494 /* local values */ 1495 else PetscCallExternal(HYPRE_IJMatrixSetValues, hA->ij, 1, &hnc, (HYPRE_BigInt *)(rows + i), (HYPRE_BigInt *)cscr[0], sscr); 1496 } 1497 vals += nc; 1498 } 1499 } 1500 1501 PetscCall(MatRestoreArray_HYPRE(A, &array)); 1502 PetscFunctionReturn(PETSC_SUCCESS); 1503 } 1504 1505 static PetscErrorCode MatHYPRESetPreallocation_HYPRE(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1506 { 1507 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1508 HYPRE_Int *hdnnz, *honnz; 1509 PetscInt i, rs, re, cs, ce, bs; 1510 PetscMPIInt size; 1511 1512 PetscFunctionBegin; 1513 PetscCall(PetscLayoutSetUp(A->rmap)); 1514 PetscCall(PetscLayoutSetUp(A->cmap)); 1515 rs = A->rmap->rstart; 1516 re = A->rmap->rend; 1517 cs = A->cmap->rstart; 1518 ce = A->cmap->rend; 1519 if (!hA->ij) { 1520 PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rs, re - 1, cs, ce - 1, &hA->ij); 1521 PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR); 1522 } else { 1523 HYPRE_BigInt hrs, hre, hcs, hce; 1524 PetscCallExternal(HYPRE_IJMatrixGetLocalRange, hA->ij, &hrs, &hre, &hcs, &hce); 1525 PetscCheck(hre - hrs + 1 == re - rs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local rows: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hrs, hre + 1, rs, re); 1526 PetscCheck(hce - hcs + 1 == ce - cs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Inconsistent local cols: IJMatrix [%" PetscHYPRE_BigInt_FMT ",%" PetscHYPRE_BigInt_FMT "), PETSc [%" PetscInt_FMT ",%" PetscInt_FMT ")", hcs, hce + 1, cs, ce); 1527 } 1528 PetscCall(MatGetBlockSize(A, &bs)); 1529 if (dnz == PETSC_DEFAULT || dnz == PETSC_DECIDE) dnz = 10 * bs; 1530 if (onz == PETSC_DEFAULT || onz == PETSC_DECIDE) onz = 10 * bs; 1531 1532 if (!dnnz) { 1533 PetscCall(PetscMalloc1(A->rmap->n, &hdnnz)); 1534 for (i = 0; i < A->rmap->n; i++) hdnnz[i] = dnz; 1535 } else { 1536 hdnnz = (HYPRE_Int *)dnnz; 1537 } 1538 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size)); 1539 if (size > 1) { 1540 hypre_AuxParCSRMatrix *aux_matrix; 1541 if (!onnz) { 1542 PetscCall(PetscMalloc1(A->rmap->n, &honnz)); 1543 for (i = 0; i < A->rmap->n; i++) honnz[i] = onz; 1544 } else honnz = (HYPRE_Int *)onnz; 1545 /* SetDiagOffdSizes sets hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 0, since it seems 1546 they assume the user will input the entire row values, properly sorted 1547 In PETSc, we don't make such an assumption and set this flag to 1, 1548 unless the option MAT_SORTED_FULL is set to true. 1549 Also, to avoid possible memory leaks, we destroy and recreate the translator 1550 This has to be done here, as HYPRE_IJMatrixInitialize will properly initialize 1551 the IJ matrix for us */ 1552 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij); 1553 hypre_AuxParCSRMatrixDestroy(aux_matrix); 1554 hypre_IJMatrixTranslator(hA->ij) = NULL; 1555 PetscCallExternal(HYPRE_IJMatrixSetDiagOffdSizes, hA->ij, hdnnz, honnz); 1556 aux_matrix = (hypre_AuxParCSRMatrix *)hypre_IJMatrixTranslator(hA->ij); 1557 hypre_AuxParCSRMatrixNeedAux(aux_matrix) = !hA->sorted_full; 1558 } else { 1559 honnz = NULL; 1560 PetscCallExternal(HYPRE_IJMatrixSetRowSizes, hA->ij, hdnnz); 1561 } 1562 1563 /* reset assembled flag and call the initialize method */ 1564 hypre_IJMatrixAssembleFlag(hA->ij) = 0; 1565 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0) 1566 PetscCallExternal(HYPRE_IJMatrixInitialize, hA->ij); 1567 #else 1568 PetscCallExternal(HYPRE_IJMatrixInitialize_v2, hA->ij, HYPRE_MEMORY_HOST); 1569 #endif 1570 if (!dnnz) PetscCall(PetscFree(hdnnz)); 1571 if (!onnz && honnz) PetscCall(PetscFree(honnz)); 1572 /* Match AIJ logic */ 1573 A->preallocated = PETSC_TRUE; 1574 A->assembled = PETSC_FALSE; 1575 PetscFunctionReturn(PETSC_SUCCESS); 1576 } 1577 1578 /*@C 1579 MatHYPRESetPreallocation - Preallocates memory for a sparse parallel matrix in HYPRE IJ format 1580 1581 Collective 1582 1583 Input Parameters: 1584 + A - the matrix 1585 . dnz - number of nonzeros per row in DIAGONAL portion of local submatrix 1586 (same value is used for all local rows) 1587 . dnnz - array containing the number of nonzeros in the various rows of the 1588 DIAGONAL portion of the local submatrix (possibly different for each row) 1589 or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `d_nz` is used to specify the nonzero structure. 1590 The size of this array is equal to the number of local rows, i.e `m`. 1591 For matrices that will be factored, you must leave room for (and set) 1592 the diagonal entry even if it is zero. 1593 . onz - number of nonzeros per row in the OFF-DIAGONAL portion of local 1594 submatrix (same value is used for all local rows). 1595 - onnz - array containing the number of nonzeros in the various rows of the 1596 OFF-DIAGONAL portion of the local submatrix (possibly different for 1597 each row) or `NULL` (`PETSC_NULL_INTEGER` in Fortran), if `o_nz` is used to specify the nonzero 1598 structure. The size of this array is equal to the number 1599 of local rows, i.e `m`. 1600 1601 Level: intermediate 1602 1603 Note: 1604 If the *nnz parameter is given then the *nz parameter is ignored; for sequential matrices, `onz` and `onnz` are ignored. 1605 1606 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatMPIAIJSetPreallocation()`, `MATHYPRE`, `MATAIJ` 1607 @*/ 1608 PetscErrorCode MatHYPRESetPreallocation(Mat A, PetscInt dnz, const PetscInt dnnz[], PetscInt onz, const PetscInt onnz[]) 1609 { 1610 PetscFunctionBegin; 1611 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1612 PetscValidType(A, 1); 1613 PetscTryMethod(A, "MatHYPRESetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (A, dnz, dnnz, onz, onnz)); 1614 PetscFunctionReturn(PETSC_SUCCESS); 1615 } 1616 1617 /*@C 1618 MatCreateFromParCSR - Creates a `Mat` from a `hypre_ParCSRMatrix` 1619 1620 Collective 1621 1622 Input Parameters: 1623 + parcsr - the pointer to the `hypre_ParCSRMatrix` 1624 . mtype - matrix type to be created. Currently `MATAIJ`, `MATIS` and `MATHYPRE` are supported. 1625 - copymode - PETSc copying options, see `PetscCopyMode` 1626 1627 Output Parameter: 1628 . A - the matrix 1629 1630 Level: intermediate 1631 1632 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode` 1633 @*/ 1634 PETSC_EXTERN PetscErrorCode MatCreateFromParCSR(hypre_ParCSRMatrix *parcsr, MatType mtype, PetscCopyMode copymode, Mat *A) 1635 { 1636 Mat T; 1637 Mat_HYPRE *hA; 1638 MPI_Comm comm; 1639 PetscInt rstart, rend, cstart, cend, M, N; 1640 PetscBool isseqaij, isseqaijmkl, ismpiaij, isaij, ishyp, isis; 1641 1642 PetscFunctionBegin; 1643 comm = hypre_ParCSRMatrixComm(parcsr); 1644 PetscCall(PetscStrcmp(mtype, MATSEQAIJ, &isseqaij)); 1645 PetscCall(PetscStrcmp(mtype, MATSEQAIJMKL, &isseqaijmkl)); 1646 PetscCall(PetscStrcmp(mtype, MATMPIAIJ, &ismpiaij)); 1647 PetscCall(PetscStrcmp(mtype, MATAIJ, &isaij)); 1648 PetscCall(PetscStrcmp(mtype, MATHYPRE, &ishyp)); 1649 PetscCall(PetscStrcmp(mtype, MATIS, &isis)); 1650 isaij = (PetscBool)(isseqaij || isseqaijmkl || ismpiaij || isaij); 1651 /* TODO */ 1652 PetscCheck(isaij || ishyp || isis, comm, PETSC_ERR_SUP, "Unsupported MatType %s! Supported types are %s, %s, %s, %s, %s, and %s", mtype, MATAIJ, MATSEQAIJ, MATSEQAIJMKL, MATMPIAIJ, MATIS, MATHYPRE); 1653 /* access ParCSRMatrix */ 1654 rstart = hypre_ParCSRMatrixFirstRowIndex(parcsr); 1655 rend = hypre_ParCSRMatrixLastRowIndex(parcsr); 1656 cstart = hypre_ParCSRMatrixFirstColDiag(parcsr); 1657 cend = hypre_ParCSRMatrixLastColDiag(parcsr); 1658 M = hypre_ParCSRMatrixGlobalNumRows(parcsr); 1659 N = hypre_ParCSRMatrixGlobalNumCols(parcsr); 1660 1661 /* fix for empty local rows/columns */ 1662 if (rend < rstart) rend = rstart; 1663 if (cend < cstart) cend = cstart; 1664 1665 /* PETSc convention */ 1666 rend++; 1667 cend++; 1668 rend = PetscMin(rend, M); 1669 cend = PetscMin(cend, N); 1670 1671 /* create PETSc matrix with MatHYPRE */ 1672 PetscCall(MatCreate(comm, &T)); 1673 PetscCall(MatSetSizes(T, rend - rstart, cend - cstart, M, N)); 1674 PetscCall(MatSetType(T, MATHYPRE)); 1675 hA = (Mat_HYPRE *)(T->data); 1676 1677 /* create HYPRE_IJMatrix */ 1678 PetscCallExternal(HYPRE_IJMatrixCreate, hA->comm, rstart, rend - 1, cstart, cend - 1, &hA->ij); 1679 PetscCallExternal(HYPRE_IJMatrixSetObjectType, hA->ij, HYPRE_PARCSR); 1680 1681 // TODO DEV 1682 /* create new ParCSR object if needed */ 1683 if (ishyp && copymode == PETSC_COPY_VALUES) { 1684 hypre_ParCSRMatrix *new_parcsr; 1685 #if PETSC_PKG_HYPRE_VERSION_LT(2, 18, 0) 1686 hypre_CSRMatrix *hdiag, *hoffd, *ndiag, *noffd; 1687 1688 new_parcsr = hypre_ParCSRMatrixClone(parcsr, 0); 1689 hdiag = hypre_ParCSRMatrixDiag(parcsr); 1690 hoffd = hypre_ParCSRMatrixOffd(parcsr); 1691 ndiag = hypre_ParCSRMatrixDiag(new_parcsr); 1692 noffd = hypre_ParCSRMatrixOffd(new_parcsr); 1693 PetscCall(PetscArraycpy(hypre_CSRMatrixData(ndiag), hypre_CSRMatrixData(hdiag), hypre_CSRMatrixNumNonzeros(hdiag))); 1694 PetscCall(PetscArraycpy(hypre_CSRMatrixData(noffd), hypre_CSRMatrixData(hoffd), hypre_CSRMatrixNumNonzeros(hoffd))); 1695 #else 1696 new_parcsr = hypre_ParCSRMatrixClone(parcsr, 1); 1697 #endif 1698 parcsr = new_parcsr; 1699 copymode = PETSC_OWN_POINTER; 1700 } 1701 1702 /* set ParCSR object */ 1703 hypre_IJMatrixObject(hA->ij) = parcsr; 1704 T->preallocated = PETSC_TRUE; 1705 1706 /* set assembled flag */ 1707 hypre_IJMatrixAssembleFlag(hA->ij) = 1; 1708 #if 0 1709 PetscCallExternal(HYPRE_IJMatrixInitialize,hA->ij); 1710 #endif 1711 if (ishyp) { 1712 PetscMPIInt myid = 0; 1713 1714 /* make sure we always have row_starts and col_starts available */ 1715 if (HYPRE_AssumedPartitionCheck()) PetscCallMPI(MPI_Comm_rank(comm, &myid)); 1716 #if defined(hypre_ParCSRMatrixOwnsRowStarts) 1717 if (!hypre_ParCSRMatrixOwnsColStarts(parcsr)) { 1718 PetscLayout map; 1719 1720 PetscCall(MatGetLayouts(T, NULL, &map)); 1721 PetscCall(PetscLayoutSetUp(map)); 1722 hypre_ParCSRMatrixColStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid); 1723 } 1724 if (!hypre_ParCSRMatrixOwnsRowStarts(parcsr)) { 1725 PetscLayout map; 1726 1727 PetscCall(MatGetLayouts(T, &map, NULL)); 1728 PetscCall(PetscLayoutSetUp(map)); 1729 hypre_ParCSRMatrixRowStarts(parcsr) = (HYPRE_BigInt *)(map->range + myid); 1730 } 1731 #endif 1732 /* prevent from freeing the pointer */ 1733 if (copymode == PETSC_USE_POINTER) hA->inner_free = PETSC_FALSE; 1734 *A = T; 1735 PetscCall(MatSetOption(*A, MAT_SORTED_FULL, PETSC_TRUE)); 1736 PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY)); 1737 PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY)); 1738 } else if (isaij) { 1739 if (copymode != PETSC_OWN_POINTER) { 1740 /* prevent from freeing the pointer */ 1741 hA->inner_free = PETSC_FALSE; 1742 PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INITIAL_MATRIX, A)); 1743 PetscCall(MatDestroy(&T)); 1744 } else { /* AIJ return type with PETSC_OWN_POINTER */ 1745 PetscCall(MatConvert_HYPRE_AIJ(T, MATAIJ, MAT_INPLACE_MATRIX, &T)); 1746 *A = T; 1747 } 1748 } else if (isis) { 1749 PetscCall(MatConvert_HYPRE_IS(T, MATIS, MAT_INITIAL_MATRIX, A)); 1750 if (copymode != PETSC_OWN_POINTER) hA->inner_free = PETSC_FALSE; 1751 PetscCall(MatDestroy(&T)); 1752 } 1753 PetscFunctionReturn(PETSC_SUCCESS); 1754 } 1755 1756 static PetscErrorCode MatHYPREGetParCSR_HYPRE(Mat A, hypre_ParCSRMatrix **parcsr) 1757 { 1758 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1759 HYPRE_Int type; 1760 1761 PetscFunctionBegin; 1762 PetscCheck(hA->ij, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "HYPRE_IJMatrix not present"); 1763 PetscCallExternal(HYPRE_IJMatrixGetObjectType, hA->ij, &type); 1764 PetscCheck(type == HYPRE_PARCSR, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "HYPRE_IJMatrix is not of type HYPRE_PARCSR"); 1765 PetscCallExternal(HYPRE_IJMatrixGetObject, hA->ij, (void **)parcsr); 1766 PetscFunctionReturn(PETSC_SUCCESS); 1767 } 1768 1769 /*@C 1770 MatHYPREGetParCSR - Gets the pointer to the ParCSR matrix 1771 1772 Not Collective 1773 1774 Input Parameter: 1775 . A - the `MATHYPRE` object 1776 1777 Output Parameter: 1778 . parcsr - the pointer to the `hypre_ParCSRMatrix` 1779 1780 Level: intermediate 1781 1782 .seealso: [](ch_matrices), `Mat`, `MatHYPRE`, `PetscCopyMode` 1783 @*/ 1784 PetscErrorCode MatHYPREGetParCSR(Mat A, hypre_ParCSRMatrix **parcsr) 1785 { 1786 PetscFunctionBegin; 1787 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 1788 PetscValidType(A, 1); 1789 PetscUseMethod(A, "MatHYPREGetParCSR_C", (Mat, hypre_ParCSRMatrix **), (A, parcsr)); 1790 PetscFunctionReturn(PETSC_SUCCESS); 1791 } 1792 1793 static PetscErrorCode MatMissingDiagonal_HYPRE(Mat A, PetscBool *missing, PetscInt *dd) 1794 { 1795 hypre_ParCSRMatrix *parcsr; 1796 hypre_CSRMatrix *ha; 1797 PetscInt rst; 1798 1799 PetscFunctionBegin; 1800 PetscCheck(A->rmap->n == A->cmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented with non-square diagonal blocks"); 1801 PetscCall(MatGetOwnershipRange(A, &rst, NULL)); 1802 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1803 if (missing) *missing = PETSC_FALSE; 1804 if (dd) *dd = -1; 1805 ha = hypre_ParCSRMatrixDiag(parcsr); 1806 if (ha) { 1807 PetscInt size, i; 1808 HYPRE_Int *ii, *jj; 1809 1810 size = hypre_CSRMatrixNumRows(ha); 1811 ii = hypre_CSRMatrixI(ha); 1812 jj = hypre_CSRMatrixJ(ha); 1813 for (i = 0; i < size; i++) { 1814 PetscInt j; 1815 PetscBool found = PETSC_FALSE; 1816 1817 for (j = ii[i]; j < ii[i + 1] && !found; j++) found = (jj[j] == i) ? PETSC_TRUE : PETSC_FALSE; 1818 1819 if (!found) { 1820 PetscCall(PetscInfo(A, "Matrix is missing local diagonal entry %" PetscInt_FMT "\n", i)); 1821 if (missing) *missing = PETSC_TRUE; 1822 if (dd) *dd = i + rst; 1823 PetscFunctionReturn(PETSC_SUCCESS); 1824 } 1825 } 1826 if (!size) { 1827 PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n")); 1828 if (missing) *missing = PETSC_TRUE; 1829 if (dd) *dd = rst; 1830 } 1831 } else { 1832 PetscCall(PetscInfo(A, "Matrix has no diagonal entries therefore is missing diagonal\n")); 1833 if (missing) *missing = PETSC_TRUE; 1834 if (dd) *dd = rst; 1835 } 1836 PetscFunctionReturn(PETSC_SUCCESS); 1837 } 1838 1839 static PetscErrorCode MatScale_HYPRE(Mat A, PetscScalar s) 1840 { 1841 hypre_ParCSRMatrix *parcsr; 1842 #if PETSC_PKG_HYPRE_VERSION_LT(2, 19, 0) 1843 hypre_CSRMatrix *ha; 1844 #endif 1845 HYPRE_Complex hs; 1846 1847 PetscFunctionBegin; 1848 PetscCall(PetscHYPREScalarCast(s, &hs)); 1849 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1850 #if PETSC_PKG_HYPRE_VERSION_GE(2, 19, 0) 1851 PetscCallExternal(hypre_ParCSRMatrixScale, parcsr, hs); 1852 #else /* diagonal part */ 1853 ha = hypre_ParCSRMatrixDiag(parcsr); 1854 if (ha) { 1855 PetscInt size, i; 1856 HYPRE_Int *ii; 1857 HYPRE_Complex *a; 1858 1859 size = hypre_CSRMatrixNumRows(ha); 1860 a = hypre_CSRMatrixData(ha); 1861 ii = hypre_CSRMatrixI(ha); 1862 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1863 } 1864 /* offdiagonal part */ 1865 ha = hypre_ParCSRMatrixOffd(parcsr); 1866 if (ha) { 1867 PetscInt size, i; 1868 HYPRE_Int *ii; 1869 HYPRE_Complex *a; 1870 1871 size = hypre_CSRMatrixNumRows(ha); 1872 a = hypre_CSRMatrixData(ha); 1873 ii = hypre_CSRMatrixI(ha); 1874 for (i = 0; i < ii[size]; i++) a[i] *= hs; 1875 } 1876 #endif 1877 PetscFunctionReturn(PETSC_SUCCESS); 1878 } 1879 1880 static PetscErrorCode MatZeroRowsColumns_HYPRE(Mat A, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1881 { 1882 hypre_ParCSRMatrix *parcsr; 1883 HYPRE_Int *lrows; 1884 PetscInt rst, ren, i; 1885 1886 PetscFunctionBegin; 1887 PetscCheck(!x && !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "To be implemented"); 1888 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1889 PetscCall(PetscMalloc1(numRows, &lrows)); 1890 PetscCall(MatGetOwnershipRange(A, &rst, &ren)); 1891 for (i = 0; i < numRows; i++) { 1892 PetscCheck(rows[i] >= rst && rows[i] < ren, PETSC_COMM_SELF, PETSC_ERR_SUP, "Non-local rows not yet supported"); 1893 lrows[i] = rows[i] - rst; 1894 } 1895 PetscCallExternal(hypre_ParCSRMatrixEliminateRowsCols, parcsr, numRows, lrows); 1896 PetscCall(PetscFree(lrows)); 1897 PetscFunctionReturn(PETSC_SUCCESS); 1898 } 1899 1900 static PetscErrorCode MatZeroEntries_HYPRE_CSRMatrix(hypre_CSRMatrix *ha) 1901 { 1902 PetscFunctionBegin; 1903 if (ha) { 1904 HYPRE_Int *ii, size; 1905 HYPRE_Complex *a; 1906 1907 size = hypre_CSRMatrixNumRows(ha); 1908 a = hypre_CSRMatrixData(ha); 1909 ii = hypre_CSRMatrixI(ha); 1910 1911 if (a) PetscCall(PetscArrayzero(a, ii[size])); 1912 } 1913 PetscFunctionReturn(PETSC_SUCCESS); 1914 } 1915 1916 PetscErrorCode MatZeroEntries_HYPRE(Mat A) 1917 { 1918 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 1919 1920 PetscFunctionBegin; 1921 if (HYPRE_MEMORY_DEVICE == hypre_IJMatrixMemoryLocation(hA->ij)) { 1922 PetscCallExternal(HYPRE_IJMatrixSetConstantValues, hA->ij, 0.0); 1923 } else { 1924 hypre_ParCSRMatrix *parcsr; 1925 1926 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1927 PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr))); 1928 PetscCall(MatZeroEntries_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr))); 1929 } 1930 PetscFunctionReturn(PETSC_SUCCESS); 1931 } 1932 1933 static PetscErrorCode MatZeroRows_HYPRE_CSRMatrix(hypre_CSRMatrix *hA, PetscInt N, const PetscInt rows[], HYPRE_Complex diag) 1934 { 1935 PetscInt ii; 1936 HYPRE_Int *i, *j; 1937 HYPRE_Complex *a; 1938 1939 PetscFunctionBegin; 1940 if (!hA) PetscFunctionReturn(PETSC_SUCCESS); 1941 1942 i = hypre_CSRMatrixI(hA); 1943 j = hypre_CSRMatrixJ(hA); 1944 a = hypre_CSRMatrixData(hA); 1945 1946 for (ii = 0; ii < N; ii++) { 1947 HYPRE_Int jj, ibeg, iend, irow; 1948 1949 irow = rows[ii]; 1950 ibeg = i[irow]; 1951 iend = i[irow + 1]; 1952 for (jj = ibeg; jj < iend; jj++) 1953 if (j[jj] == irow) a[jj] = diag; 1954 else a[jj] = 0.0; 1955 } 1956 PetscFunctionReturn(PETSC_SUCCESS); 1957 } 1958 1959 static PetscErrorCode MatZeroRows_HYPRE(Mat A, PetscInt N, const PetscInt rows[], PetscScalar diag, Vec x, Vec b) 1960 { 1961 hypre_ParCSRMatrix *parcsr; 1962 PetscInt *lrows, len; 1963 HYPRE_Complex hdiag; 1964 1965 PetscFunctionBegin; 1966 PetscCheck(!x && !b, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Does not support to modify the solution and the right hand size"); 1967 PetscCall(PetscHYPREScalarCast(diag, &hdiag)); 1968 /* retrieve the internal matrix */ 1969 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1970 /* get locally owned rows */ 1971 PetscCall(MatZeroRowsMapLocal_Private(A, N, rows, &len, &lrows)); 1972 /* zero diagonal part */ 1973 PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixDiag(parcsr), len, lrows, hdiag)); 1974 /* zero off-diagonal part */ 1975 PetscCall(MatZeroRows_HYPRE_CSRMatrix(hypre_ParCSRMatrixOffd(parcsr), len, lrows, 0.0)); 1976 1977 PetscCall(PetscFree(lrows)); 1978 PetscFunctionReturn(PETSC_SUCCESS); 1979 } 1980 1981 static PetscErrorCode MatAssemblyBegin_HYPRE(Mat mat, MatAssemblyType mode) 1982 { 1983 PetscFunctionBegin; 1984 if (mat->nooffprocentries) PetscFunctionReturn(PETSC_SUCCESS); 1985 1986 PetscCall(MatStashScatterBegin_Private(mat, &mat->stash, mat->rmap->range)); 1987 PetscFunctionReturn(PETSC_SUCCESS); 1988 } 1989 1990 static PetscErrorCode MatGetRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 1991 { 1992 hypre_ParCSRMatrix *parcsr; 1993 HYPRE_Int hnz; 1994 1995 PetscFunctionBegin; 1996 /* retrieve the internal matrix */ 1997 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 1998 /* call HYPRE API */ 1999 PetscCallExternal(HYPRE_ParCSRMatrixGetRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v); 2000 if (nz) *nz = (PetscInt)hnz; 2001 PetscFunctionReturn(PETSC_SUCCESS); 2002 } 2003 2004 static PetscErrorCode MatRestoreRow_HYPRE(Mat A, PetscInt row, PetscInt *nz, PetscInt **idx, PetscScalar **v) 2005 { 2006 hypre_ParCSRMatrix *parcsr; 2007 HYPRE_Int hnz; 2008 2009 PetscFunctionBegin; 2010 /* retrieve the internal matrix */ 2011 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 2012 /* call HYPRE API */ 2013 hnz = nz ? (HYPRE_Int)(*nz) : 0; 2014 PetscCallExternal(HYPRE_ParCSRMatrixRestoreRow, parcsr, row, &hnz, (HYPRE_BigInt **)idx, (HYPRE_Complex **)v); 2015 PetscFunctionReturn(PETSC_SUCCESS); 2016 } 2017 2018 static PetscErrorCode MatGetValues_HYPRE(Mat A, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[]) 2019 { 2020 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 2021 PetscInt i; 2022 2023 PetscFunctionBegin; 2024 if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); 2025 /* Ignore negative row indices 2026 * And negative column indices should be automatically ignored in hypre 2027 * */ 2028 for (i = 0; i < m; i++) { 2029 if (idxm[i] >= 0) { 2030 HYPRE_Int hn = (HYPRE_Int)n; 2031 PetscCallExternal(HYPRE_IJMatrixGetValues, hA->ij, 1, &hn, (HYPRE_BigInt *)&idxm[i], (HYPRE_BigInt *)idxn, (HYPRE_Complex *)(v + i * n)); 2032 } 2033 } 2034 PetscFunctionReturn(PETSC_SUCCESS); 2035 } 2036 2037 static PetscErrorCode MatSetOption_HYPRE(Mat A, MatOption op, PetscBool flg) 2038 { 2039 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 2040 2041 PetscFunctionBegin; 2042 switch (op) { 2043 case MAT_NO_OFF_PROC_ENTRIES: 2044 if (flg) PetscCallExternal(HYPRE_IJMatrixSetMaxOffProcElmts, hA->ij, 0); 2045 break; 2046 case MAT_SORTED_FULL: 2047 hA->sorted_full = flg; 2048 break; 2049 default: 2050 break; 2051 } 2052 PetscFunctionReturn(PETSC_SUCCESS); 2053 } 2054 2055 static PetscErrorCode MatView_HYPRE(Mat A, PetscViewer view) 2056 { 2057 PetscViewerFormat format; 2058 2059 PetscFunctionBegin; 2060 PetscCall(PetscViewerGetFormat(view, &format)); 2061 if (format == PETSC_VIEWER_ASCII_FACTOR_INFO || format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscFunctionReturn(PETSC_SUCCESS); 2062 if (format != PETSC_VIEWER_NATIVE) { 2063 Mat B; 2064 hypre_ParCSRMatrix *parcsr; 2065 PetscErrorCode (*mview)(Mat, PetscViewer) = NULL; 2066 2067 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 2068 PetscCall(MatCreateFromParCSR(parcsr, MATAIJ, PETSC_USE_POINTER, &B)); 2069 PetscCall(MatGetOperation(B, MATOP_VIEW, (void (**)(void)) & mview)); 2070 PetscCheck(mview, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing view operation"); 2071 PetscCall((*mview)(B, view)); 2072 PetscCall(MatDestroy(&B)); 2073 } else { 2074 Mat_HYPRE *hA = (Mat_HYPRE *)A->data; 2075 PetscMPIInt size; 2076 PetscBool isascii; 2077 const char *filename; 2078 2079 /* HYPRE uses only text files */ 2080 PetscCall(PetscObjectTypeCompare((PetscObject)view, PETSCVIEWERASCII, &isascii)); 2081 PetscCheck(isascii, PetscObjectComm((PetscObject)view), PETSC_ERR_SUP, "PetscViewerType %s: native HYPRE format needs PETSCVIEWERASCII", ((PetscObject)view)->type_name); 2082 PetscCall(PetscViewerFileGetName(view, &filename)); 2083 PetscCallExternal(HYPRE_IJMatrixPrint, hA->ij, filename); 2084 PetscCallMPI(MPI_Comm_size(hA->comm, &size)); 2085 if (size > 1) { 2086 PetscCall(PetscViewerASCIIPrintf(view, "Matrix files: %s.%05d ... %s.%05d\n", filename, 0, filename, size - 1)); 2087 } else { 2088 PetscCall(PetscViewerASCIIPrintf(view, "Matrix file: %s.%05d\n", filename, 0)); 2089 } 2090 } 2091 PetscFunctionReturn(PETSC_SUCCESS); 2092 } 2093 2094 static PetscErrorCode MatCopy_HYPRE(Mat A, Mat B, MatStructure str) 2095 { 2096 hypre_ParCSRMatrix *acsr, *bcsr; 2097 2098 PetscFunctionBegin; 2099 if (str == SAME_NONZERO_PATTERN && A->ops->copy == B->ops->copy) { 2100 PetscCall(MatHYPREGetParCSR_HYPRE(A, &acsr)); 2101 PetscCall(MatHYPREGetParCSR_HYPRE(B, &bcsr)); 2102 PetscCallExternal(hypre_ParCSRMatrixCopy, acsr, bcsr, 1); 2103 PetscCall(MatSetOption(B, MAT_SORTED_FULL, PETSC_TRUE)); /* "perfect" preallocation, so no need for hypre_AuxParCSRMatrixNeedAux */ 2104 PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY)); 2105 PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY)); 2106 } else { 2107 PetscCall(MatCopy_Basic(A, B, str)); 2108 } 2109 PetscFunctionReturn(PETSC_SUCCESS); 2110 } 2111 2112 static PetscErrorCode MatGetDiagonal_HYPRE(Mat A, Vec d) 2113 { 2114 hypre_ParCSRMatrix *parcsr; 2115 hypre_CSRMatrix *dmat; 2116 HYPRE_Complex *a; 2117 HYPRE_Complex *data = NULL; 2118 HYPRE_Int *diag = NULL; 2119 PetscInt i; 2120 PetscBool cong; 2121 2122 PetscFunctionBegin; 2123 PetscCall(MatHasCongruentLayouts(A, &cong)); 2124 PetscCheck(cong, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Only for square matrices with same local distributions of rows and columns"); 2125 if (PetscDefined(USE_DEBUG)) { 2126 PetscBool miss; 2127 PetscCall(MatMissingDiagonal(A, &miss, NULL)); 2128 PetscCheck(!miss || !A->rmap->n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not implemented when diagonal entries are missing"); 2129 } 2130 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 2131 dmat = hypre_ParCSRMatrixDiag(parcsr); 2132 if (dmat) { 2133 /* this cast fixes the clang error: implicit conversion from 'HYPRE_Complex' (aka '_Complex double') to 'double' is not permitted in C++ */ 2134 PetscCall(VecGetArray(d, (PetscScalar **)&a)); 2135 diag = hypre_CSRMatrixI(dmat); 2136 data = hypre_CSRMatrixData(dmat); 2137 for (i = 0; i < A->rmap->n; i++) a[i] = data[diag[i]]; 2138 PetscCall(VecRestoreArray(d, (PetscScalar **)&a)); 2139 } 2140 PetscFunctionReturn(PETSC_SUCCESS); 2141 } 2142 2143 #include <petscblaslapack.h> 2144 2145 static PetscErrorCode MatAXPY_HYPRE(Mat Y, PetscScalar a, Mat X, MatStructure str) 2146 { 2147 PetscFunctionBegin; 2148 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2149 { 2150 Mat B; 2151 hypre_ParCSRMatrix *x, *y, *z; 2152 2153 PetscCall(MatHYPREGetParCSR(Y, &y)); 2154 PetscCall(MatHYPREGetParCSR(X, &x)); 2155 PetscCallExternal(hypre_ParCSRMatrixAdd, 1.0, y, 1.0, x, &z); 2156 PetscCall(MatCreateFromParCSR(z, MATHYPRE, PETSC_OWN_POINTER, &B)); 2157 PetscCall(MatHeaderMerge(Y, &B)); 2158 } 2159 #else 2160 if (str == SAME_NONZERO_PATTERN) { 2161 hypre_ParCSRMatrix *x, *y; 2162 hypre_CSRMatrix *xloc, *yloc; 2163 PetscInt xnnz, ynnz; 2164 HYPRE_Complex *xarr, *yarr; 2165 PetscBLASInt one = 1, bnz; 2166 2167 PetscCall(MatHYPREGetParCSR(Y, &y)); 2168 PetscCall(MatHYPREGetParCSR(X, &x)); 2169 2170 /* diagonal block */ 2171 xloc = hypre_ParCSRMatrixDiag(x); 2172 yloc = hypre_ParCSRMatrixDiag(y); 2173 xnnz = 0; 2174 ynnz = 0; 2175 xarr = NULL; 2176 yarr = NULL; 2177 if (xloc) { 2178 xarr = hypre_CSRMatrixData(xloc); 2179 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2180 } 2181 if (yloc) { 2182 yarr = hypre_CSRMatrixData(yloc); 2183 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2184 } 2185 PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz); 2186 PetscCall(PetscBLASIntCast(xnnz, &bnz)); 2187 PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one)); 2188 2189 /* off-diagonal block */ 2190 xloc = hypre_ParCSRMatrixOffd(x); 2191 yloc = hypre_ParCSRMatrixOffd(y); 2192 xnnz = 0; 2193 ynnz = 0; 2194 xarr = NULL; 2195 yarr = NULL; 2196 if (xloc) { 2197 xarr = hypre_CSRMatrixData(xloc); 2198 xnnz = hypre_CSRMatrixNumNonzeros(xloc); 2199 } 2200 if (yloc) { 2201 yarr = hypre_CSRMatrixData(yloc); 2202 ynnz = hypre_CSRMatrixNumNonzeros(yloc); 2203 } 2204 PetscCheck(xnnz == ynnz, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Different number of nonzeros in off-diagonal block %" PetscInt_FMT " != %" PetscInt_FMT, xnnz, ynnz); 2205 PetscCall(PetscBLASIntCast(xnnz, &bnz)); 2206 PetscCallBLAS("BLASaxpy", BLASaxpy_(&bnz, &a, (PetscScalar *)xarr, &one, (PetscScalar *)yarr, &one)); 2207 } else if (str == SUBSET_NONZERO_PATTERN) { 2208 PetscCall(MatAXPY_Basic(Y, a, X, str)); 2209 } else { 2210 Mat B; 2211 2212 PetscCall(MatAXPY_Basic_Preallocate(Y, X, &B)); 2213 PetscCall(MatAXPY_BasicWithPreallocation(B, Y, a, X, str)); 2214 PetscCall(MatHeaderReplace(Y, &B)); 2215 } 2216 #endif 2217 PetscFunctionReturn(PETSC_SUCCESS); 2218 } 2219 2220 /* Attach cooMat to hypre matrix mat. The two matrices will share the same data array */ 2221 static PetscErrorCode MatAttachCOOMat_HYPRE(Mat mat, Mat cooMat) 2222 { 2223 Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data; 2224 hypre_CSRMatrix *diag, *offd; 2225 hypre_ParCSRMatrix *parCSR; 2226 HYPRE_MemoryLocation hypreMemtype = HYPRE_MEMORY_HOST; 2227 PetscMemType petscMemtype; 2228 Mat A, B; 2229 PetscScalar *Aa, *Ba; 2230 PetscMPIInt size; 2231 MPI_Comm comm; 2232 PetscLayout rmap; 2233 2234 PetscFunctionBegin; 2235 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); 2236 PetscCallMPI(MPI_Comm_size(comm, &size)); 2237 PetscCall(MatGetLayouts(mat, &rmap, NULL)); 2238 2239 /* Alias cooMat's data array to IJMatrix's */ 2240 PetscCallExternal(HYPRE_IJMatrixGetObject, hmat->ij, (void **)&parCSR); 2241 diag = hypre_ParCSRMatrixDiag(parCSR); 2242 offd = hypre_ParCSRMatrixOffd(parCSR); 2243 2244 hypreMemtype = hypre_CSRMatrixMemoryLocation(diag); 2245 A = (size == 1) ? cooMat : ((Mat_MPIAIJ *)cooMat->data)->A; 2246 PetscCall(MatSeqAIJGetCSRAndMemType(A, NULL, NULL, &Aa, &petscMemtype)); 2247 PetscAssert((PetscMemTypeHost(petscMemtype) && hypreMemtype == HYPRE_MEMORY_HOST) || (PetscMemTypeDevice(petscMemtype) && hypreMemtype == HYPRE_MEMORY_DEVICE), comm, PETSC_ERR_PLIB, "PETSc and hypre's memory types mismatch"); 2248 2249 hmat->diagJ = hypre_CSRMatrixJ(diag); 2250 PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(diag), hypreMemtype)); 2251 hypre_CSRMatrixData(diag) = (HYPRE_Complex *)Aa; 2252 hypre_CSRMatrixOwnsData(diag) = 0; /* Take ownership of (j,a) away from hypre. As a result, we need to free them on our own */ 2253 2254 /* Copy diagonal pointers of A to device to facilitate MatSeqAIJMoveDiagonalValuesFront_SeqAIJKokkos */ 2255 if (hypreMemtype == HYPRE_MEMORY_DEVICE) { 2256 PetscStackCallExternalVoid("hypre_TAlloc", hmat->diag = hypre_TAlloc(PetscInt, rmap->n, hypreMemtype)); 2257 PetscCall(MatMarkDiagonal_SeqAIJ(A)); /* We need updated diagonal positions */ 2258 PetscStackCallExternalVoid("hypre_TMemcpy", hypre_TMemcpy(hmat->diag, ((Mat_SeqAIJ *)A->data)->diag, PetscInt, rmap->n, hypreMemtype, HYPRE_MEMORY_HOST)); 2259 } 2260 2261 if (size > 1) { 2262 B = ((Mat_MPIAIJ *)cooMat->data)->B; 2263 PetscCall(MatSeqAIJGetCSRAndMemType(B, NULL, NULL, &Ba, &petscMemtype)); 2264 hmat->offdJ = hypre_CSRMatrixJ(offd); 2265 PetscStackCallExternalVoid("hypre_TFree", hypre_TFree(hypre_CSRMatrixData(offd), hypreMemtype)); 2266 hypre_CSRMatrixData(offd) = (HYPRE_Complex *)Ba; 2267 hypre_CSRMatrixOwnsData(offd) = 0; 2268 } 2269 2270 /* Record cooMat for use in MatSetValuesCOO_HYPRE */ 2271 hmat->cooMat = cooMat; 2272 hmat->memType = hypreMemtype; 2273 PetscFunctionReturn(PETSC_SUCCESS); 2274 } 2275 2276 static PetscErrorCode MatDuplicate_HYPRE(Mat A, MatDuplicateOption op, Mat *B) 2277 { 2278 hypre_ParCSRMatrix *parcsr = NULL; 2279 PetscCopyMode cpmode; 2280 Mat_HYPRE *hA; 2281 Mat cooMat; 2282 2283 PetscFunctionBegin; 2284 PetscCall(MatHYPREGetParCSR_HYPRE(A, &parcsr)); 2285 if (op == MAT_DO_NOT_COPY_VALUES || op == MAT_SHARE_NONZERO_PATTERN) { 2286 parcsr = hypre_ParCSRMatrixClone(parcsr, 0); 2287 cpmode = PETSC_OWN_POINTER; 2288 } else { 2289 cpmode = PETSC_COPY_VALUES; 2290 } 2291 PetscCall(MatCreateFromParCSR(parcsr, MATHYPRE, cpmode, B)); 2292 hA = (Mat_HYPRE *)A->data; 2293 if (hA->cooMat) { 2294 op = (op == MAT_DO_NOT_COPY_VALUES) ? op : MAT_COPY_VALUES; 2295 /* Cannot simply increase the reference count of hA->cooMat, since B needs to share cooMat's data array */ 2296 PetscCall(MatDuplicate(hA->cooMat, op, &cooMat)); 2297 PetscCall(MatAttachCOOMat_HYPRE(*B, cooMat)); 2298 } 2299 PetscFunctionReturn(PETSC_SUCCESS); 2300 } 2301 2302 static PetscErrorCode MatSetPreallocationCOO_HYPRE(Mat mat, PetscCount coo_n, PetscInt coo_i[], PetscInt coo_j[]) 2303 { 2304 MPI_Comm comm; 2305 PetscMPIInt size; 2306 PetscLayout rmap, cmap; 2307 Mat_HYPRE *hmat; 2308 Mat cooMat; 2309 MatType matType = MATAIJ; /* default type of cooMat */ 2310 2311 PetscFunctionBegin; 2312 /* Build an agent matrix cooMat whose type is either MATAIJ or MATAIJKOKKOS. 2313 It has the same sparsity pattern as mat, and also shares the data array with mat. We use cooMat to do the COO work. 2314 */ 2315 PetscCall(PetscObjectGetComm((PetscObject)mat, &comm)); 2316 PetscCallMPI(MPI_Comm_size(comm, &size)); 2317 PetscCall(PetscLayoutSetUp(mat->rmap)); 2318 PetscCall(PetscLayoutSetUp(mat->cmap)); 2319 PetscCall(MatGetLayouts(mat, &rmap, &cmap)); 2320 2321 #if defined(PETSC_HAVE_DEVICE) 2322 if (!mat->boundtocpu) { /* mat will be on device, so will cooMat */ 2323 #if defined(PETSC_HAVE_KOKKOS) 2324 matType = MATAIJKOKKOS; 2325 #else 2326 SETERRQ(comm, PETSC_ERR_SUP, "To support MATHYPRE COO assembly on device, we need Kokkos, e.g., --download-kokkos --download-kokkos-kernels"); 2327 #endif 2328 } 2329 #endif 2330 2331 /* Do COO preallocation through cooMat */ 2332 hmat = (Mat_HYPRE *)mat->data; 2333 PetscCall(MatDestroy(&hmat->cooMat)); 2334 PetscCall(MatCreate(comm, &cooMat)); 2335 PetscCall(MatSetType(cooMat, matType)); 2336 PetscCall(MatSetLayouts(cooMat, rmap, cmap)); 2337 PetscCall(MatSetPreallocationCOO(cooMat, coo_n, coo_i, coo_j)); 2338 cooMat->assembled = PETSC_TRUE; 2339 2340 /* Copy the sparsity pattern from cooMat to hypre IJMatrix hmat->ij */ 2341 PetscCall(MatSetOption(mat, MAT_SORTED_FULL, PETSC_TRUE)); 2342 PetscCall(MatSetOption(mat, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 2343 PetscCall(MatHYPRE_CreateFromMat(cooMat, hmat)); /* Create hmat->ij and preallocate it */ 2344 PetscCall(MatHYPRE_IJMatrixCopyIJ(cooMat, hmat->ij)); /* Copy A's (i,j) to hmat->ij */ 2345 2346 mat->preallocated = PETSC_TRUE; 2347 PetscCall(MatAssemblyBegin(mat, MAT_FINAL_ASSEMBLY)); 2348 PetscCall(MatAssemblyEnd(mat, MAT_FINAL_ASSEMBLY)); /* Migrate mat to device if it is bound to. Hypre builds its own SpMV context here */ 2349 2350 /* Attach cooMat to mat */ 2351 PetscCall(MatAttachCOOMat_HYPRE(mat, cooMat)); 2352 PetscFunctionReturn(PETSC_SUCCESS); 2353 } 2354 2355 static PetscErrorCode MatSetValuesCOO_HYPRE(Mat mat, const PetscScalar v[], InsertMode imode) 2356 { 2357 Mat_HYPRE *hmat = (Mat_HYPRE *)mat->data; 2358 PetscBool ismpiaij; 2359 Mat A; 2360 2361 PetscFunctionBegin; 2362 PetscCheck(hmat->cooMat, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "HYPRE COO delegate matrix has not been created yet"); 2363 PetscCall(MatSetValuesCOO(hmat->cooMat, v, imode)); 2364 2365 PetscCall(PetscObjectBaseTypeCompare((PetscObject)hmat->cooMat, MATMPIAIJ, &ismpiaij)); 2366 2367 /* Move diagonal elements of the diagonal block to the front of their row, as needed by ParCSRMatrix. So damn hacky */ 2368 A = ismpiaij ? ((Mat_MPIAIJ *)hmat->cooMat->data)->A : hmat->cooMat; 2369 if (hmat->memType == HYPRE_MEMORY_HOST) { 2370 Mat_SeqAIJ *aij = (Mat_SeqAIJ *)A->data; 2371 PetscInt i, m, *Ai = aij->i, *Adiag = aij->diag; 2372 PetscScalar *Aa = aij->a, tmp; 2373 2374 PetscCall(MatGetSize(A, &m, NULL)); 2375 for (i = 0; i < m; i++) { 2376 if (Adiag[i] >= Ai[i] && Adiag[i] < Ai[i + 1]) { /* Diagonal element of this row exists in a[] and j[] */ 2377 tmp = Aa[Ai[i]]; 2378 Aa[Ai[i]] = Aa[Adiag[i]]; 2379 Aa[Adiag[i]] = tmp; 2380 } 2381 } 2382 } else { 2383 #if defined(PETSC_HAVE_KOKKOS_KERNELS) 2384 PetscCall(MatSeqAIJMoveDiagonalValuesFront_SeqAIJKokkos(A, hmat->diag)); 2385 #endif 2386 } 2387 PetscFunctionReturn(PETSC_SUCCESS); 2388 } 2389 2390 /*MC 2391 MATHYPRE - "hypre" - A matrix type to be used for sequential and parallel sparse matrices 2392 based on the hypre IJ interface. 2393 2394 Level: intermediate 2395 2396 .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatHYPRESetPreallocation` 2397 M*/ 2398 2399 PETSC_EXTERN PetscErrorCode MatCreate_HYPRE(Mat B) 2400 { 2401 Mat_HYPRE *hB; 2402 2403 PetscFunctionBegin; 2404 PetscCall(PetscNew(&hB)); 2405 2406 hB->inner_free = PETSC_TRUE; 2407 hB->available = PETSC_TRUE; 2408 hB->sorted_full = PETSC_FALSE; /* no assumption whether column indices are sorted or not */ 2409 hB->size = 0; 2410 hB->array = NULL; 2411 2412 B->data = (void *)hB; 2413 B->assembled = PETSC_FALSE; 2414 2415 PetscCall(PetscMemzero(B->ops, sizeof(struct _MatOps))); 2416 B->ops->mult = MatMult_HYPRE; 2417 B->ops->multtranspose = MatMultTranspose_HYPRE; 2418 B->ops->multadd = MatMultAdd_HYPRE; 2419 B->ops->multtransposeadd = MatMultTransposeAdd_HYPRE; 2420 B->ops->setup = MatSetUp_HYPRE; 2421 B->ops->destroy = MatDestroy_HYPRE; 2422 B->ops->assemblyend = MatAssemblyEnd_HYPRE; 2423 B->ops->assemblybegin = MatAssemblyBegin_HYPRE; 2424 B->ops->setvalues = MatSetValues_HYPRE; 2425 B->ops->missingdiagonal = MatMissingDiagonal_HYPRE; 2426 B->ops->scale = MatScale_HYPRE; 2427 B->ops->zerorowscolumns = MatZeroRowsColumns_HYPRE; 2428 B->ops->zeroentries = MatZeroEntries_HYPRE; 2429 B->ops->zerorows = MatZeroRows_HYPRE; 2430 B->ops->getrow = MatGetRow_HYPRE; 2431 B->ops->restorerow = MatRestoreRow_HYPRE; 2432 B->ops->getvalues = MatGetValues_HYPRE; 2433 B->ops->setoption = MatSetOption_HYPRE; 2434 B->ops->duplicate = MatDuplicate_HYPRE; 2435 B->ops->copy = MatCopy_HYPRE; 2436 B->ops->view = MatView_HYPRE; 2437 B->ops->getdiagonal = MatGetDiagonal_HYPRE; 2438 B->ops->axpy = MatAXPY_HYPRE; 2439 B->ops->productsetfromoptions = MatProductSetFromOptions_HYPRE; 2440 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2441 B->ops->bindtocpu = MatBindToCPU_HYPRE; 2442 B->boundtocpu = PETSC_FALSE; 2443 #endif 2444 2445 /* build cache for off array entries formed */ 2446 PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)B), 1, &B->stash)); 2447 2448 PetscCall(PetscCommGetComm(PetscObjectComm((PetscObject)B), &hB->comm)); 2449 PetscCall(PetscObjectChangeTypeName((PetscObject)B, MATHYPRE)); 2450 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_aij_C", MatConvert_HYPRE_AIJ)); 2451 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatConvert_hypre_is_C", MatConvert_HYPRE_IS)); 2452 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_seqaij_hypre_C", MatProductSetFromOptions_HYPRE)); 2453 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatProductSetFromOptions_mpiaij_hypre_C", MatProductSetFromOptions_HYPRE)); 2454 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPRESetPreallocation_C", MatHYPRESetPreallocation_HYPRE)); 2455 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatHYPREGetParCSR_C", MatHYPREGetParCSR_HYPRE)); 2456 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_HYPRE)); 2457 PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatSetValuesCOO_C", MatSetValuesCOO_HYPRE)); 2458 #if defined(PETSC_HAVE_HYPRE_DEVICE) 2459 #if defined(HYPRE_USING_HIP) 2460 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_HIP)); 2461 PetscCall(MatSetVecType(B, VECHIP)); 2462 #endif 2463 #if defined(HYPRE_USING_CUDA) 2464 PetscCall(PetscDeviceInitialize(PETSC_DEVICE_CUDA)); 2465 PetscCall(MatSetVecType(B, VECCUDA)); 2466 #endif 2467 #endif 2468 PetscHYPREInitialize(); 2469 PetscFunctionReturn(PETSC_SUCCESS); 2470 } 2471 2472 static PetscErrorCode hypre_array_destroy(void *ptr) 2473 { 2474 PetscFunctionBegin; 2475 if (ptr) hypre_TFree(ptr, HYPRE_MEMORY_HOST); 2476 PetscFunctionReturn(PETSC_SUCCESS); 2477 } 2478